diff --git a/.gitignore b/.gitignore
index b518aa047..148de3a0b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -117,4 +117,4 @@ ENV/
examples/tests/ironwood.2.tar.gz
examples/tests/edx-demo-course-*
-!derex/runner/compose_files/openedx_customizations/**
+!derex/runner/compose_files/common/openedx_customizations/**
diff --git a/.isort.cfg b/.isort.cfg
index eab9dbe8a..c1040e553 100644
--- a/.isort.cfg
+++ b/.isort.cfg
@@ -3,6 +3,8 @@ force_alphabetical_sort = True
force_single_line = True
lines_after_imports = 2
line_length = 88
-not_skip = __init__.py
use_parentheses = True
multi_line_output = 3
+
+[tool.isort]
+profile=black
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index efe450871..017d7f1ce 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,4 +1,4 @@
-exclude: derex/runner/compose_files/openedx_customizations/.*
+exclude: derex/runner/compose_files/common/openedx_customizations/.*
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.0.1
@@ -21,7 +21,7 @@ repos:
args:
- "--per-file-ignores=\
docker-definition/derex_django/derex_django/settings/default/*.py,\
- derex/runner/compose_files/openedx_customizations/*\
+ derex/runner/compose_files/common/openedx_customizations/*\
:F821,F405,F403,E266"
- repo: https://github.com/pre-commit/mirrors-mypy
diff --git a/derex/runner/__init__.py b/derex/runner/__init__.py
index 574cce5f7..5c4772385 100644
--- a/derex/runner/__init__.py
+++ b/derex/runner/__init__.py
@@ -7,8 +7,29 @@
__version__ = "0.3.1"
+from functools import partial
+from pathlib import Path
+from typing import Optional
+
+import importlib_metadata
import pluggy
hookimpl = pluggy.HookimplMarker("derex.runner")
"""Marker to be imported and used in plugins (and for own implementations)"""
+
+
+def abspath_from_egg(egg: str, path: str) -> Optional[Path]:
+ """Given a path relative to the egg root, find the absolute
+ filesystem path for that resource.
+ For instance this file's absolute path can be found passing
+ derex/runner/utils.py
+ to this function.
+ """
+ for file in importlib_metadata.files(egg):
+ if str(file) == path:
+ return file.locate()
+ return None
+
+
+derex_path = partial(abspath_from_egg, "derex.runner")
diff --git a/derex/runner/cli/__init__.py b/derex/runner/cli/__init__.py
index 1e0ab4d6b..e8d882801 100644
--- a/derex/runner/cli/__init__.py
+++ b/derex/runner/cli/__init__.py
@@ -1,18 +1,20 @@
# -*- coding: utf-8 -*-
"""Console script for derex.runner."""
-from .build import build
-from .mongodb import mongodb
-from .mysql import mysql
-from .test import test
-from .utils import ensure_project
-from .utils import red
from click_plugins import with_plugins
-from derex.runner.logging_utils import setup_logging_decorator
+from derex.runner.cli.build import build
+from derex.runner.cli.caddy import caddy
+from derex.runner.cli.mongodb import mongodb
+from derex.runner.cli.mysql import mysql
+from derex.runner.cli.test import test
+from derex.runner.cli.utils import ensure_project
+from derex.runner.cli.utils import red
+from derex.runner.exceptions import DerexSecretError
+from derex.runner.exceptions import ProjectNotFound
+from derex.runner.logging_utils import setup_logging
from derex.runner.project import DebugBaseImageProject
from derex.runner.project import Project
-from derex.runner.project import ProjectNotFound
+from derex.runner.project import ProjectEnvironment
from derex.runner.project import ProjectRunMode
-from derex.runner.secrets import HAS_MASTER_SECRET
from derex.runner.utils import get_rich_console
from derex.runner.utils import get_rich_table
from typing import Any
@@ -33,7 +35,7 @@
@click.group(invoke_without_command=True)
@click.version_option()
@click.pass_context
-@setup_logging_decorator
+@setup_logging
def derex(ctx):
"""Derex directs edX: commands to manage an Open edX installation"""
# Optimize --help and bash completion by importing
@@ -94,7 +96,7 @@ def reset_mailslurper(project):
@ensure_project
def compile_theme(project):
"""Compile theme sass files"""
- from derex.runner.ddc import run_ddc_project
+ from derex.runner.ddc import run_ddc
if project.themes_dir is None:
click.echo("No theme directory present in this project")
@@ -112,21 +114,21 @@ def compile_theme(project):
paver compile_sass --theme-dirs /openedx/themes --themes {themes}
chown {uid}:{uid} /openedx/themes/* -R""",
]
- run_ddc_project(compose_args, DebugBaseImageProject(), exit_afterwards=True)
+ run_ddc(compose_args, "project", DebugBaseImageProject(), exit_afterwards=True)
@derex.command()
@click.pass_obj
-@click.argument("course_ids", nargs=-1)
@ensure_project
-def reindex_courses(project, course_ids):
+@click.argument("course_ids", nargs=-1)
+def reindex_courses(project: Project, course_ids: list):
"""Reindex all courses on elasticsearch.
Course ids may be specified as arguemnts in order
to reindex specific courses.
e.g. `derex reindex_courses course-v1:edX+DemoX+Demo_Course`"""
- from derex.runner.ddc import run_ddc_project
+ from derex.runner.ddc import run_ddc
django_cmd = ["python", "manage.py", "cms", "reindex_course"]
@@ -139,7 +141,7 @@ def reindex_courses(project, course_ids):
django_cmd.append("--setup")
compose_args = ["run", "--rm", "cms", "sh", "-c", " ".join(django_cmd)]
- run_ddc_project(compose_args, DebugBaseImageProject(), exit_afterwards=True)
+ run_ddc(compose_args, "project", DebugBaseImageProject(), exit_afterwards=True)
@derex.command()
@@ -155,10 +157,12 @@ def create_bucket(project, tty):
"""Create S3 buckets on Minio"""
from derex.runner.docker_utils import run_minio_shell
- click.echo(f"Creating bucket {project.name} with dowload policy on /profile-images")
- command = f"mc mb --ignore-existing local/{project.name}; "
- command += f"mc policy set download local/{project.name}/profile-images"
- run_minio_shell(command, tty=tty)
+ click.echo(
+ f"Creating bucket {project.minio_bucket} with dowload policy on /profile-images"
+ )
+ command = f"mc mb --ignore-existing local/{project.minio_bucket}; "
+ command += f"mc policy set download local/{project.minio_bucket}/profile-images"
+ run_minio_shell(project, command, tty=tty)
@derex.command()
@@ -166,7 +170,7 @@ def create_bucket(project, tty):
@ensure_project
def reset_rabbitmq(project):
"""Create rabbitmq vhost"""
- from derex.runner.ddc import run_ddc_services
+ from derex.runner.ddc import run_ddc
vhost = f"{project.name}_edxqueue"
compose_args = [
@@ -179,7 +183,7 @@ def reset_rabbitmq(project):
rabbitmqctl set_permissions -p {vhost} guest ".*" ".*" ".*"
""",
]
- run_ddc_services(compose_args, exit_afterwards=True)
+ run_ddc(compose_args, "services", exit_afterwards=True)
click.echo(f"Rabbitmq vhost {vhost} created")
return 0
@@ -191,15 +195,9 @@ def reset_rabbitmq(project):
required=False,
callback=lambda _, __, value: value and ProjectRunMode[value],
)
-@click.option(
- "--force/-f",
- required=False,
- default=False,
- help="Allows switching to production mode without a main secret defined",
-)
@click.pass_obj
@ensure_project
-def runmode(project: Project, runmode: Optional[ProjectRunMode], force):
+def runmode(project: Project, runmode: Optional[ProjectRunMode]):
"""Get/set project runmode (debug/production)"""
if runmode is None:
click.echo(project.runmode.name)
@@ -209,17 +207,6 @@ def runmode(project: Project, runmode: Optional[ProjectRunMode], force):
f"The current project runmode is already {runmode.name}", err=True
)
return
- if not force:
- if runmode is ProjectRunMode.production:
- if not HAS_MASTER_SECRET:
- click.echo(
- red("Set a master secret before switching to production"),
- err=True,
- )
- sys.exit(1)
- return 1
- # We need https://github.com/Santandersecurityresearch/DrHeader/pull/102
- # for the return 1 to work, but it's not released yet
previous_runmode = project.runmode
project.runmode = runmode
click.echo(
@@ -259,27 +246,84 @@ def settings(project: Project, settings: Optional[Any]):
project.settings = settings
+@derex.command()
+@click.argument(
+ "environment",
+ type=click.Choice(ProjectEnvironment.__members__),
+ required=False,
+ callback=lambda _, __, value: value and ProjectEnvironment[value],
+)
+@click.option(
+ "--force/-f",
+ required=False,
+ default=False,
+ help="Allows switching to production environment without a main secret defined",
+)
+@click.pass_obj
+@ensure_project
+def environment(
+ project: Project, environment: Optional[ProjectEnvironment], force: bool
+):
+ """Get/set project environment (development/staging/production)"""
+ if environment is None:
+ click.echo(project.environment.value)
+ else:
+ if project.environment is environment:
+ click.echo(
+ f"The current project environment is already {environment.name}",
+ err=True,
+ )
+ return
+ if not force:
+ if environment in [
+ ProjectEnvironment.production,
+ ProjectEnvironment.staging,
+ ]:
+ try:
+ if not project.has_main_secret(environment):
+ click.echo(
+ red(
+ "Set a main secret before switching to a production environment"
+ ),
+ err=True,
+ )
+ sys.exit(1)
+ return 1
+ except DerexSecretError as exception:
+ click.echo(red(str(exception)), err=True)
+ return 1
+ previous_environment = project.environment
+ project.environment = environment
+ click.echo(
+ f"Switched environment: {previous_environment.name} → {environment.name}",
+ err=True,
+ )
+
+
@debug.command()
-def minio_shell():
+@click.pass_obj
+@ensure_project
+def minio_shell(project: Project):
from derex.runner.docker_utils import run_minio_shell
- run_minio_shell()
+ run_minio_shell(project)
@debug.command("print-secret")
+@click.pass_obj
+@ensure_project
@click.argument(
"secret",
type=str,
required=True,
)
-def print_secret(secret):
- from derex.runner.secrets import DerexSecrets
- from derex.runner.secrets import get_secret
+def print_secret(project: Project, secret: str):
+ from derex.runner.constants import DerexSecrets
derex_secret = getattr(DerexSecrets, secret, None)
if not derex_secret:
raise click.exceptions.ClickException(f'No secrets found for "{secret}"')
- click.echo(get_secret(derex_secret))
+ click.echo(project.get_secret(derex_secret))
return 0
@@ -292,11 +336,11 @@ def print_secret(secret):
)
def minio_update_key(old_key: str):
"""Run minio to re-key data with the new secret"""
- from derex.runner.ddc import run_ddc_services
- from derex.runner.docker_utils import wait_for_service
- from derex.runner.utils import derex_path
+ from derex.runner import derex_path
+ from derex.runner.ddc import run_ddc
+ from derex.runner.docker_utils import wait_for_container
- wait_for_service("minio")
+ wait_for_container("minio")
MINIO_SCRIPT_PATH = derex_path("derex/runner/compose_files/minio-update-key.sh")
click.echo("Updating MinIO secret key...")
compose_args = [
@@ -313,7 +357,7 @@ def minio_update_key(old_key: str):
"/minio-update-key.sh",
]
try:
- run_ddc_services(compose_args)
+ run_ddc(compose_args, "services")
except RuntimeError:
return 1
@@ -323,15 +367,16 @@ def minio_update_key(old_key: str):
# We'll let `docker-compose up` recreate it for us, if needed
click.echo("\nRecreating MinIO container...")
compose_args = ["up", "-d", "minio"]
- run_ddc_services(compose_args)
+ run_ddc(compose_args, "services")
- wait_for_service("minio")
+ wait_for_container("minio")
click.echo("\nMinIO secret key updated successfully!")
return 0
derex.add_command(mysql)
derex.add_command(mongodb)
+derex.add_command(caddy)
derex.add_command(build)
derex.add_command(test)
diff --git a/derex/runner/cli/build.py b/derex/runner/cli/build.py
index 8724c8926..42e1df6d9 100644
--- a/derex/runner/cli/build.py
+++ b/derex/runner/cli/build.py
@@ -1,8 +1,8 @@
-from .utils import ensure_project
from derex.runner import __version__
+from derex.runner import abspath_from_egg
+from derex.runner.cli.utils import ensure_project
from derex.runner.project import OpenEdXVersions
from derex.runner.project import Project
-from derex.runner.utils import abspath_from_egg
from distutils.spawn import find_executable
import click
diff --git a/derex/runner/cli/caddy.py b/derex/runner/cli/caddy.py
new file mode 100644
index 000000000..d8df7bf03
--- /dev/null
+++ b/derex/runner/cli/caddy.py
@@ -0,0 +1,6 @@
+import click
+
+
+@click.group()
+def caddy():
+ """Commands to manage Caddy server configurations"""
diff --git a/derex/runner/cli/mongodb.py b/derex/runner/cli/mongodb.py
index 8a950e1c8..87688a174 100644
--- a/derex/runner/cli/mongodb.py
+++ b/derex/runner/cli/mongodb.py
@@ -1,3 +1,7 @@
+from derex.runner.cli.utils import ensure_project
+from derex.runner.cli.utils import green
+from derex.runner.cli.utils import red
+from derex.runner.docker_utils import client as docker_client
from derex.runner.project import Project
from derex.runner.utils import get_rich_console
from derex.runner.utils import get_rich_table
@@ -5,6 +9,11 @@
from typing import Tuple
import click
+import docker
+import logging
+
+
+logger = logging.getLogger(__name__)
@click.group(invoke_without_command=True)
@@ -18,35 +27,40 @@ def mongodb(context: click.core.Context):
click.echo()
project = context.obj
- for db in list_databases():
- if db["name"] == project.mongodb_db_name:
+ try:
+ for db in list_databases():
+ if db["name"] == project.mongodb_db_name:
+ click.echo(
+ f'Current MongoDB databases for project "{project.name}"'
+ )
+ console = get_rich_console()
+ table = get_rich_table(
+ "Database", "Tables", "Django users", show_lines=True
+ )
+ table.add_row(
+ db["name"],
+ str(db["sizeOnDisk"]),
+ str(db["empty"]),
+ )
+ console.print(table)
+ break
+ else:
click.echo(
- f'Current MongoDB databases for project "{project.name}"'
- )
- console = get_rich_console()
- table = get_rich_table(
- "Database", "Tables", "Django users", show_lines=True
- )
- table.add_row(
- db["name"],
- str(db["sizeOnDisk"]),
- str(db["empty"]),
+ f'No MongoDB database "{project.mongodb_db_name}" found for project "{project.name}"'
)
- console.print(table)
- break
- else:
- click.echo(
- f'No MongoDB database "{project.mongodb_db_name}" found for project "{project.name}"'
- )
+ except TimeoutError as exception:
+ click.echo(red(str(exception)))
@mongodb.command(name="shell")
+@click.pass_obj
+@ensure_project
@click.argument("command", type=str, required=False)
-def shell(command: Optional[str]):
+def shell(project: Project, command: Optional[str]):
"""Execute a root session of the MongoDB client"""
from derex.runner.mongodb import execute_root_shell
- execute_root_shell(command)
+ execute_root_shell(project, command)
@mongodb.group("list")
@@ -57,16 +71,11 @@ def listing(context: click.core.Context):
@mongodb.command(name="drop")
@click.pass_obj
+@ensure_project
@click.argument("db_name", type=str, required=False)
-def drop_mongodb(project: Optional[Project], db_name: str):
+def drop_mongodb(project: Project, db_name: str):
"""Drop a MongoDB database"""
- if not any([project, db_name]):
- raise click.exceptions.MissingParameter(
- param_hint="db_name",
- param_type="str",
- message="Either specify a destination database name or run in a derex project.",
- )
- if not db_name and project:
+ if not db_name:
db_name = project.mongodb_db_name
if click.confirm(
@@ -80,8 +89,9 @@ def drop_mongodb(project: Optional[Project], db_name: str):
@listing.command(name="databases")
@click.pass_obj
+@ensure_project
@click.argument("db_name", type=str, required=False)
-def list_databases_cmd(project: Optional[Project], db_name: str):
+def list_databases_cmd(project: Project, db_name: str):
"""List all MongoDB databases"""
from derex.runner.mongodb import list_databases
@@ -169,6 +179,8 @@ def copy_mongodb(
@mongodb.command(name="reset-root-password")
+@click.pass_obj
+@ensure_project
@click.argument("current_password", type=str, required=False)
@click.option(
"--force",
@@ -176,17 +188,237 @@ def copy_mongodb(
default=False,
help="Do not ask for confirmation",
)
-def reset_mongodb_password_cmd(current_password: Optional[str], force: bool):
+def reset_mongodb_password_cmd(
+ project: Project, current_password: Optional[str], force: bool
+):
"""Reset the mongodb root user password with the one derived
from the Derex main secret."""
- from derex.runner.constants import MONGODB_ROOT_USER
-
if click.confirm(
- f'This is going to reset the password for the mongodb "{MONGODB_ROOT_USER}" user'
+ f'This is going to reset the password for the mongodb "{project.mongodb_user}" user'
"with the one computed by derex.\n"
"Are you sure you want to continue?"
):
from derex.runner.mongodb import reset_mongodb_password
- reset_mongodb_password(current_password)
+ reset_mongodb_password(project, current_password)
+ return 0
+
+
+@mongodb.group("upgrade")
+@click.pass_context
+def upgrade(context: click.core.Context):
+ """MongoDB upgrade procedures"""
+
+
+@upgrade.command(name="32-to-36")
+@click.pass_obj
+@ensure_project
+@click.option(
+ "--source",
+ "source_data_volume",
+ type=str,
+ help="Source data volume",
+)
+@click.option(
+ "--destination",
+ "destination_data_volume",
+ type=str,
+ help="Destination data volume",
+)
+def upgrade_from_32_to_36(
+ project: Project,
+ source_data_volume: Optional[str],
+ destination_data_volume: Optional[str],
+):
+ """Upgrades the mongodb data volume from version 3.2 to 3.4 to 3.6"""
+ if not source_data_volume:
+ source_data_volume = project.mongodb_docker_volume
+ if not destination_data_volume:
+ destination_project = project
+ destination_project.openedx_version.value["mongodb_image"] = "mongo:3.6"
+ destination_data_volume = destination_project.mongodb_docker_volume
+
+ intermediary_data_volume = "derex_tmp_mongodb34"
+
+ if source_data_volume == destination_data_volume:
+ click.echo(red("Source and destination data volume are the same !"))
+ click.echo(red("Please specify a different source or destination volume"))
+ click.echo(red("Upgrade aborted"))
+ return 0
+
+ try:
+ docker_client.volumes.get(source_data_volume)
+ except docker.errors.NotFound:
+ raise RuntimeError(f'Volume "{source_data_volume}" does not exists')
+ try:
+ docker_client.volumes.get(destination_data_volume)
+ click.echo(
+ red(
+ f'Destination volume "{destination_data_volume}" already exists !\n'
+ "Please specify a different destination volume."
+ )
+ )
+ click.echo(red("Upgrade aborted"))
+ return 0
+ except docker.errors.NotFound:
+ pass
+
+ if click.confirm(
+ f'This is going to copy the source MongoDB data volume "{source_data_volume}" '
+ f'to a new data volume "{destination_data_volume}" and upgrade it '
+ "from version 3.2 to 3.6\n"
+ "Are you sure you want to continue?"
+ ):
+ from derex.runner.mongodb import run_mongodb_upgrade
+
+ try:
+ logger.info(f'Creating data volume "{destination_data_volume}"')
+ docker_client.volumes.create(destination_data_volume)
+ docker_client.volumes.create(intermediary_data_volume)
+
+ click.echo("Running upgrade from mongodb 3.2 to 3.4")
+ run_mongodb_upgrade(
+ project, source_data_volume, intermediary_data_volume, "3.2", "3.4"
+ )
+ click.echo("Running upgrade from mongodb 3.4 to 3.6")
+ run_mongodb_upgrade(
+ project,
+ intermediary_data_volume,
+ destination_data_volume,
+ "3.4",
+ "3.6",
+ )
+ click.echo(
+ green(
+ f'Successfully upgraded the mongodb data volume "{destination_data_volume}" to version 3.6'
+ )
+ )
+ except Exception as exception:
+ click.echo(red("Upgrade failed"))
+ click.echo(red(exception))
+ return 1
+ finally:
+ logger.info(
+ f'Dropping intermediary data volume "{intermediary_data_volume}"'
+ )
+ try:
+ docker_client.volumes.get(intermediary_data_volume).remove()
+ except docker.errors.NotFound:
+ pass
+ return 0
+ click.echo(red("Upgrade aborted"))
+ return 0
+
+
+@upgrade.command(name="36-to-44")
+@click.pass_obj
+@ensure_project
+@click.option(
+ "--source",
+ "source_data_volume",
+ type=str,
+ help="Source data volume",
+)
+@click.option(
+ "--destination",
+ "destination_data_volume",
+ type=str,
+ help="Destination data volume",
+)
+def upgrade_from_36_to_44(
+ project: Project,
+ source_data_volume: Optional[str],
+ destination_data_volume: Optional[str],
+):
+ """Upgrades the mongodb data volume from version 3.6 to 4.0 to 4.2 to 4.4"""
+ if not source_data_volume:
+ source_data_volume = project.mongodb_docker_volume
+ if not destination_data_volume:
+ destination_project = project
+ destination_project.openedx_version.value["mongodb_image"] = "mongo:4.4"
+ destination_data_volume = destination_project.mongodb_docker_volume
+
+ intermediary_data_volume_40 = "derex_tmp_mongodb4.0"
+ intermediary_data_volume_42 = "derex_tmp_mongodb4.2"
+
+ if source_data_volume == destination_data_volume:
+ click.echo(red("Source and destination data volume are the same !"))
+ click.echo(red("Please specify a different source or destination volume"))
+ click.echo(red("Upgrade aborted"))
+ return 0
+
+ try:
+ docker_client.volumes.get(source_data_volume)
+ except docker.errors.NotFound:
+ raise RuntimeError(f'Volume "{source_data_volume}" does not exists')
+ try:
+ docker_client.volumes.get(destination_data_volume)
+ click.echo(
+ red(
+ f'Destination volume "{destination_data_volume}" already exists !\n'
+ "Please specify a different destination volume."
+ )
+ )
+ click.echo(red("Upgrade aborted"))
+ return 0
+ except docker.errors.NotFound:
+ pass
+
+ if click.confirm(
+ f'This is going to copy the source MongoDB data volume "{source_data_volume}" '
+ f'to a new data volume "{destination_data_volume}" and upgrade it '
+ "from version 3.6 to 4.4\n"
+ "Are you sure you want to continue?"
+ ):
+ from derex.runner.mongodb import run_mongodb_upgrade
+
+ try:
+ logger.info(f'Creating data volume "{destination_data_volume}"')
+ docker_client.volumes.create(destination_data_volume)
+ docker_client.volumes.create(intermediary_data_volume_40)
+ docker_client.volumes.create(intermediary_data_volume_42)
+
+ click.echo("Running upgrade from mongodb 3.6 to 4.0")
+ run_mongodb_upgrade(
+ project, source_data_volume, intermediary_data_volume_40, "3.6", "4.0"
+ )
+ click.echo("Running upgrade from mongodb 4.0 to 4.2")
+ run_mongodb_upgrade(
+ project,
+ intermediary_data_volume_40,
+ intermediary_data_volume_42,
+ "4.0",
+ "4.2",
+ )
+ click.echo("Running upgrade from mongodb 4.2 to 4.4")
+ run_mongodb_upgrade(
+ project,
+ intermediary_data_volume_42,
+ destination_data_volume,
+ "4.2",
+ "4.4",
+ )
+ click.echo(
+ green(
+ f'Successfully upgraded the mongodb data volume "{destination_data_volume}" to version 4.4'
+ )
+ )
+ except Exception as exception:
+ click.echo(red("Upgrade failed"))
+ click.echo(red(exception))
+ return 1
+ finally:
+ for intermediary_data_volume in [
+ intermediary_data_volume_40,
+ intermediary_data_volume_42,
+ ]:
+ logger.info(
+ f'Dropping intermediary data volume "{intermediary_data_volume}"'
+ )
+ try:
+ docker_client.volumes.get(intermediary_data_volume).remove()
+ except docker.errors.NotFound:
+ pass
+ return 0
+ click.echo(red("Upgrade aborted"))
return 0
diff --git a/derex/runner/cli/mysql.py b/derex/runner/cli/mysql.py
index 4462d3263..dc4b0b7d9 100644
--- a/derex/runner/cli/mysql.py
+++ b/derex/runner/cli/mysql.py
@@ -1,6 +1,8 @@
+from derex.runner.cli.utils import ensure_project
+from derex.runner.cli.utils import red
from derex.runner.project import DebugBaseImageProject
from derex.runner.project import Project
-from derex.runner.project import ProjectRunMode
+from derex.runner.project import ProjectEnvironment
from derex.runner.utils import get_rich_console
from derex.runner.utils import get_rich_table
from typing import Optional
@@ -19,7 +21,7 @@ def mysql(context: click.core.Context):
if isinstance(context.obj, Project):
click.echo()
project = context.obj
- for db in show_databases():
+ for db in show_databases(project):
if db[0] == project.mysql_db_name:
click.echo(f'Current MySQL databases for project "{project.name}"')
console = get_rich_console()
@@ -37,12 +39,14 @@ def mysql(context: click.core.Context):
@mysql.command(name="shell")
+@click.pass_obj
+@ensure_project
@click.argument("command", type=str, required=False)
-def shell(command: Optional[str]):
+def shell(project: Project, command: Optional[str]):
"""Execute a root session of the mysql client"""
from derex.runner.mysql import execute_root_shell
- execute_root_shell(command)
+ execute_root_shell(project, command)
@mysql.group("create")
@@ -65,101 +69,99 @@ def show(context: click.core.Context):
@create.command(name="database")
@click.pass_obj
+@ensure_project
@click.argument("db_name", type=str, required=False)
-def create_database_cmd(project: Optional[Project], db_name: str):
+def create_database_cmd(project: Project, db_name: str):
"""Create a mysql database."""
- if not any([project, db_name]):
- raise click.exceptions.MissingParameter(
- param_hint="db_name",
- param_type="str",
- message="Either specify a database name or run in a derex project.",
- )
- if not db_name and project:
+ if not db_name:
db_name = project.mysql_db_name
-
from derex.runner.mysql import create_database
- create_database(db_name)
+ create_database(project, db_name)
return 0
@create.command(name="user")
+@click.pass_obj
+@ensure_project
@click.argument("user", type=str)
@click.argument("password", type=str)
@click.argument("host", type=str, default="localhost")
-def create_user_cmd(user: str, password: str, host: str):
+def create_user_cmd(project: Project, user: str, password: str, host: str):
"""Create a mysql user"""
from derex.runner.mysql import create_user
- create_user(user, password, host)
+ create_user(project, user, password, host)
return 0
@drop.command(name="database")
@click.pass_obj
+@ensure_project
@click.argument("db_name", type=str, required=False)
-def drop_database_cmd(project: Optional[Project], db_name: str):
+def drop_database_cmd(project: Project, db_name: str):
"""Drop a mysql database"""
- if not any([project, db_name]):
- raise click.exceptions.MissingParameter(
- param_hint="db_name",
- param_type="str",
- message="Either specify a database name or run in a derex project.",
- )
- if not db_name and project:
+ if not db_name:
db_name = project.mysql_db_name
if click.confirm(f'Are you sure you want to drop database "{db_name}" ?'):
from derex.runner.mysql import drop_database
- drop_database(db_name)
+ drop_database(project, db_name)
return 0
@drop.command(name="user")
+@click.pass_obj
+@ensure_project
@click.argument("user", type=str)
@click.argument("host", type=str, default="localhost")
-def drop_user_cmd(user: str, host: str):
+def drop_user_cmd(project: Project, user: str, host: str):
"""Drop a mysql user"""
if click.confirm(f"Are you sure you want to drop user '{user}'@'{host}' ?"):
from derex.runner.mysql import drop_user
- drop_user(user, host)
+ drop_user(project, user, host)
return 0
@show.command(name="databases")
-def show_databases_cmd():
+@click.pass_obj
+@ensure_project
+def show_databases_cmd(project: Project):
"""List all MySQL databases"""
from derex.runner.mysql import show_databases
console = get_rich_console()
table = get_rich_table("Database", "Tables", "Django users", show_lines=True)
- for database in show_databases():
+ for database in show_databases(project):
table.add_row(database[0], str(database[1]), str(database[2]))
console.print(table)
return 0
@show.command(name="users")
-def show_users_cmd():
+@click.pass_obj
+@ensure_project
+def show_users_cmd(project: Project):
"""List all MySQL users"""
from derex.runner.mysql import list_users
console = get_rich_console()
table = get_rich_table("User", "Host", "Password", show_lines=True)
- for user in list_users():
+ for user in list_users(project):
table.add_row(user[0], user[1], user[2])
console.print(table)
return 0
@mysql.command("copy-database")
+@click.pass_obj
+@ensure_project
@click.argument("source_db_name", type=str, required=True)
@click.argument("destination_db_name", type=str)
-@click.pass_obj
def copy_database_cmd(
- project: Optional[Project], source_db_name: str, destination_db_name: Optional[str]
+ project: Project, source_db_name: str, destination_db_name: Optional[str]
):
"""
Copy an existing mysql database. If no destination database is given it defaults
@@ -186,29 +188,27 @@ def copy_database_cmd(
@mysql.command(name="reset")
-@click.pass_context
+@click.pass_obj
+@ensure_project
@click.option(
"--force",
is_flag=True,
default=False,
help="Do not ask for confirmation and allow resetting mysql database if runmode is production",
)
-def reset_mysql_cmd(context, force):
+def reset_mysql_cmd(project: Project, force: bool):
"""Reset MySQL database for the current project"""
-
- if context.obj is None:
- click.echo("This command needs to be run inside a derex project")
- return 1
- project = context.obj
-
from derex.runner.mysql import reset_mysql_openedx
- if project.runmode is not ProjectRunMode.debug and not force:
+ if project.environment is not ProjectEnvironment.development and not force:
# Safety belt: we don't want people to run this in production
- context.fail(
- "The command mysql reset can only be run in `debug` runmode.\n"
- "Use --force to override"
+ click.echo(
+ red(
+ "The command mysql reset can only be run in `development` environment.\n"
+ "Use --force to override"
+ )
)
+ return 1
if not force:
if not click.confirm(
@@ -222,6 +222,8 @@ def reset_mysql_cmd(context, force):
@mysql.command(name="reset-root-password")
+@click.pass_obj
+@ensure_project
@click.argument("current_password", type=str, required=True)
@click.option(
"--force",
@@ -229,13 +231,11 @@ def reset_mysql_cmd(context, force):
default=False,
help="Do not ask for confirmation",
)
-def reset_mysql_password_cmd(current_password: str, force: bool):
+def reset_mysql_password_cmd(project: Project, current_password: str, force: bool):
"""Reset the mysql root user password with the one derived from
the Derex main secret."""
- from derex.runner.constants import MYSQL_ROOT_USER
-
if click.confirm(
- f'This is going to reset the password for the mysql "{MYSQL_ROOT_USER}" user '
+ f'This is going to reset the password for the mysql "{project.mysql_user}" user '
"with the one computed by derex.\n"
"Are you sure you want to continue?"
):
diff --git a/derex/runner/cli/test.py b/derex/runner/cli/test.py
index c9c4c72dd..e0d925e5e 100644
--- a/derex/runner/cli/test.py
+++ b/derex/runner/cli/test.py
@@ -1,11 +1,9 @@
-from .utils import ensure_project
-from .utils import red
+from derex.runner.cli.utils import ensure_project
+from derex.runner.cli.utils import red
from derex.runner.compose_generation import generate_ddc_test_compose
from derex.runner.ddc import run_docker_compose
-from derex.runner.docker_utils import wait_for_service
import click
-import sys
@click.group()
@@ -22,12 +20,6 @@ def e2e(project):
click.echo(red(f"No e2e tests directory found in {project.root}"), err=True)
return 1
- try:
- wait_for_service("httpserver")
- except (TimeoutError, RuntimeError, NotImplementedError) as exc:
- click.echo(click.style(str(exc), fg="red"))
- sys.exit(1)
-
click.echo(f"Running e2e Cypress tests from {project.e2e_dir}")
test_compose_path = generate_ddc_test_compose(project)
run_docker_compose(
diff --git a/derex/runner/cli/utils.py b/derex/runner/cli/utils.py
index c38f76f9a..778b761b1 100644
--- a/derex/runner/cli/utils.py
+++ b/derex/runner/cli/utils.py
@@ -22,3 +22,11 @@ def wrapper(*args, **kwargs):
def red(string: str) -> str:
return click.style(string, fg="red")
+
+
+def green(string: str) -> str:
+ return click.style(string, fg="green")
+
+
+def yellow(string: str) -> str:
+ return click.style(string, fg="yellow")
diff --git a/derex/runner/compose_files/Caddyfile b/derex/runner/compose_files/Caddyfile
deleted file mode 100644
index 8535336ef..000000000
--- a/derex/runner/compose_files/Caddyfile
+++ /dev/null
@@ -1,26 +0,0 @@
-:80 {
- reverse_proxy {http.request.host}.derex:80
-}
-
-:81 {
- reverse_proxy {http.request.host}.derex:81
-}
-
-# Mailsluprer needs an extra port and thus special treatment
-http://mailslurper.localhost:4301 {
- reverse_proxy mailslurper.localhost.derex:4301
-}
-
-# It's harder than ideal to set Portainer and adminer ports to 80, so here it is:
-http://portainer.localhost:80 {
- reverse_proxy portainer.localhost.derex:9000
-}
-
-http://adminer.localhost:80 {
- reverse_proxy adminer.localhost.derex:8080
-}
-
-# Used by health check
-:8080 {
- respond /health-check 200
-}
diff --git a/derex/runner/templates/docker-compose-test.yml.j2 b/derex/runner/compose_files/common/docker-compose-test.yml.j2
similarity index 100%
rename from derex/runner/templates/docker-compose-test.yml.j2
rename to derex/runner/compose_files/common/docker-compose-test.yml.j2
diff --git a/derex/runner/compose_files/openedx_customizations/README.rst b/derex/runner/compose_files/common/openedx_customizations/README.rst
similarity index 100%
rename from derex/runner/compose_files/openedx_customizations/README.rst
rename to derex/runner/compose_files/common/openedx_customizations/README.rst
diff --git a/derex/runner/compose_files/openedx_customizations/ironwood/common/lib/xmodule/xmodule/mongo_utils.py b/derex/runner/compose_files/common/openedx_customizations/ironwood/common/lib/xmodule/xmodule/mongo_utils.py
similarity index 100%
rename from derex/runner/compose_files/openedx_customizations/ironwood/common/lib/xmodule/xmodule/mongo_utils.py
rename to derex/runner/compose_files/common/openedx_customizations/ironwood/common/lib/xmodule/xmodule/mongo_utils.py
diff --git a/derex/runner/compose_files/openedx_customizations/ironwood/common/lib/xmodule/xmodule/mongo_utils.py.ironwood b/derex/runner/compose_files/common/openedx_customizations/ironwood/common/lib/xmodule/xmodule/mongo_utils.py.ironwood
similarity index 100%
rename from derex/runner/compose_files/openedx_customizations/ironwood/common/lib/xmodule/xmodule/mongo_utils.py.ironwood
rename to derex/runner/compose_files/common/openedx_customizations/ironwood/common/lib/xmodule/xmodule/mongo_utils.py.ironwood
diff --git a/derex/runner/compose_files/openedx_customizations/juniper/cms/djangoapps/contentstore/views/import_export.py b/derex/runner/compose_files/common/openedx_customizations/juniper/cms/djangoapps/contentstore/views/import_export.py
similarity index 100%
rename from derex/runner/compose_files/openedx_customizations/juniper/cms/djangoapps/contentstore/views/import_export.py
rename to derex/runner/compose_files/common/openedx_customizations/juniper/cms/djangoapps/contentstore/views/import_export.py
diff --git a/derex/runner/compose_files/openedx_customizations/juniper/cms/djangoapps/contentstore/views/import_export.py.juniper b/derex/runner/compose_files/common/openedx_customizations/juniper/cms/djangoapps/contentstore/views/import_export.py.juniper
similarity index 100%
rename from derex/runner/compose_files/openedx_customizations/juniper/cms/djangoapps/contentstore/views/import_export.py.juniper
rename to derex/runner/compose_files/common/openedx_customizations/juniper/cms/djangoapps/contentstore/views/import_export.py.juniper
diff --git a/derex/runner/compose_files/openedx_customizations/juniper/cms/djangoapps/contentstore/views/transcripts_ajax.py b/derex/runner/compose_files/common/openedx_customizations/juniper/cms/djangoapps/contentstore/views/transcripts_ajax.py
similarity index 100%
rename from derex/runner/compose_files/openedx_customizations/juniper/cms/djangoapps/contentstore/views/transcripts_ajax.py
rename to derex/runner/compose_files/common/openedx_customizations/juniper/cms/djangoapps/contentstore/views/transcripts_ajax.py
diff --git a/derex/runner/compose_files/openedx_customizations/juniper/cms/djangoapps/contentstore/views/transcripts_ajax.py.juniper b/derex/runner/compose_files/common/openedx_customizations/juniper/cms/djangoapps/contentstore/views/transcripts_ajax.py.juniper
similarity index 100%
rename from derex/runner/compose_files/openedx_customizations/juniper/cms/djangoapps/contentstore/views/transcripts_ajax.py.juniper
rename to derex/runner/compose_files/common/openedx_customizations/juniper/cms/djangoapps/contentstore/views/transcripts_ajax.py.juniper
diff --git a/derex/runner/compose_files/openedx_customizations/juniper/lms/djangoapps/courseware/migrations/0011_csm_id_bigint.py b/derex/runner/compose_files/common/openedx_customizations/juniper/lms/djangoapps/courseware/migrations/0011_csm_id_bigint.py
similarity index 100%
rename from derex/runner/compose_files/openedx_customizations/juniper/lms/djangoapps/courseware/migrations/0011_csm_id_bigint.py
rename to derex/runner/compose_files/common/openedx_customizations/juniper/lms/djangoapps/courseware/migrations/0011_csm_id_bigint.py
diff --git a/derex/runner/compose_files/openedx_customizations/juniper/lms/djangoapps/courseware/migrations/0011_csm_id_bigint.py.juniper b/derex/runner/compose_files/common/openedx_customizations/juniper/lms/djangoapps/courseware/migrations/0011_csm_id_bigint.py.juniper
similarity index 100%
rename from derex/runner/compose_files/openedx_customizations/juniper/lms/djangoapps/courseware/migrations/0011_csm_id_bigint.py.juniper
rename to derex/runner/compose_files/common/openedx_customizations/juniper/lms/djangoapps/courseware/migrations/0011_csm_id_bigint.py.juniper
diff --git a/derex/runner/compose_files/openedx_customizations/juniper/lms/djangoapps/dashboard/sysadmin.py b/derex/runner/compose_files/common/openedx_customizations/juniper/lms/djangoapps/dashboard/sysadmin.py
similarity index 100%
rename from derex/runner/compose_files/openedx_customizations/juniper/lms/djangoapps/dashboard/sysadmin.py
rename to derex/runner/compose_files/common/openedx_customizations/juniper/lms/djangoapps/dashboard/sysadmin.py
diff --git a/derex/runner/compose_files/openedx_customizations/juniper/lms/djangoapps/dashboard/sysadmin.py.juniper b/derex/runner/compose_files/common/openedx_customizations/juniper/lms/djangoapps/dashboard/sysadmin.py.juniper
similarity index 100%
rename from derex/runner/compose_files/openedx_customizations/juniper/lms/djangoapps/dashboard/sysadmin.py.juniper
rename to derex/runner/compose_files/common/openedx_customizations/juniper/lms/djangoapps/dashboard/sysadmin.py.juniper
diff --git a/derex/runner/compose_files/minio-update-key.sh b/derex/runner/compose_files/common/scripts/minio-update-key.sh
similarity index 100%
rename from derex/runner/compose_files/minio-update-key.sh
rename to derex/runner/compose_files/common/scripts/minio-update-key.sh
diff --git a/derex/runner/compose_files/wsgi.py b/derex/runner/compose_files/common/wsgi.py
similarity index 100%
rename from derex/runner/compose_files/wsgi.py
rename to derex/runner/compose_files/common/wsgi.py
diff --git a/derex/runner/templates/docker-compose-project.yml.j2 b/derex/runner/compose_files/development/docker-compose-project.yml.j2
similarity index 85%
rename from derex/runner/templates/docker-compose-project.yml.j2
rename to derex/runner/compose_files/development/docker-compose-project.yml.j2
index 18c1c2280..6c0f5bb10 100644
--- a/derex/runner/templates/docker-compose-project.yml.j2
+++ b/derex/runner/compose_files/development/docker-compose-project.yml.j2
@@ -14,8 +14,8 @@ x-common:
networks:
- derex
volumes:
- - derex_{{ project.name }}_media:/openedx/media
- - derex_{{ project.name }}_data:/openedx/data/
+ - derex_{{ project.name }}_openedx_media:/openedx/media
+ - derex_{{ project.name }}_openedx_data:/openedx/data/
{%- if project.settings_directory_path() %}
- {{ project.settings_directory_path() }}:/openedx/edx-platform/derex_settings
{%- endif %}
@@ -44,12 +44,14 @@ x-common:
DJANGO_SETTINGS_MODULE: {{ project.settings.value }}
DEREX_PROJECT: {{ project.name }}
DEREX_OPENEDX_VERSION: {{ project.openedx_version.name }}
+ MYSQL_HOST: {{ project.mysql_host }}
MYSQL_DB_NAME: {{ project.mysql_db_name }}
MYSQL_USER: {{ project.mysql_user }}
- MYSQL_PASSWORD: {{ project.secret("mysql") }}
+ MYSQL_PASSWORD: {{ project.mysql_password }}
+ MONGODB_HOST: {{ project.mongodb_host }}
MONGODB_DB_NAME: {{ project.mongodb_db_name }}
MONGODB_USER: {{ project.mongodb_user }}
- MONGODB_PASSWORD: {{ project.secret("mongodb") }}
+ MONGODB_PASSWORD: {{ project.mongodb_password }}
DEREX_MINIO_SECRET: {{ project.secret("minio") }}
{%- for key, value in project.get_container_env().items() %}
{{ key }}: {{ value | tojson }}
@@ -66,13 +68,12 @@ services:
environment:
<<: *common-env
SERVICE_VARIANT: lms
- container_name: {{ project.name }}_flower
networks:
- derex
networks:
- derex:
- aliases:
- - flower.{{ project.name }}.localhost.derex
+ derex:
+ aliases:
+ - flower.{{ project.name }}.localhost.derex
lms:
<<: *common-conf
@@ -97,10 +98,10 @@ services:
<<: *common-env
SERVICE_VARIANT: lms
networks:
- derex:
- aliases:
- - {{ project.name }}.localhost.derex
- - preview.{{ project.name }}.localhost.derex
+ derex:
+ aliases:
+ - {{ project.name }}.localhost.derex
+ - preview.{{ project.name }}.localhost.derex
cms:
<<: *common-conf
@@ -125,8 +126,6 @@ services:
environment:
<<: *common-env
SERVICE_VARIANT: cms
- {% if project.runmode.value == "debug" -%}
- {% endif %}
networks:
derex:
aliases:
@@ -146,6 +145,10 @@ services:
<<: *common-env
C_FORCE_ROOT: "True"
SERVICE_VARIANT: lms
+ networks:
+ derex:
+ aliases:
+ - lms-worker.{{ project.name }}.localhost.derex
cms_worker:
<<: *common-conf
@@ -161,11 +164,17 @@ services:
<<: *common-env
C_FORCE_ROOT: "True"
SERVICE_VARIANT: cms
+ networks:
+ derex:
+ aliases:
+ - cms-worker.{{ project.name }}.localhost.derex
networks:
derex:
name: derex
volumes:
- derex_{{ project.name }}_data:
- derex_{{ project.name }}_media:
+ {%- for volume in project.docker_volumes %}
+ {{ volume}}:
+ external: true
+ {%- endfor %}
diff --git a/derex/runner/compose_files/development/docker-compose-services.yml.j2 b/derex/runner/compose_files/development/docker-compose-services.yml.j2
new file mode 100644
index 000000000..ad768e5b8
--- /dev/null
+++ b/derex/runner/compose_files/development/docker-compose-services.yml.j2
@@ -0,0 +1,100 @@
+# Services needed for Open edX to work
+version: "3.5"
+services:
+ {% include "mysql.yml.j2" %}
+ {% include "mongodb.yml.j2" %}
+ {% include "elasticsearch.yml.j2" %}
+
+ rabbitmq:
+ image: rabbitmq:3.6.16-alpine
+ restart: unless-stopped
+ hostname: rabbitmq
+ container_name: rabbitmq
+ healthcheck:
+ test: rabbitmqctl node_health_check | grep "Health check passed" -q
+ interval: 30s
+ timeout: 30s
+ retries: 3
+ start_period: 10s
+ volumes:
+ - {{ project.rabbitmq_docker_volume }}:/etc/rabbitmq/
+ - {{ project.rabbitmq_docker_volume }}:/var/lib/rabbitmq
+ - {{ project.rabbitmq_docker_volume }}:/var/log/rabbitmq/
+ networks:
+ - derex
+
+ mailslurper:
+ image: derex/mailslurper:1.14.1
+ restart: unless-stopped
+ container_name: smtp
+ volumes:
+ - ./mailslurper.json:/config.json
+ networks:
+ derex:
+ aliases:
+ - mailslurper.localhost.derex
+
+ memcached:
+ image: memcached:1.6.3-alpine
+ restart: unless-stopped
+ container_name: memcached
+ healthcheck:
+ test: nc -z 127.0.0.1 11211
+ interval: 30s
+ timeout: 30s
+ retries: 3
+ start_period: 10s
+ networks:
+ - derex
+
+ minio:
+ image: minio/minio:RELEASE.2021-08-20T18-32-01Z
+ restart: unless-stopped
+ container_name: minio
+ volumes:
+ - {{ project.minio_docker_volume }}:/data
+ environment:
+ MINIO_ROOT_USER: "project.minio_user"
+ MINIO_ROOT_PASSWORD: "project.minio_password"
+ command: server --console-address :80 /data
+ healthcheck:
+ test: curl --silent --fail http://localhost:80/minio/health/live
+ interval: 30s
+ timeout: 20s
+ retries: 3
+ networks:
+ derex:
+ aliases:
+ - minio.localhost.derex
+ - minio.localhost
+
+ caddy:
+ image: caddy:2-alpine
+ restart: unless-stopped
+ container_name: caddy
+ ports:
+ - 127.0.0.1:80:80
+ - 127.0.0.1:81:81
+ - 127.0.0.1:4301:4301 # Mailslurper port
+ volumes:
+ - {{ host_caddy_dir }}:/etc/caddy
+ {%- if host_caddy_config_path %}
+ - {{ host_caddy_config_path }}:/etc/caddy/Caddyfile
+ {%- endif %}
+ healthcheck:
+ test: wget -q -O - http://localhost:8080
+ interval: 30s
+ timeout: 20s
+ retries: 3
+ networks:
+ - derex
+
+networks:
+ derex:
+ name: derex
+
+volumes:
+ {%- for volume in project.docker_volumes %}
+ {{ volume }}:
+ external: true
+ {%- endfor %}
diff --git a/derex/runner/compose_files/development/elasticsearch.yml.j2 b/derex/runner/compose_files/development/elasticsearch.yml.j2
new file mode 100644
index 000000000..7e913a12c
--- /dev/null
+++ b/derex/runner/compose_files/development/elasticsearch.yml.j2
@@ -0,0 +1,70 @@
+{%- if project.elasticsearch_host == "elasticsearch7" %}
+
+ {{ project.elasticsearch_host }}:
+ image: {{ project.openedx_version.value["elasticsearch_image"] }}
+ restart: unless-stopped
+ container_name: {{ project.elasticsearch_host }}
+ environment:
+ - "ES_JAVA_OPTS=-Xms1g -Xmx1g"
+ - "cluster.name=openedx"
+ # For the memory lock to work, the container should be started with
+ # sufficient high a value for "Max locked memory".
+ # For docker on a systemctl distro (like Ubuntu) this can be achieved with
+ # echo -e "[Service]\nLimitMEMLOCK=infinity" | SYSTEMD_EDITOR=tee sudo -E systemctl edit docker.service
+ # sudo systemctl daemon-reload
+ # sudo systemctl restart docker
+ - "bootstrap.memory_lock=true"
+ - discovery.type=single-node
+ healthcheck:
+ test: curl --silent --fail localhost:9200/_cluster/health || exit 1
+ interval: 30s
+ timeout: 30s
+ retries: 3
+ start_period: 10s
+ ulimits:
+ memlock:
+ soft: -1
+ hard: -1
+ nofile:
+ soft: 65536
+ hard: 65536
+ volumes:
+ - {{ project.elasticsearch_docker_volume }}:/usr/share/elasticsearch/data
+ networks:
+ - derex
+
+{%- else %}
+
+ {{ project.elasticsearch_host }}:
+ image: {{ project.openedx_version.value["elasticsearch_image"] }}
+ restart: unless-stopped
+ container_name: {{ project.elasticsearch_host }}
+ environment:
+ - "ES_JAVA_OPTS=-Xms1g -Xmx1g"
+ - "cluster.name=openedx"
+ # For the memory lock to work, the container should be started with
+ # sufficient high a value for "Max locked memory".
+ # For docker on a systemctl distro (like Ubuntu) this can be achieved with
+ # echo -e "[Service]\nLimitMEMLOCK=infinity" | SYSTEMD_EDITOR=tee sudo -E systemctl edit docker.service
+ # sudo systemctl daemon-reload
+ # sudo systemctl restart docker
+ - "bootstrap.memory_lock=true"
+ healthcheck:
+ test: curl --silent --fail localhost:9200/_cluster/health || exit 1
+ interval: 30s
+ timeout: 30s
+ retries: 3
+ start_period: 10s
+ ulimits:
+ memlock:
+ soft: -1
+ hard: -1
+ nofile:
+ soft: 65536
+ hard: 65536
+ volumes:
+ - {{ project.elasticsearch_docker_volume }}:/usr/share/elasticsearch/data
+ networks:
+ - derex
+
+{%- endif %}
diff --git a/derex/runner/compose_files/development/host_caddy/Caddyfile.j2 b/derex/runner/compose_files/development/host_caddy/Caddyfile.j2
new file mode 100644
index 000000000..8c1dd7722
--- /dev/null
+++ b/derex/runner/compose_files/development/host_caddy/Caddyfile.j2
@@ -0,0 +1,12 @@
+:80 {
+ reverse_proxy {http.request.host}.derex:80
+}
+
+:81 {
+ reverse_proxy {http.request.host}.derex:81
+}
+
+# Used by health check
+:8080 {
+ respond /health-check 200
+}
diff --git a/derex/runner/compose_files/mailslurper.json.j2 b/derex/runner/compose_files/development/mailslurper.json.j2
similarity index 79%
rename from derex/runner/compose_files/mailslurper.json.j2
rename to derex/runner/compose_files/development/mailslurper.json.j2
index da2be6a11..b69c2de87 100644
--- a/derex/runner/compose_files/mailslurper.json.j2
+++ b/derex/runner/compose_files/development/mailslurper.json.j2
@@ -9,11 +9,11 @@
"smtpAddress": "0.0.0.0",
"smtpPort": 25,
"dbEngine": "MySQL",
- "dbHost": "mysql",
+ "dbHost": "{{ project.mysql_host }}",
"dbPort": 3306,
"dbDatabase": "mailslurper",
- "dbUserName": "root",
- "dbPassword": "{{ MYSQL_ROOT_PASSWORD }}",
+ "dbUserName": "{{ project.mysql_user }}",
+ "dbPassword": "{{ project.mysql_password }}",
"maxWorkers": 1000,
"autoStartBrowser": false,
"keyFile": "",
diff --git a/derex/runner/fixtures/mailslurper.sql b/derex/runner/compose_files/development/mailslurper.sql
similarity index 100%
rename from derex/runner/fixtures/mailslurper.sql
rename to derex/runner/compose_files/development/mailslurper.sql
diff --git a/derex/runner/compose_files/development/mongodb.yml.j2 b/derex/runner/compose_files/development/mongodb.yml.j2
new file mode 100644
index 000000000..0f7627a02
--- /dev/null
+++ b/derex/runner/compose_files/development/mongodb.yml.j2
@@ -0,0 +1,70 @@
+{%- if project.mongodb_host == "mongodb44" %}
+
+ {{ project.mongodb_host }}:
+ image: {{ project.openedx_version.value["mongodb_image"] }}
+ restart: unless-stopped
+ container_name: {{ project.mongodb_host }}
+ command: mongod --nojournal
+ --storageEngine wiredTiger
+ --wiredTigerEngineConfigString="cache_size=${MONGO_CACHE_MB:-200}M"
+ environment:
+ MONGO_INITDB_ROOT_USERNAME: {{ project.mongodb_user }}
+ MONGO_INITDB_ROOT_PASSWORD: {{ project.mongodb_password }}"
+ healthcheck:
+ test: echo 'db.runCommand("ping").ok' | mongo --quiet
+ interval: 30s
+ timeout: 30s
+ retries: 3
+ start_period: 10s
+ volumes:
+ - {{ project.mongodb_docker_volume }}:/data/db
+ networks:
+ - derex
+
+{%- elif project.mongodb_host == "mongodb36" %}
+
+ {{ project.mongodb_host }}:
+ image: {{ project.openedx_version.value["mongodb_image"] }}
+ restart: unless-stopped
+ container_name: {{ project.mongodb_host }}
+ command: mongod --smallfiles --nojournal
+ --storageEngine wiredTiger
+ --wiredTigerEngineConfigString="cache_size=${MONGO_CACHE_MB:-200}M"
+ environment:
+ MONGO_INITDB_ROOT_USERNAME: {{ project.mongodb_user }}
+ MONGO_INITDB_ROOT_PASSWORD: {{ project.mongodb_password }}"
+ healthcheck:
+ test: echo 'db.runCommand("ping").ok' | mongo --quiet
+ interval: 30s
+ timeout: 30s
+ retries: 3
+ start_period: 10s
+ volumes:
+ - {{ project.mongodb_docker_volume }}:/data/db
+ networks:
+ - derex
+
+{%- else %}
+
+ {{ project.mongodb_host }}:
+ image: {{ project.openedx_version.value["mongodb_image"] }}
+ restart: unless-stopped
+ container_name: {{ project.mongodb_host }}
+ command: mongod --smallfiles --nojournal
+ --storageEngine wiredTiger
+ --wiredTigerEngineConfigString="cache_size=${MONGO_CACHE_MB:-200}M"
+ environment:
+ MONGO_INITDB_ROOT_USERNAME: {{ project.mongodb_user }}
+ MONGO_INITDB_ROOT_PASSWORD: {{ project.mongodb_password }}"
+ healthcheck:
+ test: echo 'db.runCommand("ping").ok' | mongo --quiet
+ interval: 30s
+ timeout: 30s
+ retries: 3
+ start_period: 10s
+ volumes:
+ - {{ project.mongodb_docker_volume }}:/data/db
+ networks:
+ - derex
+
+{%- endif %}
diff --git a/derex/runner/compose_files/development/mysql.yml.j2 b/derex/runner/compose_files/development/mysql.yml.j2
new file mode 100644
index 000000000..af977838d
--- /dev/null
+++ b/derex/runner/compose_files/development/mysql.yml.j2
@@ -0,0 +1,41 @@
+{%- if project.mysql_host == "mysql57" %}
+
+ {{ project.mysql_host }}:
+ image: {{ project.openedx_version.value["mysql_image"] }}
+ restart: unless-stopped
+ container_name: {{ project.mysql_host }}
+ command: mysqld --character-set-server=utf8 --collation-server=utf8_general_ci
+ environment:
+ MYSQL_ROOT_PASSWORD: {{ project.mysql_password }}
+ healthcheck:
+ test: mysqladmin ping
+ interval: 30s
+ timeout: 30s
+ retries: 3
+ start_period: 10s
+ volumes:
+ - {{ project.mysql_docker_volume }}:/var/lib/mysql
+ networks:
+ - derex
+
+{%- else %}
+
+ {{ project.mysql_host }}:
+ image: {{ project.openedx_version.value["mysql_image"] }}
+ restart: unless-stopped
+ container_name: {{ project.mysql_host }}
+ command: mysqld --character-set-server=utf8 --collation-server=utf8_general_ci
+ environment:
+ MYSQL_ROOT_PASSWORD: {{ project.mysql_password }}
+ healthcheck:
+ test: mysqladmin ping
+ interval: 30s
+ timeout: 30s
+ retries: 3
+ start_period: 10s
+ volumes:
+ - {{ project.mysql_docker_volume }}:/var/lib/mysql
+ networks:
+ - derex
+
+{%- endif %}
diff --git a/derex/runner/compose_files/docker-compose-services.yml b/derex/runner/compose_files/docker-compose-services.yml
deleted file mode 100644
index 105ac816a..000000000
--- a/derex/runner/compose_files/docker-compose-services.yml
+++ /dev/null
@@ -1,172 +0,0 @@
-# Services needed for Open edX to work
-version: "3.5"
-services:
- mongodb:
- image: mongo:3.2.21
- restart: unless-stopped
- container_name: mongodb
- command: mongod --smallfiles --nojournal
- --storageEngine wiredTiger
- --wiredTigerEngineConfigString="cache_size=${MONGO_CACHE_MB:-200}M"
- environment:
- MONGO_INITDB_ROOT_USERNAME: "{{ MONGODB_ROOT_USERNAME }}"
- MONGO_INITDB_ROOT_PASSWORD: "{{ MONGODB_ROOT_PASSWORD }}"
- healthcheck:
- test: echo 'db.runCommand("ping").ok' | mongo --quiet
- interval: 30s
- timeout: 30s
- retries: 3
- start_period: 10s
- volumes:
- - derex_mongodb:/data/db
- networks:
- - derex
-
- mysql:
- image: mysql:5.6.36
- restart: unless-stopped
- container_name: mysql
- command: mysqld --character-set-server=utf8 --collation-server=utf8_general_ci
- environment:
- MYSQL_ROOT_PASSWORD: "{{ MYSQL_ROOT_PASSWORD }}"
- healthcheck:
- test: mysqladmin ping
- interval: 30s
- timeout: 30s
- retries: 3
- start_period: 10s
- volumes:
- - derex_mysql:/var/lib/mysql
- networks:
- - derex
-
- elasticsearch:
- image: elasticsearch:1.5.2
- restart: unless-stopped
- container_name: elasticsearch
- environment:
- - "ES_JAVA_OPTS=-Xms1g -Xmx1g"
- - "cluster.name=openedx"
- # For the memory lock to work, the container should be started with
- # sufficient high a value for "Max locked memory".
- # For docker on a systemctl distro (like Ubuntu) this can be achieved with
- # echo -e "[Service]\nLimitMEMLOCK=infinity" | SYSTEMD_EDITOR=tee sudo -E systemctl edit docker.service
- # sudo systemctl daemon-reload
- # sudo systemctl restart docker
- - "bootstrap.memory_lock=true"
- healthcheck:
- test: curl --silent --fail localhost:9200/_cluster/health || exit 1
- interval: 30s
- timeout: 30s
- retries: 3
- start_period: 10s
- ulimits:
- memlock:
- soft: -1
- hard: -1
- nofile:
- soft: 65536
- hard: 65536
- volumes:
- - derex_elasticsearch:/usr/share/elasticsearch/data
- networks:
- - derex
-
- rabbitmq:
- image: rabbitmq:3.6.16-alpine
- restart: unless-stopped
- hostname: rabbitmq
- container_name: rabbitmq
- healthcheck:
- test: rabbitmqctl node_health_check | grep "Health check passed" -q
- interval: 30s
- timeout: 30s
- retries: 3
- start_period: 10s
- volumes:
- - derex_rabbitmq:/etc/rabbitmq/
- - derex_rabbitmq:/var/lib/rabbitmq
- - derex_rabbitmq:/var/log/rabbitmq/
- networks:
- - derex
-
- mailslurper:
- image: derex/mailslurper:1.14.1
- restart: unless-stopped
- container_name: smtp
- volumes:
- - ./mailslurper.json:/config.json
- depends_on:
- - mysql
- networks:
- derex:
- aliases:
- - mailslurper.localhost.derex
-
- memcached:
- image: memcached:1.6.3-alpine
- restart: unless-stopped
- container_name: memcached
- healthcheck:
- test: nc -z 127.0.0.1 11211
- interval: 30s
- timeout: 30s
- retries: 3
- start_period: 10s
- networks:
- - derex
-
- minio:
- image: minio/minio:RELEASE.2020-06-03T22-13-49Z
- restart: unless-stopped
- container_name: minio
- volumes:
- - derex_minio:/data
- environment:
- MINIO_ACCESS_KEY: minio_derex
- MINIO_SECRET_KEY: "{{ MINIO_SECRET_KEY }}"
- command: server --address :80 /data
- healthcheck:
- test: curl --silent --fail http://localhost:80/minio/health/live
- interval: 30s
- timeout: 20s
- retries: 3
- networks:
- derex:
- aliases:
- - minio.localhost.derex
- - minio.localhost
-
- httpserver:
- image: caddy:2.0.0
- restart: unless-stopped
- container_name: httpserver
- ports:
- - 127.0.0.1:80:80
- - 127.0.0.1:81:81
- - 127.0.0.1:4301:4301 # Mailslurper port
- volumes:
- - ./Caddyfile:/etc/caddy/Caddyfile
- healthcheck:
- test: wget -q -O - http://localhost:8080
- interval: 30s
- timeout: 20s
- retries: 3
- networks:
- - derex
-
-volumes:
- derex_mongodb:
- external: true
- derex_mysql:
- external: true
- derex_elasticsearch:
- external: true
- derex_rabbitmq:
- external: true
- derex_minio:
- external: true
-
-networks:
- derex:
- name: derex
diff --git a/derex/runner/compose_files/docker-compose-admin.yml b/derex/runner/compose_files/production/docker-compose-admin.yml.j2
similarity index 100%
rename from derex/runner/compose_files/docker-compose-admin.yml
rename to derex/runner/compose_files/production/docker-compose-admin.yml.j2
diff --git a/derex/runner/compose_files/production/docker-compose-project.yml.j2 b/derex/runner/compose_files/production/docker-compose-project.yml.j2
new file mode 100644
index 000000000..157d07311
--- /dev/null
+++ b/derex/runner/compose_files/production/docker-compose-project.yml.j2
@@ -0,0 +1,309 @@
+# Open edX services
+version: "3.5"
+
+x-common:
+ &common-conf
+ {% if project.runmode.name == "production" -%}
+ image: {{ project.image_name }}
+ restart: unless-stopped
+ {% else -%}
+ image: {{ project.requirements_image_name }}
+ {% endif -%}
+ tmpfs:
+ - /tmp/
+ networks:
+ - {{ project.name }}_{{ project.environment.name }}_network
+ volumes:
+ - {{ project.name }}_{{ project.environment.name }}_openedx_media:/openedx/media
+ - {{ project.name }}_{{ project.environment.name }}_openedx_data:/openedx/data/
+ {%- if project.settings_directory_path() %}
+ - {{ project.settings_directory_path() }}:/openedx/edx-platform/derex_settings
+ {%- endif %}
+ - {{ derex_django_path }}:/openedx/derex_django
+ {%- if openedx_customizations %}{%- for dest, src in openedx_customizations.items() %}
+ - {{ src }}:{{ dest }}
+ {%- endfor %}{%- endif %}
+ {%- if project.requirements_dir and not project.requirements_volumes %}
+ - {{ project.requirements_dir }}:/openedx/derex.requirements
+ {%- endif -%}
+ {%- if project.requirements_volumes %}{%- for src, dest in project.requirements_volumes.items() %}
+ - {{ src }}:{{ dest }}
+ {%- endfor %}{%- endif %}
+ {%- if project.fixtures_dir %}
+ - {{ project.fixtures_dir }}:/openedx/fixtures
+ {%- endif -%}
+ {%- if project.themes_dir %}
+ - {{ project.themes_dir }}:/openedx/themes
+ {%- endif -%}
+ {%- if project.runmode.value == "production" %}
+ - {{ wsgi_py_path }}:/openedx/edx-platform/wsgi.py
+ {%- endif %}
+
+ environment:
+ &common-env
+ DJANGO_SETTINGS_MODULE: {{ project.settings.value }}
+ DEREX_PROJECT: {{ project.name }}_{{ project.environment.name }}
+ DEREX_OPENEDX_VERSION: {{ project.openedx_version.name }}
+ MYSQL_HOST: {{ project.mysql_host }}
+ MYSQL_DB_NAME: {{ project.mysql_db_name }}
+ MYSQL_USER: {{ project.mysql_user }}
+ MYSQL_PASSWORD: {{ project.mysql_password }}
+ MONGODB_HOST: {{ project.mongodb_host }}
+ MONGODB_DB_NAME: {{ project.mongodb_db_name }}
+ MONGODB_USER: {{ project.mongodb_user }}
+ MONGODB_PASSWORD: {{ project.mongodb_password }}
+ DEREX_MINIO_SECRET: {{ project.minio_password }}
+ {%- for key, value in project.get_container_env().items() %}
+ {{ key }}: {{ value | tojson }}
+ {%- endfor %}
+
+services:
+ flower:
+ <<: *common-conf
+ command:
+ sh -c 'echo Obtaining broker configuration from edx. This is a bit slow;
+ export FLOWER_OPTIONS=$$(echo "from django.conf import settings; print(\"--broker=\" + settings.BROKER_URL + \" --broker_api=http://\" + settings.CELERY_BROKER_USER + \":\" + settings.CELERY_BROKER_PASSWORD + \"@\" + settings.CELERY_BROKER_HOSTNAME + \":15672/api/\") " | ./manage.py lms shell);
+ echo Done. Flower options are \"$$FLOWER_OPTIONS\";
+ exec flower --port=80 $$FLOWER_OPTIONS'
+ environment:
+ <<: *common-env
+ SERVICE_VARIANT: lms
+ networks:
+ {{ project.name }}_{{ project.environment.name }}_network:
+ aliases:
+ - flower.{{ project.name }}_{{ project.environment.name }}.localhost.derex
+
+ lms:
+ <<: *common-conf
+ {% if project.runmode.value == "debug" -%}
+ command:
+ sh -c 'exec ./manage.py $${SERVICE_VARIANT} runserver --noreload 0:80'
+ {% else -%}
+ command:
+ sh -c 'exec gunicorn --name $${SERVICE_VARIANT}
+ --bind 0.0.0.0:80
+ --max-requests 1000
+ --workers 3
+ --max-requests-jitter 200
+ --worker-class gevent
+ --worker-tmp-dir /dev/shm
+ --log-file=-
+ wsgi:application'
+ healthcheck:
+ test: ["CMD", "wget", "localhost:80/heartbeat", "-q", "-O", "/dev/null"]
+ {% endif -%}
+ environment:
+ <<: *common-env
+ SERVICE_VARIANT: lms
+ networks:
+ {{ project.name }}_{{ project.environment.name }}_network:
+ aliases:
+ - {{ project.name }}_{{ project.environment.name }}.localhost.derex
+ - preview.{{ project.name }}_{{ project.environment.name }}.localhost.derex
+
+ cms:
+ <<: *common-conf
+ {% if project.runmode.value == "debug" -%}
+ command:
+ sh -c 'exec ./manage.py $${SERVICE_VARIANT} runserver --noreload 0:80'
+ {% else -%}
+ command:
+ sh -c 'exec gunicorn --name $${SERVICE_VARIANT}
+ --bind 0.0.0.0:80
+ --max-requests 1000
+ --workers 3
+ --max-requests-jitter 200
+ --worker-class gevent
+ --worker-tmp-dir /dev/shm
+ --log-file=-
+ --timeout 300
+ wsgi:application'
+ healthcheck:
+ test: ["CMD", "wget", "localhost:80/heartbeat", "-q", "-O", "/dev/null"]
+ {% endif -%}
+ environment:
+ <<: *common-env
+ SERVICE_VARIANT: cms
+ {% if project.runmode.value == "debug" -%}
+ {% endif %}
+ networks:
+ {{ project.name }}_{{ project.environment.name }}_network:
+ aliases:
+ - studio.{{ project.name }}_{{ project.environment.name }}.localhost.derex
+
+ lms_worker:
+ <<: *common-conf
+ command:
+ sh -c 'celery worker -A lms.celery:APP --loglevel=INFO -n openedx.lms -Q openedx.lms.default --beat'
+ healthcheck:
+ test: celery inspect ping -A lms.celery:APP -d celery@openedx.lms
+ interval: 15m
+ timeout: 15s
+ retries: 3
+ start_period: 30s
+ environment:
+ <<: *common-env
+ C_FORCE_ROOT: "True"
+ SERVICE_VARIANT: lms
+
+ cms_worker:
+ <<: *common-conf
+ command:
+ sh -c 'celery worker -A cms.celery:APP --loglevel=INFO -n openedx.cms -Q openedx.cms.default --beat'
+ healthcheck:
+ test: celery inspect ping -A cms.celery:APP -d celery@openedx.cms
+ interval: 15m
+ timeout: 15s
+ retries: 3
+ start_period: 30s
+ environment:
+ <<: *common-env
+ C_FORCE_ROOT: "True"
+ SERVICE_VARIANT: cms
+
+ mongodb:
+ image: {{ project.openedx_version.value["mongodb_image"] }}
+ restart: unless-stopped
+ command: mongod --smallfiles --nojournal
+ --storageEngine wiredTiger
+ --wiredTigerEngineConfigString="cache_size=${MONGO_CACHE_MB:-200}M"
+ environment:
+ MONGO_INITDB_ROOT_USERNAME: "{{ project.mongodb_user }}"
+ MONGO_INITDB_ROOT_PASSWORD: "{{ project.mongodb_password }}"
+ healthcheck:
+ test: echo 'db.runCommand("ping").ok' | mongo --quiet
+ interval: 30s
+ timeout: 30s
+ retries: 3
+ start_period: 10s
+ volumes:
+ - {{ project.mongodb_docker_volume }}:/data/db
+ networks:
+ - {{ project.name }}_{{ project.environment.name }}_network
+
+ mysql:
+ image: {{ project.openedx_version.value["mysql_image"] }}
+ restart: unless-stopped
+ command: mysqld --character-set-server=utf8 --collation-server=utf8_general_ci
+ environment:
+ MYSQL_USER: "{{ project.mysql_user }}"
+ MYSQL_ROOT_PASSWORD: "{{ project.mysql_password }}"
+ healthcheck:
+ test: mysqladmin ping
+ interval: 30s
+ timeout: 30s
+ retries: 3
+ start_period: 10s
+ volumes:
+ - {{ project.mysql_docker_volume }}:/var/lib/mysql
+ networks:
+ {{ project.name }}_{{ project.environment.name }}_network:
+
+ elasticsearch:
+ image: {{ project.openedx_version.value["elasticsearch_image"] }}
+ restart: unless-stopped
+ environment:
+ - "ES_JAVA_OPTS=-Xms1g -Xmx1g"
+ - "cluster.name=openedx"
+ # For the memory lock to work, the container should be started with
+ # sufficient high a value for "Max locked memory".
+ # For docker on a systemctl distro (like Ubuntu) this can be achieved with
+ # echo -e "[Service]\nLimitMEMLOCK=infinity" | SYSTEMD_EDITOR=tee sudo -E systemctl edit docker.service
+ # sudo systemctl daemon-reload
+ # sudo systemctl restart docker
+ - "bootstrap.memory_lock=true"
+ healthcheck:
+ test: curl --silent --fail localhost:9200/_cluster/health || exit 1
+ interval: 30s
+ timeout: 30s
+ retries: 3
+ start_period: 10s
+ ulimits:
+ memlock:
+ soft: -1
+ hard: -1
+ nofile:
+ soft: 65536
+ hard: 65536
+ volumes:
+ - {{ project.elasticsearch_docker_volume }}:/usr/share/elasticsearch/data
+ networks:
+ {{ project.name }}_{{ project.environment.name }}_network:
+
+ rabbitmq:
+ image: rabbitmq:3.6.16-alpine
+ restart: unless-stopped
+ hostname: rabbitmq
+ healthcheck:
+ test: rabbitmqctl node_health_check | grep "Health check passed" -q
+ interval: 30s
+ timeout: 30s
+ retries: 3
+ start_period: 10s
+ volumes:
+ - {{ project.rabbitmq_docker_volume }}:/etc/rabbitmq/
+ - {{ project.rabbitmq_docker_volume }}:/var/lib/rabbitmq
+ - {{ project.rabbitmq_docker_volume }}:/var/log/rabbitmq/
+ networks:
+ {{ project.name }}_{{ project.environment.name }}_network:
+
+ memcached:
+ image: memcached:1.6.3-alpine
+ restart: unless-stopped
+ healthcheck:
+ test: nc -z 127.0.0.1 11211
+ interval: 30s
+ timeout: 30s
+ retries: 3
+ start_period: 10s
+ networks:
+ {{ project.name }}_{{ project.environment.name }}_network:
+ aliases:
+ - memcached.{{ project.name }}_{{ project.environment.name }}.localhost.derex
+
+ minio:
+ image: minio/minio:RELEASE.2020-06-03T22-13-49Z
+ restart: unless-stopped
+ volumes:
+ - {{ project.minio_docker_volume }}:/data
+ environment:
+ MINIO_ACCESS_KEY: minio_derex
+ MINIO_SECRET_KEY: "{{ MINIO_SECRET_KEY }}"
+ command: server --address :80 /data
+ healthcheck:
+ test: curl --silent --fail http://localhost:80/minio/health/live
+ interval: 30s
+ timeout: 20s
+ retries: 3
+ networks:
+ {{ project.name }}_{{ project.environment.name }}_network:
+ aliases:
+ - minio.{{ project.name }}_{{ project.environment.name }}.localhost.derex
+
+ caddy:
+ image: caddy:2-alpine
+ restart: unless-stopped
+ volumes:
+ - {{ project.project_caddy_dir }}:/etc/caddy
+ healthcheck:
+ test: wget -q -O - http://localhost:8080
+ interval: 30s
+ timeout: 20s
+ retries: 3
+ networks:
+ - {{ project.name }}_{{ project.environment.name }}_network
+ - derex
+
+
+volumes:
+ {%- for volume in project.docker_volumes %}
+ {{ volume}}:
+ external: true
+ {%- endfor %}
+
+networks:
+ {{ project.name }}_{{ project.environment.name }}_network:
+ name: {{ project.name }}_{{ project.environment.name }}_network
+ derex:
+ external: true
+ name: derex
diff --git a/derex/runner/compose_files/production/docker-compose-services.yml.j2 b/derex/runner/compose_files/production/docker-compose-services.yml.j2
new file mode 100644
index 000000000..3c4db7c3c
--- /dev/null
+++ b/derex/runner/compose_files/production/docker-compose-services.yml.j2
@@ -0,0 +1,26 @@
+# Services needed for Open edX to work
+version: "3.5"
+services:
+ {%- if enable_host_caddy %}
+ caddy:
+ restart: unless-stopped
+ image: caddy:2-alpine
+ ports:
+ - 0.0.0.0:443:443
+ - 0.0.0.0:80:80
+ volumes:
+ - {{ host_caddy_dir }}:/etc/caddy
+ - /var/caddy:/data
+ - /var/log/caddy:/var/log/
+ healthcheck:
+ test: wget -q -O - http://localhost:8080
+ interval: 30s
+ timeout: 20s
+ retries: 3
+ networks:
+ - derex
+ {%- endif %}
+
+networks:
+ derex:
+ name: derex
diff --git a/derex/runner/compose_files/production/host_caddy/Caddyfile.j2 b/derex/runner/compose_files/production/host_caddy/Caddyfile.j2
new file mode 100644
index 000000000..156818743
--- /dev/null
+++ b/derex/runner/compose_files/production/host_caddy/Caddyfile.j2
@@ -0,0 +1,29 @@
+{
+ auto_https off
+}
+(logging) {
+ log {
+ output file /var/log/caddy-https-access.log {
+ roll_size 50mb # Rotate after 50 MB
+ roll_keep 20 # Keep at most 20 log files
+ roll_keep_for 2160h # Keep rotated files for 90 days
+ }
+ }
+}
+(handle_errors) {
+ handle_errors {
+ @5xx {
+ expression {http.error.status_code} > 499
+ expression {http.error.status_code} < 600
+ }
+ rewrite @5xx /5xx.html
+ file_server {
+ root /etc/caddy/server-static
+ }
+ }
+}
+
+# Used by health check
+http://localhost:8080 {
+ respond /health-check 200
+}
diff --git a/derex/runner/compose_files/production/host_caddy/conf.d/project-caddyfile.j2 b/derex/runner/compose_files/production/host_caddy/conf.d/project-caddyfile.j2
new file mode 100644
index 000000000..ee3465ac0
--- /dev/null
+++ b/derex/runner/compose_files/production/host_caddy/conf.d/project-caddyfile.j2
@@ -0,0 +1,19 @@
+{{ project.config.lms_hostname }} {{ project.config.lms_preview_hostname }} {
+ reverse_proxy http://{{ project.name }}_{{ project.environment.name }}.localhost.derex {
+ header_up X-Forwarded-Port 443
+ }
+ import logging
+ import handle_errors
+}
+{{ project.config.cms_hostname }} {
+ reverse_proxy http://studio.{{ project.name }}_{{ project.environment.name }}.localhost.derex
+ import logging
+ import handle_errors
+}
+{{ project.config.flower_hostname }} {
+ reverse_proxy http://flower.{{ project.name }}_{{ project.environment.name }}.localhost.derex
+}
+
+{{ project.config.minio_hostname }} {
+ reverse_proxy http://minio.{{ project.name }}_{{ project.environment.name }}.localhost.derex
+}
diff --git a/derex/runner/compose_files/production/host_caddy/server-static/5xx.html b/derex/runner/compose_files/production/host_caddy/server-static/5xx.html
new file mode 100644
index 000000000..ed8457027
--- /dev/null
+++ b/derex/runner/compose_files/production/host_caddy/server-static/5xx.html
@@ -0,0 +1,13 @@
+
+
+
+
+
+ Server error
+
+
+
+
+ 500 Server Error Oops, looks like something is wrong.
+
+
diff --git a/derex/runner/compose_files/production/project_caddy/Caddyfile.j2 b/derex/runner/compose_files/production/project_caddy/Caddyfile.j2
new file mode 100644
index 000000000..8c1dd7722
--- /dev/null
+++ b/derex/runner/compose_files/production/project_caddy/Caddyfile.j2
@@ -0,0 +1,12 @@
+:80 {
+ reverse_proxy {http.request.host}.derex:80
+}
+
+:81 {
+ reverse_proxy {http.request.host}.derex:81
+}
+
+# Used by health check
+:8080 {
+ respond /health-check 200
+}
diff --git a/derex/runner/compose_generation.py b/derex/runner/compose_generation.py
index a68c93822..0a3f2135f 100644
--- a/derex/runner/compose_generation.py
+++ b/derex/runner/compose_generation.py
@@ -8,31 +8,31 @@
so a class is put in place to hold each of them.
"""
from derex.runner import hookimpl
-from derex.runner.constants import DDC_ADMIN_PATH
-from derex.runner.constants import DDC_PROJECT_TEMPLATE_PATH
-from derex.runner.constants import DDC_SERVICES_YML_PATH
+from derex.runner.constants import CADDY_DEVELOPMENT_HOST_CADDYFILE_TEMPLATE
+from derex.runner.constants import CADDY_PRODUCTION_HOST_CADDYFILE_TEMPLATE
+from derex.runner.constants import CADDY_PRODUCTION_PROJECT_CADDYFILE_TEMPLATE
+from derex.runner.constants import DDC_PROJECT_DEVELOPMENT_ENVIRONMENT_TEMPLATE_PATH
+from derex.runner.constants import DDC_PROJECT_PRODUCTION_ENVIRONMENT_TEMPLATE_PATH
+from derex.runner.constants import DDC_SERVICES_DEVELOPMENT_ENVIRONMENT_TEMPLATE_PATH
+from derex.runner.constants import DDC_SERVICES_PRODUCTION_ENVIRONMENT_TEMPLATE_PATH
from derex.runner.constants import DDC_TEST_TEMPLATE_PATH
from derex.runner.constants import DEREX_DJANGO_PATH
-from derex.runner.constants import DEREX_ETC_PATH
-from derex.runner.constants import MAILSLURPER_JSON_TEMPLATE
-from derex.runner.constants import MONGODB_ROOT_USER
+from derex.runner.constants import MAILSLURPER_CONFIG_TEMPLATE
+from derex.runner.constants import ProjectEnvironment
from derex.runner.constants import WSGI_PY_PATH
from derex.runner.docker_utils import image_exists
from derex.runner.local_appdir import DEREX_DIR
from derex.runner.local_appdir import ensure_dir
from derex.runner.project import Project
-from derex.runner.secrets import DerexSecrets
-from derex.runner.secrets import get_secret
from derex.runner.utils import asbool
-from distutils import dir_util
-from jinja2 import Template
+from derex.runner.utils import compile_jinja_template
from pathlib import Path
+from typing import Any
from typing import Dict
from typing import List
from typing import Union
import logging
-import os
logger = logging.getLogger(__name__)
@@ -41,17 +41,22 @@
class BaseServices:
@staticmethod
@hookimpl
- def ddc_services_options() -> Dict[str, Union[str, List[str]]]:
+ def ddc_services_options(project: Project) -> Dict[str, Union[str, List[str]]]:
"""See derex.runner.plugin_spec.ddc_services_options docstring."""
- services_compose_path = generate_ddc_services_compose()
+ if project.environment is ProjectEnvironment.development:
+ project_name = "derex_services"
+ else:
+ project_name = project.name
+
options = [
"--project-name",
- "derex_services",
+ project_name,
"-f",
- str(services_compose_path),
+ str(generate_ddc_services_compose(project)),
]
- if asbool(os.environ.get("DEREX_ADMIN_SERVICES", True)):
- options += ["-f", str(DDC_ADMIN_PATH)]
+ # Move this into a separate pluign
+ # if asbool(os.environ.get("DEREX_ADMIN_SERVICES", True)):
+ # options += ["-f", str(DDC_ADMIN_PATH)]
return {
"options": options,
"name": "base-services",
@@ -72,12 +77,9 @@ def ddc_project_options(project: Project) -> Dict[str, Union[str, List[str]]]:
class LocalServices:
@staticmethod
@hookimpl
- def ddc_services_options() -> Dict[str, Union[str, List[str]]]:
+ def ddc_services_options(project: Project) -> Dict[str, Union[str, List[str]]]:
"""See derex.runner.plugin_spec.ddc_services_options docstring."""
- local_path = (
- Path(os.getenv("DEREX_ETC_PATH", DEREX_ETC_PATH))
- / "docker-compose-services.yml"
- )
+ local_path = project.etc_path / "docker-compose-services.yml"
options: List[str] = []
if local_path.is_file():
options = ["-f", str(local_path)]
@@ -103,12 +105,32 @@ def ddc_project_options(project: Project) -> Dict[str, Union[str, List[str]]]:
}
+class LocalProjectEnvironment:
+ @staticmethod
+ @hookimpl
+ def ddc_project_options(project: Project) -> Dict[str, Union[str, List[str]]]:
+ """See derex.runner.plugin_spec.ddc_project_options docstring"""
+ local_path = (
+ project.root / f"docker-compose-env-{project.environment.value}.yml"
+ )
+ options: List[str] = []
+ if local_path.is_file():
+ options = ["-f", str(local_path)]
+ return {
+ "options": options,
+ "name": "local-project-environment",
+ "priority": "_end",
+ }
+
+
class LocalProjectRunmode:
@staticmethod
@hookimpl
def ddc_project_options(project: Project) -> Dict[str, Union[str, List[str]]]:
"""See derex.runner.plugin_spec.ddc_project_options docstring"""
- local_path = project.root / f"docker-compose-{project.runmode.value}.yml"
+ local_path = (
+ project.root / f"docker-compose-runmode-{project.runmode.value}.yml"
+ )
options: List[str] = []
if local_path.is_file():
options = ["-f", str(local_path)]
@@ -120,8 +142,11 @@ def generate_ddc_project_compose(project: Project) -> Path:
It assembles a docker-compose file from the given configuration.
It should execute as fast as possible.
"""
- project_compose_path = project.private_filepath("docker-compose.yml")
- template_path = DDC_PROJECT_TEMPLATE_PATH
+ if project.environment is ProjectEnvironment.development:
+ template_path = DDC_PROJECT_DEVELOPMENT_ENVIRONMENT_TEMPLATE_PATH
+ else:
+ template_path = DDC_PROJECT_PRODUCTION_ENVIRONMENT_TEMPLATE_PATH
+
final_image = None
if image_exists(project.image_name):
final_image = project.image_name
@@ -133,15 +158,28 @@ def generate_ddc_project_compose(project: Project) -> Path:
openedx_customizations = project.get_openedx_customizations()
- tmpl = Template(template_path.read_text())
- text = tmpl.render(
- project=project,
- final_image=final_image,
- wsgi_py_path=WSGI_PY_PATH,
- derex_django_path=DEREX_DJANGO_PATH,
- openedx_customizations=openedx_customizations,
+ context = {
+ "project": project,
+ "final_image": final_image,
+ "wsgi_py_path": WSGI_PY_PATH,
+ "derex_django_path": DEREX_DJANGO_PATH,
+ "openedx_customizations": openedx_customizations,
+ }
+ project_compose_path = compile_jinja_template(
+ template_path,
+ project.private_filepath("docker-compose.yml"),
+ context=context,
)
- project_compose_path.write_text(text)
+
+ if (
+ not project.project_caddy_dir
+ and project.environment is ProjectEnvironment.production
+ ):
+ project_caddy_config = generate_project_caddy_config(project)
+ context.update({"project_caddy_dir": project_caddy_config.parent})
+ else:
+ context.update({"project_caddy_dir": project.project_caddy_dir})
+
return project_compose_path
@@ -150,43 +188,95 @@ def generate_ddc_test_compose(project: Project) -> Path:
the given project.
It should execute as fast as possible.
"""
- test_compose_path = project.private_filepath("docker-compose-test.yml")
- template_path = DDC_TEST_TEMPLATE_PATH
-
- tmpl = Template(template_path.read_text())
- text = tmpl.render(project=project)
- test_compose_path.write_text(text)
+ test_compose_path = compile_jinja_template(
+ DDC_TEST_TEMPLATE_PATH,
+ project.private_filepath("docker-compose-test.yml"),
+ context={"project": project},
+ )
return test_compose_path
-def generate_ddc_services_compose() -> Path:
+def generate_ddc_services_compose(project: Project) -> Path:
"""Generate the global docker-compose config file that will drive
ddc-services and return its path.
"""
- local_path = DEREX_DIR / "services" / DDC_SERVICES_YML_PATH.name
- # Copy all files
- dir_util.copy_tree(
- str(DDC_SERVICES_YML_PATH.parent),
- str(local_path.parent),
- update=1, # Do not copy files more than once
- verbose=1,
- )
- # Compile the mailslurper template to include the mysql password
- tmpl = Template(MAILSLURPER_JSON_TEMPLATE.read_text())
- MYSQL_ROOT_PASSWORD = get_secret(DerexSecrets.mysql)
- text = tmpl.render(MYSQL_ROOT_PASSWORD=MYSQL_ROOT_PASSWORD)
- (local_path.parent / MAILSLURPER_JSON_TEMPLATE.name.replace(".j2", "")).write_text(
- text
- )
+ context: Dict[str, Any] = {}
+ if project.environment is ProjectEnvironment.development:
+ ddc_services_template_path = DDC_SERVICES_DEVELOPMENT_ENVIRONMENT_TEMPLATE_PATH
+ # Mailslurper config file generation should be moved elsewhere,
+ # ddc-services should not be responsible for it to be generated.
+ # Probably a client interface like we are already doing
+ # with `derex reset mailslurper`
+ templates_paths = [MAILSLURPER_CONFIG_TEMPLATE, ddc_services_template_path]
+ else:
+ ddc_services_template_path = DDC_SERVICES_PRODUCTION_ENVIRONMENT_TEMPLATE_PATH
+ templates_paths = [ddc_services_template_path]
+
+ if asbool(project.enable_host_caddy):
+ context.update({"enable_host_caddy": True})
+ if not project.host_caddy_dir:
+ host_caddy_config_path = generate_host_caddy_config(project)
+ context.update(
+ {
+ "host_caddy_dir": host_caddy_config_path.parent,
+ "host_caddy_config_path": host_caddy_config_path,
+ }
+ )
+ else:
+ context.update({"host_caddy_dir": project.host_caddy_dir})
- # Compile the docker compose yaml template
+ # Add the project object to the template context
+ context.update({"project": project})
+ local_path = DEREX_DIR / "compose_files"
ensure_dir(local_path)
- tmpl = Template(DDC_SERVICES_YML_PATH.read_text())
- text = tmpl.render(
- MINIO_SECRET_KEY=get_secret(DerexSecrets.minio),
- MONGODB_ROOT_USERNAME=MONGODB_ROOT_USER,
- MONGODB_ROOT_PASSWORD=get_secret(DerexSecrets.mongodb),
- MYSQL_ROOT_PASSWORD=MYSQL_ROOT_PASSWORD,
- )
- local_path.write_text(text)
- return local_path
+ for template_path in templates_paths:
+ destination = local_path / template_path.name.replace(".j2", "")
+ compile_jinja_template(template_path, destination, context=context)
+ return destination
+
+
+def generate_project_caddy_config(project: Project) -> Path:
+ """Generate Caddyfile needed to serve the project through a Caddy HTTP server.
+ In a development environment there is a single caddy server running on the host
+ serving all projects.
+ """
+ if project.environment is ProjectEnvironment.development:
+ raise RuntimeError(
+ "In a development environment we don't need a project caddy server !"
+ )
+ else:
+ # In a production environment configure an internal caddy server for every project.
+ # This will be the only entry point to the project internal network.
+ template_path = CADDY_PRODUCTION_PROJECT_CADDYFILE_TEMPLATE
+ if project.project_caddy_dir:
+ template_path = project.project_caddy_dir / "Caddyfile"
+ if not template_path.exists():
+ raise RuntimeError(
+ f"No caddyfile exists at {template_path}."
+ "Add one or delete {project.caddy_dir}."
+ )
+ context = {"project": project}
+ destination = project.private_filepath("Caddyfile")
+ ensure_dir(destination.parent)
+ compile_jinja_template(template_path, destination, context=context)
+ return destination
+
+
+def generate_host_caddy_config(project: Project) -> Path:
+ """Generate Caddyfile needed for the host Caddy HTTP server.
+ In a development environment this will be Caddy server serving all projects
+ and will route requests directly to docker containers.
+
+ In a production environment this server will route
+ requests to an internally facing Caddy server specific to every project.
+ """
+ if project.environment is ProjectEnvironment.development:
+ template_path = CADDY_DEVELOPMENT_HOST_CADDYFILE_TEMPLATE
+ else:
+ template_path = CADDY_PRODUCTION_HOST_CADDYFILE_TEMPLATE
+
+ local_path = DEREX_DIR / "caddy" / "host"
+ ensure_dir(local_path)
+ destination = local_path / template_path.name.replace(".j2", "")
+ compile_jinja_template(template_path, destination)
+ return destination
diff --git a/derex/runner/compose_utils.py b/derex/runner/compose_utils.py
index 33919dc20..8cae96157 100644
--- a/derex/runner/compose_utils.py
+++ b/derex/runner/compose_utils.py
@@ -23,7 +23,7 @@ def run_docker_compose(
if exit_afterwards:
main()
else:
- with exit_cm():
+ with exit_context_manager():
main()
else:
click.echo("Would have run:\n")
@@ -33,7 +33,7 @@ def run_docker_compose(
@contextmanager
-def exit_cm():
+def exit_context_manager():
# Context manager to monkey patch sys.exit calls
import sys
diff --git a/derex/runner/constants.py b/derex/runner/constants.py
index 1e4916161..b57ed54c4 100644
--- a/derex/runner/constants.py
+++ b/derex/runner/constants.py
@@ -1,41 +1,145 @@
-from derex.runner.utils import derex_path
+from derex.runner import derex_path
+from enum import Enum
from pathlib import Path
DEREX_ETC_PATH = Path("/etc/derex")
-WSGI_PY_PATH = derex_path("derex/runner/compose_files/wsgi.py")
-DDC_SERVICES_YML_PATH = derex_path(
- "derex/runner/compose_files/docker-compose-services.yml"
+WSGI_PY_PATH = derex_path("derex/runner/compose_files/common/wsgi.py")
+DDC_SERVICES_DEVELOPMENT_ENVIRONMENT_TEMPLATE_PATH = derex_path(
+ "derex/runner/compose_files/development/docker-compose-services.yml.j2"
)
-DDC_ADMIN_PATH = derex_path("derex/runner/compose_files/docker-compose-admin.yml")
-DDC_PROJECT_TEMPLATE_PATH = derex_path(
- "derex/runner/templates/docker-compose-project.yml.j2"
+DDC_SERVICES_PRODUCTION_ENVIRONMENT_TEMPLATE_PATH = derex_path(
+ "derex/runner/compose_files/production/docker-compose-services.yml.j2"
)
-DDC_TEST_TEMPLATE_PATH = derex_path("derex/runner/templates/docker-compose-test.yml.j2")
-MAILSLURPER_JSON_TEMPLATE = derex_path("derex/runner/compose_files/mailslurper.json.j2")
+DDC_ADMIN_PATH = derex_path(
+ "derex/runner/compose_files/production/docker-compose-admin.yml.j2"
+)
+DDC_PROJECT_DEVELOPMENT_ENVIRONMENT_TEMPLATE_PATH = derex_path(
+ "derex/runner/compose_files/development/docker-compose-project.yml.j2"
+)
+DDC_PROJECT_PRODUCTION_ENVIRONMENT_TEMPLATE_PATH = derex_path(
+ "derex/runner/compose_files/production/docker-compose-project.yml.j2"
+)
+DDC_TEST_TEMPLATE_PATH = derex_path(
+ "derex/runner/compose_files/common/docker-compose-test.yml.j2"
+)
+
+MAILSLURPER_CONFIG_TEMPLATE = derex_path(
+ "derex/runner/compose_files/development/mailslurper.json.j2"
+)
+
DEREX_DJANGO_PATH = derex_path("derex/django/__init__.py").parent
DEREX_DJANGO_SETTINGS_PATH = DEREX_DJANGO_PATH / "settings"
+
DEREX_OPENEDX_CUSTOMIZATIONS_PATH = derex_path(
- "derex/runner/compose_files/openedx_customizations/README.rst"
+ "derex/runner/compose_files/common/openedx_customizations/README.rst"
).parent
+CADDY_DEVELOPMENT_HOST_CADDYFILE_TEMPLATE = derex_path(
+ "derex/runner/compose_files/development/host_caddy/Caddyfile.j2"
+)
+CADDY_PRODUCTION_PROJECT_CADDYFILE_TEMPLATE = derex_path(
+ "derex/runner/compose_files/production/project_caddy/Caddyfile.j2"
+)
+CADDY_PRODUCTION_HOST_CADDYFILE_TEMPLATE = derex_path(
+ "derex/runner/compose_files/production/host_caddy/Caddyfile.j2"
+)
+
CONF_FILENAME = "derex.config.yaml"
SECRETS_CONF_FILENAME = "derex.secrets.yaml"
MYSQL_ROOT_USER = "root"
MONGODB_ROOT_USER = "root"
+MINIO_ROOT_USER = "minio_derex"
+
+DEREX_MAIN_SECRET_DEFAULT_MAX_SIZE = 1024
+DEREX_MAIN_SECRET_DEFAULT_MIN_SIZE = 8
+DEREX_MAIN_SECRET_DEFAULT_MIN_ENTROPY = 128
+DEREX_MAIN_SECRET_DEFAULT_PATH = "/etc/derex/main_secret"
assert all(
(
WSGI_PY_PATH,
- DDC_SERVICES_YML_PATH,
+ CADDY_DEVELOPMENT_HOST_CADDYFILE_TEMPLATE,
+ CADDY_PRODUCTION_PROJECT_CADDYFILE_TEMPLATE,
+ CADDY_PRODUCTION_HOST_CADDYFILE_TEMPLATE,
+ DDC_SERVICES_DEVELOPMENT_ENVIRONMENT_TEMPLATE_PATH,
+ DDC_SERVICES_PRODUCTION_ENVIRONMENT_TEMPLATE_PATH,
DDC_ADMIN_PATH,
- DDC_PROJECT_TEMPLATE_PATH,
+ DDC_PROJECT_DEVELOPMENT_ENVIRONMENT_TEMPLATE_PATH,
+ DDC_PROJECT_PRODUCTION_ENVIRONMENT_TEMPLATE_PATH,
DDC_TEST_TEMPLATE_PATH,
- MAILSLURPER_JSON_TEMPLATE,
+ MAILSLURPER_CONFIG_TEMPLATE,
DEREX_DJANGO_PATH,
DEREX_DJANGO_SETTINGS_PATH,
DEREX_OPENEDX_CUSTOMIZATIONS_PATH,
)
), "Some distribution files were not found"
+
+
+class OpenEdXVersions(Enum):
+ # Values will be passed as uppercased named arguments to the docker build
+ # e.g. --build-arg EDX_PLATFORM_RELEASE=koa
+ ironwood = {
+ "edx_platform_repository": "https://github.com/edx/edx-platform.git",
+ "edx_platform_version": "open-release/ironwood.master",
+ "edx_platform_release": "ironwood",
+ "docker_image_prefix": "derex/openedx-ironwood",
+ "alpine_version": "alpine3.11",
+ "python_version": "2.7",
+ "pip_version": "20.3.4",
+ # The latest node release does not work on ironwood
+ # (node-sass version fails to compile)
+ "node_version": "v10.22.1",
+ "mysql_image": "mysql:5.6.36",
+ "mongodb_image": "mongo:3.2.21",
+ "elasticsearch_image": "elasticsearch:1.5.2",
+ }
+ juniper = {
+ "edx_platform_repository": "https://github.com/edx/edx-platform.git",
+ "edx_platform_version": "open-release/juniper.master",
+ "edx_platform_release": "juniper",
+ "docker_image_prefix": "derex/openedx-juniper",
+ "alpine_version": "alpine3.11",
+ "python_version": "3.6",
+ "pip_version": "21.0.1",
+ "node_version": "v12.19.0",
+ "mysql_image": "mysql:5.6.36",
+ "mongodb_image": "mongo:3.6.23",
+ "elasticsearch_image": "elasticsearch:1.5.2",
+ }
+ koa = {
+ "edx_platform_repository": "https://github.com/edx/edx-platform.git",
+ # We set koa.3 since as today (20 may 2021) koa.master codebase is broken
+ "edx_platform_version": "open-release/koa.3",
+ "edx_platform_release": "koa",
+ "docker_image_prefix": "derex/openedx-koa",
+ # We are stuck on alpine3.12 since SciPy won't build
+ # on gcc>=10 due to fortran incompatibility issues.
+ # See more at https://gcc.gnu.org/gcc-10/porting_to.html
+ "alpine_version": "alpine3.12",
+ "python_version": "3.8",
+ "pip_version": "21.0.1",
+ "node_version": "v12.19.0",
+ "mysql_image": "mysql:5.7.34",
+ "mongodb_image": "mongo:3.6.23",
+ "elasticsearch_image": "elasticsearch:1.5.2",
+ }
+
+
+class ProjectRunMode(Enum):
+ debug = "debug" # The first is the default
+ production = "production"
+
+
+class ProjectEnvironment(Enum):
+ development = "development" # The first is the default
+ staging = "staging"
+ production = "production"
+
+
+class DerexSecrets(Enum):
+ minio = "minio"
+ mysql = "mysql"
+ mongodb = "mongodb"
diff --git a/derex/runner/ddc.py b/derex/runner/ddc.py
index 1f49ead8d..ca6504fa4 100644
--- a/derex/runner/ddc.py
+++ b/derex/runner/ddc.py
@@ -4,10 +4,10 @@
They put a `docker.compose.yml` file in place based on user configuration.
"""
from derex.runner.compose_utils import run_docker_compose
+from derex.runner.constants import ProjectEnvironment
from derex.runner.docker_utils import ensure_volumes_present
from derex.runner.docker_utils import is_docker_working
-from derex.runner.docker_utils import wait_for_service
-from derex.runner.logging_utils import setup_logging
+from derex.runner.docker_utils import wait_for_container
from derex.runner.plugins import setup_plugin_manager
from derex.runner.plugins import sort_and_validate_plugins
from derex.runner.project import DebugBaseImageProject
@@ -40,78 +40,50 @@ def ddc_parse_args(compose_args: List[str]) -> Tuple[List[str], bool]:
def ddc_services():
"""Derex docker-compose: run docker-compose with additional parameters.
- Adds docker compose file paths for services and administrative tools.
- If the environment variable DEREX_ADMIN_SERVICES is set to a falsey value,
- only the core ones will be started (mysql, mongodb etc) and the nice-to-have
- will not (portainer and adminer).
+ Adds docker compose file paths for services relative to the host.
Besides the regular docker-compose options it also accepts the --dry-run
option; in case it's specified docker-compose will not be invoked, but
a line will be printed showing what would have been invoked.
"""
- check_docker()
- setup_logging()
- args, dry_run = ddc_parse_args(sys.argv)
- run_ddc_services(args, dry_run=dry_run, exit_afterwards=True)
+ compose_args, dry_run = ddc_parse_args(sys.argv)
+ run_ddc(list(compose_args), "services", dry_run=dry_run, exit_afterwards=True)
def ddc_project():
"""Proxy for docker-compose: writes a docker-compose.yml file with the
configuration of this project, and then run `docker-compose` on it.
- You probably want do run `ddc-project up -d` and `ddc-project logs -f`.
+ Besides the regular docker-compose options it also accepts the --dry-run
+ option; in case it's specified docker-compose will not be invoked, but
+ a line will be printed showing what would have been invoked.
+
+ You probably want to run `ddc-project up -d` and `ddc-project logs -f`.
"""
- check_docker()
- setup_logging()
- try:
- project = Project()
- except ValueError as exc:
- click.echo(str(exc))
- sys.exit(1)
compose_args, dry_run = ddc_parse_args(sys.argv)
- # If trying to start up containers, first check that needed services are running
- is_start_cmd = any(param in compose_args for param in ["up", "start"])
- if is_start_cmd:
- for service in ["mysql", "mongodb", "rabbitmq"]:
- try:
- wait_for_service(service)
- except (TimeoutError, RuntimeError, NotImplementedError) as exc:
- click.echo(click.style(str(exc), fg="red"))
- sys.exit(1)
- run_ddc_project(list(compose_args), project, dry_run=dry_run, exit_afterwards=True)
-
-
-def check_docker():
- if not is_docker_working():
- click.echo(click.style("Could not connect to docker.", fg="red"))
- click.echo(
- "Is it installed and running? Make sure the docker command works and try again."
- )
- sys.exit(1)
+ run_ddc(list(compose_args), "project", dry_run=dry_run, exit_afterwards=True)
-def run_ddc_services(
- argv: List[str],
- dry_run: bool = False,
- exit_afterwards: bool = False,
-):
- """Run a docker-compose command relative to the system services.
- Plugin arguments are added to arguments passed in this function sorted by
- plugin priority.
+def check_docker(func):
+ """Decorator to check if docker is working before executing the decorated function."""
- Used by ddc-services cli command.
- """
- ensure_volumes_present()
- plugins_argv = sort_and_validate_plugins(
- setup_plugin_manager().hook.ddc_services_options()
- )
- compose_argv = plugins_argv + argv
- run_docker_compose(compose_argv, dry_run, exit_afterwards)
+ def inner(*args, **kwargs):
+ if not is_docker_working():
+ click.echo(click.style("Could not connect to docker.", fg="red"))
+ click.echo(
+ "Is it installed and running? Make sure the docker command works and try again."
+ )
+ sys.exit(1)
+ func(*args, **kwargs)
+ return inner
-def run_ddc_project(
- argv: List[str],
- project: Project,
+
+@check_docker
+def run_ddc(
+ compose_args: List[str],
+ variant: str,
+ project: Optional[Project] = None,
dry_run: bool = False,
exit_afterwards: bool = False,
):
@@ -119,13 +91,41 @@ def run_ddc_project(
Plugin arguments are added to arguments passed in this function sorted by
plugin priority.
- Used by ddc-project cli command.
+ Used by both ddc-services and ddc-project cli command.
"""
- plugins_argv = sort_and_validate_plugins(
- setup_plugin_manager().hook.ddc_project_options(project=project),
- )
- compose_argv = plugins_argv + argv
- run_docker_compose(compose_argv, dry_run, exit_afterwards)
+ if not project:
+ try:
+ project = Project()
+ except ValueError as exc:
+ click.echo(str(exc))
+ sys.exit(1)
+
+ ensure_volumes_present(project)
+
+ if variant == "project":
+ plugins_args = sort_and_validate_plugins(
+ setup_plugin_manager().hook.ddc_project_options(project=project),
+ )
+ if project.environment is ProjectEnvironment.development:
+ # If trying to start up containers, first check that needed services are running
+ is_start_cmd = any(param in compose_args for param in ["up", "start"])
+ if is_start_cmd:
+ for service in [project.mysql_host, project.mongodb_host, "rabbitmq"]:
+ try:
+ wait_for_container(service)
+ except (TimeoutError, RuntimeError, NotImplementedError) as exc:
+ click.echo(click.style(str(exc), fg="red"))
+ sys.exit(1)
+ elif variant == "services":
+ plugins_args = sort_and_validate_plugins(
+ setup_plugin_manager().hook.ddc_services_options(project=project)
+ )
+ else:
+ raise RuntimeError(
+ "ddc variant argument must be either `project` or `services`"
+ )
+ compose_args = plugins_args + compose_args
+ run_docker_compose(compose_args, dry_run, exit_afterwards)
def run_django_script(
@@ -153,7 +153,7 @@ def run_django_script(
]
try:
- run_ddc_project(compose_args, project=DebugBaseImageProject())
+ run_ddc(compose_args, "project", DebugBaseImageProject())
finally:
result_json = open(result_path).read()
try:
diff --git a/derex/runner/docker_utils.py b/derex/runner/docker_utils.py
index 4b59ac25f..48ba6f1b7 100644
--- a/derex/runner/docker_utils.py
+++ b/derex/runner/docker_utils.py
@@ -1,9 +1,10 @@
# -coding: utf8-
"""Utility functions to deal with docker.
"""
-from derex.runner.secrets import DerexSecrets
-from derex.runner.secrets import get_secret
-from derex.runner.utils import abspath_from_egg
+from derex.runner import abspath_from_egg
+from derex.runner.constants import DerexSecrets
+from derex.runner.exceptions import BuildError
+from derex.runner.project import Project
from pathlib import Path
from requests.exceptions import RequestException
from typing import Dict
@@ -23,14 +24,6 @@
client = docker.from_env()
api_client = docker.APIClient()
logger = logging.getLogger(__name__)
-VOLUMES = {
- "derex_elasticsearch",
- "derex_mongodb",
- "derex_mysql",
- "derex_rabbitmq",
- "derex_portainer_data",
- "derex_minio",
-}
def is_docker_working() -> bool:
@@ -49,18 +42,18 @@ def docker_has_experimental() -> bool:
return bool(client.api.info().get("ExperimentalBuild"))
-def ensure_volumes_present():
+def ensure_volumes_present(project: Project):
"""Make sure the derex network necessary for our docker-compose files to
work is in place.
"""
- missing = VOLUMES - {el.name for el in client.volumes.list()}
+ missing = project.docker_volumes - {el.name for el in client.volumes.list()}
for volume in missing:
logger.warning("Creating docker volume '%s'", volume)
client.volumes.create(volume)
-def wait_for_service(service: str, max_seconds: int = 35) -> int:
- """With a freshly created container services might need a bit of time to start.
+def wait_for_container(container_name: str, max_seconds: int = 35) -> int:
+ """A freshly created container might need a bit of time to start.
This functions waits up to max_seconds seconds for the healthcheck on the container
to report as healthy.
Returns an exit code 0 or raises an exception:
@@ -71,17 +64,17 @@ def wait_for_service(service: str, max_seconds: int = 35) -> int:
"""
for i in range(max_seconds):
try:
- container_info = api_client.inspect_container(service)
+ container_info = api_client.inspect_container(container_name)
except docker.errors.NotFound:
raise RuntimeError(
- f"{service} service not found.\n"
+ f"{container_name} container not found.\n"
"Maybe you forgot to run\n"
"ddc-services up -d"
)
container_status = container_info.get("State").get("Status")
if container_status not in ["running", "restarting"]:
raise RuntimeError(
- f'Service {service} is not running (status="{container_status}")\n'
+ f'{container_name} container is not running (status="{container_status}")\n'
"Maybe you forgot to run\n"
"ddc-services up -d"
)
@@ -89,34 +82,32 @@ def wait_for_service(service: str, max_seconds: int = 35) -> int:
healthcheck = container_info.get("State").get("Health").get("Status")
except AttributeError:
raise NotImplementedError(
- f"{service} service doesn't declare any healthcheck.\n"
+ f"{container_name} container doesn't declare any healthcheck.\n"
)
if healthcheck == "healthy":
return 0
time.sleep(1)
- logger.warning(f"Waiting for {service} to be ready")
- raise TimeoutError(f"Can't connect to {service} service")
+ logger.warning(f"Waiting for {container_name} to be ready")
+ raise TimeoutError(f"Can't connect to {container_name} container")
-def check_services(services: Iterable[str], max_seconds: int = 1) -> bool:
- """Check if the specified services are running and healthy.
- For every service it will retry for a `max_seconds` amount of time.
- Returns False if any of the service is unhealthy, True otherwise.
+def check_containers(containers: Iterable[str], max_seconds: int = 1) -> bool:
+ """Check if the specified containers are running and healthy.
+ For every container it will retry for a `max_seconds` amount of time.
+ Returns False if any of the container is unhealthy, True otherwise.
"""
try:
- for service in services:
- wait_for_service(service, max_seconds)
+ for container in containers:
+ wait_for_container(container, max_seconds)
except (TimeoutError, RuntimeError, NotImplementedError):
return False
return True
-def load_dump(relpath):
+def load_dump(project: Project, relative_path: Path):
"""Loads a mysql dump into the derex mysql database."""
- from derex.runner.mysql import MYSQL_ROOT_PASSWORD
-
- dump_path = abspath_from_egg("derex.runner", relpath)
- image = client.containers.get("mysql").image
+ dump_path = abspath_from_egg("derex.runner", relative_path)
+ image = client.containers.get(project.mysql_host).image
logger.info("Resetting email database")
try:
client.containers.run(
@@ -124,7 +115,7 @@ def load_dump(relpath):
[
"sh",
"-c",
- f"mysql -h mysql -p{MYSQL_ROOT_PASSWORD} < /dump/{dump_path.name}",
+ f"mysql -h {project.mysql_host} -p{project.mysql_password} < /dump/{dump_path.name}",
],
network="derex",
volumes={dump_path.parent: {"bind": "/dump"}},
@@ -202,10 +193,6 @@ def image_exists(needle: str) -> bool:
return False
-class BuildError(RuntimeError):
- """An error occurred while building a docker image"""
-
-
def get_running_containers() -> Dict:
if "derex" in [network.name for network in client.networks.list()]:
return {
@@ -234,9 +221,9 @@ def get_exposed_container_names() -> List:
return result
-def run_minio_shell(command: str = "sh", tty: bool = True):
+def run_minio_shell(project: Project, command: str = "sh", tty: bool = True):
"""Invoke a minio shell"""
- minio_key = get_secret(DerexSecrets.minio)
+ minio_key = project.get_secret(DerexSecrets.minio)
os.system(
f"docker run {'-ti ' if tty else ''}--rm --network derex --entrypoint /bin/sh minio/mc -c '"
f'mc config host add local http://minio:80 minio_derex "{minio_key}" --api s3v4 ; set -ex; {command}\''
diff --git a/derex/runner/exceptions.py b/derex/runner/exceptions.py
new file mode 100644
index 000000000..ee8f779ec
--- /dev/null
+++ b/derex/runner/exceptions.py
@@ -0,0 +1,10 @@
+class ProjectNotFound(ValueError):
+ """No derex project could be found."""
+
+
+class DerexSecretError(ValueError):
+ """The main secret provided to derex is not valid or could not be found."""
+
+
+class BuildError(RuntimeError):
+ """An error occurred while building a docker image"""
diff --git a/derex/runner/logging_utils.py b/derex/runner/logging_utils.py
index 0dc9ec7b8..aa306c023 100644
--- a/derex/runner/logging_utils.py
+++ b/derex/runner/logging_utils.py
@@ -6,8 +6,8 @@
import sys
-def setup_logging():
- loglevel = getattr(logging, os.environ.get("DEREX_LOGLEVEL", "WARN"))
+def do_setup_logging():
+ loglevel = getattr(logging, os.environ.get("DEREX_LOGLEVEL", "WARNING"))
for logger in ("urllib3.connectionpool", "compose", "docker"):
logging.getLogger(logger).setLevel(loglevel)
@@ -24,11 +24,11 @@ def setup_logging():
)
-def setup_logging_decorator(func):
+def setup_logging(func):
"""Decorator to run the setup_logging function before the decorated one."""
def inner(*args, **kwargs):
- setup_logging()
+ do_setup_logging()
func(*args, **kwargs)
return inner
diff --git a/derex/runner/mongodb.py b/derex/runner/mongodb.py
index 2513b2b08..aab65adbf 100644
--- a/derex/runner/mongodb.py
+++ b/derex/runner/mongodb.py
@@ -1,9 +1,8 @@
-from derex.runner.constants import MONGODB_ROOT_USER
-from derex.runner.ddc import run_ddc_services
+from derex.runner.ddc import run_ddc
+from derex.runner.docker_utils import check_containers
from derex.runner.docker_utils import client as docker_client
-from derex.runner.docker_utils import wait_for_service
-from derex.runner.secrets import DerexSecrets
-from derex.runner.secrets import get_secret
+from derex.runner.docker_utils import wait_for_container
+from derex.runner.project import Project
from functools import wraps
from pymongo import MongoClient
from typing import cast
@@ -11,24 +10,22 @@
from typing import Optional
import logging
+import time
import urllib.parse
logger = logging.getLogger(__name__)
-MONGODB_ROOT_PASSWORD = get_secret(DerexSecrets.mongodb)
-try:
- wait_for_service("mongodb")
- container = docker_client.containers.get("mongodb")
+
+def get_mongodb_client(project: Project):
+ wait_for_container(project.mongodb_host)
+ container = docker_client.containers.get(project.mongodb_host)
mongo_address = container.attrs["NetworkSettings"]["Networks"]["derex"]["IPAddress"]
- user = urllib.parse.quote_plus(MONGODB_ROOT_USER)
- password = urllib.parse.quote_plus(MONGODB_ROOT_PASSWORD)
- MONGODB_CLIENT = MongoClient(
+ user = urllib.parse.quote_plus(project.mongodb_user)
+ password = urllib.parse.quote_plus(project.mongodb_password)
+ return MongoClient(
f"mongodb://{user}:{password}@{mongo_address}:27017/", authSource="admin"
)
-except RuntimeError as e:
- MONGODB_CLIENT = None
- logger.warning(e)
def ensure_mongodb(func):
@@ -38,85 +35,88 @@ def ensure_mongodb(func):
@wraps(func)
def inner(*args, **kwargs):
- if MONGODB_CLIENT is None:
- raise RuntimeError(
- "MongoDB service not found.\nMaybe you forgot to run\nddc-services up -d"
- )
+ project = Project()
+ wait_for_container(project.mongodb_host, 0)
return func(*args, **kwargs)
return inner
@ensure_mongodb
-def execute_root_shell(command: Optional[str]):
+def execute_root_shell(project: Project, command: Optional[str]):
"""Open a root shell on the MongoDB database. If a command is given
it is executed."""
compose_args = [
"exec",
- "mongodb",
+ project.mongodb_host,
"mongo",
"--authenticationDatabase",
"admin",
"-u",
- MONGODB_ROOT_USER,
- f"-p{MONGODB_ROOT_PASSWORD}",
+ project.mongodb_user,
+ f"-p{project.mongodb_password}",
]
if command:
compose_args.insert(1, "-T")
compose_args.extend(["--eval", command])
- run_ddc_services(compose_args, exit_afterwards=True)
+ run_ddc(compose_args, "services", exit_afterwards=True)
@ensure_mongodb
-def list_databases() -> List[dict]:
+def list_databases(project: Project) -> List[dict]:
"""List all existing databases"""
logger.info("Listing MongoDB databases...")
databases = [
- database for database in cast(MongoClient, MONGODB_CLIENT).list_databases()
+ database
+ for database in cast(MongoClient, get_mongodb_client(project)).list_databases()
]
return databases
@ensure_mongodb
-def list_users() -> List[dict]:
+def list_users(project: Project) -> List[dict]:
"""List all existing users"""
logger.info("Listing MongoDB users...")
- return cast(MongoClient, MONGODB_CLIENT).admin.command("usersInfo").get("users")
+ return (
+ cast(MongoClient, get_mongodb_client(project))
+ .admin.command("usersInfo")
+ .get("users")
+ )
@ensure_mongodb
-def create_user(user: str, password: str, roles: List[str]):
+def create_user(project: Project, user: str, password: str, roles: List[str]):
"""Create a new user"""
logger.info(f'Creating user "{user}"...')
- cast(MongoClient, MONGODB_CLIENT).admin.command(
+ cast(MongoClient, get_mongodb_client(project)).admin.command(
"createUser", user, pwd=password, roles=roles
)
@ensure_mongodb
-def drop_database(database_name: str):
+def drop_database(project: Project, database_name: str):
"""Drop the selected database"""
logger.info(f'Dropping database "{database_name}"...')
- cast(MongoClient, MONGODB_CLIENT).drop_database(database_name)
+ cast(MongoClient, get_mongodb_client(project)).drop_database(database_name)
@ensure_mongodb
-def copy_database(source_db_name: str, destination_db_name: str):
+def copy_database(project: Project, source_db_name: str, destination_db_name: str):
"""Copy an existing database"""
logger.info(f'Copying database "{source_db_name}" to "{destination_db_name}...')
- cast(MongoClient, MONGODB_CLIENT).admin.command(
+ cast(MongoClient, get_mongodb_client(project)).admin.command(
"copydb", fromdb=source_db_name, todb=destination_db_name
)
@ensure_mongodb
-def create_root_user():
+def create_root_user(project: Project):
"""Create the root user"""
- create_user(MONGODB_ROOT_USER, MONGODB_ROOT_PASSWORD, ["root"])
+ create_user(project.mongodb_user, project.mongodb_password, ["root"])
@ensure_mongodb
-def reset_mongodb_password(current_password: str = None):
+def reset_mongodb_password(project: Project, current_password: str = None):
"""Reset the mongodb root user password"""
mongo_command_args = [
"mongo",
@@ -124,14 +124,76 @@ def reset_mongodb_password(current_password: str = None):
"admin",
"admin",
"--eval",
- f'"db.changeUserPassword(\\"{MONGODB_ROOT_USER}\\",'
- f'\\"{MONGODB_ROOT_PASSWORD}\\");"',
+ f'"db.changeUserPassword(\\"{project.mongodb_user}\\",'
+ f'\\"{project.mongodb_password}\\");"',
]
if current_password:
- mongo_command_args.extend(["-u", MONGODB_ROOT_USER, f"-p{current_password}"])
+ mongo_command_args.extend(["-u", project.mongodb_user, f"-p{current_password}"])
mongo_command = " ".join(mongo_command_args)
- compose_args = ["exec", "-T", "mongodb", "bash", "-c", f"{mongo_command}"]
+ compose_args = [
+ "exec",
+ "-T",
+ project.mongodb_host,
+ "bash",
+ "-c",
+ f"{mongo_command}",
+ ]
- run_ddc_services(compose_args, exit_afterwards=True)
+ run_ddc(compose_args, "services", exit_afterwards=True)
+ return 0
+
+
+def run_mongodb_upgrade(
+ project: Project,
+ data_volume: str,
+ upgrade_volume: str,
+ from_version: str,
+ to_version: str,
+):
+ if check_containers(project.mongodb_host):
+ logger.info(f"Stopping running mongodb service {project.mongodb_host}")
+ run_ddc(["stop", project.mongodb_host], "services")
+
+ version_map = {
+ "mongodb34": "mongo:3.4.24",
+ "mongodb36": "mongo:3.6.23",
+ "mongodb40": "mongo:4.0.26",
+ "mongodb42": "mongo:4.2.15",
+ "mongodb44": "mongo:4.4.8",
+ }
+ logger.info(f'Copying data volume "{data_volume}" to "{upgrade_volume}"')
+ output = docker_client.containers.run(
+ "alpine",
+ 'sh -c "cd /source; cp -av . /destination"',
+ auto_remove=True,
+ volumes={
+ data_volume: {"bind": "/source", "mode": "ro"},
+ upgrade_volume: {"bind": "/destination", "mode": "rw"},
+ },
+ )
+ logger.debug(output)
+ logger.info(
+ f'Running mongodb upgrade for volume "{upgrade_volume}" from version {from_version} to version {to_version}'
+ )
+ container = docker_client.containers.run(
+ version_map[f"mongodb{to_version.replace('.', '')}"],
+ "mongod",
+ auto_remove=True,
+ detach=True,
+ volumes={upgrade_volume: {"bind": "/data/db", "mode": "rw"}},
+ )
+ # We are being lazy here.
+ # We should probably implement an healthcheck in the container and wait for it
+ # to become healthy. Or abort the operation if a timeout is reached.
+ time.sleep(5)
+ try:
+ exit_code, output = container.exec_run(
+ f"mongo --eval 'db.adminCommand({{setFeatureCompatibilityVersion:\"{to_version}\"}})'"
+ )
+ output = output.decode("utf-8")
+ if exit_code or "errmsg" in output:
+ raise Exception(output)
+ finally:
+ container.stop()
return 0
diff --git a/derex/runner/mysql.py b/derex/runner/mysql.py
index 321aba202..b310a6cbc 100644
--- a/derex/runner/mysql.py
+++ b/derex/runner/mysql.py
@@ -1,12 +1,8 @@
-from derex.runner.constants import MYSQL_ROOT_USER
-from derex.runner.ddc import run_ddc_project
-from derex.runner.ddc import run_ddc_services
+from derex.runner import abspath_from_egg
+from derex.runner.ddc import run_ddc
from derex.runner.docker_utils import client as docker_client
-from derex.runner.docker_utils import wait_for_service
+from derex.runner.docker_utils import wait_for_container
from derex.runner.project import Project
-from derex.runner.secrets import DerexSecrets
-from derex.runner.secrets import get_secret
-from derex.runner.utils import abspath_from_egg
from functools import wraps
from typing import cast
from typing import List
@@ -18,7 +14,6 @@
logger = logging.getLogger(__name__)
-MYSQL_ROOT_PASSWORD = get_secret(DerexSecrets.mysql)
def ensure_mysql(func):
@@ -28,28 +23,20 @@ def ensure_mysql(func):
@wraps(func)
def inner(*args, **kwargs):
- wait_for_service("mysql")
+ wait_for_container(Project().mysql_host)
return func(*args, **kwargs)
return inner
-@ensure_mysql
-def get_system_mysql_client() -> pymysql.cursors.Cursor:
- container = docker_client.containers.get("mysql")
- mysql_host = container.attrs["NetworkSettings"]["Networks"]["derex"]["IPAddress"]
- return get_mysql_client(
- host=mysql_host, user=MYSQL_ROOT_USER, password=MYSQL_ROOT_PASSWORD
- )
-
-
@ensure_mysql
def get_project_mysql_client(project: Project) -> pymysql.cursors.Cursor:
+ container = docker_client.containers.get(project.mysql_host)
+ mysql_host_ip = container.attrs["NetworkSettings"]["Networks"]["derex"]["IPAddress"]
return get_mysql_client(
- host=project.mysql_db_host,
- user=project.mysql_db_user,
- password=project.mysql_db_password,
- database=project.mysql_db_name,
+ host=mysql_host_ip,
+ user=project.mysql_user,
+ password=project.mysql_password,
)
@@ -76,11 +63,11 @@ def get_mysql_client(
return connection.cursor()
-def show_databases() -> List[Tuple[str, int, int]]:
+def show_databases(project: Project) -> List[Tuple[str, int, int]]:
"""List all existing databases together with some
useful infos (number of tables, number of Django users).
"""
- client = get_system_mysql_client()
+ client = get_project_mysql_client(project)
try:
databases_tuples = []
client.execute("SHOW DATABASES;")
@@ -105,82 +92,83 @@ def show_databases() -> List[Tuple[str, int, int]]:
return databases_tuples
-def list_users() -> Optional[Tuple[Tuple[str, str, str]]]:
+def list_users(project: Project) -> Optional[Tuple[Tuple[str, str, str]]]:
"""List all mysql users."""
- client = get_system_mysql_client()
+ client = get_project_mysql_client(project)
client.execute("SELECT user, host, password FROM mysql.user;")
users = cast(Tuple[Tuple[str, str, str]], client.fetchall())
return users
-def create_database(database_name: str):
+def create_database(project: Project, database_name: str):
"""Create a database if doesn't exists."""
- client = get_system_mysql_client()
+ client = get_project_mysql_client(project)
logger.info(f'Creating database "{database_name}"...')
client.execute(f"CREATE DATABASE `{database_name}` CHARACTER SET utf8")
logger.info(f'Successfully created database "{database_name}"')
-def create_user(user: str, password: str, host: str):
+def create_user(project: Project, user: str, password: str, host: str):
"""Create a user if doesn't exists."""
- client = get_system_mysql_client()
+ client = get_project_mysql_client(project)
logger.info(f"Creating user '{user}'@'{host}'...")
client.execute(f"CREATE USER '{user}'@'{host}' IDENTIFIED BY '{password}';")
logger.info(f"Successfully created user '{user}'@'{host}'")
-def drop_database(database_name: str):
+def drop_database(project: Project, database_name: str):
"""Drops the selected database."""
- client = get_system_mysql_client()
+ client = get_project_mysql_client(project)
logger.info(f'Dropping database "{database_name}"...')
client.execute(f"DROP DATABASE IF EXISTS `{database_name}`;")
logger.info(f'Successfully dropped database "{database_name}"')
-def drop_user(user: str, host: str):
+def drop_user(project: Project, user: str, host: str):
"""Drops the selected user."""
- client = get_system_mysql_client()
+ client = get_project_mysql_client(project)
logger.info(f"Dropping user '{user}'@'{host}'...")
client.execute(f"DROP USER '{user}'@'{host}';")
logger.info(f"Successfully dropped user '{user}'@'{host}'")
@ensure_mysql
-def execute_root_shell(command: Optional[str]):
+def execute_root_shell(project: Project, command: Optional[str]):
"""Open a root shell on the mysql database. If a command is given
it is executed."""
compose_args = [
"exec",
- "mysql",
+ project.mysql_host,
"mysql",
"-u",
- MYSQL_ROOT_USER,
- f"-p{MYSQL_ROOT_PASSWORD}",
+ project.mysql_user,
+ f"-p{project.mysql_password}",
]
if command:
compose_args.insert(1, "-T")
compose_args.extend(["-e", command])
- run_ddc_services(compose_args, exit_afterwards=True)
+ run_ddc(compose_args, "services", exit_afterwards=True)
@ensure_mysql
-def copy_database(source_db_name: str, destination_db_name: str):
+def copy_database(project: Project, source_db_name: str, destination_db_name: str):
"""Copy an existing MySQL database. This actually involves exporting and importing back
the database with a different name."""
- create_database(destination_db_name)
+ create_database(project, destination_db_name)
logger.info(f"Copying database {source_db_name} to {destination_db_name}")
- run_ddc_services(
+ run_ddc(
[
"exec",
"-T",
- "mysql",
+ project.mysql_host,
"sh",
"-c",
f"""set -ex
- mysqldump -u root -p{MYSQL_ROOT_PASSWORD} {source_db_name} --no-create-db |
- mysql --user=root -p{MYSQL_ROOT_PASSWORD} {destination_db_name}
+ mysqldump -u root -p{project.mysql_password} {source_db_name} --no-create-db |
+ mysql --user=root -p{project.mysql_password} {destination_db_name}
""",
- ]
+ ],
+ "services",
)
logger.info(
f"Successfully copied database {source_db_name} to {destination_db_name}"
@@ -196,7 +184,7 @@ def reset_mysql_openedx(project: Project, dry_run: bool = False):
assert (
restore_dump_path
), "Could not find restore_dump.py in derex.runner distribution"
- run_ddc_project(
+ run_ddc(
[
"run",
"--rm",
@@ -206,29 +194,31 @@ def reset_mysql_openedx(project: Project, dry_run: bool = False):
"python",
"/restore_dump.py",
],
- project=project,
+ "project",
+ project,
dry_run=dry_run,
)
@ensure_mysql
-def reset_mysql_password(current_password: str):
+def reset_mysql_password(project: Project, current_password: str):
"""Reset the mysql root user password."""
- logger.info(f'Resetting password for mysql user "{MYSQL_ROOT_USER}"')
+ logger.info(f'Resetting password for mysql user "{project.mysql_user}"')
- run_ddc_services(
+ run_ddc(
[
"exec",
- "mysql",
+ project.mysql_host,
"mysql",
"-u",
- MYSQL_ROOT_USER,
+ project.mysql_user,
f"-p{current_password}",
"-e",
- f"""SET PASSWORD FOR '{MYSQL_ROOT_USER}'@'localhost' = PASSWORD('{MYSQL_ROOT_PASSWORD}');
- SET PASSWORD FOR '{MYSQL_ROOT_USER}'@'%' = PASSWORD('{MYSQL_ROOT_PASSWORD}');
- GRANT ALL PRIVILEGES ON *.* TO '{MYSQL_ROOT_USER}'@'%' WITH GRANT OPTION;
+ f"""SET PASSWORD FOR '{project.mysql_user}'@'localhost' = PASSWORD('{project.mysql_password}');
+ SET PASSWORD FOR '{project.mysql_user}'@'%' = PASSWORD('{project.mysql_password}');
+ GRANT ALL PRIVILEGES ON *.* TO '{project.mysql_user}'@'%' WITH GRANT OPTION;
FLUSH PRIVILEGES;""",
],
+ "services",
exit_afterwards=True,
)
diff --git a/derex/runner/plugin_spec.py b/derex/runner/plugin_spec.py
index d2155cf08..29e316293 100644
--- a/derex/runner/plugin_spec.py
+++ b/derex/runner/plugin_spec.py
@@ -10,7 +10,9 @@
@hookspec
-def ddc_services_options() -> Dict[str, Union[str, float, int, List[str]]]:
+def ddc_services_options(
+ project: Project,
+) -> Dict[str, Union[str, float, int, List[str]]]:
"""Return a dict describing how to add this plugin.
The dict `name` and `priority` keys will be used to determine ordering.
The `options` key contains a list of strings pointing to docker-compose yml files
diff --git a/derex/runner/plugins.py b/derex/runner/plugins.py
index d844959d0..569c83eb4 100644
--- a/derex/runner/plugins.py
+++ b/derex/runner/plugins.py
@@ -20,6 +20,7 @@ def setup_plugin_manager():
plugin_manager.register(compose_generation.BaseProject)
plugin_manager.register(compose_generation.LocalServices)
plugin_manager.register(compose_generation.LocalProject)
+ plugin_manager.register(compose_generation.LocalProjectEnvironment)
plugin_manager.register(compose_generation.LocalProjectRunmode)
return plugin_manager
diff --git a/derex/runner/project.py b/derex/runner/project.py
index b49cd09e7..cf08cafe3 100644
--- a/derex/runner/project.py
+++ b/derex/runner/project.py
@@ -1,13 +1,27 @@
+from base64 import b64encode
from derex.runner import __version__
from derex.runner.constants import CONF_FILENAME
from derex.runner.constants import DEREX_DJANGO_SETTINGS_PATH
+from derex.runner.constants import DEREX_ETC_PATH
+from derex.runner.constants import DEREX_MAIN_SECRET_DEFAULT_MAX_SIZE
+from derex.runner.constants import DEREX_MAIN_SECRET_DEFAULT_MIN_ENTROPY
+from derex.runner.constants import DEREX_MAIN_SECRET_DEFAULT_MIN_SIZE
from derex.runner.constants import DEREX_OPENEDX_CUSTOMIZATIONS_PATH
+from derex.runner.constants import DerexSecrets
+from derex.runner.constants import MINIO_ROOT_USER
from derex.runner.constants import MONGODB_ROOT_USER
from derex.runner.constants import MYSQL_ROOT_USER
+from derex.runner.constants import OpenEdXVersions
+from derex.runner.constants import ProjectEnvironment
+from derex.runner.constants import ProjectRunMode
from derex.runner.constants import SECRETS_CONF_FILENAME
-from derex.runner.secrets import DerexSecrets
-from derex.runner.secrets import get_secret
+from derex.runner.exceptions import DerexSecretError
+from derex.runner.secrets import compute_entropy
+from derex.runner.secrets import get_derex_secrets_env
+from derex.runner.secrets import scrypt_hash
+from derex.runner.utils import find_project_root
from derex.runner.utils import get_dir_hash
+from derex.runner.utils import get_requirements_hash
from enum import Enum
from logging import getLogger
from pathlib import Path
@@ -16,7 +30,6 @@
from typing import Union
import difflib
-import hashlib
import json
import os
import re
@@ -28,59 +41,7 @@
DEREX_RUNNER_PROJECT_DIR = ".derex"
-class OpenEdXVersions(Enum):
- # Values will be passed as uppercased named arguments to the docker build
- # e.g. --build-arg EDX_PLATFORM_RELEASE=koa
- ironwood = {
- "edx_platform_repository": "https://github.com/edx/edx-platform.git",
- "edx_platform_version": "open-release/ironwood.master",
- "edx_platform_release": "ironwood",
- "docker_image_prefix": "derex/openedx-ironwood",
- "alpine_version": "alpine3.11",
- "python_version": "2.7",
- "pip_version": "20.3.4",
- # The latest node release does not work on ironwood
- # (node-sass version fails to compile)
- "node_version": "v10.22.1",
- "mysql_image": "mysql:5.6.36",
- "mongodb_image": "mongo:3.2.21",
- }
- juniper = {
- "edx_platform_repository": "https://github.com/edx/edx-platform.git",
- "edx_platform_version": "open-release/juniper.master",
- "edx_platform_release": "juniper",
- "docker_image_prefix": "derex/openedx-juniper",
- "alpine_version": "alpine3.11",
- "python_version": "3.6",
- "pip_version": "21.0.1",
- "node_version": "v12.19.0",
- "mysql_image": "mysql:5.6.36",
- "mongodb_image": "mongo:3.6.23",
- }
- koa = {
- "edx_platform_repository": "https://github.com/edx/edx-platform.git",
- # We set koa.3 since as today (20 may 2021) koa.master codebase is broken
- "edx_platform_version": "open-release/koa.3",
- "edx_platform_release": "koa",
- "docker_image_prefix": "derex/openedx-koa",
- # We are stuck on alpine3.12 since SciPy won't build
- # on gcc>=10 due to fortran incompatibility issues.
- # See more at https://gcc.gnu.org/gcc-10/porting_to.html
- "alpine_version": "alpine3.12",
- "python_version": "3.8",
- "pip_version": "21.0.1",
- "node_version": "v12.19.0",
- "mysql_image": "mysql:5.7.34",
- "mongodb_image": "mongo:3.6.23",
- }
-
-
-class ProjectRunMode(Enum):
- debug = "debug" # The first is the default
- production = "production"
-
-
-class Project:
+class BaseProject:
"""Represents a derex.runner project, i.e. a directory with a
`derex.config.yaml` file and optionally a "themes", "settings" and
"requirements" directory.
@@ -127,6 +88,18 @@ class Project:
# The directory containing cypress tests
e2e_dir: Optional[Path] = None
+ # The host directory where to lookup for global host configurations
+ derex_etc_path: Optional[Path] = None
+
+ # Toggle the host Caddy server should be enabled
+ enable_host_caddy: bool
+
+ # The directory containing the project Caddy configuration files
+ project_caddy_dir: Optional[Path] = None
+
+ # The directory containing the host Caddy configuration files
+ host_caddy_dir: Optional[Path] = None
+
# The image name of the image that includes requirements
requirements_image_name: str
@@ -156,12 +129,104 @@ class Project:
_available_settings = None
@property
- def mysql_db_name(self) -> str:
- return self.config.get("mysql_db_name", f"{self.name}_openedx")
+ def etc_path(self) -> Path:
+ return Path(os.getenv("DEREX_ETC_PATH", DEREX_ETC_PATH))
@property
- def mysql_user(self) -> str:
- return self.config.get("mysql_user", MYSQL_ROOT_USER)
+ def main_secret_path(self) -> str:
+ return self.config.get("main_secret_path", self.root / "main_secret")
+
+ @property
+ def main_secret_max_size(self) -> str:
+ return self.config.get(
+ "main_secret_max_size", DEREX_MAIN_SECRET_DEFAULT_MAX_SIZE
+ )
+
+ @property
+ def main_secret_min_size(self) -> str:
+ return self.config.get(
+ "main_secret_min_size", DEREX_MAIN_SECRET_DEFAULT_MIN_SIZE
+ )
+
+ @property
+ def main_secret_min_entropy(self) -> str:
+ return self.config.get(
+ "main_secret_min_entropy", DEREX_MAIN_SECRET_DEFAULT_MIN_ENTROPY
+ )
+
+ @property
+ def main_secret(self) -> str:
+ """Derex uses a main secret to derive all other secrets.
+ This functions finds the main secret for the current project,
+ and if it can't find it it will return a default one.
+ """
+ return self.get_main_secret(self.environment) or "Default secret"
+
+ def has_main_secret(self, environment: ProjectEnvironment) -> bool:
+ """Return wheter a main secret exists for a given environment"""
+ return bool(self.get_main_secret(environment))
+
+ def get_main_secret(self, environment: ProjectEnvironment) -> Optional[str]:
+ """In a development environment the main secret is shared among projects.
+ Its location can be customized through the environment variable `DEREX_MAIN_SECRET_PATH`.
+
+ In a staging or production environment the main secret is tied to a project.
+ The default location is in the project root, but can be customized
+ via the project configuration `main_secret_path`.
+ """
+
+ if environment == ProjectEnvironment.development:
+ # Get configurations from environment
+ filepath = get_derex_secrets_env("path", Path)
+ max_size = get_derex_secrets_env("max_size", int)
+ min_size = get_derex_secrets_env("min_size", int)
+ min_entropy = get_derex_secrets_env("min_entropy", int)
+ else:
+ # Get configurations from project
+ filepath = self.main_secret_path
+ max_size = self.main_secret_max_size
+ min_size = self.main_secret_min_size
+ min_entropy = self.main_secret_min_entropy
+
+ if os.access(filepath, os.R_OK):
+ main_secret = filepath.read_text().strip()
+ if len(main_secret) > max_size:
+ raise DerexSecretError(
+ f"Main secret in {filepath} is too large: {len(main_secret)} (should be {max_size} at most)"
+ )
+ if len(main_secret) < min_size:
+ raise DerexSecretError(
+ f"Main secret in {filepath} is too small: {len(main_secret)} (should be {min_size} at least)"
+ )
+ if compute_entropy(main_secret) < min_entropy:
+ raise DerexSecretError(
+ f"Main secret in {filepath} has not enough entropy: {compute_entropy(main_secret)} (should be {min_entropy} at least)"
+ )
+ return main_secret
+
+ if filepath.exists():
+ logger.error(
+ f"File {filepath} is not readable; using default master secret"
+ )
+ return None
+
+ @property
+ def elasticsearch_host(self) -> str:
+ elasticsearch_version = self.openedx_version.value["elasticsearch_image"].split(
+ ":"
+ )[1]
+ elasticsearch_major_version = elasticsearch_version.split(".")[0]
+ elasticsearch_minor_version = elasticsearch_version.split(".")[1]
+ return (
+ f"elasticsearch{elasticsearch_major_version}{elasticsearch_minor_version}"
+ )
+
+ @property
+ def mongodb_host(self) -> str:
+ mongo_version = self.openedx_version.value["mongodb_image"].split(":")[1]
+ mongo_major_version = mongo_version.split(".")[0]
+ mongo_minor_version = mongo_version.split(".")[1]
+ return f"mongodb{mongo_major_version}{mongo_minor_version}"
@property
def mongodb_db_name(self) -> str:
@@ -171,6 +236,162 @@ def mongodb_db_name(self) -> str:
def mongodb_user(self) -> str:
return self.config.get("mongodb_user", MONGODB_ROOT_USER)
+ @property
+ def mongodb_password(self) -> str:
+ return self.config.get(
+ "mongodb_password", self.get_secret(DerexSecrets.mongodb)
+ )
+
+ @property
+ def minio_user(self) -> str:
+ return self.config.get("minio_user", MINIO_ROOT_USER)
+
+ @property
+ def minio_password(self) -> str:
+ return self.config.get("minio_password", self.get_secret(DerexSecrets.minio))
+
+ @property
+ def minio_bucket(self) -> str:
+ return self.config.get("minio_bucket", self.name)
+
+ @property
+ def lms_hostname(self) -> str:
+ return self.config.get("lms_hostname", f"{self.name}.localhost")
+
+ @property
+ def preview_hostname(self) -> str:
+ return self.config.get("cms_hostname", f"preview.{self.lms_hostname}")
+
+ @property
+ def cms_hostname(self) -> str:
+ return self.config.get("cms_hostname", f"studio.{self.lms_hostname}")
+
+ @property
+ def flower_hostname(self) -> str:
+ return self.config.get("flower_hostname", f"flower.{self.lms_hostname}")
+
+ @property
+ def minio_hostname(self) -> str:
+ return self.config.get("minio_hostname", f"minio.{self.lms_hostname}")
+
+ @property
+ def mongodb_docker_volume(self) -> str:
+ if self.environment is ProjectEnvironment.development:
+ mongodb_docker_volume = f"derex_{self.mongodb_host}"
+ else:
+ mongodb_docker_volume = (
+ f"{self.name}_{self.environment.name}_{self.mongodb_host}"
+ )
+ return self.config.get("mongodb_docker_volume", mongodb_docker_volume)
+
+ @property
+ def elasticsearch_docker_volume(self) -> str:
+ if self.environment is ProjectEnvironment.development:
+ elasticsearch_docker_volume = f"derex_{self.elasticsearch_host}"
+ else:
+ elasticsearch_docker_volume = (
+ f"{self.name}_{self.environment.name}_{self.elasticsearch_host}"
+ )
+ return self.config.get(
+ "elasticsearch_docker_volume", elasticsearch_docker_volume
+ )
+
+ @property
+ def mysql_docker_volume(self) -> str:
+ if self.environment is ProjectEnvironment.development:
+ mysql_docker_volume = f"derex_{self.mysql_host}"
+ else:
+ mysql_docker_volume = (
+ f"{self.name}_{self.environment.name}_{self.mysql_host}"
+ )
+ return self.config.get("mysql_docker_volume", mysql_docker_volume)
+
+ @property
+ def rabbitmq_docker_volume(self) -> str:
+ if self.environment is ProjectEnvironment.development:
+ rabbitmq_docker_volume = "derex_rabbitmq"
+ else:
+ rabbitmq_docker_volume = f"{self.name}_{self.environment.name}_rabbitmq"
+ return self.config.get("rabbitmq_docker_volume", rabbitmq_docker_volume)
+
+ @property
+ def minio_docker_volume(self) -> str:
+ if self.environment is ProjectEnvironment.development:
+ minio_docker_volume = "derex_minio"
+ else:
+ minio_docker_volume = f"{self.name}_{self.environment.name}_minio"
+ return self.config.get("minio_docker_volume", minio_docker_volume)
+
+ @property
+ def openedx_data_docker_volume(self) -> str:
+ if self.environment is ProjectEnvironment.development:
+ openedx_data_docker_volume = f"derex_{self.name}_openedx_data"
+ else:
+ openedx_data_docker_volume = (
+ f"{self.name}_{self.environment.name}_openedx_data"
+ )
+ return self.config.get("openedx_data_docker_volume", openedx_data_docker_volume)
+
+ @property
+ def openedx_media_docker_volume(self) -> str:
+ if self.environment is ProjectEnvironment.development:
+ openedx_media_docker_volume = f"derex_{self.name}_openedx_media"
+ else:
+ openedx_media_docker_volume = (
+ f"{self.name}_{self.environment.name}_openedx_media"
+ )
+ return self.config.get(
+ "openedx_media_docker_volume", openedx_media_docker_volume
+ )
+
+ @property
+ def docker_volumes(self):
+ return {
+ self.mongodb_docker_volume,
+ self.elasticsearch_docker_volume,
+ self.mysql_docker_volume,
+ self.rabbitmq_docker_volume,
+ self.minio_docker_volume,
+ self.openedx_media_docker_volume,
+ self.openedx_data_docker_volume,
+ }
+
+ @property
+ def environment(self) -> ProjectEnvironment:
+ """The environment of this project, either development, staging or production.
+ In a development environment secret and services (like the HTTP server, databases,
+ search backends and message brokers) are shared among projects.
+ In a production environment instead services are bound to a specific project.
+
+ The environment is also used to give a name to project containers and volumes.
+ """
+ name = "environment"
+ mode_str = self._get_status(name)
+ if mode_str is not None:
+ if mode_str in ProjectEnvironment.__members__:
+ return ProjectEnvironment[mode_str]
+ # We found a string but we don't recognize it: warn the user
+ logger.warning(
+ f"Value `{mode_str}` found in `{self.private_filepath(name)}` "
+ "is not valid as environment "
+ f"(valid values are {[environment for environment in ProjectEnvironment.__members__]})"
+ )
+ default = self.config.get(f"default_{name}")
+ if default:
+ if default not in ProjectEnvironment.__members__:
+ logger.warning(
+ f"Value `{default}` found in config `{self.root / CONF_FILENAME}` "
+ "is not a valid default for environment "
+ f"(valid values are {[environment for environment in ProjectEnvironment.__members__]})"
+ )
+ else:
+ return ProjectEnvironment[default]
+ return next(iter(ProjectEnvironment)) # Return the first by default
+
+ @environment.setter
+ def environment(self, value: ProjectEnvironment):
+ self._set_status("environment", value.name)
+
@property
def runmode(self) -> ProjectRunMode:
"""The run mode of this project, either debug or production.
@@ -187,7 +408,7 @@ def runmode(self) -> ProjectRunMode:
logger.warning(
f"Value `{mode_str}` found in `{self.private_filepath(name)}` "
"is not valid as runmode "
- "(valid values are `debug` and `production`)"
+ f"(valid values are {[runmode for runmode in ProjectRunMode.__members__]})"
)
default = self.config.get(f"default_{name}")
if default:
@@ -195,7 +416,7 @@ def runmode(self) -> ProjectRunMode:
logger.warning(
f"Value `{default}` found in config `{self.root / CONF_FILENAME}` "
"is not a valid default for runmode "
- "(valid values are `debug` and `production`)"
+ f"(valid values are {[runmode for runmode in ProjectRunMode.__members__]})"
)
else:
return ProjectRunMode[default]
@@ -352,12 +573,28 @@ def _load(self, path: Union[Path, str] = None):
if e2e_dir.is_dir():
self.e2e_dir = e2e_dir
+ project_caddy_dir = self.root / "caddy" / "internal_caddy"
+ if self.config.get("project_caddy_dir"):
+ self.project_caddy_dir = self.config.get("project_caddy_dir")
+ elif project_caddy_dir.is_dir():
+ self.project_caddy_dir = project_caddy_dir
+ else:
+ self.project_caddy_dir = self.private_filepath("caddy")
+
+ host_caddy_dir = self.etc_path / "caddy"
+ if host_caddy_dir.is_dir():
+ self.host_caddy_dir = host_caddy_dir
+
+ self.enable_host_caddy = self.config.get("enable_host_caddy", True)
+
self.image_name = self.themes_image_name
self.materialize_derex_settings = self.config.get(
"materialize_derex_settings", True
)
- def update_default_settings(self, default_settings_dir, destination_settings_dir):
+ def update_default_settings(
+ self, default_settings_dir: Path, destination_settings_dir: Path
+ ):
"""Update default settings in a specified directory.
Given a directory where to look for default settings modules, recursively
copy or update them into the destination directory.
@@ -466,8 +703,16 @@ def get_container_env(self):
result[f"DEREX_{variable.upper()}"] = value
return result
+ def get_secret(self, secret: DerexSecrets) -> str:
+ """Derive a secret using the master secret and the provided name."""
+ binary_secret = scrypt_hash(self.main_secret, secret.name)
+ # Pad the binary string so that its length is a multiple of 3
+ # This will make sure its base64 representation is equals-free
+ new_length = len(binary_secret) + (3 - len(binary_secret) % 3)
+ return b64encode(binary_secret.rjust(new_length, b" ")).decode()
+
def secret(self, name: str) -> str:
- return get_secret(DerexSecrets[name])
+ return self.get_secret(DerexSecrets[name])
def get_openedx_customizations(self) -> dict:
"""Return a mapping of customized files to be mounted in
@@ -489,33 +734,6 @@ def get_openedx_customizations(self) -> dict:
return openedx_customizations
-def get_requirements_hash(path: Path) -> str:
- """Given a directory, return a hash of the contents of the text files it contains."""
- hasher = hashlib.sha256()
- logger.debug(
- f"Calculating hash for requirements dir {path}; initial (empty) hash is {hasher.hexdigest()}"
- )
- for file in sorted(path.iterdir()):
- if file.is_file():
- hasher.update(file.read_bytes())
- logger.debug(f"Examined contents of {file}; hash so far: {hasher.hexdigest()}")
- return hasher.hexdigest()
-
-
-def find_project_root(path: Path) -> Path:
- """Find the project directory walking up the filesystem starting on the
- given path until a configuration file is found.
- """
- current = path
- while current != current.parent:
- if (current / CONF_FILENAME).is_file():
- return current
- current = current.parent
- raise ProjectNotFound(
- f"No directory found with a {CONF_FILENAME} file in it, starting from {path}"
- )
-
-
class DebugBaseImageProject(Project):
"""A project that is always in debug mode and always uses the base image,
irregardless of the presence of requirements.
@@ -532,5 +750,106 @@ def requirements_image_name(self, value):
pass
-class ProjectNotFound(ValueError):
- """No derex project could be found."""
+class MysqlProject(BaseProject):
+ @property
+ def mysql_host(self) -> str:
+ mysql_version = self.openedx_version.value["mysql_image"].split(":")[1]
+ mysql_major_version = mysql_version.split(".")[0]
+ mysql_minor_version = mysql_version.split(".")[1]
+ return f"mysql{mysql_major_version}{mysql_minor_version}"
+
+ @property
+ def mysql_db_name(self) -> str:
+ return self.config.get("mysql_db_name", f"{self.name}_openedx")
+
+ @property
+ def mysql_user(self) -> str:
+ return self.config.get("mysql_user", MYSQL_ROOT_USER)
+
+ @property
+ def mysql_password(self) -> str:
+ return self.config.get("mysql_password", self.get_secret(DerexSecrets.mysql))
+
+ @property
+ def volumes(self) -> str:
+ volumes = super().volumes()
+
+ if self.environment is ProjectEnvironment.development:
+ mysql_docker_volume = f"derex_{self.mysql_host}"
+ else:
+ mysql_docker_volume = (
+ f"{self.name}_{self.environment.name}_{self.mysql_host}"
+ )
+ mysql_docker_volume = self.config.get(
+ "mysql_docker_volume", mysql_docker_volume
+ )
+
+ volumes.append(mysql_docker_volume)
+ return volumes
+
+
+class MysqlProjectPlugin:
+ project = None
+
+ def load(self, project):
+ print(f"Loading Mysql on project {project.name}!")
+ self.project = project
+ project.mysql_user = self.mysql_user(project)
+ return
+
+ def mysql_user(self, project) -> str:
+ return project.config.get("mysql_user", "mysql")
+
+ @property
+ def volumes(self):
+ if self.environment is ProjectEnvironment.development:
+ mysql_docker_volume = f"derex_{self.mysql_host}"
+ else:
+ mysql_docker_volume = (
+ f"{self.name}_{self.environment.name}_{self.mysql_host}"
+ )
+ mysql_docker_volume = self.config.get(
+ "mysql_docker_volume", mysql_docker_volume
+ )
+
+ return [mysql_docker_volume]
+
+
+class MongodbProjectPlugin:
+ project = None
+
+ def load(self, project):
+ print(f"Loading MongoDB on project {project.name}!")
+ self.project = project
+ project.mongodb_user = self.mongodb_user(project)
+
+ return project
+
+ def mongodb_user(self, project) -> str:
+ return project.config.get("mongodb_user", "default_mongodb")
+
+ @property
+ def volumes(self):
+ return ["mongodb"]
+
+
+class Project(
+ BaseProject,
+ MysqlProject,
+ MongodbProject,
+ ElasticsearchProject,
+ RabbitmqProject,
+ MinioProject,
+ OpenedxProject,
+):
+ @property
+ def docker_volumes(self):
+ return {
+ self.mongodb_docker_volume,
+ self.elasticsearch_docker_volume,
+ self.mysql_docker_volume,
+ self.rabbitmq_docker_volume,
+ self.minio_docker_volume,
+ self.openedx_media_docker_volume,
+ self.openedx_data_docker_volume,
+ }
diff --git a/derex/runner/project_plugins.py b/derex/runner/project_plugins.py
new file mode 100644
index 000000000..3309bac87
--- /dev/null
+++ b/derex/runner/project_plugins.py
@@ -0,0 +1,87 @@
+class BaseProject:
+ """
+ The BaseProject defines some common attributes for a project
+ and methods to collect informations from loaded project services.
+
+ This will allow Derex Plugins to define additional services and
+ specify their requirements, like:
+
+ * start command
+ * mounted volumes
+ * additional data volumes
+ * service specific info (host, credentials)
+
+ All those information might be overridden from the project derex.config.yaml
+ file.
+ """
+
+ config: dict = {
+ "mysql_user": "test_mysql_user",
+ }
+ name: str = "BaseProject"
+ services: list = []
+ data_volumes: list = []
+ volumes: dict = {}
+
+ @property
+ def volumes(self):
+ volumes = []
+ for module in self.modules:
+ volumes.extend(module.volumes)
+ return volumes
+
+
+class BaseService:
+ project: Project = None
+ name: str = None
+
+ def load(self, project):
+ raise NotImplementedError
+ print(f"Loading Mysql info on project {project.name}!")
+ self.project = project
+ project.mysql_user = self.mysql_user(project)
+ return
+
+
+class MysqlService(BaseProjectService):
+ def load(self, project):
+ print(f"Loading Mysql info on project {project.name}!")
+ self.project = project
+ project.mysql_user = self.mysql_user(project)
+ return
+
+ def mysql_user(self, project) -> str:
+ return project.config.get("mysql_user", "mysql")
+
+ @property
+ def volumes(self):
+ if self.environment is ProjectEnvironment.development:
+ mysql_docker_volume = f"derex_{self.mysql_host}"
+ else:
+ mysql_docker_volume = (
+ f"{self.name}_{self.environment.name}_{self.mysql_host}"
+ )
+ mysql_docker_volume = self.config.get(
+ "mysql_docker_volume", mysql_docker_volume
+ )
+
+ return [mysql_docker_volume]
+
+
+class MongodbService(BaseProjectService):
+ project = None
+
+ def load(self, project):
+ print(f"Loading MongoDB on project {project.name}!")
+ self.project = project
+
+ project.mongodb_user = self.mongodb_user(project)
+
+ return project
+
+ def mongodb_user(self, project) -> str:
+ return project.config.get("mongodb_user", "default_mongodb")
+
+ @property
+ def volumes(self):
+ return ["mongodb"]
diff --git a/derex/runner/secrets.py b/derex/runner/secrets.py
index a6d9835d7..f13a82915 100644
--- a/derex/runner/secrets.py
+++ b/derex/runner/secrets.py
@@ -1,26 +1,17 @@
"""Tools to deal with secrets in derex.
"""
-from base64 import b64encode
+
from collections import Counter
-from enum import Enum
-from pathlib import Path
+from derex.runner.constants import DEREX_MAIN_SECRET_DEFAULT_MAX_SIZE # noqa
+from derex.runner.constants import DEREX_MAIN_SECRET_DEFAULT_MIN_ENTROPY # noqa
+from derex.runner.constants import DEREX_MAIN_SECRET_DEFAULT_MIN_SIZE # noqa
+from derex.runner.constants import DEREX_MAIN_SECRET_DEFAULT_PATH # noqa
from typing import Any
-from typing import Optional
-import logging
import math
import os
-logger = logging.getLogger(__name__)
-
-
-DEREX_MAIN_SECRET_MAX_SIZE = 1024
-DEREX_MAIN_SECRET_MIN_SIZE = 8
-DEREX_MAIN_SECRET_MIN_ENTROPY = 128
-DEREX_MAIN_SECRET_PATH = "/etc/derex/main_secret"
-
-
def scrypt_hash_stdlib(main_secret: str, name: str) -> bytes:
from hashlib import scrypt
@@ -50,62 +41,10 @@ def scrypt_hash_addon(main_secret: str, name: str) -> bytes:
scrypt_hash = scrypt_hash_addon
-class DerexSecrets(Enum):
- minio = "minio"
- mysql = "mysql"
- mongodb = "mongodb"
-
-
-def get_var(name: str, vartype: type) -> Any:
+def get_derex_secrets_env(name: str, vartype: type) -> Any:
varname = f"DEREX_MAIN_SECRET_{name.upper()}"
- return vartype(os.environ.get(varname, globals()[varname]))
-
-
-def _get_master_secret() -> Optional[str]:
- """Derex uses a master secret to derive all other secrets.
- This functions finds the master secret on the current machine,
- and if it can't find it it will return a default one.
-
- The default location is `/etc/derex/main_secret`, but can be customized
- via the environment variable DEREX_MAIN_SECRET_PATH.
- """
- filepath = get_var("path", Path)
- max_size = get_var("max_size", int)
- min_size = get_var("min_size", int)
- min_entropy = get_var("min_entropy", int)
-
- if os.access(filepath, os.R_OK):
- master_secret = filepath.read_text().strip()
- if len(master_secret) > max_size:
- raise DerexSecretError(
- f"Master secret in {filepath} is too large: {len(master_secret)} (should be {max_size} at most)"
- )
- if len(master_secret) < min_size:
- raise DerexSecretError(
- f"Master secret in {filepath} is too small: {len(master_secret)} (should be {min_size} at least)"
- )
- if compute_entropy(master_secret) < min_entropy:
- raise DerexSecretError(
- f"Master secret in {filepath} has not enough entropy: {compute_entropy(master_secret)} (should be {min_entropy} at least)"
- )
- return master_secret
-
- if filepath.exists():
- logger.error(f"File {filepath} is not readable; using default master secret")
- return None
-
-
-def get_secret(secret: DerexSecrets) -> str:
- """Derive a secret using the master secret and the provided name."""
- binary_secret = scrypt_hash(MASTER_SECRET, secret.name)
- # Pad the binary string so that its length is a multiple of 3
- # This will make sure its base64 representation is equals-free
- new_length = len(binary_secret) + (3 - len(binary_secret) % 3)
- return b64encode(binary_secret.rjust(new_length, b" ")).decode()
-
-
-class DerexSecretError(ValueError):
- """The master secret provided to derex is not valid or could not be found."""
+ default_varname = f"DEREX_MAIN_SECRET_DEFAULT_{name.upper()}"
+ return vartype(os.environ.get(varname, globals()[default_varname]))
def compute_entropy(s: str) -> float:
@@ -119,20 +58,6 @@ def compute_entropy(s: str) -> float:
return per_char_entropy * len(s)
-_MASTER_SECRET = _get_master_secret()
-if _MASTER_SECRET is None:
- _MASTER_SECRET = "Default secret"
- HAS_MASTER_SECRET = False
-else:
- HAS_MASTER_SECRET = True
-
-MASTER_SECRET = _MASTER_SECRET
-"The main secret derex uses to derive all other secrets"
-
__all__ = [
- "MASTER_SECRET",
"compute_entropy",
- "DerexSecretError",
- "DerexSecrets",
- "get_secret",
]
diff --git a/derex/runner/utils.py b/derex/runner/utils.py
index e39abbcea..0e5483420 100644
--- a/derex/runner/utils.py
+++ b/derex/runner/utils.py
@@ -1,18 +1,24 @@
-from functools import partial
+from derex.runner.constants import CONF_FILENAME
+from derex.runner.exceptions import ProjectNotFound
+from jinja2 import Environment
+from jinja2 import FileSystemLoader
+from logging import getLogger
from pathlib import Path
from rich.console import Console
from rich.table import Table
from typing import Any
from typing import List
-from typing import Optional
from typing import Union
import hashlib
-import importlib_metadata
import os
import re
+logger = getLogger(__name__)
+truthy = frozenset(("t", "true", "y", "yes", "on", "1"))
+
+
def get_dir_hash(
dirname: Union[Path, str],
excluded_files: List = [],
@@ -60,9 +66,6 @@ def get_dir_hash(
return hasher.hexdigest()
-truthy = frozenset(("t", "true", "y", "yes", "on", "1"))
-
-
def asbool(s: Any) -> bool:
"""Return the boolean value ``True`` if the case-lowered value of string
input ``s`` is a `truthy string`. If ``s`` is already one of the
@@ -77,19 +80,6 @@ def asbool(s: Any) -> bool:
return s.lower() in truthy
-def abspath_from_egg(egg: str, path: str) -> Optional[Path]:
- """Given a path relative to the egg root, find the absolute
- filesystem path for that resource.
- For instance this file's absolute path can be found passing
- derex/runner/utils.py
- to this function.
- """
- for file in importlib_metadata.files(egg):
- if str(file) == path:
- return file.locate()
- return None
-
-
def get_rich_console(*args, **kwargs):
return Console(*args, **kwargs)
@@ -98,4 +88,40 @@ def get_rich_table(*args, **kwargs):
return Table(*args, show_header=True, **kwargs)
-derex_path = partial(abspath_from_egg, "derex.runner")
+def get_requirements_hash(path: Path) -> str:
+ """Given a directory, return a hash of the contents of the text files it contains."""
+ hasher = hashlib.sha256()
+ logger.debug(
+ f"Calculating hash for requirements dir {path}; initial (empty) hash is {hasher.hexdigest()}"
+ )
+ for file in sorted(path.iterdir()):
+ if file.is_file():
+ hasher.update(file.read_bytes())
+ logger.debug(f"Examined contents of {file}; hash so far: {hasher.hexdigest()}")
+ return hasher.hexdigest()
+
+
+def find_project_root(path: Path) -> Path:
+ """Find the project directory walking up the filesystem starting on the
+ given path until a configuration file is found.
+ """
+ current = path
+ while current != current.parent:
+ if (current / CONF_FILENAME).is_file():
+ return current
+ current = current.parent
+ raise ProjectNotFound(
+ f"No directory found with a {CONF_FILENAME} file in it, starting from {path}"
+ )
+
+
+def compile_jinja_template(
+ template_path: Path, destination: Path, context: dict = {}
+) -> Path:
+ """Write a compiled jinja2 template using the given context variables"""
+ template = Environment(loader=FileSystemLoader(template_path.parent)).from_string(
+ template_path.read_text()
+ )
+ rendered_template = template.render(**context)
+ destination.write_text(rendered_template)
+ return destination
diff --git a/docker-definition/derex_django/derex_django/settings/default/__init__.py b/docker-definition/derex_django/derex_django/settings/default/__init__.py
index 82a5d0e18..f8a0ea177 100644
--- a/docker-definition/derex_django/derex_django/settings/default/__init__.py
+++ b/docker-definition/derex_django/derex_django/settings/default/__init__.py
@@ -44,6 +44,7 @@
if SERVICE_VARIANT == "cms":
from cms.envs.common import * # noqa: F401, F403
+
_settings_modules = [
"django_settings",
"mysql",
diff --git a/docker-definition/derex_django/derex_django/settings/default/mongo.py b/docker-definition/derex_django/derex_django/settings/default/mongo.py
index 84f0d1714..5628a5972 100644
--- a/docker-definition/derex_django/derex_django/settings/default/mongo.py
+++ b/docker-definition/derex_django/derex_django/settings/default/mongo.py
@@ -9,6 +9,11 @@
DATA_DIR = Path("/openedx/data")
MONGODB_HOST = "mongodb"
+if DEREX_OPENEDX_VERSION == "lilac":
+ MONGODB_HOST = "mongodb4"
+elif DEREX_OPENEDX_VERSION == "juniper":
+ MONGODB_HOST = "mongodb36"
+
MONGODB_DB_NAME = os.environ["MONGODB_DB_NAME"]
DOC_STORE_CONFIG = {
"host": MONGODB_HOST,
diff --git a/docker-definition/derex_django/derex_django/settings/default/mysql.py b/docker-definition/derex_django/derex_django/settings/default/mysql.py
index cc5dcdfe4..92afd3061 100644
--- a/docker-definition/derex_django/derex_django/settings/default/mysql.py
+++ b/docker-definition/derex_django/derex_django/settings/default/mysql.py
@@ -5,7 +5,7 @@
"default": {
"ATOMIC_REQUESTS": True,
"ENGINE": "django.db.backends.mysql",
- "HOST": "mysql",
+ "HOST": os.environ["MYSQL_HOST"],
"NAME": os.environ["MYSQL_DB_NAME"],
"USER": os.environ["MYSQL_USER"],
"PASSWORD": os.environ["MYSQL_PASSWORD"],
diff --git a/examples/koa/complete/derex_etc_dir/docker-compose-services.yml b/examples/koa/complete/derex_etc_dir/docker-compose-services.yml
index 6c87579f4..21557b109 100644
--- a/examples/koa/complete/derex_etc_dir/docker-compose-services.yml
+++ b/examples/koa/complete/derex_etc_dir/docker-compose-services.yml
@@ -1,5 +1,5 @@
version: "3.5"
services:
- mysql:
+ mysql57:
environment:
MYSQL_ROOT_PASSWORD: my-overridden-secret-password
diff --git a/requirements.txt b/requirements.txt
index 16d6696ad..254e09a1e 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -12,12 +12,12 @@ bcrypt==3.2.0
# via paramiko
certifi==2021.5.30
# via requests
-cffi==1.14.5
+cffi==1.14.6
# via
# bcrypt
# cryptography
# pynacl
-chardet==4.0.0
+charset-normalizer==2.0.4
# via requests
click==8.0.1
# via
@@ -29,9 +29,9 @@ colorama==0.4.4
# via rich
commonmark==0.9.1
# via rich
-cryptography==3.4.7
+cryptography==3.4.8
# via paramiko
-distro==1.5.0
+distro==1.6.0
# via docker-compose
docker[ssh]==5.0.0
# via docker-compose
@@ -41,9 +41,9 @@ dockerpty==0.4.1
# via docker-compose
docopt==0.6.2
# via docker-compose
-idna==2.10
+idna==3.2
# via requests
-importlib-metadata==4.5.0
+importlib-metadata==4.6.4
# via derex.runner (setup.py)
jinja2==3.0.1
# via derex.runner (setup.py)
@@ -59,27 +59,27 @@ py==1.10.0
# via derex.runner (setup.py)
pycparser==2.20
# via cffi
-pygments==2.9.0
+pygments==2.10.0
# via rich
-pymongo==3.11.4
+pymongo==3.12.0
# via derex.runner (setup.py)
pymysql==1.0.2
# via derex.runner (setup.py)
pynacl==1.4.0
# via paramiko
-pyrsistent==0.17.3
+pyrsistent==0.18.0
# via jsonschema
-python-dotenv==0.18.0
+python-dotenv==0.19.0
# via docker-compose
pyyaml==5.4.1
# via
# derex.runner (setup.py)
# docker-compose
-requests==2.25.1
+requests==2.26.0
# via
# docker
# docker-compose
-rich==10.4.0
+rich==10.7.0
# via derex.runner (setup.py)
six==1.16.0
# via
@@ -88,9 +88,9 @@ six==1.16.0
# jsonschema
# pynacl
# websocket-client
-texttable==1.6.3
+texttable==1.6.4
# via docker-compose
-urllib3==1.26.5
+urllib3==1.26.6
# via
# derex.runner (setup.py)
# requests
@@ -98,7 +98,7 @@ websocket-client==0.59.0
# via
# docker
# docker-compose
-zipp==3.4.1
+zipp==3.5.0
# via importlib-metadata
# The following packages are considered to be unsafe in a requirements file:
diff --git a/requirements_dev.txt b/requirements_dev.txt
index 93a28e530..f804b7fda 100644
--- a/requirements_dev.txt
+++ b/requirements_dev.txt
@@ -11,8 +11,7 @@ appdirs==1.4.4
# -c requirements.txt
# -r requirements.txt
# black
- # virtualenv
-astroid==2.6.0
+astroid==2.7.2
# via sphinx-autoapi
attrs==21.2.0
# via
@@ -22,16 +21,18 @@ attrs==21.2.0
# pytest
babel==2.9.1
# via sphinx
+backports.entry-points-selectable==1.1.0
+ # via virtualenv
bcrypt==3.2.0
# via
# -c requirements.txt
# -r requirements.txt
# paramiko
-black==21.6b0
+black==21.7b0
# via
# -r requirements_dev.in
# pytest-black
-bleach==3.3.0
+bleach==4.0.0
# via readme-renderer
bump2version==1.0.1
# via -r requirements_dev.in
@@ -40,7 +41,7 @@ certifi==2021.5.30
# -c requirements.txt
# -r requirements.txt
# requests
-cffi==1.14.5
+cffi==1.14.6
# via
# -c requirements.txt
# -r requirements.txt
@@ -49,7 +50,7 @@ cffi==1.14.5
# pynacl
cfgv==3.3.0
# via pre-commit
-chardet==4.0.0
+charset-normalizer==2.0.4
# via
# -c requirements.txt
# -r requirements.txt
@@ -80,7 +81,7 @@ coverage==5.5
# via
# -r requirements_dev.in
# pytest-cov
-cryptography==3.4.7
+cryptography==3.4.8
# via
# -c requirements.txt
# -r requirements.txt
@@ -88,7 +89,7 @@ cryptography==3.4.7
# secretstorage
distlib==0.3.2
# via virtualenv
-distro==1.5.0
+distro==1.6.0
# via
# -c requirements.txt
# -r requirements.txt
@@ -124,16 +125,16 @@ flake8==3.9.2
# via -r requirements_dev.in
flake8-import-order==0.18.1
# via -r requirements_dev.in
-identify==2.2.10
+identify==2.2.13
# via pre-commit
-idna==2.10
+idna==3.2
# via
# -c requirements.txt
# -r requirements.txt
# requests
imagesize==1.2.0
# via sphinx
-importlib-metadata==4.5.0
+importlib-metadata==4.6.4
# via
# -c requirements.txt
# -r requirements.txt
@@ -141,9 +142,9 @@ importlib-metadata==4.5.0
# twine
iniconfig==1.1.1
# via pytest
-isort==5.9.1
+isort==5.9.3
# via -r requirements_dev.in
-jeepney==0.6.0
+jeepney==0.7.1
# via
# keyring
# secretstorage
@@ -158,7 +159,7 @@ jsonschema==3.2.0
# -c requirements.txt
# -r requirements.txt
# docker-compose
-keyring==23.0.1
+keyring==23.1.0
# via twine
lazy-object-proxy==1.6.0
# via astroid
@@ -177,7 +178,7 @@ mypy-extensions==0.4.3
# mypy
nodeenv==1.6.0
# via pre-commit
-packaging==20.9
+packaging==21.0
# via
# bleach
# pytest
@@ -188,21 +189,23 @@ paramiko==2.7.2
# -c requirements.txt
# -r requirements.txt
# docker
-pathspec==0.8.1
+pathspec==0.9.0
# via black
-pep517==0.10.0
+pep517==0.11.0
# via pip-tools
pip-tools==6.2.0
# via -r requirements_dev.in
-pkginfo==1.7.0
+pkginfo==1.7.1
# via twine
+platformdirs==2.2.0
+ # via virtualenv
pluggy==0.13.1
# via
# -c requirements.txt
# -r requirements.txt
# pytest
# tox
-pre-commit==2.13.0
+pre-commit==2.14.0
# via -r requirements_dev.in
py==1.10.0
# via
@@ -221,14 +224,14 @@ pycparser==2.20
# cffi
pyflakes==2.3.1
# via flake8
-pygments==2.9.0
+pygments==2.10.0
# via
# -c requirements.txt
# -r requirements.txt
# readme-renderer
# rich
# sphinx
-pymongo==3.11.4
+pymongo==3.12.0
# via
# -c requirements.txt
# -r requirements.txt
@@ -243,7 +246,7 @@ pynacl==1.4.0
# paramiko
pyparsing==2.4.7
# via packaging
-pyrsistent==0.17.3
+pyrsistent==0.18.0
# via
# -c requirements.txt
# -r requirements.txt
@@ -262,7 +265,7 @@ pytest-mock==3.6.1
# via -r requirements_dev.in
pytest-runner==5.3.1
# via -r requirements_dev.in
-python-dotenv==0.18.0
+python-dotenv==0.19.0
# via
# -c requirements.txt
# -r requirements.txt
@@ -279,9 +282,9 @@ pyyaml==5.4.1
# sphinx-autoapi
readme-renderer==29.0
# via twine
-regex==2021.4.4
+regex==2021.8.21
# via black
-requests==2.25.1
+requests==2.26.0
# via
# -c requirements.txt
# -r requirements.txt
@@ -295,7 +298,7 @@ requests-toolbelt==0.9.1
# via twine
rfc3986==1.5.0
# via twine
-rich==10.4.0
+rich==10.7.0
# via
# -c requirements.txt
# -r requirements.txt
@@ -318,14 +321,14 @@ six==1.16.0
# websocket-client
snowballstemmer==2.1.0
# via sphinx
-sphinx==4.0.2
+sphinx==4.1.2
# via
# -r requirements_doc.in
# sphinx-autoapi
# sphinx-press-theme
-sphinx-autoapi==1.8.1
+sphinx-autoapi==1.8.4
# via -r requirements_doc.in
-sphinx-press-theme==0.7.3
+sphinx-press-theme==0.8.0
# via -r requirements_doc.in
sphinxcontrib-applehelp==1.0.2
# via sphinx
@@ -339,25 +342,27 @@ sphinxcontrib-qthelp==1.0.3
# via sphinx
sphinxcontrib-serializinghtml==1.1.5
# via sphinx
-texttable==1.6.3
+texttable==1.6.4
# via
# -c requirements.txt
# -r requirements.txt
# docker-compose
toml==0.10.2
# via
- # black
- # pep517
# pre-commit
# pytest
# pytest-black
# pytest-cov
# tox
-tox==3.23.1
+tomli==1.2.1
+ # via
+ # black
+ # pep517
+tox==3.24.3
# via -r requirements_dev.in
-tqdm==4.61.1
+tqdm==4.62.2
# via twine
-twine==3.4.1
+twine==3.4.2
# via -r requirements_dev.in
typed-ast==1.4.3
# via mypy
@@ -365,16 +370,16 @@ typing-extensions==3.10.0.0
# via mypy
unidecode==1.2.0
# via sphinx-autoapi
-urllib3==1.26.5
+urllib3==1.26.6
# via
# -c requirements.txt
# -r requirements.txt
# requests
-virtualenv==20.4.7
+virtualenv==20.7.2
# via
# pre-commit
# tox
-watchdog==2.1.2
+watchdog==2.1.5
# via -r requirements_dev.in
webencodings==0.5.1
# via bleach
@@ -384,13 +389,13 @@ websocket-client==0.59.0
# -r requirements.txt
# docker
# docker-compose
-wheel==0.36.2
+wheel==0.37.0
# via
# -r requirements_dev.in
# pip-tools
wrapt==1.12.1
# via astroid
-zipp==3.4.1
+zipp==3.5.0
# via
# -c requirements.txt
# -r requirements.txt
diff --git a/tests/conftest.py b/tests/conftest.py
index 278d6f977..82f474703 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -78,7 +78,7 @@ def complete_project(request, workdir_copy):
@pytest.fixture(scope=("session"))
def sys_argv(session_mocker):
@contextlib.contextmanager
- def my_cm(eargs):
+ def context_manager(eargs):
with session_mocker.mock_module.patch.object(sys, "argv", eargs):
try:
yield
@@ -86,7 +86,7 @@ def my_cm(eargs):
if exc.code != 0:
raise
- return my_cm
+ return context_manager
def pytest_configure(config):
diff --git a/tests/test_ddc.py b/tests/test_ddc.py
index 5477c16e7..80b2b5370 100644
--- a/tests/test_ddc.py
+++ b/tests/test_ddc.py
@@ -1,8 +1,9 @@
# -*- coding: utf-8 -*-
"""Tests for `derex.runner.ddc` module."""
+from derex.runner.project import Project
+
import logging
-import os
import pytest
import sys
import yaml
@@ -11,47 +12,41 @@
def test_ddc_services(sys_argv, capsys, monkeypatch, complete_project):
"""Test the derex docker compose shortcut."""
from derex.runner.ddc import ddc_services
- from derex.runner.project import Project
-
- os.environ["DEREX_ADMIN_SERVICES"] = "False"
- with sys_argv(["ddc-services", "config"]):
- ddc_services()
- output = capsys.readouterr().out
- assert "mongodb" in output
- assert "adminer" not in output
-
- os.environ["DEREX_ADMIN_SERVICES"] = "True"
- with sys_argv(["ddc-services", "config"]):
- ddc_services()
- output = capsys.readouterr().out
- assert "adminer" in output
with complete_project:
+ project = Project()
+ with sys_argv(["ddc-services", "config"]):
+ ddc_services()
+ output = capsys.readouterr().out
+ assert project.mongodb_host in output
+ assert project.mysql_host in output
+ assert project.elasticsearch_host in output
+
monkeypatch.setenv("DEREX_ETC_PATH", str(Project().root / "derex_etc_dir"))
with sys_argv(["ddc-services", "config"]):
ddc_services()
- output = capsys.readouterr().out
- assert "my-overridden-secret-password" in output
+ output = capsys.readouterr().out
+ assert "my-overridden-secret-password" in output
def test_ddc_project_minimal(sys_argv, mocker, minimal_project, capsys):
+ """Test the Open edX docker compose shortcut."""
+
from derex.runner.ddc import ddc_project
- from derex.runner.project import Project
- """Test the open edx ironwood docker compose shortcut."""
# It should check for services to be up before trying to do anything
- wait_for_service = mocker.patch("derex.runner.ddc.wait_for_service")
+ wait_for_container = mocker.patch("derex.runner.ddc.wait_for_container")
with minimal_project:
for param in ["up", "start"]:
- wait_for_service.return_value = 0
- wait_for_service.side_effect = None
+ wait_for_container.return_value = 0
+ wait_for_container.side_effect = None
with sys_argv(["ddc-project", param, "--dry-run"]):
ddc_project()
assert "Would have run" in capsys.readouterr().out
- wait_for_service.side_effect = RuntimeError(
+ wait_for_container.side_effect = RuntimeError(
"mysql service not found.\n"
"Maybe you forgot to run\n"
"ddc-services up -d"
@@ -69,7 +64,7 @@ def test_ddc_project_minimal(sys_argv, mocker, minimal_project, capsys):
with sys_argv(["ddc-project", "config"]):
ddc_project()
assert (
- "/derex/runner/compose_files/openedx_customizations/juniper/"
+ "/derex/runner/compose_files/common/openedx_customizations/juniper/"
in capsys.readouterr().out
)
@@ -91,9 +86,8 @@ def test_ddc_project_symlink_mounting(sys_argv, mocker, complete_project, capsys
are mounted in the Open edX containers.
"""
from derex.runner.ddc import ddc_project
- from derex.runner.project import Project
- mocker.patch("derex.runner.ddc.wait_for_service", return_value=0)
+ mocker.patch("derex.runner.ddc.wait_for_container", return_value=0)
with complete_project:
with sys_argv(["ddc-project", "config"]):
ddc_project()
diff --git a/tests/test_derex.py b/tests/test_derex.py
index 928614866..3cbe97350 100644
--- a/tests/test_derex.py
+++ b/tests/test_derex.py
@@ -26,33 +26,24 @@ def test_derex_runmode(minimal_project, mocker):
result = runner.invoke(derex_cli_group, ["runmode"])
assert result.exit_code == 0, result.output
assert result.output == "debug\n"
- # Until this PR is merged we can't peek into `.stderr`
- # https://github.com/pallets/click/pull/1194
- assert result.stderr_bytes == b""
+ assert result.stderr == ""
result = runner.invoke(derex_cli_group, ["runmode", "aaa"])
assert result.exit_code == 2, result.output
assert "Usage:" in result.stderr
- mocker.patch("derex.runner.cli.HAS_MASTER_SECRET", new=False)
- result = runner.invoke(derex_cli_group, ["runmode", "production"])
- assert result.exit_code == 1, result.output
- assert "Set a master secret" in result.stderr_bytes.decode("utf8")
-
- mocker.patch("derex.runner.cli.HAS_MASTER_SECRET", new=True)
result = runner.invoke(derex_cli_group, ["runmode", "production"])
assert result.exit_code == 0, result.output
- assert "debug → production" in result.stderr_bytes.decode("utf8")
+ assert "debug → production" in result.stderr
- mocker.patch("derex.runner.cli.HAS_MASTER_SECRET", new=True)
result = runner.invoke(derex_cli_group, ["runmode", "production"])
assert result.exit_code == 0, result.output
- assert "already production" in result.stderr_bytes.decode("utf8")
+ assert "already production" in result.stderr
result = runner.invoke(derex_cli_group, ["runmode"])
assert result.exit_code == 0, result.output
assert result.output == "production\n"
- assert result.stderr_bytes == b""
+ assert result.stderr == ""
def test_derex_runmode_wrong(minimal_project):
@@ -67,6 +58,50 @@ def test_derex_runmode_wrong(minimal_project):
assert "is not valid as" in result.stderr
+def test_derex_environment(minimal_project, mocker):
+ with minimal_project:
+ result = runner.invoke(derex_cli_group, ["environment"])
+ assert result.exit_code == 0, result.output
+ assert result.output == "development\n"
+ assert result.stderr == ""
+
+ result = runner.invoke(derex_cli_group, ["environment", "aaa"])
+ assert result.exit_code == 2, result.output
+ assert "Usage:" in result.stderr
+
+ mocker.patch("derex.runner.project.Project.has_main_secret", return_value=False)
+ result = runner.invoke(derex_cli_group, ["environment", "production"])
+ assert result.exit_code == 1, result.output
+ assert "Set a main secret" in result.stderr
+
+ mocker.patch("derex.runner.project.Project.has_main_secret", return_value=True)
+ result = runner.invoke(derex_cli_group, ["environment", "production"])
+ assert result.exit_code == 0, result.output
+ assert "development → production" in result.stderr
+
+ mocker.patch("derex.runner.project.Project.has_main_secret", return_value=True)
+ result = runner.invoke(derex_cli_group, ["environment", "production"])
+ assert result.exit_code == 0, result.output
+ assert "already production" in result.stderr
+
+ result = runner.invoke(derex_cli_group, ["environment"])
+ assert result.exit_code == 0, result.output
+ assert result.output == "production\n"
+ assert result.stderr == ""
+
+
+def test_derex_environment_wrong(minimal_project):
+ with minimal_project:
+ project = Project()
+ # Use low level API to inject invalid value
+ project._set_status("environment", "garbage-not-a-valid-environment")
+
+ result = runner.invoke(derex_cli_group, "environment")
+ # Ensure presence of error message
+ assert "garbage-not-a-valid-environment" in result.stderr
+ assert "is not valid as" in result.stderr
+
+
def test_derex_cli_group_no_containers_running(monkeypatch):
from derex.runner import docker_utils
diff --git a/tests/test_docker.py b/tests/test_docker.py
index f48d9e3df..f66003335 100644
--- a/tests/test_docker.py
+++ b/tests/test_docker.py
@@ -6,46 +6,52 @@
import pytest
-def test_ensure_volumes_present(mocker):
+def test_ensure_volumes_present(mocker, minimal_project):
from derex.runner.docker_utils import ensure_volumes_present
- from derex.runner.docker_utils import VOLUMES
client = mocker.patch("derex.runner.docker_utils.client")
- client.volumes.list.return_value = []
- ensure_volumes_present()
- assert client.volumes.create.call_count > 3
- client.volumes.create.assert_any_call("derex_mysql")
- client.volumes.create.assert_any_call("derex_mongodb")
-
- client.volumes.create.reset_mock()
- client.volumes.list.return_value = [SimpleNamespace(name=name) for name in VOLUMES]
- ensure_volumes_present()
- client.volumes.create.assert_not_called()
-
-
-def test_check_services(mocker):
- from derex.runner.docker_utils import check_services
+ with minimal_project:
+ project = Project()
+ client.volumes.list.return_value = []
+ ensure_volumes_present(project)
+ assert client.volumes.create.call_count > 3
+ client.volumes.create.assert_any_call(project.mysql_docker_volume)
+ client.volumes.create.assert_any_call(project.mongodb_docker_volume)
+ client.volumes.create.assert_any_call(project.elasticsearch_docker_volume)
+ client.volumes.create.assert_any_call(project.rabbitmq_docker_volume)
+ client.volumes.create.assert_any_call(project.minio_docker_volume)
+
+ client.volumes.create.reset_mock()
+ client.volumes.list.return_value = [
+ SimpleNamespace(name=name) for name in project.docker_volumes
+ ]
+ ensure_volumes_present(project)
+ client.volumes.create.assert_not_called()
+
+
+def test_check_containers(mocker):
+ from derex.runner.docker_utils import check_containers
api_client = mocker.patch("derex.runner.docker_utils.api_client")
container_info = {"State": {"Status": "running", "Health": {"Status": "healthy"}}}
api_client.inspect_container.return_value = container_info
- assert check_services(["mysql"])
+ assert check_containers(["mysql"])
api_client.inspect_container.side_effect = docker.errors.NotFound(
"mysql container not found"
)
- assert check_services(["mysql"]) is False
+ assert check_containers(["mysql"]) is False
-def test_wait_for_service(mocker):
- from derex.runner.docker_utils import wait_for_service
+def test_wait_for_container(mocker):
+ from derex.runner.docker_utils import wait_for_container
# Test that a RuntimeError is raised if the container doesn't
# exists
with pytest.raises(RuntimeError):
- wait_for_service("service", 1)
+ wait_for_container("container", 1)
container_info = {"State": {"Status": "running", "Health": {"Status": "healthy"}}}
api_client = mocker.patch("derex.runner.docker_utils.api_client")
@@ -53,32 +59,32 @@ def test_wait_for_service(mocker):
# Test that the result is successfull when the container
# is running or restarting and healthy
- result = wait_for_service("service", 1)
- api_client.inspect_container.assert_called_with("service")
+ result = wait_for_container("container", 1)
+ api_client.inspect_container.assert_called_with("container")
assert result == 0
container_info["State"]["Status"] = "restarting"
- result = wait_for_service("service", 1)
+ result = wait_for_container("container", 1)
assert result == 0
# Test that a RuntimeError is raised if the container status is
# exited
container_info["State"]["Status"] = "exited"
with pytest.raises(RuntimeError):
- wait_for_service("service", 1)
+ wait_for_container("service", 1)
# Test that a TimeoutError is raised if the container status is
# unhealthy
container_info["State"]["Status"] = "running"
container_info["State"]["Health"]["Status"] = "unhealthy"
with pytest.raises(TimeoutError):
- wait_for_service("service", 1)
+ wait_for_container("service", 1)
# Test that a NotImplementedError is raised if the container doesn't
# define an healtcheck
container_info["State"]["Health"] = None
with pytest.raises(NotImplementedError):
- wait_for_service("service", 1)
+ wait_for_container("service", 1)
def test_get_final_image(mocker, minimal_project):
diff --git a/tests/test_mongodb.py b/tests/test_mongodb.py
index c12035001..e12e84bcd 100644
--- a/tests/test_mongodb.py
+++ b/tests/test_mongodb.py
@@ -1,87 +1,101 @@
-from .conftest import assert_result_ok
-from .conftest import DEREX_TEST_USER
-from click.testing import CliRunner
-from derex.runner.ddc import ddc_services
-from importlib import reload
+# from .conftest import assert_result_ok
+# from .conftest import DEREX_TEST_USER
+# from click.testing import CliRunner
+# from derex.runner.docker_utils import wait_for_container
+# from derex.runner.ddc import run_ddc
+# from importlib import reload
-import pytest
-import uuid
+# from derex.runner.project import Project
+# from derex.runner.mongodb import get_mongodb_client
+# import pytest
+# import uuid
-runner = CliRunner(mix_stderr=False)
+# runner = CliRunner(mix_stderr=False)
-@pytest.fixture(scope="session")
-def start_mongodb(sys_argv):
- """Ensure the mongodb service is up"""
- with sys_argv(["ddc-services", "up", "-d", "mongodb"]):
- ddc_services()
+# def start_mongodb(project):
+# """Ensure the mongodb service is up"""
+# run_ddc(["up", "-d", project.mongodb_host], "services", project)
+# wait_for_container(project.mongodb_host)
-@pytest.fixture(autouse=True)
-def cleanup_mongodb(start_mongodb):
- """Ensure no test database is left behind"""
- from derex.runner.mongodb import MONGODB_CLIENT
- yield
+# def stop_mongodb(project):
+# with minimal_project:
+# run_ddc(
+# ["down"],
+# "services",
+# project
+# )
- for database_name in [
- database["name"]
- for database in MONGODB_CLIENT.list_databases()
- if "derex_test_db_" in database["name"]
- ]:
- MONGODB_CLIENT.drop_database(database_name)
- for user in MONGODB_CLIENT.admin.command("usersInfo").get("users"):
- if DEREX_TEST_USER in user["user"]:
- MONGODB_CLIENT.admin.command("dropUser", DEREX_TEST_USER)
+# def cleanup_mongodb(project):
+# """Ensure no test database is left behind"""
+# start_mongodb(project)
+# project = Project()
+# mongodb_client = get_mongodb_client(project)
+# for database_name in [
+# database["name"]
+# for database in mongodb_client.list_databases()
+# if "derex_test_db_" in database["name"]
+# ]:
+# mongodb_client.drop_database(database_name)
+# for user in mongodb_client.admin.command("usersInfo").get("users"):
+# if DEREX_TEST_USER in user["user"]:
+# mongodb_client.admin.command("dropUser", DEREX_TEST_USER)
-def test_derex_mongodb(start_mongodb):
- from derex.runner.cli.mongodb import copy_mongodb
- from derex.runner.cli.mongodb import drop_mongodb
- from derex.runner.mongodb import list_databases
- import derex.runner.mongodb
+# def test_derex_mongodb(minimal_project):
+# from derex.runner.cli.mongodb import copy_mongodb
+# from derex.runner.cli.mongodb import drop_mongodb
+# from derex.runner.mongodb import list_databases
- reload(derex.runner.mongodb)
- MONGODB_CLIENT = derex.runner.mongodb.MONGODB_CLIENT
+# with minimal_project:
+# project = Project()
+# start_mongodb(project)
- test_db_name = f"derex_test_db_{uuid.uuid4().hex[:20]}"
- test_db_copy_name = f"derex_test_db_copy_{uuid.uuid4().hex[:20]}"
- random_value = uuid.uuid4().hex[:20]
- test_data = {"data": random_value}
+# mongodb_client = get_mongodb_client(project)
- MONGODB_CLIENT[test_db_name]["test_collection"].insert_one(test_data)
- assert test_db_name in [database["name"] for database in list_databases()]
+# test_db_name = f"derex_test_db_{uuid.uuid4().hex[:20]}"
+# test_db_copy_name = f"derex_test_db_copy_{uuid.uuid4().hex[:20]}"
+# random_value = uuid.uuid4().hex[:20]
+# test_data = {"data": random_value}
- runner.invoke(copy_mongodb, f"{test_db_name} {test_db_copy_name}", input="y")
- assert test_db_copy_name in [database["name"] for database in list_databases()]
- assert MONGODB_CLIENT[test_db_copy_name]["test_collection"].find_one(test_data)
+# mongodb_client[test_db_name]["test_collection"].insert_one(test_data)
+# assert test_db_name in [database["name"] for database in list_databases()]
- runner.invoke(drop_mongodb, test_db_name, input="y")
- runner.invoke(drop_mongodb, test_db_copy_name, input="y")
- assert test_db_name not in [database["name"] for database in list_databases()]
- assert test_db_copy_name not in [database["name"] for database in list_databases()]
+# runner.invoke(copy_mongodb, f"{test_db_name} {test_db_copy_name}", input="y")
+# assert test_db_copy_name in [database["name"] for database in list_databases()]
+# assert mongodb_client[test_db_copy_name]["test_collection"].find_one(test_data)
+# runner.invoke(drop_mongodb, test_db_name, input="y")
+# runner.invoke(drop_mongodb, test_db_copy_name, input="y")
+# assert test_db_name not in [database["name"] for database in list_databases()]
+# assert test_db_copy_name not in [database["name"] for database in list_databases()]
-def test_derex_mongodb_reset_password(mocker, start_mongodb):
- from derex.runner.cli.mongodb import create_user_cmd
- from derex.runner.cli.mongodb import reset_mongodb_password_cmd
- from derex.runner.cli.mongodb import shell
+# cleanup_mongodb(project)
+# stop_mongodb(project)
- assert_result_ok(
- runner.invoke(create_user_cmd, [DEREX_TEST_USER, "secret", "--role=root"])
- )
- mocker.patch("derex.runner.mongodb.MONGODB_ROOT_USER", new=DEREX_TEST_USER)
+# def test_derex_mongodb_reset_password(mocker, start_mongodb, minimal_project):
+# from derex.runner.cli.mongodb import create_user_cmd
+# from derex.runner.cli.mongodb import reset_mongodb_password_cmd
+# from derex.runner.cli.mongodb import shell
- # This is expected to fail since we set a custom password for the root user
- result = runner.invoke(shell)
- assert result.exit_code == 1
+# with minimal_project:
+# assert_result_ok(
+# runner.invoke(create_user_cmd, [DEREX_TEST_USER, "secret", "--role=root"])
+# )
+# mocker.patch("derex.runner.project.MONGODB_ROOT_USER", new=DEREX_TEST_USER)
- # We reset the password to the derex generated one
- assert_result_ok(runner.invoke(reset_mongodb_password_cmd, ["secret"], input="y"))
+# # This is expected to fail since we set a custom password for the root user
+# result = runner.invoke(shell)
+# assert result.exit_code == 1
- # If the password is still not resetted to the value of the derex generated password
- # but still set to "secret" the next test will fail
- assert_result_ok(runner.invoke(shell))
+# # We reset the password to the derex generated one
+# assert_result_ok(runner.invoke(reset_mongodb_password_cmd, ["secret"], input="y"))
+
+# # If the password is still not resetted to the value of the derex generated password
+# # but still set to "secret" the next test will fail
+# assert_result_ok(runner.invoke(shell))
diff --git a/tests/test_mysql.py b/tests/test_mysql.py
index 99e2be80b..5e6650896 100644
--- a/tests/test_mysql.py
+++ b/tests/test_mysql.py
@@ -1,8 +1,9 @@
from .conftest import assert_result_ok
from .conftest import DEREX_TEST_USER
from click.testing import CliRunner
-from derex.runner.mysql import get_system_mysql_client
+from derex.runner.mysql import get_project_mysql_client
from derex.runner.mysql import show_databases
+from derex.runner.project import Project
from itertools import repeat
from types import SimpleNamespace
@@ -13,72 +14,70 @@
runner = CliRunner(mix_stderr=False)
-@pytest.fixture(scope="session")
-def start_mysql(sys_argv):
- """Ensure the mysql service is up"""
- from derex.runner.ddc import ddc_services
+# @pytest.fixture(autouse=True)
+# def cleanup_mysql(minimal_project):
+# """Ensure no test database or user is left behind"""
+# yield
- with sys_argv(["ddc-services", "up", "-d", "mysql"]):
- ddc_services()
+# with minimal_project:
+# project = Project()
+# mysql_client = get_project_mysql_client(project)
+# mysql_client.execute("SHOW DATABASES;")
+# for database in mysql_client.fetchall():
+# if "derex_test_db_" in database[0]:
+# mysql_client.execute(f"DROP DATABASE {database[0]};")
+# mysql_client.execute("SELECT user,host from mysql.user;")
+# for user in mysql_client.fetchall():
+# if DEREX_TEST_USER in user[0]:
+# mysql_client.execute(f"DROP USER '{user[0]}'@'{user[1]}'")
-@pytest.fixture(autouse=True)
-def cleanup_mysql(start_mysql):
- """Ensure no test database or user is left behind"""
- yield
-
- client = get_system_mysql_client()
- client.execute("SHOW DATABASES;")
- for database in client.fetchall():
- if "derex_test_db_" in database[0]:
- client.execute(f"DROP DATABASE {database[0]};")
-
- client.execute("SELECT user,host from mysql.user;")
- for user in client.fetchall():
- if DEREX_TEST_USER in user[0]:
- client.execute(f"DROP USER '{user[0]}'@'{user[1]}'")
-
- client.connection.close()
+# mysql_client.connection.close()
@pytest.mark.slowtest
-def test_derex_mysql(start_mysql):
+def test_derex_mysql(minimal_project):
"""Test the `derex mysql copy` cli command"""
from derex.runner.cli.mysql import copy_database_cmd
from derex.runner.cli.mysql import create_database_cmd
from derex.runner.cli.mysql import drop_database_cmd
- test_db_name = f"derex_test_db_{uuid.uuid4().hex[:20]}"
- test_db_copy_name = f"derex_test_db_copy_{uuid.uuid4().hex[:20]}"
- random_value = uuid.uuid4().hex[:20]
+ with minimal_project:
+ project = Project()
+
+ test_db_name = f"derex_test_db_{uuid.uuid4().hex[:20]}"
+ test_db_copy_name = f"derex_test_db_copy_{uuid.uuid4().hex[:20]}"
+ random_value = uuid.uuid4().hex[:20]
- runner.invoke(create_database_cmd, test_db_name)
- assert test_db_name in [database[0] for database in show_databases()]
+ runner.invoke(create_database_cmd, test_db_name)
+ assert test_db_name in [database[0] for database in show_databases(project)]
- mysql_client = get_system_mysql_client()
- mysql_client.connection.autocommit(True)
- mysql_client.execute(f"USE {test_db_name};")
- mysql_client.execute("CREATE TABLE test (field VARCHAR(255) NOT NULL);")
- mysql_client.execute(f"INSERT INTO test (field) VALUES ('{random_value}');")
+ mysql_client = get_project_mysql_client(project=project)
+ mysql_client.connection.autocommit(True)
+ mysql_client.execute(f"USE {test_db_name};")
+ mysql_client.execute("CREATE TABLE test (field VARCHAR(255) NOT NULL);")
+ mysql_client.execute(f"INSERT INTO test (field) VALUES ('{random_value}');")
- runner.invoke(copy_database_cmd, [test_db_name, test_db_copy_name], input="y")
- mysql_client.execute(f"USE {test_db_copy_name};")
- mysql_client.execute("SELECT * from test;")
- assert mysql_client.fetchone()[0] == random_value
+ runner.invoke(copy_database_cmd, [test_db_name, test_db_copy_name], input="y")
+ mysql_client.execute(f"USE {test_db_copy_name};")
+ mysql_client.execute("SELECT * from test;")
+ assert mysql_client.fetchone()[0] == random_value
- runner.invoke(drop_database_cmd, test_db_name, input="y")
- runner.invoke(drop_database_cmd, test_db_copy_name, input="y")
+ runner.invoke(drop_database_cmd, test_db_name, input="y")
+ runner.invoke(drop_database_cmd, test_db_copy_name, input="y")
- assert test_db_name not in [database[0] for database in show_databases()]
- assert test_db_copy_name not in [database[0] for database in show_databases()]
+ assert test_db_name not in [database[0] for database in show_databases(project)]
+ assert test_db_copy_name not in [
+ database[0] for database in show_databases(project)
+ ]
@pytest.mark.slowtest
-def test_derex_mysql_reset(start_mysql, mocker, minimal_project):
- """Test the open edx ironwood docker compose shortcut."""
+def test_derex_mysql_reset(mocker, minimal_project):
+ """Test the `derex mysql reset` cli command"""
from derex.runner.cli.mysql import reset_mysql_cmd
- mocker.patch("derex.runner.ddc.wait_for_service", return_value=0)
+ mocker.patch("derex.runner.ddc.wait_for_container", return_value=0)
client = mocker.patch("derex.runner.docker_utils.client")
client.containers.get.return_value.exec_run.side_effect = [
SimpleNamespace(exit_code=-1)
@@ -91,30 +90,31 @@ def test_derex_mysql_reset(start_mysql, mocker, minimal_project):
@pytest.mark.slowtest
-def test_derex_mysql_reset_password(start_mysql, mocker):
+def test_derex_mysql_reset_password(mocker, minimal_project):
"""Test the `derex mysql copy` cli command"""
from derex.runner.cli.mysql import create_user_cmd
from derex.runner.cli.mysql import reset_mysql_password_cmd
from derex.runner.cli.mysql import shell
- for host in ["localhost", "%"]:
- runner.invoke(create_user_cmd, [DEREX_TEST_USER, "secret", host])
- result = runner.invoke(
- shell,
- [
- f"GRANT ALL ON *.* TO '{DEREX_TEST_USER}'@'{host}' WITH GRANT OPTION;"
- "FLUSH PRIVILEGES;"
- ],
- )
-
- mocker.patch("derex.runner.mysql.MYSQL_ROOT_USER", new=DEREX_TEST_USER)
-
- # This is expected to fail since we set a custom password for the root user
- result = runner.invoke(shell, ["SHOW DATABASES;"])
- assert result.exit_code == 1
-
- # We reset the password to the derex generated one
- assert_result_ok(runner.invoke(reset_mysql_password_cmd, ["secret"], input="y"))
-
- # Now this should be
- assert_result_ok(result=runner.invoke(shell, ["SHOW DATABASES;"]))
+ with minimal_project:
+ for host in ["localhost", "%"]:
+ runner.invoke(create_user_cmd, [DEREX_TEST_USER, "secret", host])
+ result = runner.invoke(
+ shell,
+ [
+ f"GRANT ALL ON *.* TO '{DEREX_TEST_USER}'@'{host}' WITH GRANT OPTION;"
+ "FLUSH PRIVILEGES;"
+ ],
+ )
+
+ mocker.patch("derex.runner.project.MYSQL_ROOT_USER", new=DEREX_TEST_USER)
+
+ # This is expected to fail since we set a custom password for the root user
+ result = runner.invoke(shell, ["SHOW DATABASES;"])
+ assert result.exit_code == 1
+
+ # We reset the password to the derex generated one
+ assert_result_ok(runner.invoke(reset_mysql_password_cmd, ["secret"], input="y"))
+
+ # Now this should be
+ assert_result_ok(result=runner.invoke(shell, ["SHOW DATABASES;"]))
diff --git a/tests/test_project.py b/tests/test_project.py
index fd6347561..81587816f 100644
--- a/tests/test_project.py
+++ b/tests/test_project.py
@@ -1,6 +1,6 @@
from derex.runner.constants import CONF_FILENAME
from derex.runner.constants import SECRETS_CONF_FILENAME
-from derex.runner.ddc import run_ddc_project
+from derex.runner.ddc import run_ddc
from derex.runner.project import Project
from derex.runner.project import ProjectRunMode
from pathlib import Path
@@ -92,7 +92,7 @@ def ddc_project_options(
with docker_compose_path.open("w") as fh:
fh.write("lms:\n image: foobar\n")
project = Project()
- run_ddc_project([], project, dry_run=True)
+ run_ddc([], "project", project, dry_run=True)
output = capsys.readouterr().out
# The last option should be the path of the user docker compose file for this project
assert output.endswith(f"-f {docker_compose_path}\n")
@@ -100,11 +100,11 @@ def ddc_project_options(
def test_docker_compose_addition_per_runmode(minimal_project, mocker, capsys):
with minimal_project:
- docker_compose_debug_path = Project().root / "docker-compose-debug.yml"
+ docker_compose_debug_path = Project().root / "docker-compose-runmode-debug.yml"
with docker_compose_debug_path.open("w") as fh:
fh.write("lms:\n image: foobar\n")
project = Project()
- run_ddc_project([], project, dry_run=True)
+ run_ddc([], "project", project, dry_run=True)
output = capsys.readouterr().out
# The last option should be the path of the debug docker compose
assert output.endswith(f"-f {docker_compose_debug_path}\n")
@@ -113,7 +113,7 @@ def test_docker_compose_addition_per_runmode(minimal_project, mocker, capsys):
default_project_docker_compose_file = project.private_filepath(
"docker-compose.yml"
)
- run_ddc_project([], project, dry_run=True)
+ run_ddc([], "project", project, dry_run=True)
output = capsys.readouterr().out
# The last option should be the path of the project default docker compose file
assert output.endswith(f"-f {default_project_docker_compose_file}\n")
diff --git a/tests/test_secrets.py b/tests/test_secrets.py
index 150dd55d0..716399c48 100644
--- a/tests/test_secrets.py
+++ b/tests/test_secrets.py
@@ -1,3 +1,4 @@
+from derex.runner.project import Project
from enum import Enum
from importlib import reload
@@ -7,85 +8,100 @@
CUSTOM_SECRET = "0123456789abcdefghijklmnopqrstuvwxyz"
-def test_master_secret(mocker):
- from derex.runner.secrets import _get_master_secret
+def test_unreadable_main_secret(mocker, minimal_project):
+ with minimal_project:
+ project = Project()
- mocker.patch("derex.runner.secrets.os.access", return_value=False)
- assert _get_master_secret() is None
+ mocker.patch("derex.runner.secrets.os.access", return_value=False)
+ assert project.main_secret == "Default secret"
-def test_master_secret_default_filename(mocker):
+def test_main_secret_default_filename(mocker, minimal_project):
"""If a file exists on the default path it should be taken into consideration.
Also make sure file contents are stripped from whitespace.
"""
- from derex.runner.secrets import _get_master_secret
+ with minimal_project:
+ project = Project()
- mocker.patch("derex.runner.secrets.os.access", return_value=True)
- mocker.patch(
- "derex.runner.secrets.Path.read_text", return_value=CUSTOM_SECRET + "\n"
- )
- assert _get_master_secret() == CUSTOM_SECRET
+ mocker.patch("derex.runner.secrets.os.access", return_value=True)
+ mocker.patch(
+ "derex.runner.project.Path.read_text", return_value=CUSTOM_SECRET + "\n"
+ )
+ assert project.main_secret == CUSTOM_SECRET
-def test_master_secret_default_filename_not_readable(mocker):
+def test_main_secret_default_filename_not_readable(mocker, minimal_project):
"""If the file exists but is not readable we should log an error."""
- from derex.runner.secrets import _get_master_secret
+ with minimal_project:
+ project = Project()
+ environment = project.environment
- mocker.patch("derex.runner.secrets.os.access", return_value=False)
- mocker.patch("derex.runner.secrets.Path.exists", return_value=True)
- logger = mocker.patch("derex.runner.secrets.logger")
+ mocker.patch("derex.runner.secrets.os.access", return_value=False)
+ mocker.patch("derex.runner.project.Path.exists", return_value=True)
+ logger = mocker.patch("derex.runner.project.logger")
- assert _get_master_secret() is None
- logger.error.assert_called_once()
+ # Since we are patching derex.runner.project.Path.exists we can't call
+ # project.main_secret since that will fail when checking the existence of a
+ # project environment file
+ assert project.get_main_secret(environment) is None
+ logger.error.assert_called_once()
-def test_master_secret_custom_filename(tmp_path, monkeypatch):
+def test_main_secret_custom_filename(tmp_path, monkeypatch, minimal_project):
"""If the file exists but is not readable we should log an error.
If the file contains a bad secret (too short, too long or not enough entropy)
an exception is raised.
"""
- from derex.runner.secrets import _get_master_secret
- from derex.runner.secrets import DerexSecretError
+ from derex.runner.exceptions import DerexSecretError
- secret_path = tmp_path / "main_secret"
- secret_path.write_text("\n" + CUSTOM_SECRET + "\n")
- monkeypatch.setenv("DEREX_MAIN_SECRET_PATH", str(secret_path))
- assert _get_master_secret() == CUSTOM_SECRET
+ with minimal_project:
+ project = Project()
- secret_path.write_text("a" * 5000)
- with pytest.raises(DerexSecretError):
- _get_master_secret() # Too long
+ secret_path = tmp_path / "main_secret"
+ secret_path.write_text("\n" + CUSTOM_SECRET + "\n")
+ monkeypatch.setenv("DEREX_MAIN_SECRET_PATH", str(secret_path))
+ assert project.main_secret == CUSTOM_SECRET
- secret_path.write_text("a")
- with pytest.raises(DerexSecretError):
- _get_master_secret() # Too short
+ secret_path.write_text("a" * 5000)
+ with pytest.raises(DerexSecretError):
+ project.main_secret # Too long
- secret_path.write_text("a" * 20)
- with pytest.raises(DerexSecretError):
- _get_master_secret() # Not enough entropy
+ secret_path.write_text("a")
+ with pytest.raises(DerexSecretError):
+ project.main_secret # Too short
+ secret_path.write_text("a" * 20)
+ with pytest.raises(DerexSecretError):
+ project.main_secret # Not enough entropy
-def test_derived_secret():
+
+def test_derived_secret(minimal_project):
from derex.runner.secrets import compute_entropy
- from derex.runner.secrets import get_secret
- foo_secret = get_secret(FooSecrets.foo)
- # The same name should always yield the same secrets
- assert get_secret(FooSecrets.foo) == foo_secret
+ with minimal_project:
+ project = Project()
+
+ foo_secret = project.get_secret(FooSecrets.foo)
+ # The same name should always yield the same secrets
+ assert project.get_secret(FooSecrets.foo) == foo_secret
+
+ # Two names should have different secrets
+ assert foo_secret != project.get_secret(FooSecrets.bar)
- # Two names should have different secrets
- assert foo_secret != get_secret(FooSecrets.bar)
+ # Secrets must have enough entropy
+ assert compute_entropy(foo_secret) > 256
- # Secrets must have enough entropy
- assert compute_entropy(foo_secret) > 256
+def test_derived_secret_no_scrypt_available(no_scrypt, minimal_project):
+ with minimal_project:
+ import derex.runner.project
-def test_derived_secret_no_scrypt_available(no_scrypt):
- import derex.runner.secrets
+ project = derex.runner.project.Project()
- reload(derex.runner.secrets)
+ reload(derex.runner.secrets)
+ reload(derex.runner.project)
- derex.runner.secrets.get_secret(FooSecrets.foo)
+ project.get_secret(FooSecrets.foo)
try:
diff --git a/tests/test_utils.py b/tests/test_utils.py
index 9122100b2..34eb04114 100644
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -12,9 +12,7 @@ def test_asbool():
def test_abspath_from_egg():
- import derex.runner.utils
+ from derex.runner import abspath_from_egg
- assert derex.runner.utils.abspath_from_egg("derex.runner", "derex/runner/utils.py")
- assert derex.runner.utils.abspath_from_egg(
- "derex.runner", "derex/runner/templates/docker-compose-project.yml.j2"
- )
+ assert abspath_from_egg("derex.runner", "derex/runner/utils.py")
+ assert abspath_from_egg("derex.runner", "derex/runner/__init__.py")