diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 090b7315eb..7d5cda44ef 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -180,6 +180,7 @@ upload-artifacts: - build-for-glinet-b1300 - build-for-turris-omnia + .run-test-in-docker: stage: test extends: .in-prplmesh-builder @@ -198,6 +199,15 @@ bwl_dummy_unit_tests: bcl_unit_tests: extends: .run-test-in-docker +dctest_one_test: + stage: test + script: + - ./dctest.py + tags: + - boardfarm-compose + needs: + - job: build-in-docker + mapf_common_encryption_tests: extends: .run-test-in-docker diff --git a/dctest.py b/dctest.py new file mode 100755 index 0000000000..3855209703 --- /dev/null +++ b/dctest.py @@ -0,0 +1,255 @@ +#!/usr/bin/env python3 +# +# Launch the test suite using docker and docker-compose. This script wraps +# the creation of the bridge(s) to be able to connect external devices with +# the docker network, launching the service for boardfarm. +# +# As this script is run outside containers, it does not use anything apart +# from Python 3.5 (will work on later versions but only uses 3.5 features) +# +# The best way to make sure no Python 3.5+ features are used is running the +# script with a Python 3.5.0 interpreter. Compile it from: +# +# https://www.python.org/ftp/python/3.5.0/Python-3.5.0.tgz +# +# Also, when calling a function look for 'New in version 3.X' where X > 5 +# +from __future__ import print_function # To check for python2 or < 3.5 execution +import argparse +import fcntl +import os +import grp +import shutil +import getpass +import sys +from subprocess import Popen, PIPE + + +if not (sys.version_info.major == 3 and sys.version_info.minor >= 5): + print("This script requires Python 3.5 or higher!") + print("You are using Python {}.{}.".format(sys.version_info.major, sys.version_info.minor)) + sys.exit(1) + + +def check_docker_versions(): + DOCKER_MAJOR = 19 + DC_MAJOR = 1 + DC_MINOR = 25 + docker_version = os.popen('docker --version').read().split(' ')[2] + docker_major = int(docker_version.split('.')[0]) + if docker_major < DOCKER_MAJOR: + fmt = "This script requires docker {}.0 or higher" + print(fmt.format(DOCKER_MAJOR)) + print("You are usng version {}".format(docker_version)) + sys.exit(1) + dc_version = os.popen('docker-compose --version').read().split(' ')[2] + dc_major = int(dc_version.split('.')[0]) + dc_minor = int(dc_version.split('.')[1]) + if dc_major < DC_MAJOR: + fmt = "This script requires docker-compose {}.{} or higher" + print(fmt.format(DC_MAJOR, DC_MINOR)) + print("You are usng version {}".format(dc_version)) + sys.exit(1) + if dc_minor < DC_MINOR: + fmt = "This script requires docker-compose {}.{} or higher" + print(fmt.format(DC_MAJOR, DC_MINOR)) + print("You are usng version {}".format(dc_version)) + sys.exit(1) + + +class Services: + def __init__(self, bid=None): + self.scriptdir = os.path.dirname(os.path.realpath(__file__)) + os.chdir(self.scriptdir) + self.rootdir = self.scriptdir + + if bid is not None: + self.build_id = bid + print('Using ID {}'.format(self.build_id)) + # return + else: + self.build_id = self.get_build_id() + + self.logdir = os.path.join(self.scriptdir, 'logs') + device_name = 'dockerized_device-{}'.format(self.build_id) + self.devicedir = os.path.join(self.logdir, device_name) + repeater_name = 'repeater1-{}'.format(self.build_id) + self.repeaterdir = os.path.join(self.logdir, repeater_name) + if not os.path.exists(self.logdir): + os.makedirs(self.logdir) + if not os.path.exists(self.devicedir): + print('Making {}'.format(self.devicedir)) + os.makedirs(self.devicedir) + if not os.path.exists(self.repeaterdir): + print('Making {}'.format(self.repeaterdir)) + os.makedirs(self.repeaterdir) + + def cleanlogs(self): + shutil.rmtree(os.path.join(self.scriptdir, 'logs')) + + def _setNonBlocking(fd): + """ + Set the file description of the given file descriptor to non-blocking. + """ + flags = fcntl.fcntl(fd, fcntl.F_GETFL) + flags = flags | os.O_NONBLOCK + fcntl.fcntl(fd, fcntl.F_SETFL, flags) + + def get_build_id(self): + ci_pipeline_id = os.getenv('CI_PIPELINE_ID') + if ci_pipeline_id is not None: + return ci_pipeline_id + + # Otherwise we are running on the local machine, just find last id + # created and add one + last_id = 0 + if not os.path.exists('logs'): + return str(1) + for d in os.listdir('logs'): + if d.startswith('dockerized_device-'): + suffix = d[len('dockerized_device-'):] + isuffix = int(suffix) + if isuffix > last_id: + last_id = isuffix + if last_id == 0: + new_id = 1 + else: + new_id = last_id + 1 + return str(new_id) + + def copy_build_dir(self): + new_id = self.build_id + self.build_dir = 'build-{}'.format(new_id) + shutil.copytree('build', 'build-{}'.format(self.build_dir)) + print('Copied build/ into {}'.format(self.build_dir)) + + def dc(self, args, interactive=False): + params = ['docker-compose', '-f', + 'tools/docker/boardfarm-ci/docker-compose.yml'] + # params += ['-p', 'boardfarm-ci-{}'.format(self.build_id)] + params += args + local_env = os.environ + local_env['ROOT_DIR'] = self.rootdir + docker_gid = grp.getgrnam('docker')[2] + local_env['CURRENT_UID'] = str(os.getuid()) + ':' + str(docker_gid) + local_env['CURRENT_ID'] = str(os.getuid()) + local_env['RUN_ID'] = self.build_id + + if os.getenv('CI_PIPELINE_ID') is None: + # Running locally + local_env['CI_PIPELINE_ID'] = 'latest' + local_env['FINAL_ROOT_DIR'] = self.rootdir + else: + # Running inside gitlab-ci + local_env['FINAL_ROOT_DIR'] = '/builds/prpl-foundation/prplMesh' + + # local_env['CURRENT_UID']= str(os.getuid()) + ':' + str(os.getgid()) + if not interactive: + proc = Popen(params, stdout=PIPE, stderr=PIPE) + for line in proc.stdout: + print(line.decode(), end='') + proc.stdout.close() + else: + proc = Popen(params) + return_code = proc.wait() + return return_code + + +def vararg_callback(option, opt_str, value, parser): + assert value is None + value = [] + + def floatable(str): + try: + float(str) + return True + except ValueError: + return False + + for arg in parser.rargs: + # stop on --foo like options + if arg[:2] == "--" and len(arg) > 2: + break + # stop on -a, but not on -3 or -3.0 + if arg[:1] == "-" and len(arg) > 1 and not floatable(arg): + break + value.append(arg) + + del parser.rargs[:len(value)] + setattr(parser.values, option.dest, value) + + +def cleanup(rc): + if rc != 0: + print('Return code !=0 -> {}'.format(rc)) + if getpass.getuser() == 'gitlab-runner': + os.system('chown -R gitlab-runner:gitlab-runner .') + sys.exit(rc) + + +if __name__ == '__main__': + check_docker_versions() + parser = argparse.ArgumentParser(description='Dockerized test launcher') + parser.add_argument('--test', dest='test', type=str, help='Test to be run') + parser.add_argument('--clean', dest='clean', action='store_true', + help='Clean containers images and networks') + parser.add_argument('--build', dest='build', action='store_true', + help='Rebuild containers') + parser.add_argument('--shell', dest='shell', action='store_true', + help='Run a shell on the bf container') + parser.add_argument('--comp', dest='comp', action='store_true', + help='Pass the rest of arguments to docker-compose') + parser.add_argument('--id', dest='bid', type=str, + help='Specify the id to use for build/shell/comp/clean') + args, rest = parser.parse_known_args() + + if os.getenv('CI_PIPELINE_ID') is not None: + args.bid == os.getenv('CI_PIPELINE_ID') + + if args.comp: + if args.bid is None: + print('Specify --id for the --comp parameter') + sys.exit(0) + services = Services(bid=args.bid) + if len(rest) == 0: + print('Usage: dctest --id --comp ') + sys.exit(1) + sys.exit(services.dc(rest, interactive=True)) + else: + if len(rest) > 0: + print('Unknown parameters: {}'.format(rest)) + sys.exit(1) + + if args.clean: + if args.bid is None: + print('Specify --id for the --clean parameter') + sys.exit(0) + services = Services(bid=args.bid) + rc = services.dc(['down', '--remove-orphans', '--rmi', 'all']) + cleanup(rc) + elif args.shell: + if not args.bid: + print('Specify --id for the shell parameter') + sys.exit(0) + services = Services(bid=args.bid) + rc = services.dc(['run', '--rm', '--service-ports', '--entrypoint', + '/bin/bash', 'boardfarm'], interactive=True) + cleanup(rc) + elif args.build: + if not args.bid: + print('Specify --id for the build parameter') + sys.exit(0) + services = Services(bid=args.bid) + rc = services.dc(['build'], interactive=True) + cleanup(rc) + else: + if args.bid: + services = Services(bid=args.bid) # With new build id + else: + services = Services() # With new build id + # rc = services.dc(['up', 'boardfarm']) + # rc = services.dc(['run', '--service-ports', '--entrypoint', + # '/bin/bash', 'boardfarm'], interactive=True) + rc = services.dc(['run', '--rm', '--service-ports', '--use-aliases', + 'boardfarm'], interactive=True) + cleanup(rc) diff --git a/tests/boardfarm_plugins/boardfarm_prplmesh/devices/prplmesh_base.py b/tests/boardfarm_plugins/boardfarm_prplmesh/devices/prplmesh_base.py index 700c5c2863..b8cd850d72 100644 --- a/tests/boardfarm_plugins/boardfarm_prplmesh/devices/prplmesh_base.py +++ b/tests/boardfarm_plugins/boardfarm_prplmesh/devices/prplmesh_base.py @@ -4,6 +4,7 @@ # See LICENSE file for more details. import pexpect +from typing import Dict from boardfarm.devices import linux @@ -16,14 +17,18 @@ class CommandError(Exception): class PrplMeshBase(linux.LinuxDevice): """PrplMesh abstract device.""" - def _run_shell_cmd(self, cmd: str = "", args: list = None, timeout: int = 30): + def _run_shell_cmd(self, cmd: str = "", args: list = None, timeout: int = 30, + env: Dict[str, str] = None): """Wrapper that executes command with specified args on host machine and logs output.""" - - res, exitstatus = pexpect.run(cmd, args=args, timeout=timeout, encoding="utf-8", - withexitstatus=1) + if env is not None: + res, exitstatus = pexpect.run(cmd, args=args, timeout=timeout, encoding="utf-8", + withexitstatus=1, env=env) + else: + res, exitstatus = pexpect.run(cmd, args=args, timeout=timeout, encoding="utf-8", + withexitstatus=1) entry = " ".join((cmd, " ".join(args))) if exitstatus != 0: - raise CommandError("Error executing {}:\n{}".format(entry, res)) + raise CommandError("Error executing {}".format(entry)) self.log_calls += entry self.log += "$ " + entry + "\r\n" + res @@ -34,25 +39,14 @@ def check_status(self): It is used by boardfarm to indicate that spawned device instance is ready for test and also after test - to insure that device still operational. """ - pass - - def close(self): - """Method required by boardfarm. - - Purpose is to close connection to device's consoles. - """ + # self._run_shell_cmd(os.path.join(rootdir, "tools", "docker", "test.sh"), + # ["-v", "-n", "controller"]) pass def isalive(self): """Method required by boardfarm. States that device is operational and its consoles are accessible. - """ - pass - def touch(self): - """Method required by boardfarm. - - Purpose is to keep consoles active, so they don't disconnect for long running activities. """ - pass + return True diff --git a/tests/boardfarm_plugins/boardfarm_prplmesh/devices/prplmesh_compose.py b/tests/boardfarm_plugins/boardfarm_prplmesh/devices/prplmesh_compose.py new file mode 100644 index 0000000000..b8e3ff821a --- /dev/null +++ b/tests/boardfarm_plugins/boardfarm_prplmesh/devices/prplmesh_compose.py @@ -0,0 +1,117 @@ +# SPDX-License-Identifier: BSD-2-Clause-Patent +# SPDX-FileCopyrightText: 2020 the prplMesh contributors (see AUTHORS.md) +# This code is subject to the terms of the BSD+Patent license. +# See LICENSE file for more details. + +import os +import time + +import boardfarm +from environment import ALEntityDocker, _get_bridge_interface +from .prplmesh_base import PrplMeshBase +from sniffer import Sniffer + +rootdir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../..')) + + +class PrplMeshCompose(PrplMeshBase): + """Dockerized prplMesh device.""" + + model = ("prplmesh_compose") + agent_entity = None + controller_entity = None + + def __init__(self, *args, **kwargs): + self.args = args + self.kwargs = kwargs + + config = kwargs.get("config", kwargs) + + # List of device's consoles test can interact with + self.consoles = [self] + + # Getting unic ID to distinguish devices and network they belong to + self.unique_id = os.getenv("RUN_ID") + self.user_id = os.getenv("SUDO_USER", os.getenv("USER", "")) + + self.docker_name = "-".join((config.get("name", "prplmesh_compose"), self.unique_id)) + self.name = config.get("name", "prplmesh_compose") + print('config.get("name") {}'.format(config.get("name"))) + # self.name = config.get("name") + self.role = config.get("role", "agent") + self.cleanup_cmd = config.get("cleanup_cmd", None) + self.conn_cmd = config.get("conn_cmd", None) + self.delay = config.get("delay", 7) + self.docker_network = "boardfarm-ci_default" + + if self.role == "controller": + self._docker_compose(["-d", "--name", self.docker_name, "controller"], + "run", "start-controller-agent") + time.sleep(self.delay) + self.controller_entity = \ + ALEntityDocker(self.name, device=self, is_controller=True, compose=True) + else: + self._docker_compose(["-d", "--name", self.docker_name, "agent"], + "run", "start-agent") + time.sleep(self.delay) + self.agent_entity = ALEntityDocker(self.name, device=self, is_controller=False, compose=True) + + self.wired_sniffer = Sniffer(_get_bridge_interface(self.docker_network), + boardfarm.config.output_dir) + self.check_status() + + def _docker_compose(self, args, parameter=None, start=None): + print('_docker_compose: args {}'.format(args)) + yml_path = "tools/docker/boardfarm-ci/docker-compose.yml" + full_args = ["-f", os.path.join(rootdir, yml_path)] + if parameter == "run": + log_path = os.path.join(rootdir, "logs/{}".format(self.docker_name)) + if not os.path.exists(log_path): + os.mkdir(log_path) + + pipeline_id = os.getenv('CI_PIPELINE_ID') + if pipeline_id is None or pipeline_id == 'latest': + vol = '{}:/tmp/{}/beerocks/logs'.format(log_path, self.user_id) + else: + vol = '{}:/tmp/beerocks/logs'.format(log_path) + + full_args += ["run", "--rm", "-v", vol] + # full_args += ["--entrypoint", entrypoint + ' ' + start] + full_args += args + + print('_docker_compose: {}'.format(' '.join(full_args))) + # os.environ['CURRENT_UID'] = '1000:998' + if os.getenv('CI_PIPELINE_ID') is None: + print('Setting CI_PIPELINE_ID "latest"') + os.environ['CI_PIPELINE_ID'] = 'latest' + self._run_shell_cmd("/usr/local/bin/docker-compose", + full_args, env=os.environ) + else: + self._run_shell_cmd("/usr/local/bin/docker-compose", full_args) + + def __del__(self): + # self._docker_compose(["stop", self.name + self._run_shell_cmd("docker", ["stop", self.docker_name]) + self._run_shell_cmd("docker", ["container", "rm", "-f", self.docker_name]) + + def check_status(self): + """Method required by boardfarm. + + It is used by boardfarm to indicate that spawned device instance is ready for test + and also after test - to insure that device still operational. + """ + # self._run_shell_cmd(os.path.join(rootdir, "tools", "docker", "test.sh"), + # ["-v", "-n", "controller"]) + pass + + def isalive(self): + """Method required by boardfarm. + + States that device is operational and its consoles are accessible. + + """ + return True + + def prprlmesh_status_check(self): + return True + diff --git a/tests/boardfarm_plugins/boardfarm_prplmesh/prplmesh_config_compose.json b/tests/boardfarm_plugins/boardfarm_prplmesh/prplmesh_config_compose.json new file mode 100644 index 0000000000..dbd3897cb8 --- /dev/null +++ b/tests/boardfarm_plugins/boardfarm_prplmesh/prplmesh_config_compose.json @@ -0,0 +1,15 @@ +{ + "prplmesh_docker": { + "name": "dockerized_device", + "board_type": "prplmesh_compose", + "role": "controller", + "conn_cmd": "", + "devices": [ + { + "name": "repeater1", + "type": "prplmesh_compose", + "conn_cmd": "" + } + ] + } +} diff --git a/tests/environment.py b/tests/environment.py index 7da67bb6b0..715c6cd5ff 100644 --- a/tests/environment.py +++ b/tests/environment.py @@ -191,6 +191,9 @@ def _docker_wait_for_log(container: str, programs: [str], regex: str, start_line timeout: float) -> bool: def logfilename(program): logfilename = os.path.join(rootdir, 'logs', container, 'beerocks_{}.log'.format(program)) + + print(' --- logfilename: {}'.format(logfilename)) + # WSL doesn't support symlinks on NTFS, so resolve the symlink manually if on_wsl: logfilename = os.path.join( @@ -249,9 +252,9 @@ class ALEntityDocker(ALEntity): The entity is defined from the name of the container, the rest is derived from that. ''' - # NOTE: name arg can be also extracted from the device class itself, but test_flows.py - # don't have it. We can remove this arg as soon, as we drop test_flows.py - def __init__(self, name: str, device: None = None, is_controller: bool = False): + def __init__(self, name: str, device: None = None, is_controller: bool = False, + compose: bool = False): + self.name = name self.bridge_name = 'br-lan' if device: @@ -268,16 +271,19 @@ def __init__(self, name: str, device: None = None, is_controller: bool = False): config_file.read()).group('port') # On WSL, connect to the locally exposed container port - if on_wsl: - published_port_output = subprocess.check_output( - ["docker", "port", name, ucc_port]).decode('utf-8').split(":") - device_ip = published_port_output[0] - ucc_port = int(published_port_output[1]) + if not compose: + if on_wsl: + published_port_output = subprocess.check_output( + ["docker", "port", name, ucc_port]).decode('utf-8').split(":") + device_ip = published_port_output[0] + ucc_port = int(published_port_output[1]) + else: + device_ip_output = self.command( + 'ip', '-f', 'inet', 'addr', 'show', self.bridge_name) + device_ip = re.search( + r'inet (?P[0-9.]+)', device_ip_output.decode('utf-8')).group('ip') else: - device_ip_output = self.command( - 'ip', '-f', 'inet', 'addr', 'show', self.bridge_name) - device_ip = re.search(r'inet (?P[0-9.]+)', - device_ip_output.decode('utf-8')).group('ip') + device_ip = self.device.docker_name ucc_socket = UCCSocket(device_ip, ucc_port) mac = ucc_socket.dev_get_parameter('ALid') diff --git a/tests/run_bf_compose.sh b/tests/run_bf_compose.sh new file mode 100755 index 0000000000..df9b144dcc --- /dev/null +++ b/tests/run_bf_compose.sh @@ -0,0 +1,18 @@ +#!/bin/bash +# SPDX-License-Identifier: BSD-2-Clause-Patent +# SPDX-FileCopyrightText: 2020 the prplMesh contributors (see AUTHORS.md) +# This code is subject to the terms of the BSD+Patent license. +# See LICENSE file for more details. + +scriptdir=$(dirname "$(readlink -f "${0}")") +bf_plugins_dir=${scriptdir}/boardfarm_plugins + +if [ -n "${PYTHONPATH}" ]; then + PYTHONPATH="${bf_plugins_dir}:${scriptdir}:${PYTHONPATH}" +else + PYTHONPATH="${bf_plugins_dir}:${scriptdir}" +fi +echo "$PYTHONPATH" +export PYTHONPATH +export BFT_DEBUG=y +exec bft -c "${bf_plugins_dir}"/boardfarm_prplmesh/prplmesh_config_compose.json -n prplmesh_docker -x test_flows diff --git a/tools/docker/boardfarm-ci/Dockerfile b/tools/docker/boardfarm-ci/Dockerfile index 67fd3ed294..a5d2979de6 100644 --- a/tools/docker/boardfarm-ci/Dockerfile +++ b/tools/docker/boardfarm-ci/Dockerfile @@ -1,41 +1,29 @@ FROM python:3.8-slim-buster +# FROM tiangolo/docker-with-compose RUN apt-get update \ - && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ - apt-transport-https \ - ca-certificates curl \ - curl \ - gcc \ - git \ - gnupg \ - gnupg-agent \ - libsnmp-dev \ - netcat \ - software-properties-common \ - wireshark-common \ - && rm -rf /var/lib/apt/lists/* +&& apt-get install gcc libsnmp-dev -y \ +&& apt-get clean COPY requirements.txt /app/requirements.txt WORKDIR app RUN pip3 install -r requirements.txt -# TODO: what needs this? -#RUN pip3 install jsonschema distro +RUN apt-get install apt-transport-https ca-certificates curl gnupg-agent software-properties-common -y +RUN apt-get install curl -y RUN curl -fsSL https://download.docker.com/linux/debian/gpg | apt-key add - RUN add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/debian $(lsb_release -cs) stable" RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - docker-ce \ - docker-ce-cli \ - containerd.io \ - && rm -rf /var/lib/apt/lists/* - +&& apt-get install docker-ce docker-ce-cli containerd.io -y RUN curl -L "https://github.com/docker/compose/releases/download/1.26.0/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose RUN chmod 755 /usr/local/bin/docker-compose +RUN DEBIAN_FRONTEND=noninteractive apt-get install wireshark-common -y +# VOLUME "/home/pablo/assia/prpl/git" +# ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] RUN git clone https://github.com/mattsm/boardfarm.git \ && cd boardfarm \ && git checkout 100521fde1fb67536682cafecc2f91a6e2e8a6f8 \ diff --git a/tools/docker/boardfarm-ci/docker-compose.yml b/tools/docker/boardfarm-ci/docker-compose.yml new file mode 100644 index 0000000000..1f50b9cc75 --- /dev/null +++ b/tools/docker/boardfarm-ci/docker-compose.yml @@ -0,0 +1,60 @@ +version: '3' +services: + # Controller and agent are run from inside the boardfarm container + controller: + image: registry.gitlab.com/prpl-foundation/prplmesh/prplmesh-runner:${CI_PIPELINE_ID} + privileged: true # For the creation of the bridge to work + container_name: controller + environment: + - USER + - INSTALL_DIR=${FINAL_ROOT_DIR}/build/install + - ROOT_DIR=${FINAL_ROOT_DIR} + - CURRENT_ID + - CI_PIPELINE_ID + expose: + - "5000" + - "8002" + volumes: + - "$ROOT_DIR:${FINAL_ROOT_DIR}" + entrypoint: ["/root/entrypoint.sh", "/usr/bin/start-controller-agent"] + + agent: + image: registry.gitlab.com/prpl-foundation/prplmesh/prplmesh-runner:${CI_PIPELINE_ID} + privileged: true + container_name: agent + environment: + - USER + - INSTALL_DIR=${FINAL_ROOT_DIR}/build/install + - ROOT_DIR=${FINAL_ROOT_DIR} + # /builds/prpl-foundation/prplMesh + - CURRENT_ID + - CI_PIPELINE_ID + expose: + - "5000" + - "8002" + volumes: + - "$ROOT_DIR:${FINAL_ROOT_DIR}" + entrypoint: ["/root/entrypoint.sh", "/usr/bin/start-agent"] + + # Boardfarm image is launched from dctest.py, TARGET_DIR refers to the + # path inside the controller, will be referred as $ROOT_DIR inside + boardfarm: + build: . + privileged: true + container_name: boardfarm + environment: + - USER + - ROOT_DIR + - CURRENT_ID + - RUN_ID + - CI_PIPELINE_ID + - FINAL_ROOT_DIR + ports: + - "5000:5000" + volumes: + - "$ROOT_DIR:$ROOT_DIR" + - "/var/run/docker:/var/run/docker" + - "/var/run/docker.sock:/var/run/docker.sock" + user: ${CURRENT_UID} + working_dir: $ROOT_DIR/tests + entrypoint: ["bash", "run_bf_compose.sh"]