mirror of
https://github.com/docker/compose.git
synced 2026-02-12 19:49:22 +08:00
Compare commits
110 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0773730525 | ||
|
|
c81046aac0 | ||
|
|
84c816e887 | ||
|
|
1607674374 | ||
|
|
683fac0dbf | ||
|
|
ddee2958ec | ||
|
|
1ab1cd202b | ||
|
|
1752927dcd | ||
|
|
c2ddd71e5f | ||
|
|
72bbd9c3a6 | ||
|
|
3c9ee678e7 | ||
|
|
92fefbc9cc | ||
|
|
e496c64127 | ||
|
|
84afa518e8 | ||
|
|
0f6a55e036 | ||
|
|
8ce5e235e4 | ||
|
|
c1dddbe608 | ||
|
|
981b0cd641 | ||
|
|
5ec8af582c | ||
|
|
f5342b600c | ||
|
|
4a26d95de4 | ||
|
|
5b7851f55b | ||
|
|
eaa22df151 | ||
|
|
551f680751 | ||
|
|
3e071ec8d9 | ||
|
|
858ff26731 | ||
|
|
2a7c06a050 | ||
|
|
d0b7bc3110 | ||
|
|
fe4f16e448 | ||
|
|
1da4301650 | ||
|
|
c594cb3fc3 | ||
|
|
89ad637d50 | ||
|
|
6ca2aed7ec | ||
|
|
fc744a0cc9 | ||
|
|
245ede1d75 | ||
|
|
72f7b086d7 | ||
|
|
2f48b6f5e9 | ||
|
|
e6fcde422c | ||
|
|
75b2d7905f | ||
|
|
efa5969086 | ||
|
|
2a4aca7f54 | ||
|
|
9c8f5a5705 | ||
|
|
62bbc5cfe2 | ||
|
|
66375c2871 | ||
|
|
c760600a65 | ||
|
|
4daad056c4 | ||
|
|
74c09cac66 | ||
|
|
36e470d640 | ||
|
|
d28d717884 | ||
|
|
42c2cfd7a6 | ||
|
|
5b983ac653 | ||
|
|
93425218eb | ||
|
|
49d0ee2de5 | ||
|
|
a92c6d7e17 | ||
|
|
b8800db52e | ||
|
|
ccabfde353 | ||
|
|
3297bb50bb | ||
|
|
e688006444 | ||
|
|
e4a83c15ff | ||
|
|
824b9f138e | ||
|
|
8654eb2ea3 | ||
|
|
9407ee65e5 | ||
|
|
66c6d2757a | ||
|
|
17daa93edf | ||
|
|
9795e39d0c | ||
|
|
393abc5b33 | ||
|
|
d0866c8c18 | ||
|
|
546133c977 | ||
|
|
9a2f94713e | ||
|
|
b88f635514 | ||
|
|
31002aeacd | ||
|
|
6209baccf3 | ||
|
|
28f8b8549d | ||
|
|
76a19ec8c5 | ||
|
|
bba8cd0322 | ||
|
|
f2ec6a2176 | ||
|
|
7f7f1607de | ||
|
|
4990a7f935 | ||
|
|
72f8551466 | ||
|
|
487779960c | ||
|
|
99b6776fd2 | ||
|
|
4e382b9c28 | ||
|
|
862107a32a | ||
|
|
6a3af5b707 | ||
|
|
205d520805 | ||
|
|
8f2bb66e73 | ||
|
|
af4eaae006 | ||
|
|
1c547b270e | ||
|
|
1c499bb2eb | ||
|
|
4fa72a066a | ||
|
|
b9249168bd | ||
|
|
e36ac32120 | ||
|
|
5be6bde76c | ||
|
|
c380604a9e | ||
|
|
369eb3220a | ||
|
|
2e273c5029 | ||
|
|
21e196f20a | ||
|
|
b9d86f4b51 | ||
|
|
1b5278f977 | ||
|
|
affb0d504d | ||
|
|
8034bc3bd6 | ||
|
|
89fcfc5499 | ||
|
|
40a4ec1624 | ||
|
|
6c55ef6a5d | ||
|
|
3f46dc1d76 | ||
|
|
f2bc89a876 | ||
|
|
fee4756e33 | ||
|
|
030b347673 | ||
|
|
4139d701f3 | ||
|
|
3ebfa4b089 |
91
CHANGELOG.md
91
CHANGELOG.md
@@ -1,6 +1,96 @@
|
||||
Change log
|
||||
==========
|
||||
|
||||
1.29.0 (2021-04-06)
|
||||
-------------------
|
||||
|
||||
[List of PRs / issues for this release](https://github.com/docker/compose/milestone/56?closed=1)
|
||||
|
||||
### Features
|
||||
|
||||
- Add profile filter to `docker-compose config`
|
||||
|
||||
- Add a `depends_on` condition to wait for successful service completion
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
- Add image scan message on build
|
||||
|
||||
- Update warning message for `--no-ansi` to mention `--ansi never` as alternative
|
||||
|
||||
- Bump docker-py to 5.0.0
|
||||
|
||||
- Bump PyYAML to 5.4.1
|
||||
|
||||
- Bump python-dotenv to 0.17.0
|
||||
|
||||
1.28.6 (2021-03-23)
|
||||
-------------------
|
||||
|
||||
[List of PRs / issues for this release](https://github.com/docker/compose/milestone/57?closed=1)
|
||||
|
||||
### Bugs
|
||||
|
||||
- Make `--env-file` relative to the current working directory and error out for invalid paths. Environment file paths set with `--env-file` are relative to the current working directory while the default `.env` file is located in the project directory which by default is the base directory of the Compose file.
|
||||
|
||||
- Fix missing service property `storage_opt` by updating the compose schema
|
||||
|
||||
- Fix build `extra_hosts` list format
|
||||
|
||||
- Remove extra error message on `exec`
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
- Add `compose.yml` and `compose.yaml` to default filename list
|
||||
|
||||
1.28.5 (2021-02-25)
|
||||
-------------------
|
||||
|
||||
[List of PRs / issues for this release](https://github.com/docker/compose/milestone/55?closed=1)
|
||||
|
||||
### Bugs
|
||||
|
||||
- Fix OpenSSL version mismatch error when shelling out to the ssh client (via bump to docker-py 4.4.4 which contains the fix)
|
||||
|
||||
- Add missing build flags to the native builder: `platform`, `isolation` and `extra_hosts`
|
||||
|
||||
- Remove info message on native build
|
||||
|
||||
- Avoid fetching logs when service logging driver is set to 'none'
|
||||
|
||||
1.28.4 (2021-02-18)
|
||||
-------------------
|
||||
|
||||
[List of PRs / issues for this release](https://github.com/docker/compose/milestone/54?closed=1)
|
||||
|
||||
### Bugs
|
||||
|
||||
- Fix SSH port parsing by bumping docker-py to 4.4.3
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
- Bump Python to 3.7.10
|
||||
|
||||
1.28.3 (2021-02-17)
|
||||
-------------------
|
||||
|
||||
[List of PRs / issues for this release](https://github.com/docker/compose/milestone/53?closed=1)
|
||||
|
||||
### Bugs
|
||||
|
||||
- Fix SSH hostname parsing when it contains leading s/h, and remove the quiet option that was hiding the error (via docker-py bump to 4.4.2)
|
||||
|
||||
- Fix key error for '--no-log-prefix' option
|
||||
|
||||
- Fix incorrect CLI environment variable name for service profiles: `COMPOSE_PROFILES` instead of `COMPOSE_PROFILE`
|
||||
|
||||
- Fix fish completion
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
- Bump cryptography to 3.3.2
|
||||
|
||||
- Remove log driver filter
|
||||
|
||||
1.28.2 (2021-01-26)
|
||||
-------------------
|
||||
@@ -81,6 +171,7 @@ Change log
|
||||
|
||||
- Updates of READMEs
|
||||
|
||||
|
||||
1.27.4 (2020-09-24)
|
||||
-------------------
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
ARG DOCKER_VERSION=19.03
|
||||
ARG PYTHON_VERSION=3.7.9
|
||||
ARG PYTHON_VERSION=3.7.10
|
||||
|
||||
ARG BUILD_ALPINE_VERSION=3.12
|
||||
ARG BUILD_CENTOS_VERSION=7
|
||||
|
||||
10
Jenkinsfile
vendored
10
Jenkinsfile
vendored
@@ -23,7 +23,7 @@ pipeline {
|
||||
parallel {
|
||||
stage('alpine') {
|
||||
agent {
|
||||
label 'ubuntu && amd64 && !zfs'
|
||||
label 'ubuntu-2004 && amd64 && !zfs && cgroup1'
|
||||
}
|
||||
steps {
|
||||
buildImage('alpine')
|
||||
@@ -31,7 +31,7 @@ pipeline {
|
||||
}
|
||||
stage('debian') {
|
||||
agent {
|
||||
label 'ubuntu && amd64 && !zfs'
|
||||
label 'ubuntu-2004 && amd64 && !zfs && cgroup1'
|
||||
}
|
||||
steps {
|
||||
buildImage('debian')
|
||||
@@ -62,7 +62,7 @@ pipeline {
|
||||
|
||||
def buildImage(baseImage) {
|
||||
def scmvar = checkout(scm)
|
||||
def imageName = "dockerbuildbot/compose:${baseImage}-${scmvar.GIT_COMMIT}"
|
||||
def imageName = "dockerpinata/compose:${baseImage}-${scmvar.GIT_COMMIT}"
|
||||
image = docker.image(imageName)
|
||||
|
||||
withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') {
|
||||
@@ -89,7 +89,7 @@ def runTests(dockerVersion, pythonVersion, baseImage) {
|
||||
stage("python=${pythonVersion} docker=${dockerVersion} ${baseImage}") {
|
||||
node("ubuntu && amd64 && !zfs") {
|
||||
def scmvar = checkout(scm)
|
||||
def imageName = "dockerbuildbot/compose:${baseImage}-${scmvar.GIT_COMMIT}"
|
||||
def imageName = "dockerpinata/compose:${baseImage}-${scmvar.GIT_COMMIT}"
|
||||
def storageDriver = sh(script: "docker info -f \'{{.Driver}}\'", returnStdout: true).trim()
|
||||
echo "Using local system's storage driver: ${storageDriver}"
|
||||
withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') {
|
||||
@@ -99,6 +99,8 @@ def runTests(dockerVersion, pythonVersion, baseImage) {
|
||||
--privileged \\
|
||||
--volume="\$(pwd)/.git:/code/.git" \\
|
||||
--volume="/var/run/docker.sock:/var/run/docker.sock" \\
|
||||
--volume="\${DOCKER_CONFIG}/config.json:/root/.docker/config.json" \\
|
||||
-e "DOCKER_TLS_CERTDIR=" \\
|
||||
-e "TAG=${imageName}" \\
|
||||
-e "STORAGE_DRIVER=${storageDriver}" \\
|
||||
-e "DOCKER_VERSIONS=${dockerVersion}" \\
|
||||
|
||||
@@ -23,7 +23,7 @@ pipeline {
|
||||
parallel {
|
||||
stage('alpine') {
|
||||
agent {
|
||||
label 'linux && docker && ubuntu-2004 && cgroup1'
|
||||
label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
|
||||
}
|
||||
steps {
|
||||
buildImage('alpine')
|
||||
@@ -31,7 +31,7 @@ pipeline {
|
||||
}
|
||||
stage('debian') {
|
||||
agent {
|
||||
label 'linux && docker && ubuntu-2004 && cgroup1'
|
||||
label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
|
||||
}
|
||||
steps {
|
||||
buildImage('debian')
|
||||
@@ -41,7 +41,7 @@ pipeline {
|
||||
}
|
||||
stage('Test') {
|
||||
agent {
|
||||
label 'linux && docker && ubuntu-2004 && cgroup1'
|
||||
label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
|
||||
}
|
||||
steps {
|
||||
// TODO use declarative 1.5.0 `matrix` once available on CI
|
||||
@@ -61,7 +61,7 @@ pipeline {
|
||||
}
|
||||
stage('Generate Changelog') {
|
||||
agent {
|
||||
label 'linux && docker && ubuntu-2004 && cgroup1'
|
||||
label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
|
||||
}
|
||||
steps {
|
||||
checkout scm
|
||||
@@ -98,7 +98,7 @@ pipeline {
|
||||
}
|
||||
stage('linux binary') {
|
||||
agent {
|
||||
label 'linux && docker && ubuntu-2004 && cgroup1'
|
||||
label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
|
||||
}
|
||||
steps {
|
||||
checkout scm
|
||||
@@ -134,7 +134,7 @@ pipeline {
|
||||
}
|
||||
stage('alpine image') {
|
||||
agent {
|
||||
label 'linux && docker && ubuntu-2004 && cgroup1'
|
||||
label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
|
||||
}
|
||||
steps {
|
||||
buildRuntimeImage('alpine')
|
||||
@@ -142,7 +142,7 @@ pipeline {
|
||||
}
|
||||
stage('debian image') {
|
||||
agent {
|
||||
label 'linux && docker && ubuntu-2004 && cgroup1'
|
||||
label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
|
||||
}
|
||||
steps {
|
||||
buildRuntimeImage('debian')
|
||||
@@ -157,7 +157,7 @@ pipeline {
|
||||
parallel {
|
||||
stage('Pushing images') {
|
||||
agent {
|
||||
label 'linux && docker && ubuntu-2004 && cgroup1'
|
||||
label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
|
||||
}
|
||||
steps {
|
||||
pushRuntimeImage('alpine')
|
||||
@@ -166,7 +166,7 @@ pipeline {
|
||||
}
|
||||
stage('Creating Github Release') {
|
||||
agent {
|
||||
label 'linux && docker && ubuntu-2004 && cgroup1'
|
||||
label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
|
||||
}
|
||||
environment {
|
||||
GITHUB_TOKEN = credentials('github-release-token')
|
||||
@@ -198,7 +198,7 @@ pipeline {
|
||||
}
|
||||
stage('Publishing Python packages') {
|
||||
agent {
|
||||
label 'linux && docker && ubuntu-2004 && cgroup1'
|
||||
label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
|
||||
}
|
||||
environment {
|
||||
PYPIRC = credentials('pypirc-docker-dsg-cibot')
|
||||
@@ -222,7 +222,7 @@ pipeline {
|
||||
|
||||
def buildImage(baseImage) {
|
||||
def scmvar = checkout(scm)
|
||||
def imageName = "dockerbuildbot/compose:${baseImage}-${scmvar.GIT_COMMIT}"
|
||||
def imageName = "dockerpinata/compose:${baseImage}-${scmvar.GIT_COMMIT}"
|
||||
image = docker.image(imageName)
|
||||
|
||||
withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') {
|
||||
@@ -247,9 +247,9 @@ def buildImage(baseImage) {
|
||||
def runTests(dockerVersion, pythonVersion, baseImage) {
|
||||
return {
|
||||
stage("python=${pythonVersion} docker=${dockerVersion} ${baseImage}") {
|
||||
node("linux && docker && ubuntu-2004 && cgroup1") {
|
||||
node("linux && docker && ubuntu-2004 && amd64 && cgroup1") {
|
||||
def scmvar = checkout(scm)
|
||||
def imageName = "dockerbuildbot/compose:${baseImage}-${scmvar.GIT_COMMIT}"
|
||||
def imageName = "dockerpinata/compose:${baseImage}-${scmvar.GIT_COMMIT}"
|
||||
def storageDriver = sh(script: "docker info -f \'{{.Driver}}\'", returnStdout: true).trim()
|
||||
echo "Using local system's storage driver: ${storageDriver}"
|
||||
withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') {
|
||||
@@ -259,6 +259,8 @@ def runTests(dockerVersion, pythonVersion, baseImage) {
|
||||
--privileged \\
|
||||
--volume="\$(pwd)/.git:/code/.git" \\
|
||||
--volume="/var/run/docker.sock:/var/run/docker.sock" \\
|
||||
--volume="\${DOCKER_CONFIG}/config.json:/root/.docker/config.json" \\
|
||||
-e "DOCKER_TLS_CERTDIR=" \\
|
||||
-e "TAG=${imageName}" \\
|
||||
-e "STORAGE_DRIVER=${storageDriver}" \\
|
||||
-e "DOCKER_VERSIONS=${dockerVersion}" \\
|
||||
|
||||
@@ -1 +1 @@
|
||||
__version__ = '1.28.2'
|
||||
__version__ = '1.29.0'
|
||||
|
||||
@@ -129,7 +129,7 @@ def get_profiles_from_options(options, environment):
|
||||
if profile_option:
|
||||
return profile_option
|
||||
|
||||
profiles = environment.get('COMPOSE_PROFILE')
|
||||
profiles = environment.get('COMPOSE_PROFILES')
|
||||
if profiles:
|
||||
return profiles.split(',')
|
||||
|
||||
|
||||
@@ -158,10 +158,8 @@ class QueueItem(namedtuple('_QueueItem', 'item is_stop exc')):
|
||||
|
||||
|
||||
def tail_container_logs(container, presenter, queue, log_args):
|
||||
generator = get_log_generator(container)
|
||||
|
||||
try:
|
||||
for item in generator(container, log_args):
|
||||
for item in build_log_generator(container, log_args):
|
||||
queue.put(QueueItem.new(presenter.present(container, item)))
|
||||
except Exception as e:
|
||||
queue.put(QueueItem.exception(e))
|
||||
@@ -171,20 +169,6 @@ def tail_container_logs(container, presenter, queue, log_args):
|
||||
queue.put(QueueItem.stop(container.name))
|
||||
|
||||
|
||||
def get_log_generator(container):
|
||||
if container.has_api_logs:
|
||||
return build_log_generator
|
||||
return build_no_log_generator
|
||||
|
||||
|
||||
def build_no_log_generator(container, log_args):
|
||||
"""Return a generator that prints a warning about logs and waits for
|
||||
container to exit.
|
||||
"""
|
||||
yield "WARNING: no logs are available with the '{}' log driver\n".format(
|
||||
container.log_driver)
|
||||
|
||||
|
||||
def build_log_generator(container, log_args):
|
||||
# if the container doesn't have a log_stream we need to attach to container
|
||||
# before log printer starts running
|
||||
|
||||
@@ -23,6 +23,7 @@ from ..config import resolve_build_args
|
||||
from ..config.environment import Environment
|
||||
from ..config.serialize import serialize_config
|
||||
from ..config.types import VolumeSpec
|
||||
from ..const import IS_LINUX_PLATFORM
|
||||
from ..const import IS_WINDOWS_PLATFORM
|
||||
from ..errors import StreamParseError
|
||||
from ..metrics.decorator import metrics
|
||||
@@ -78,6 +79,8 @@ def main(): # noqa: C901
|
||||
try:
|
||||
command_func = dispatch()
|
||||
command_func()
|
||||
if not IS_LINUX_PLATFORM and command == 'help':
|
||||
print("\nDocker Compose is now in the Docker CLI, try `docker compose` help")
|
||||
except (KeyboardInterrupt, signals.ShutdownException):
|
||||
exit_with_metrics(command, "Aborting.", status=Status.FAILURE)
|
||||
except (UserError, NoSuchService, ConfigurationError,
|
||||
@@ -98,6 +101,8 @@ def main(): # noqa: C901
|
||||
e.service.name), status=Status.FAILURE)
|
||||
except NoSuchCommand as e:
|
||||
commands = "\n".join(parse_doc_section("commands:", getdoc(e.supercommand)))
|
||||
if not IS_LINUX_PLATFORM:
|
||||
commands += "\n\nDocker Compose is now in the Docker CLI, try `docker compose`"
|
||||
exit_with_metrics(e.command, "No such command: {}\n\n{}".format(e.command, commands))
|
||||
except (errors.ConnectionError, StreamParseError):
|
||||
exit_with_metrics(command, status=Status.FAILURE)
|
||||
@@ -116,6 +121,10 @@ def main(): # noqa: C901
|
||||
code = 0
|
||||
if isinstance(e.code, int):
|
||||
code = e.code
|
||||
|
||||
if not IS_LINUX_PLATFORM and not command:
|
||||
msg += "\n\nDocker Compose is now in the Docker CLI, try `docker compose`"
|
||||
|
||||
exit_with_metrics(command, log_msg=msg, status=status,
|
||||
exit_code=code)
|
||||
|
||||
@@ -128,7 +137,7 @@ def get_filtered_args(args):
|
||||
|
||||
|
||||
def exit_with_metrics(command, log_msg=None, status=Status.SUCCESS, exit_code=1):
|
||||
if log_msg:
|
||||
if log_msg and command != 'exec':
|
||||
if not exit_code:
|
||||
log.info(log_msg)
|
||||
else:
|
||||
@@ -162,7 +171,8 @@ def dispatch():
|
||||
if options.get("--no-ansi"):
|
||||
if options.get("--ansi"):
|
||||
raise UserError("--no-ansi and --ansi cannot be combined.")
|
||||
log.warning('--no-ansi option is deprecated and will be removed in future versions.')
|
||||
log.warning('--no-ansi option is deprecated and will be removed in future versions. '
|
||||
'Use `--ansi never` instead.')
|
||||
ansi_mode = AnsiMode.NEVER
|
||||
|
||||
setup_console_handler(console_handler,
|
||||
@@ -381,6 +391,7 @@ class TopLevelCommand:
|
||||
--no-interpolate Don't interpolate environment variables.
|
||||
-q, --quiet Only validate the configuration, don't print
|
||||
anything.
|
||||
--profiles Print the profile names, one per line.
|
||||
--services Print the service names, one per line.
|
||||
--volumes Print the volume names, one per line.
|
||||
--hash="*" Print the service config hash, one per line.
|
||||
@@ -400,6 +411,15 @@ class TopLevelCommand:
|
||||
if options['--quiet']:
|
||||
return
|
||||
|
||||
if options['--profiles']:
|
||||
profiles = set()
|
||||
for service in compose_config.services:
|
||||
if 'profiles' in service:
|
||||
for profile in service['profiles']:
|
||||
profiles.add(profile)
|
||||
print('\n'.join(sorted(profiles)))
|
||||
return
|
||||
|
||||
if options['--services']:
|
||||
print('\n'.join(service['name'] for service in compose_config.services))
|
||||
return
|
||||
@@ -1121,7 +1141,10 @@ class TopLevelCommand:
|
||||
detached = options.get('--detach')
|
||||
no_start = options.get('--no-start')
|
||||
attach_dependencies = options.get('--attach-dependencies')
|
||||
keep_prefix = not options['--no-log-prefix']
|
||||
keep_prefix = not options.get('--no-log-prefix')
|
||||
|
||||
if not IS_LINUX_PLATFORM:
|
||||
print('Docker Compose is now in the Docker CLI, try `docker compose up`\n')
|
||||
|
||||
if detached and (cascade_stop or exit_value_from or attach_dependencies):
|
||||
raise UserError(
|
||||
@@ -1482,7 +1505,7 @@ def log_printer_from_project(
|
||||
keep_prefix=True,
|
||||
):
|
||||
return LogPrinter(
|
||||
containers,
|
||||
[c for c in containers if c.log_driver not in (None, 'none')],
|
||||
build_log_presenters(project.service_names, monochrome, keep_prefix),
|
||||
event_stream or project.events(),
|
||||
cascade_stop=cascade_stop,
|
||||
|
||||
85
compose/cli/scan_suggest.py
Normal file
85
compose/cli/scan_suggest.py
Normal file
@@ -0,0 +1,85 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from distutils.util import strtobool
|
||||
|
||||
from docker.constants import IS_WINDOWS_PLATFORM
|
||||
from docker.utils.config import find_config_file
|
||||
|
||||
|
||||
SCAN_BINARY_NAME = "docker-scan" + (".exe" if IS_WINDOWS_PLATFORM else "")
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ScanConfig:
|
||||
def __init__(self, d):
|
||||
self.optin = False
|
||||
vars(self).update(d)
|
||||
|
||||
|
||||
def display_scan_suggest_msg():
|
||||
if environment_scan_avoid_suggest() or \
|
||||
scan_available() is None or \
|
||||
scan_already_invoked():
|
||||
return
|
||||
log.info("Use 'docker scan' to run Snyk tests against images to find vulnerabilities "
|
||||
"and learn how to fix them")
|
||||
|
||||
|
||||
def environment_scan_avoid_suggest():
|
||||
return os.getenv('DOCKER_SCAN_SUGGEST', 'true').lower() == 'false'
|
||||
|
||||
|
||||
def scan_already_invoked():
|
||||
docker_folder = docker_config_folder()
|
||||
if docker_folder is None:
|
||||
return False
|
||||
|
||||
scan_config_file = os.path.join(docker_folder, 'scan', "config.json")
|
||||
if not os.path.exists(scan_config_file):
|
||||
return False
|
||||
|
||||
try:
|
||||
data = ''
|
||||
with open(scan_config_file) as f:
|
||||
data = f.read()
|
||||
scan_config = json.loads(data, object_hook=ScanConfig)
|
||||
return scan_config.optin if isinstance(scan_config.optin, bool) else strtobool(scan_config.optin)
|
||||
except Exception: # pylint:disable=broad-except
|
||||
return True
|
||||
|
||||
|
||||
def scan_available():
|
||||
docker_folder = docker_config_folder()
|
||||
if docker_folder:
|
||||
home_scan_bin = os.path.join(docker_folder, 'cli-plugins', SCAN_BINARY_NAME)
|
||||
if os.path.isfile(home_scan_bin) or os.path.islink(home_scan_bin):
|
||||
return home_scan_bin
|
||||
|
||||
if IS_WINDOWS_PLATFORM:
|
||||
program_data_scan_bin = os.path.join('C:\\', 'ProgramData', 'Docker', 'cli-plugins',
|
||||
SCAN_BINARY_NAME)
|
||||
if os.path.isfile(program_data_scan_bin) or os.path.islink(program_data_scan_bin):
|
||||
return program_data_scan_bin
|
||||
else:
|
||||
lib_scan_bin = os.path.join('/usr', 'local', 'lib', 'docker', 'cli-plugins', SCAN_BINARY_NAME)
|
||||
if os.path.isfile(lib_scan_bin) or os.path.islink(lib_scan_bin):
|
||||
return lib_scan_bin
|
||||
lib_exec_scan_bin = os.path.join('/usr', 'local', 'libexec', 'docker', 'cli-plugins',
|
||||
SCAN_BINARY_NAME)
|
||||
if os.path.isfile(lib_exec_scan_bin) or os.path.islink(lib_exec_scan_bin):
|
||||
return lib_exec_scan_bin
|
||||
lib_scan_bin = os.path.join('/usr', 'lib', 'docker', 'cli-plugins', SCAN_BINARY_NAME)
|
||||
if os.path.isfile(lib_scan_bin) or os.path.islink(lib_scan_bin):
|
||||
return lib_scan_bin
|
||||
lib_exec_scan_bin = os.path.join('/usr', 'libexec', 'docker', 'cli-plugins', SCAN_BINARY_NAME)
|
||||
if os.path.isfile(lib_exec_scan_bin) or os.path.islink(lib_exec_scan_bin):
|
||||
return lib_exec_scan_bin
|
||||
return None
|
||||
|
||||
|
||||
def docker_config_folder():
|
||||
docker_config_file = find_config_file()
|
||||
return None if not docker_config_file \
|
||||
else os.path.dirname(os.path.abspath(docker_config_file))
|
||||
@@ -188,7 +188,7 @@
|
||||
"properties": {
|
||||
"condition": {
|
||||
"type": "string",
|
||||
"enum": ["service_started", "service_healthy"]
|
||||
"enum": ["service_started", "service_healthy", "service_completed_successfully"]
|
||||
}
|
||||
},
|
||||
"required": ["condition"]
|
||||
@@ -335,7 +335,6 @@
|
||||
"read_only": {"type": "boolean"},
|
||||
"restart": {"type": "string"},
|
||||
"runtime": {
|
||||
"deprecated": true,
|
||||
"type": "string"
|
||||
},
|
||||
"scale": {
|
||||
@@ -367,6 +366,7 @@
|
||||
"stdin_open": {"type": "boolean"},
|
||||
"stop_grace_period": {"type": "string", "format": "duration"},
|
||||
"stop_signal": {"type": "string"},
|
||||
"storage_opt": {"type": "object"},
|
||||
"tmpfs": {"$ref": "#/definitions/string_or_list"},
|
||||
"tty": {"type": "boolean"},
|
||||
"ulimits": {
|
||||
|
||||
@@ -10,7 +10,11 @@ from operator import attrgetter
|
||||
from operator import itemgetter
|
||||
|
||||
import yaml
|
||||
from cached_property import cached_property
|
||||
|
||||
try:
|
||||
from functools import cached_property
|
||||
except ImportError:
|
||||
from cached_property import cached_property
|
||||
|
||||
from . import types
|
||||
from ..const import COMPOSE_SPEC as VERSION
|
||||
@@ -149,9 +153,14 @@ DOCKER_VALID_URL_PREFIXES = (
|
||||
SUPPORTED_FILENAMES = [
|
||||
'docker-compose.yml',
|
||||
'docker-compose.yaml',
|
||||
'compose.yml',
|
||||
'compose.yaml',
|
||||
]
|
||||
|
||||
DEFAULT_OVERRIDE_FILENAMES = ('docker-compose.override.yml', 'docker-compose.override.yaml')
|
||||
DEFAULT_OVERRIDE_FILENAMES = ('docker-compose.override.yml',
|
||||
'docker-compose.override.yaml',
|
||||
'compose.override.yml',
|
||||
'compose.override.yaml')
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@@ -304,7 +313,16 @@ def find(base_dir, filenames, environment, override_dir=None):
|
||||
if filenames:
|
||||
filenames = [os.path.join(base_dir, f) for f in filenames]
|
||||
else:
|
||||
# search for compose files in the base dir and its parents
|
||||
filenames = get_default_config_files(base_dir)
|
||||
if not filenames and not override_dir:
|
||||
# none found in base_dir and no override_dir defined
|
||||
raise ComposeFileNotFound(SUPPORTED_FILENAMES)
|
||||
if not filenames:
|
||||
# search for compose files in the project directory and its parents
|
||||
filenames = get_default_config_files(override_dir)
|
||||
if not filenames:
|
||||
raise ComposeFileNotFound(SUPPORTED_FILENAMES)
|
||||
|
||||
log.debug("Using configuration files: {}".format(",".join(filenames)))
|
||||
return ConfigDetails(
|
||||
@@ -335,7 +353,7 @@ def get_default_config_files(base_dir):
|
||||
(candidates, path) = find_candidates_in_parent_dirs(SUPPORTED_FILENAMES, base_dir)
|
||||
|
||||
if not candidates:
|
||||
raise ComposeFileNotFound(SUPPORTED_FILENAMES)
|
||||
return None
|
||||
|
||||
winner = candidates[0]
|
||||
|
||||
@@ -556,8 +574,7 @@ def process_config_section(config_file, config, section, environment, interpolat
|
||||
config_file.version,
|
||||
config,
|
||||
section,
|
||||
environment
|
||||
)
|
||||
environment)
|
||||
else:
|
||||
return config
|
||||
|
||||
|
||||
@@ -54,9 +54,10 @@ class Environment(dict):
|
||||
if base_dir is None:
|
||||
return result
|
||||
if env_file:
|
||||
env_file_path = os.path.join(base_dir, env_file)
|
||||
else:
|
||||
env_file_path = os.path.join(base_dir, '.env')
|
||||
env_file_path = os.path.join(os.getcwd(), env_file)
|
||||
return cls(env_vars_from_file(env_file_path))
|
||||
|
||||
env_file_path = os.path.join(base_dir, '.env')
|
||||
try:
|
||||
return cls(env_vars_from_file(env_file_path))
|
||||
except EnvFileNotFound:
|
||||
|
||||
@@ -5,6 +5,7 @@ from .version import ComposeVersion
|
||||
DEFAULT_TIMEOUT = 10
|
||||
HTTP_TIMEOUT = 60
|
||||
IS_WINDOWS_PLATFORM = (sys.platform == "win32")
|
||||
IS_LINUX_PLATFORM = (sys.platform == "linux")
|
||||
LABEL_CONTAINER_NUMBER = 'com.docker.compose.container-number'
|
||||
LABEL_ONE_OFF = 'com.docker.compose.oneoff'
|
||||
LABEL_PROJECT = 'com.docker.compose.project'
|
||||
|
||||
@@ -186,11 +186,6 @@ class Container:
|
||||
def log_driver(self):
|
||||
return self.get('HostConfig.LogConfig.Type')
|
||||
|
||||
@property
|
||||
def has_api_logs(self):
|
||||
log_type = self.log_driver
|
||||
return not log_type or log_type in ('json-file', 'journald', 'local')
|
||||
|
||||
@property
|
||||
def human_readable_health_status(self):
|
||||
""" Generate UP status string with up time and health
|
||||
@@ -204,11 +199,7 @@ class Container:
|
||||
return status_string
|
||||
|
||||
def attach_log_stream(self):
|
||||
"""A log stream can only be attached if the container uses a
|
||||
json-file, journald or local log driver.
|
||||
"""
|
||||
if self.has_api_logs:
|
||||
self.log_stream = self.attach(stdout=True, stderr=True, stream=True)
|
||||
self.log_stream = self.attach(stdout=True, stderr=True, stream=True)
|
||||
|
||||
def get(self, key):
|
||||
"""Return a value from the container or None if the value is not set.
|
||||
|
||||
@@ -27,3 +27,8 @@ class NoHealthCheckConfigured(HealthCheckException):
|
||||
service_name
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class CompletedUnsuccessfully(Exception):
|
||||
def __init__(self, container_id, exit_code):
|
||||
self.msg = 'Container "{}" exited with code {}.'.format(container_id, exit_code)
|
||||
|
||||
@@ -16,6 +16,7 @@ from compose.cli.colors import green
|
||||
from compose.cli.colors import red
|
||||
from compose.cli.signals import ShutdownException
|
||||
from compose.const import PARALLEL_LIMIT
|
||||
from compose.errors import CompletedUnsuccessfully
|
||||
from compose.errors import HealthCheckFailed
|
||||
from compose.errors import NoHealthCheckConfigured
|
||||
from compose.errors import OperationFailedError
|
||||
@@ -61,7 +62,8 @@ def parallel_execute_watch(events, writer, errors, results, msg, get_name, fail_
|
||||
elif isinstance(exception, APIError):
|
||||
errors[get_name(obj)] = exception.explanation
|
||||
writer.write(msg, get_name(obj), 'error', red)
|
||||
elif isinstance(exception, (OperationFailedError, HealthCheckFailed, NoHealthCheckConfigured)):
|
||||
elif isinstance(exception, (OperationFailedError, HealthCheckFailed, NoHealthCheckConfigured,
|
||||
CompletedUnsuccessfully)):
|
||||
errors[get_name(obj)] = exception.msg
|
||||
writer.write(msg, get_name(obj), 'error', red)
|
||||
elif isinstance(exception, UpstreamError):
|
||||
@@ -241,6 +243,12 @@ def feed_queue(objects, func, get_deps, results, state, limiter):
|
||||
'not processing'.format(obj)
|
||||
)
|
||||
results.put((obj, None, e))
|
||||
except CompletedUnsuccessfully as e:
|
||||
log.debug(
|
||||
'Service(s) upstream of {} did not completed successfully - '
|
||||
'not processing'.format(obj)
|
||||
)
|
||||
results.put((obj, None, e))
|
||||
|
||||
if state.is_done():
|
||||
results.put(STOP)
|
||||
|
||||
@@ -13,6 +13,7 @@ from docker.utils import version_lt
|
||||
|
||||
from . import parallel
|
||||
from .cli.errors import UserError
|
||||
from .cli.scan_suggest import display_scan_suggest_msg
|
||||
from .config import ConfigurationError
|
||||
from .config.config import V1
|
||||
from .config.sort_services import get_container_name_from_network_mode
|
||||
@@ -490,8 +491,6 @@ class Project:
|
||||
log.info('%s uses an image, skipping' % service.name)
|
||||
|
||||
if cli:
|
||||
log.info("Building with native build. Learn about native build in Compose here: "
|
||||
"https://docs.docker.com/go/compose-native-build/")
|
||||
if parallel_build:
|
||||
log.warning("Flag '--parallel' is ignored when building with "
|
||||
"COMPOSE_DOCKER_CLI_BUILD=1")
|
||||
@@ -520,6 +519,9 @@ class Project:
|
||||
for service in services:
|
||||
build_service(service)
|
||||
|
||||
if services:
|
||||
display_scan_suggest_msg()
|
||||
|
||||
def create(
|
||||
self,
|
||||
service_names=None,
|
||||
@@ -651,10 +653,6 @@ class Project:
|
||||
override_options=None,
|
||||
):
|
||||
|
||||
if cli:
|
||||
log.info("Building with native build. Learn about native build in Compose here: "
|
||||
"https://docs.docker.com/go/compose-native-build/")
|
||||
|
||||
self.initialize()
|
||||
if not ignore_orphans:
|
||||
self.find_orphan_containers(remove_orphans)
|
||||
@@ -666,8 +664,15 @@ class Project:
|
||||
service_names,
|
||||
include_deps=start_deps)
|
||||
|
||||
must_build = False
|
||||
for svc in services:
|
||||
if svc.must_build(do_build=do_build):
|
||||
must_build = True
|
||||
svc.ensure_image_exists(do_build=do_build, silent=silent, cli=cli)
|
||||
|
||||
if must_build:
|
||||
display_scan_suggest_msg()
|
||||
|
||||
plans = self._get_convergence_plans(
|
||||
services,
|
||||
strategy,
|
||||
|
||||
@@ -45,6 +45,7 @@ from .const import LABEL_VERSION
|
||||
from .const import NANOCPUS_SCALE
|
||||
from .const import WINDOWS_LONGPATH_PREFIX
|
||||
from .container import Container
|
||||
from .errors import CompletedUnsuccessfully
|
||||
from .errors import HealthCheckFailed
|
||||
from .errors import NoHealthCheckConfigured
|
||||
from .errors import OperationFailedError
|
||||
@@ -112,6 +113,7 @@ HOST_CONFIG_KEYS = [
|
||||
|
||||
CONDITION_STARTED = 'service_started'
|
||||
CONDITION_HEALTHY = 'service_healthy'
|
||||
CONDITION_COMPLETED_SUCCESSFULLY = 'service_completed_successfully'
|
||||
|
||||
|
||||
class BuildError(Exception):
|
||||
@@ -366,6 +368,24 @@ class Service:
|
||||
"rebuild this image you must use `docker-compose build` or "
|
||||
"`docker-compose up --build`.".format(self.name))
|
||||
|
||||
def must_build(self, do_build=BuildAction.none):
|
||||
if self.can_be_built() and do_build == BuildAction.force:
|
||||
return True
|
||||
|
||||
try:
|
||||
self.image()
|
||||
return False
|
||||
except NoSuchImageError:
|
||||
pass
|
||||
|
||||
if not self.can_be_built():
|
||||
return False
|
||||
|
||||
if do_build == BuildAction.skip:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def get_image_registry_data(self):
|
||||
try:
|
||||
return self.client.inspect_distribution(self.image_name)
|
||||
@@ -753,6 +773,8 @@ class Service:
|
||||
configs[svc] = lambda s: True
|
||||
elif config['condition'] == CONDITION_HEALTHY:
|
||||
configs[svc] = lambda s: s.is_healthy()
|
||||
elif config['condition'] == CONDITION_COMPLETED_SUCCESSFULLY:
|
||||
configs[svc] = lambda s: s.is_completed_successfully()
|
||||
else:
|
||||
# The config schema already prevents this, but it might be
|
||||
# bypassed if Compose is called programmatically.
|
||||
@@ -1304,6 +1326,21 @@ class Service:
|
||||
raise HealthCheckFailed(ctnr.short_id)
|
||||
return result
|
||||
|
||||
def is_completed_successfully(self):
|
||||
""" Check that all containers for this service has completed successfully
|
||||
Returns false if at least one container does not exited and
|
||||
raises CompletedUnsuccessfully exception if at least one container
|
||||
exited with non-zero exit code.
|
||||
"""
|
||||
result = True
|
||||
for ctnr in self.containers(stopped=True):
|
||||
ctnr.inspect()
|
||||
if ctnr.get('State.Status') != 'exited':
|
||||
result = False
|
||||
elif ctnr.exit_code != 0:
|
||||
raise CompletedUnsuccessfully(ctnr.short_id, ctnr.exit_code)
|
||||
return result
|
||||
|
||||
def _parse_proxy_config(self):
|
||||
client = self.client
|
||||
if 'proxies' not in client._general_configs:
|
||||
@@ -1855,7 +1892,7 @@ class _CLIBuilder:
|
||||
Returns:
|
||||
A generator for the build output.
|
||||
"""
|
||||
if dockerfile:
|
||||
if dockerfile and os.path.isdir(path):
|
||||
dockerfile = os.path.join(path, dockerfile)
|
||||
iidfile = tempfile.mktemp()
|
||||
|
||||
@@ -1873,6 +1910,15 @@ class _CLIBuilder:
|
||||
command_builder.add_arg("--tag", tag)
|
||||
command_builder.add_arg("--target", target)
|
||||
command_builder.add_arg("--iidfile", iidfile)
|
||||
command_builder.add_arg("--platform", platform)
|
||||
command_builder.add_arg("--isolation", isolation)
|
||||
|
||||
if extra_hosts:
|
||||
if isinstance(extra_hosts, dict):
|
||||
extra_hosts = ["{}:{}".format(host, ip) for host, ip in extra_hosts.items()]
|
||||
for host in extra_hosts:
|
||||
command_builder.add_arg("--add-host", "{}".format(host))
|
||||
|
||||
args = command_builder.build([path])
|
||||
|
||||
magic_word = "Successfully built "
|
||||
|
||||
@@ -138,7 +138,7 @@ _docker_compose_config() {
|
||||
;;
|
||||
esac
|
||||
|
||||
COMPREPLY=( $( compgen -W "--hash --help --no-interpolate --quiet -q --resolve-image-digests --services --volumes" -- "$cur" ) )
|
||||
COMPREPLY=( $( compgen -W "--hash --help --no-interpolate --profiles --quiet -q --resolve-image-digests --services --volumes" -- "$cur" ) )
|
||||
}
|
||||
|
||||
|
||||
@@ -172,6 +172,10 @@ _docker_compose_docker_compose() {
|
||||
COMPREPLY=( $( compgen -W "debug info warning error critical" -- "$cur" ) )
|
||||
return
|
||||
;;
|
||||
--profile)
|
||||
COMPREPLY=( $( compgen -W "$(__docker_compose_q config --profiles)" -- "$cur" ) )
|
||||
return
|
||||
;;
|
||||
--project-directory)
|
||||
_filedir -d
|
||||
return
|
||||
@@ -618,10 +622,11 @@ _docker_compose() {
|
||||
--tlskey
|
||||
"
|
||||
|
||||
# These options are require special treatment when searching the command.
|
||||
# These options require special treatment when searching the command.
|
||||
local top_level_options_with_args="
|
||||
--ansi
|
||||
--log-level
|
||||
--profile
|
||||
"
|
||||
|
||||
COMPREPLY=()
|
||||
|
||||
@@ -22,6 +22,6 @@ complete -c docker-compose -l tlskey -r -d 'Path to TLS key fi
|
||||
complete -c docker-compose -l tlsverify -d 'Use TLS and verify the remote'
|
||||
complete -c docker-compose -l skip-hostname-check -d "Don't check the daemon's hostname against the name specified in the client certificate (for example if your docker host is an IP address)"
|
||||
complete -c docker-compose -l no-ansi -d 'Do not print ANSI control characters'
|
||||
complete -c docker-compose -l ansi -a never always auto -d 'Control when to print ANSI control characters'
|
||||
complete -c docker-compose -l ansi -a 'never always auto' -d 'Control when to print ANSI control characters'
|
||||
complete -c docker-compose -s h -l help -d 'Print usage'
|
||||
complete -c docker-compose -s v -l version -d 'Print version and exit'
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
Click==7.1.2
|
||||
coverage==5.2.1
|
||||
coverage==5.5
|
||||
ddt==1.4.1
|
||||
flake8==3.8.3
|
||||
gitpython==3.1.11
|
||||
@@ -7,4 +7,3 @@ mock==3.0.5
|
||||
pytest==6.0.1; python_version >= '3.5'
|
||||
pytest==4.6.5; python_version < '3.5'
|
||||
pytest-cov==2.10.1
|
||||
PyYAML==5.3.1
|
||||
|
||||
@@ -3,7 +3,7 @@ appdirs==1.4.4
|
||||
attrs==20.3.0
|
||||
bcrypt==3.2.0
|
||||
cffi==1.14.4
|
||||
cryptography==3.2.1
|
||||
cryptography==3.3.2
|
||||
distlib==0.3.1
|
||||
entrypoints==0.3
|
||||
filelock==3.0.12
|
||||
@@ -11,7 +11,7 @@ gitdb2==4.0.2
|
||||
mccabe==0.6.1
|
||||
more-itertools==8.6.0; python_version >= '3.5'
|
||||
more-itertools==5.0.0; python_version < '3.5'
|
||||
packaging==20.4
|
||||
packaging==20.9
|
||||
pluggy==0.13.1
|
||||
py==1.9.0
|
||||
pycodestyle==2.6.0
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
backports.shutil_get_terminal_size==1.0.0
|
||||
cached-property==1.5.1
|
||||
cached-property==1.5.1; python_version < '3.8'
|
||||
certifi==2020.6.20
|
||||
chardet==3.0.4
|
||||
colorama==0.4.3; sys_platform == 'win32'
|
||||
distro==1.5.0
|
||||
docker==4.4.1
|
||||
docker==5.0.0
|
||||
docker-pycreds==0.4.0
|
||||
dockerpty==0.4.1
|
||||
docopt==0.6.2
|
||||
@@ -13,9 +13,9 @@ ipaddress==1.0.23
|
||||
jsonschema==3.2.0
|
||||
paramiko==2.7.1
|
||||
PySocks==1.7.1
|
||||
python-dotenv==0.14.0
|
||||
python-dotenv==0.17.0
|
||||
pywin32==227; sys_platform == 'win32'
|
||||
PyYAML==5.3.1
|
||||
PyYAML==5.4.1
|
||||
requests==2.24.0
|
||||
texttable==1.6.2
|
||||
urllib3==1.25.10; python_version == '3.3'
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
|
||||
set -e
|
||||
|
||||
VERSION="1.28.2"
|
||||
VERSION="1.29.0"
|
||||
IMAGE="docker/compose:$VERSION"
|
||||
|
||||
|
||||
|
||||
@@ -38,17 +38,19 @@ for version in $DOCKER_VERSIONS; do
|
||||
|
||||
trap "on_exit" EXIT
|
||||
|
||||
repo="dockerswarm/dind"
|
||||
|
||||
docker run \
|
||||
-d \
|
||||
--name "$daemon_container" \
|
||||
--privileged \
|
||||
--volume="/var/lib/docker" \
|
||||
"$repo:$version" \
|
||||
-v $DOCKER_CONFIG/config.json:/root/.docker/config.json \
|
||||
-e "DOCKER_TLS_CERTDIR=" \
|
||||
"docker:$version-dind" \
|
||||
dockerd -H tcp://0.0.0.0:2375 $DOCKER_DAEMON_ARGS \
|
||||
2>&1 | tail -n 10
|
||||
|
||||
docker exec "$daemon_container" sh -c "apk add --no-cache git"
|
||||
|
||||
docker run \
|
||||
--rm \
|
||||
--tty \
|
||||
|
||||
4
setup.py
4
setup.py
@@ -25,14 +25,13 @@ def find_version(*file_paths):
|
||||
|
||||
|
||||
install_requires = [
|
||||
'cached-property >= 1.2.0, < 2',
|
||||
'docopt >= 0.6.1, < 1',
|
||||
'PyYAML >= 3.10, < 6',
|
||||
'requests >= 2.20.0, < 3',
|
||||
'texttable >= 0.9.0, < 2',
|
||||
'websocket-client >= 0.32.0, < 1',
|
||||
'distro >= 1.5.0, < 2',
|
||||
'docker[ssh] >= 4.4.0, < 5',
|
||||
'docker[ssh] >= 5',
|
||||
'dockerpty >= 0.4.1, < 1',
|
||||
'jsonschema >= 2.5.1, < 4',
|
||||
'python-dotenv >= 0.13.0, < 1',
|
||||
@@ -50,6 +49,7 @@ if sys.version_info[:2] < (3, 4):
|
||||
|
||||
extras_require = {
|
||||
':python_version < "3.5"': ['backports.ssl_match_hostname >= 3.5, < 4'],
|
||||
':python_version < "3.8"': ['cached-property >= 1.2.0, < 2'],
|
||||
':sys_platform == "win32"': ['colorama >= 0.4, < 1'],
|
||||
'socks': ['PySocks >= 1.5.6, != 1.5.7, < 2'],
|
||||
'tests': tests_require,
|
||||
|
||||
@@ -237,6 +237,11 @@ class CLITestCase(DockerClientTestCase):
|
||||
result = self.dispatch(['-H=tcp://doesnotexist:8000', 'ps'], returncode=1)
|
||||
assert "Couldn't connect to Docker daemon" in result.stderr
|
||||
|
||||
def test_config_list_profiles(self):
|
||||
self.base_dir = 'tests/fixtures/config-profiles'
|
||||
result = self.dispatch(['config', '--profiles'])
|
||||
assert set(result.stdout.rstrip().split('\n')) == {'debug', 'frontend', 'gui'}
|
||||
|
||||
def test_config_list_services(self):
|
||||
self.base_dir = 'tests/fixtures/v2-full'
|
||||
result = self.dispatch(['config', '--services'])
|
||||
|
||||
15
tests/fixtures/config-profiles/docker-compose.yml
vendored
Normal file
15
tests/fixtures/config-profiles/docker-compose.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
version: '3.8'
|
||||
services:
|
||||
frontend:
|
||||
image: frontend
|
||||
profiles: ["frontend", "gui"]
|
||||
phpmyadmin:
|
||||
image: phpmyadmin
|
||||
depends_on:
|
||||
- db
|
||||
profiles:
|
||||
- debug
|
||||
backend:
|
||||
image: backend
|
||||
db:
|
||||
image: mysql
|
||||
1
tests/fixtures/env-file-override/.env
vendored
Normal file
1
tests/fixtures/env-file-override/.env
vendored
Normal file
@@ -0,0 +1 @@
|
||||
WHEREAMI=default
|
||||
@@ -1,5 +1,6 @@
|
||||
import tempfile
|
||||
|
||||
import pytest
|
||||
from ddt import data
|
||||
from ddt import ddt
|
||||
|
||||
@@ -8,6 +9,7 @@ from ..acceptance.cli_test import dispatch
|
||||
from compose.cli.command import get_project
|
||||
from compose.cli.command import project_from_options
|
||||
from compose.config.environment import Environment
|
||||
from compose.config.errors import EnvFileNotFound
|
||||
from tests.integration.testcases import DockerClientTestCase
|
||||
|
||||
|
||||
@@ -55,13 +57,36 @@ services:
|
||||
class EnvironmentOverrideFileTest(DockerClientTestCase):
|
||||
def test_env_file_override(self):
|
||||
base_dir = 'tests/fixtures/env-file-override'
|
||||
# '--env-file' are relative to the current working dir
|
||||
env = Environment.from_env_file(base_dir, base_dir+'/.env.override')
|
||||
dispatch(base_dir, ['--env-file', '.env.override', 'up'])
|
||||
project = get_project(project_dir=base_dir,
|
||||
config_path=['docker-compose.yml'],
|
||||
environment=Environment.from_env_file(base_dir, '.env.override'),
|
||||
environment=env,
|
||||
override_dir=base_dir)
|
||||
containers = project.containers(stopped=True)
|
||||
assert len(containers) == 1
|
||||
assert "WHEREAMI=override" in containers[0].get('Config.Env')
|
||||
assert "DEFAULT_CONF_LOADED=true" in containers[0].get('Config.Env')
|
||||
dispatch(base_dir, ['--env-file', '.env.override', 'down'], None)
|
||||
|
||||
def test_env_file_not_found_error(self):
|
||||
base_dir = 'tests/fixtures/env-file-override'
|
||||
with pytest.raises(EnvFileNotFound) as excinfo:
|
||||
Environment.from_env_file(base_dir, '.env.override')
|
||||
|
||||
assert "Couldn't find env file" in excinfo.exconly()
|
||||
|
||||
def test_dot_env_file(self):
|
||||
base_dir = 'tests/fixtures/env-file-override'
|
||||
# '.env' is relative to the project_dir (base_dir)
|
||||
env = Environment.from_env_file(base_dir, None)
|
||||
dispatch(base_dir, ['up'])
|
||||
project = get_project(project_dir=base_dir,
|
||||
config_path=['docker-compose.yml'],
|
||||
environment=env,
|
||||
override_dir=base_dir)
|
||||
containers = project.containers(stopped=True)
|
||||
assert len(containers) == 1
|
||||
assert "WHEREAMI=default" in containers[0].get('Config.Env')
|
||||
dispatch(base_dir, ['down'], None)
|
||||
|
||||
@@ -25,6 +25,7 @@ from compose.const import COMPOSE_SPEC as VERSION
|
||||
from compose.const import LABEL_PROJECT
|
||||
from compose.const import LABEL_SERVICE
|
||||
from compose.container import Container
|
||||
from compose.errors import CompletedUnsuccessfully
|
||||
from compose.errors import HealthCheckFailed
|
||||
from compose.errors import NoHealthCheckConfigured
|
||||
from compose.project import Project
|
||||
@@ -1899,6 +1900,110 @@ class ProjectTest(DockerClientTestCase):
|
||||
with pytest.raises(NoHealthCheckConfigured):
|
||||
svc1.is_healthy()
|
||||
|
||||
def test_project_up_completed_successfully_dependency(self):
|
||||
config_dict = {
|
||||
'version': '2.1',
|
||||
'services': {
|
||||
'svc1': {
|
||||
'image': BUSYBOX_IMAGE_WITH_TAG,
|
||||
'command': 'true'
|
||||
},
|
||||
'svc2': {
|
||||
'image': BUSYBOX_IMAGE_WITH_TAG,
|
||||
'command': 'top',
|
||||
'depends_on': {
|
||||
'svc1': {'condition': 'service_completed_successfully'},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
config_data = load_config(config_dict)
|
||||
project = Project.from_config(
|
||||
name='composetest', config_data=config_data, client=self.client
|
||||
)
|
||||
project.up()
|
||||
|
||||
svc1 = project.get_service('svc1')
|
||||
svc2 = project.get_service('svc2')
|
||||
|
||||
assert 'svc1' in svc2.get_dependency_names()
|
||||
assert svc2.containers()[0].is_running
|
||||
assert len(svc1.containers()) == 0
|
||||
assert svc1.is_completed_successfully()
|
||||
|
||||
def test_project_up_completed_unsuccessfully_dependency(self):
|
||||
config_dict = {
|
||||
'version': '2.1',
|
||||
'services': {
|
||||
'svc1': {
|
||||
'image': BUSYBOX_IMAGE_WITH_TAG,
|
||||
'command': 'false'
|
||||
},
|
||||
'svc2': {
|
||||
'image': BUSYBOX_IMAGE_WITH_TAG,
|
||||
'command': 'top',
|
||||
'depends_on': {
|
||||
'svc1': {'condition': 'service_completed_successfully'},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
config_data = load_config(config_dict)
|
||||
project = Project.from_config(
|
||||
name='composetest', config_data=config_data, client=self.client
|
||||
)
|
||||
with pytest.raises(ProjectError):
|
||||
project.up()
|
||||
|
||||
containers = project.containers()
|
||||
assert len(containers) == 0
|
||||
|
||||
svc1 = project.get_service('svc1')
|
||||
svc2 = project.get_service('svc2')
|
||||
assert 'svc1' in svc2.get_dependency_names()
|
||||
with pytest.raises(CompletedUnsuccessfully):
|
||||
svc1.is_completed_successfully()
|
||||
|
||||
def test_project_up_completed_differently_dependencies(self):
|
||||
config_dict = {
|
||||
'version': '2.1',
|
||||
'services': {
|
||||
'svc1': {
|
||||
'image': BUSYBOX_IMAGE_WITH_TAG,
|
||||
'command': 'true'
|
||||
},
|
||||
'svc2': {
|
||||
'image': BUSYBOX_IMAGE_WITH_TAG,
|
||||
'command': 'false'
|
||||
},
|
||||
'svc3': {
|
||||
'image': BUSYBOX_IMAGE_WITH_TAG,
|
||||
'command': 'top',
|
||||
'depends_on': {
|
||||
'svc1': {'condition': 'service_completed_successfully'},
|
||||
'svc2': {'condition': 'service_completed_successfully'},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
config_data = load_config(config_dict)
|
||||
project = Project.from_config(
|
||||
name='composetest', config_data=config_data, client=self.client
|
||||
)
|
||||
with pytest.raises(ProjectError):
|
||||
project.up()
|
||||
|
||||
containers = project.containers()
|
||||
assert len(containers) == 0
|
||||
|
||||
svc1 = project.get_service('svc1')
|
||||
svc2 = project.get_service('svc2')
|
||||
svc3 = project.get_service('svc3')
|
||||
assert ['svc1', 'svc2'] == svc3.get_dependency_names()
|
||||
assert svc1.is_completed_successfully()
|
||||
with pytest.raises(CompletedUnsuccessfully):
|
||||
svc2.is_completed_successfully()
|
||||
|
||||
def test_project_up_seccomp_profile(self):
|
||||
seccomp_data = {
|
||||
'defaultAction': 'SCMP_ACT_ALLOW',
|
||||
|
||||
@@ -8,7 +8,6 @@ from docker.errors import APIError
|
||||
|
||||
from compose.cli.log_printer import build_log_generator
|
||||
from compose.cli.log_printer import build_log_presenters
|
||||
from compose.cli.log_printer import build_no_log_generator
|
||||
from compose.cli.log_printer import consume_queue
|
||||
from compose.cli.log_printer import QueueItem
|
||||
from compose.cli.log_printer import wait_on_exit
|
||||
@@ -75,14 +74,6 @@ def test_wait_on_exit_raises():
|
||||
assert expected in wait_on_exit(mock_container)
|
||||
|
||||
|
||||
def test_build_no_log_generator(mock_container):
|
||||
mock_container.has_api_logs = False
|
||||
mock_container.log_driver = 'none'
|
||||
output, = build_no_log_generator(mock_container, None)
|
||||
assert "WARNING: no logs are available with the 'none' log driver\n" in output
|
||||
assert "exited with code" not in output
|
||||
|
||||
|
||||
class TestBuildLogGenerator:
|
||||
|
||||
def test_no_log_stream(self, mock_container):
|
||||
|
||||
@@ -2397,7 +2397,8 @@ web:
|
||||
'image': 'busybox',
|
||||
'depends_on': {
|
||||
'app1': {'condition': 'service_started'},
|
||||
'app2': {'condition': 'service_healthy'}
|
||||
'app2': {'condition': 'service_healthy'},
|
||||
'app3': {'condition': 'service_completed_successfully'}
|
||||
}
|
||||
}
|
||||
override = {}
|
||||
@@ -2409,11 +2410,12 @@ web:
|
||||
'image': 'busybox',
|
||||
'depends_on': {
|
||||
'app1': {'condition': 'service_started'},
|
||||
'app2': {'condition': 'service_healthy'}
|
||||
'app2': {'condition': 'service_healthy'},
|
||||
'app3': {'condition': 'service_completed_successfully'}
|
||||
}
|
||||
}
|
||||
override = {
|
||||
'depends_on': ['app3']
|
||||
'depends_on': ['app4']
|
||||
}
|
||||
|
||||
actual = config.merge_service_dicts(base, override, VERSION)
|
||||
@@ -2422,7 +2424,8 @@ web:
|
||||
'depends_on': {
|
||||
'app1': {'condition': 'service_started'},
|
||||
'app2': {'condition': 'service_healthy'},
|
||||
'app3': {'condition': 'service_started'}
|
||||
'app3': {'condition': 'service_completed_successfully'},
|
||||
'app4': {'condition': 'service_started'},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3567,9 +3570,11 @@ class InterpolationTest(unittest.TestCase):
|
||||
@mock.patch.dict(os.environ)
|
||||
def test_config_file_with_options_environment_file(self):
|
||||
project_dir = 'tests/fixtures/default-env-file'
|
||||
# env-file is relative to current working dir
|
||||
env = Environment.from_env_file(project_dir, project_dir + '/.env2')
|
||||
service_dicts = config.load(
|
||||
config.find(
|
||||
project_dir, None, Environment.from_env_file(project_dir, '.env2')
|
||||
project_dir, None, env
|
||||
)
|
||||
).services
|
||||
|
||||
@@ -5233,6 +5238,8 @@ class GetDefaultConfigFilesTestCase(unittest.TestCase):
|
||||
files = [
|
||||
'docker-compose.yml',
|
||||
'docker-compose.yaml',
|
||||
'compose.yml',
|
||||
'compose.yaml',
|
||||
]
|
||||
|
||||
def test_get_config_path_default_file_in_basedir(self):
|
||||
@@ -5266,8 +5273,10 @@ def get_config_filename_for_files(filenames, subdir=None):
|
||||
base_dir = tempfile.mkdtemp(dir=project_dir)
|
||||
else:
|
||||
base_dir = project_dir
|
||||
filename, = config.get_default_config_files(base_dir)
|
||||
return os.path.basename(filename)
|
||||
filenames = config.get_default_config_files(base_dir)
|
||||
if not filenames:
|
||||
raise config.ComposeFileNotFound(config.SUPPORTED_FILENAMES)
|
||||
return os.path.basename(filenames[0])
|
||||
finally:
|
||||
shutil.rmtree(project_dir)
|
||||
|
||||
|
||||
@@ -221,34 +221,6 @@ class ContainerTest(unittest.TestCase):
|
||||
container = Container(None, self.container_dict, has_been_inspected=True)
|
||||
assert container.short_id == self.container_id[:12]
|
||||
|
||||
def test_has_api_logs(self):
|
||||
container_dict = {
|
||||
'HostConfig': {
|
||||
'LogConfig': {
|
||||
'Type': 'json-file'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
container = Container(None, container_dict, has_been_inspected=True)
|
||||
assert container.has_api_logs is True
|
||||
|
||||
container_dict['HostConfig']['LogConfig']['Type'] = 'none'
|
||||
container = Container(None, container_dict, has_been_inspected=True)
|
||||
assert container.has_api_logs is False
|
||||
|
||||
container_dict['HostConfig']['LogConfig']['Type'] = 'syslog'
|
||||
container = Container(None, container_dict, has_been_inspected=True)
|
||||
assert container.has_api_logs is False
|
||||
|
||||
container_dict['HostConfig']['LogConfig']['Type'] = 'journald'
|
||||
container = Container(None, container_dict, has_been_inspected=True)
|
||||
assert container.has_api_logs is True
|
||||
|
||||
container_dict['HostConfig']['LogConfig']['Type'] = 'foobar'
|
||||
container = Container(None, container_dict, has_been_inspected=True)
|
||||
assert container.has_api_logs is False
|
||||
|
||||
|
||||
class GetContainerNameTestCase(unittest.TestCase):
|
||||
|
||||
|
||||
Reference in New Issue
Block a user