mirror of
https://github.com/docker/compose.git
synced 2026-02-10 02:29:25 +08:00
Compare commits
187 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5becea4ca9 | ||
|
|
3ecf001f96 | ||
|
|
96a8e3b3c5 | ||
|
|
aca6edd631 | ||
|
|
39ab3aee50 | ||
|
|
39c2d42a0e | ||
|
|
aa45dedb3d | ||
|
|
afb36f236b | ||
|
|
f01e535cf4 | ||
|
|
853efbfb54 | ||
|
|
6ea24001fa | ||
|
|
c3d8e558a2 | ||
|
|
8f9f1111f3 | ||
|
|
1b42ecba14 | ||
|
|
c34c88b217 | ||
|
|
575d67618e | ||
|
|
adae403b27 | ||
|
|
0bea52b18d | ||
|
|
2d0c9366ff | ||
|
|
8a6dc88f9e | ||
|
|
d79745a2cd | ||
|
|
1250bb7481 | ||
|
|
3ba41b98d3 | ||
|
|
7f7370b811 | ||
|
|
a9e8ae190f | ||
|
|
3eee3e093a | ||
|
|
ad770b272c | ||
|
|
e5cab3ced5 | ||
|
|
7984767db2 | ||
|
|
0773730525 | ||
|
|
c81046aac0 | ||
|
|
84c816e887 | ||
|
|
1607674374 | ||
|
|
683fac0dbf | ||
|
|
ddee2958ec | ||
|
|
1ab1cd202b | ||
|
|
1752927dcd | ||
|
|
c2ddd71e5f | ||
|
|
72bbd9c3a6 | ||
|
|
3c9ee678e7 | ||
|
|
92fefbc9cc | ||
|
|
e496c64127 | ||
|
|
84afa518e8 | ||
|
|
0f6a55e036 | ||
|
|
8ce5e235e4 | ||
|
|
c1dddbe608 | ||
|
|
981b0cd641 | ||
|
|
5ec8af582c | ||
|
|
f5342b600c | ||
|
|
4a26d95de4 | ||
|
|
5b7851f55b | ||
|
|
eaa22df151 | ||
|
|
551f680751 | ||
|
|
3e071ec8d9 | ||
|
|
858ff26731 | ||
|
|
2a7c06a050 | ||
|
|
d0b7bc3110 | ||
|
|
fe4f16e448 | ||
|
|
1da4301650 | ||
|
|
c594cb3fc3 | ||
|
|
89ad637d50 | ||
|
|
6ca2aed7ec | ||
|
|
fc744a0cc9 | ||
|
|
245ede1d75 | ||
|
|
72f7b086d7 | ||
|
|
2f48b6f5e9 | ||
|
|
e6fcde422c | ||
|
|
75b2d7905f | ||
|
|
efa5969086 | ||
|
|
2a4aca7f54 | ||
|
|
9c8f5a5705 | ||
|
|
62bbc5cfe2 | ||
|
|
66375c2871 | ||
|
|
c760600a65 | ||
|
|
4daad056c4 | ||
|
|
74c09cac66 | ||
|
|
36e470d640 | ||
|
|
d28d717884 | ||
|
|
42c2cfd7a6 | ||
|
|
5b983ac653 | ||
|
|
93425218eb | ||
|
|
49d0ee2de5 | ||
|
|
a92c6d7e17 | ||
|
|
b8800db52e | ||
|
|
ccabfde353 | ||
|
|
3297bb50bb | ||
|
|
e688006444 | ||
|
|
e4a83c15ff | ||
|
|
824b9f138e | ||
|
|
8654eb2ea3 | ||
|
|
9407ee65e5 | ||
|
|
66c6d2757a | ||
|
|
17daa93edf | ||
|
|
9795e39d0c | ||
|
|
393abc5b33 | ||
|
|
d0866c8c18 | ||
|
|
546133c977 | ||
|
|
9a2f94713e | ||
|
|
b88f635514 | ||
|
|
31002aeacd | ||
|
|
6209baccf3 | ||
|
|
28f8b8549d | ||
|
|
76a19ec8c5 | ||
|
|
bba8cd0322 | ||
|
|
f2ec6a2176 | ||
|
|
7f7f1607de | ||
|
|
4990a7f935 | ||
|
|
72f8551466 | ||
|
|
487779960c | ||
|
|
99b6776fd2 | ||
|
|
4e382b9c28 | ||
|
|
862107a32a | ||
|
|
6a3af5b707 | ||
|
|
205d520805 | ||
|
|
8f2bb66e73 | ||
|
|
af4eaae006 | ||
|
|
1c547b270e | ||
|
|
1c499bb2eb | ||
|
|
4fa72a066a | ||
|
|
b9249168bd | ||
|
|
e36ac32120 | ||
|
|
5be6bde76c | ||
|
|
c380604a9e | ||
|
|
369eb3220a | ||
|
|
2e273c5029 | ||
|
|
21e196f20a | ||
|
|
b9d86f4b51 | ||
|
|
1b5278f977 | ||
|
|
affb0d504d | ||
|
|
8034bc3bd6 | ||
|
|
89fcfc5499 | ||
|
|
40a4ec1624 | ||
|
|
6c55ef6a5d | ||
|
|
3f46dc1d76 | ||
|
|
f2bc89a876 | ||
|
|
fee4756e33 | ||
|
|
030b347673 | ||
|
|
e0edc908b5 | ||
|
|
6f3f696bd1 | ||
|
|
3f4d1ea97e | ||
|
|
7b5be97c45 | ||
|
|
3e31f80977 | ||
|
|
059fd29ec3 | ||
|
|
f1059d75ed | ||
|
|
c45e93971f | ||
|
|
ff42a783de | ||
|
|
6ec45cf2d2 | ||
|
|
4139d701f3 | ||
|
|
c56f57da12 | ||
|
|
687fa65557 | ||
|
|
5e3708e605 | ||
|
|
d6e3af36dd | ||
|
|
ac06e35c00 | ||
|
|
9a913b110c | ||
|
|
929ca84db1 | ||
|
|
57f8a0b039 | ||
|
|
21f1d7c5e6 | ||
|
|
c87844c504 | ||
|
|
21c07bd76c | ||
|
|
8f2dbd9b12 | ||
|
|
7ca88de76b | ||
|
|
2d2a8a0469 | ||
|
|
b187f19f94 | ||
|
|
5c6c300ba5 | ||
|
|
a3e6e28eeb | ||
|
|
be8523708e | ||
|
|
8785279ffd | ||
|
|
e28c948f34 | ||
|
|
854c003359 | ||
|
|
3ebfa4b089 | ||
|
|
843621dfb8 | ||
|
|
ea28d8edac | ||
|
|
8633939080 | ||
|
|
f965401569 | ||
|
|
bf61244f37 | ||
|
|
f825cec2fc | ||
|
|
3cfccc1d64 | ||
|
|
675c9674e1 | ||
|
|
df99124d72 | ||
|
|
cddaa77fea | ||
|
|
d51249acf4 | ||
|
|
062deb19c0 | ||
|
|
a24843e1e4 | ||
|
|
df05472bcc | ||
|
|
ce59a4c223 | ||
|
|
1ff05ac060 | ||
|
|
1192a4e817 |
223
CHANGELOG.md
223
CHANGELOG.md
@@ -1,6 +1,229 @@
|
||||
Change log
|
||||
==========
|
||||
|
||||
1.29.2 (2021-05-10)
|
||||
-------------------
|
||||
|
||||
[List of PRs / issues for this release](https://github.com/docker/compose/milestone/59?closed=1)
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
- Remove advertisement for `docker compose` in the `up` command to avoid annoyance
|
||||
|
||||
- Bump `py` to `1.10.0` in `requirements-indirect.txt`
|
||||
|
||||
1.29.1 (2021-04-13)
|
||||
-------------------
|
||||
|
||||
[List of PRs / issues for this release](https://github.com/docker/compose/milestone/58?closed=1)
|
||||
|
||||
### Bugs
|
||||
|
||||
- Fix for invalid handler warning on Windows builds
|
||||
|
||||
- Fix config hash to trigger container recreation on IPC mode updates
|
||||
|
||||
- Fix conversion map for `placement.max_replicas_per_node`
|
||||
|
||||
- Remove extra scan suggestion on build
|
||||
|
||||
1.29.0 (2021-04-06)
|
||||
-------------------
|
||||
|
||||
[List of PRs / issues for this release](https://github.com/docker/compose/milestone/56?closed=1)
|
||||
|
||||
### Features
|
||||
|
||||
- Add profile filter to `docker-compose config`
|
||||
|
||||
- Add a `depends_on` condition to wait for successful service completion
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
- Add image scan message on build
|
||||
|
||||
- Update warning message for `--no-ansi` to mention `--ansi never` as alternative
|
||||
|
||||
- Bump docker-py to 5.0.0
|
||||
|
||||
- Bump PyYAML to 5.4.1
|
||||
|
||||
- Bump python-dotenv to 0.17.0
|
||||
|
||||
1.28.6 (2021-03-23)
|
||||
-------------------
|
||||
|
||||
[List of PRs / issues for this release](https://github.com/docker/compose/milestone/57?closed=1)
|
||||
|
||||
### Bugs
|
||||
|
||||
- Make `--env-file` relative to the current working directory and error out for invalid paths. Environment file paths set with `--env-file` are relative to the current working directory while the default `.env` file is located in the project directory which by default is the base directory of the Compose file.
|
||||
|
||||
- Fix missing service property `storage_opt` by updating the compose schema
|
||||
|
||||
- Fix build `extra_hosts` list format
|
||||
|
||||
- Remove extra error message on `exec`
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
- Add `compose.yml` and `compose.yaml` to default filename list
|
||||
|
||||
1.28.5 (2021-02-25)
|
||||
-------------------
|
||||
|
||||
[List of PRs / issues for this release](https://github.com/docker/compose/milestone/55?closed=1)
|
||||
|
||||
### Bugs
|
||||
|
||||
- Fix OpenSSL version mismatch error when shelling out to the ssh client (via bump to docker-py 4.4.4 which contains the fix)
|
||||
|
||||
- Add missing build flags to the native builder: `platform`, `isolation` and `extra_hosts`
|
||||
|
||||
- Remove info message on native build
|
||||
|
||||
- Avoid fetching logs when service logging driver is set to 'none'
|
||||
|
||||
1.28.4 (2021-02-18)
|
||||
-------------------
|
||||
|
||||
[List of PRs / issues for this release](https://github.com/docker/compose/milestone/54?closed=1)
|
||||
|
||||
### Bugs
|
||||
|
||||
- Fix SSH port parsing by bumping docker-py to 4.4.3
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
- Bump Python to 3.7.10
|
||||
|
||||
1.28.3 (2021-02-17)
|
||||
-------------------
|
||||
|
||||
[List of PRs / issues for this release](https://github.com/docker/compose/milestone/53?closed=1)
|
||||
|
||||
### Bugs
|
||||
|
||||
- Fix SSH hostname parsing when it contains leading s/h, and remove the quiet option that was hiding the error (via docker-py bump to 4.4.2)
|
||||
|
||||
- Fix key error for '--no-log-prefix' option
|
||||
|
||||
- Fix incorrect CLI environment variable name for service profiles: `COMPOSE_PROFILES` instead of `COMPOSE_PROFILE`
|
||||
|
||||
- Fix fish completion
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
- Bump cryptography to 3.3.2
|
||||
|
||||
- Remove log driver filter
|
||||
|
||||
1.28.2 (2021-01-26)
|
||||
-------------------
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
- CI setup update
|
||||
|
||||
1.28.1 (2021-01-25)
|
||||
-------------------
|
||||
|
||||
### Bugs
|
||||
|
||||
- Revert to Python 3.7 bump for Linux static builds
|
||||
|
||||
- Add bash completion for `docker-compose logs|up --no-log-prefix`
|
||||
|
||||
1.28.0 (2021-01-20)
|
||||
-------------------
|
||||
|
||||
### Features
|
||||
|
||||
- Support for Nvidia GPUs via device requests
|
||||
|
||||
- Support for service profiles
|
||||
|
||||
- Change the SSH connection approach to the Docker CLI's via shellout to the local SSH client (old behaviour enabled by setting `COMPOSE_PARAMIKO_SSH` environment variable)
|
||||
|
||||
- Add flag to disable log prefix
|
||||
|
||||
- Add flag for ansi output control
|
||||
|
||||
### Bugs
|
||||
|
||||
- Make `parallel_pull=True` by default
|
||||
|
||||
- Bring back warning for configs in non-swarm mode
|
||||
|
||||
- Take `--file` in account when defining `project_dir`
|
||||
|
||||
- On `compose up`, attach only to services we read logs from
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
- Make COMPOSE_DOCKER_CLI_BUILD=1 the default
|
||||
|
||||
- Add usage metrics
|
||||
|
||||
- Sync schema with COMPOSE specification
|
||||
|
||||
- Improve failure report for missing mandatory environment variables
|
||||
|
||||
- Bump attrs to 20.3.0
|
||||
|
||||
- Bump more_itertools to 8.6.0
|
||||
|
||||
- Bump cryptograhy to 3.2.1
|
||||
|
||||
- Bump cffi to 1.14.4
|
||||
|
||||
- Bump virtualenv to 20.2.2
|
||||
|
||||
- Bump bcrypt to 3.2.0
|
||||
|
||||
- Bump gitpython to 3.1.11
|
||||
|
||||
- Bump docker-py to 4.4.1
|
||||
|
||||
- Bump Python to 3.9
|
||||
|
||||
- Linux: bump Debian base image from stretch to buster (required for Python 3.9)
|
||||
|
||||
- macOS: OpenSSL 1.1.1g to 1.1.1h, Python 3.7.7 to 3.9.0
|
||||
|
||||
- Bump pyinstaller 4.1
|
||||
|
||||
- Loosen restriction on base images to latest minor
|
||||
|
||||
- Updates of READMEs
|
||||
|
||||
|
||||
1.27.4 (2020-09-24)
|
||||
-------------------
|
||||
|
||||
### Bugs
|
||||
|
||||
- Remove path checks for bind mounts
|
||||
|
||||
- Fix port rendering to output long form syntax for non-v1
|
||||
|
||||
- Add protocol to the docker socket address
|
||||
|
||||
1.27.3 (2020-09-16)
|
||||
-------------------
|
||||
|
||||
### Bugs
|
||||
|
||||
- Merge `max_replicas_per_node` on `docker-compose config`
|
||||
|
||||
- Fix `depends_on` serialization on `docker-compose config`
|
||||
|
||||
- Fix scaling when some containers are not running on `docker-compose up`
|
||||
|
||||
- Enable relative paths for `driver_opts.device` for `local` driver
|
||||
|
||||
- Allow strings for `cpus` fields
|
||||
|
||||
1.27.2 (2020-09-10)
|
||||
-------------------
|
||||
|
||||
|
||||
64
Dockerfile
64
Dockerfile
@@ -1,11 +1,15 @@
|
||||
ARG DOCKER_VERSION=19.03.8
|
||||
ARG PYTHON_VERSION=3.7.7
|
||||
ARG BUILD_ALPINE_VERSION=3.11
|
||||
ARG BUILD_DEBIAN_VERSION=slim-stretch
|
||||
ARG RUNTIME_ALPINE_VERSION=3.11.5
|
||||
ARG RUNTIME_DEBIAN_VERSION=stretch-20200414-slim
|
||||
ARG DOCKER_VERSION=19.03
|
||||
ARG PYTHON_VERSION=3.7.10
|
||||
|
||||
ARG BUILD_PLATFORM=alpine
|
||||
ARG BUILD_ALPINE_VERSION=3.12
|
||||
ARG BUILD_CENTOS_VERSION=7
|
||||
ARG BUILD_DEBIAN_VERSION=slim-stretch
|
||||
|
||||
ARG RUNTIME_ALPINE_VERSION=3.12
|
||||
ARG RUNTIME_CENTOS_VERSION=7
|
||||
ARG RUNTIME_DEBIAN_VERSION=stretch-slim
|
||||
|
||||
ARG DISTRO=alpine
|
||||
|
||||
FROM docker:${DOCKER_VERSION} AS docker-cli
|
||||
|
||||
@@ -40,32 +44,56 @@ RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||
openssl \
|
||||
zlib1g-dev
|
||||
|
||||
FROM build-${BUILD_PLATFORM} AS build
|
||||
COPY docker-compose-entrypoint.sh /usr/local/bin/
|
||||
ENTRYPOINT ["sh", "/usr/local/bin/docker-compose-entrypoint.sh"]
|
||||
COPY --from=docker-cli /usr/local/bin/docker /usr/local/bin/docker
|
||||
WORKDIR /code/
|
||||
# FIXME(chris-crone): virtualenv 16.3.0 breaks build, force 16.2.0 until fixed
|
||||
RUN pip install virtualenv==20.0.30
|
||||
RUN pip install tox==3.19.0
|
||||
FROM centos:${BUILD_CENTOS_VERSION} AS build-centos
|
||||
RUN yum install -y \
|
||||
gcc \
|
||||
git \
|
||||
libffi-devel \
|
||||
make \
|
||||
openssl \
|
||||
openssl-devel
|
||||
WORKDIR /tmp/python3/
|
||||
ARG PYTHON_VERSION
|
||||
RUN curl -L https://www.python.org/ftp/python/${PYTHON_VERSION}/Python-${PYTHON_VERSION}.tgz | tar xzf - \
|
||||
&& cd Python-${PYTHON_VERSION} \
|
||||
&& ./configure --enable-optimizations --enable-shared --prefix=/usr LDFLAGS="-Wl,-rpath /usr/lib" \
|
||||
&& make altinstall
|
||||
RUN alternatives --install /usr/bin/python python /usr/bin/python2.7 50
|
||||
RUN alternatives --install /usr/bin/python python /usr/bin/python$(echo "${PYTHON_VERSION%.*}") 60
|
||||
RUN curl https://bootstrap.pypa.io/get-pip.py | python -
|
||||
|
||||
FROM build-${DISTRO} AS build
|
||||
ENTRYPOINT ["sh", "/usr/local/bin/docker-compose-entrypoint.sh"]
|
||||
WORKDIR /code/
|
||||
COPY docker-compose-entrypoint.sh /usr/local/bin/
|
||||
COPY --from=docker-cli /usr/local/bin/docker /usr/local/bin/docker
|
||||
RUN pip install \
|
||||
virtualenv==20.4.0 \
|
||||
tox==3.21.2
|
||||
COPY requirements-dev.txt .
|
||||
COPY requirements-indirect.txt .
|
||||
COPY requirements.txt .
|
||||
COPY requirements-dev.txt .
|
||||
RUN pip install -r requirements.txt -r requirements-indirect.txt -r requirements-dev.txt
|
||||
COPY .pre-commit-config.yaml .
|
||||
COPY tox.ini .
|
||||
COPY setup.py .
|
||||
COPY README.md .
|
||||
COPY compose compose/
|
||||
RUN tox --notest
|
||||
RUN tox -e py37 --notest
|
||||
COPY . .
|
||||
ARG GIT_COMMIT=unknown
|
||||
ENV DOCKER_COMPOSE_GITSHA=$GIT_COMMIT
|
||||
RUN script/build/linux-entrypoint
|
||||
|
||||
FROM scratch AS bin
|
||||
ARG TARGETARCH
|
||||
ARG TARGETOS
|
||||
COPY --from=build /usr/local/bin/docker-compose /docker-compose-${TARGETOS}-${TARGETARCH}
|
||||
|
||||
FROM alpine:${RUNTIME_ALPINE_VERSION} AS runtime-alpine
|
||||
FROM debian:${RUNTIME_DEBIAN_VERSION} AS runtime-debian
|
||||
FROM runtime-${BUILD_PLATFORM} AS runtime
|
||||
FROM centos:${RUNTIME_CENTOS_VERSION} AS runtime-centos
|
||||
FROM runtime-${DISTRO} AS runtime
|
||||
COPY docker-compose-entrypoint.sh /usr/local/bin/
|
||||
ENTRYPOINT ["sh", "/usr/local/bin/docker-compose-entrypoint.sh"]
|
||||
COPY --from=docker-cli /usr/local/bin/docker /usr/local/bin/docker
|
||||
|
||||
19
Jenkinsfile
vendored
19
Jenkinsfile
vendored
@@ -1,6 +1,6 @@
|
||||
#!groovy
|
||||
|
||||
def dockerVersions = ['19.03.8']
|
||||
def dockerVersions = ['19.03.13']
|
||||
def baseImages = ['alpine', 'debian']
|
||||
def pythonVersions = ['py37']
|
||||
|
||||
@@ -13,6 +13,9 @@ pipeline {
|
||||
timeout(time: 2, unit: 'HOURS')
|
||||
timestamps()
|
||||
}
|
||||
environment {
|
||||
DOCKER_BUILDKIT="1"
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('Build test images') {
|
||||
@@ -20,7 +23,7 @@ pipeline {
|
||||
parallel {
|
||||
stage('alpine') {
|
||||
agent {
|
||||
label 'ubuntu && amd64 && !zfs'
|
||||
label 'ubuntu-2004 && amd64 && !zfs && cgroup1'
|
||||
}
|
||||
steps {
|
||||
buildImage('alpine')
|
||||
@@ -28,7 +31,7 @@ pipeline {
|
||||
}
|
||||
stage('debian') {
|
||||
agent {
|
||||
label 'ubuntu && amd64 && !zfs'
|
||||
label 'ubuntu-2004 && amd64 && !zfs && cgroup1'
|
||||
}
|
||||
steps {
|
||||
buildImage('debian')
|
||||
@@ -59,7 +62,7 @@ pipeline {
|
||||
|
||||
def buildImage(baseImage) {
|
||||
def scmvar = checkout(scm)
|
||||
def imageName = "dockerbuildbot/compose:${baseImage}-${scmvar.GIT_COMMIT}"
|
||||
def imageName = "dockerpinata/compose:${baseImage}-${scmvar.GIT_COMMIT}"
|
||||
image = docker.image(imageName)
|
||||
|
||||
withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') {
|
||||
@@ -69,7 +72,7 @@ def buildImage(baseImage) {
|
||||
ansiColor('xterm') {
|
||||
sh """docker build -t ${imageName} \\
|
||||
--target build \\
|
||||
--build-arg BUILD_PLATFORM="${baseImage}" \\
|
||||
--build-arg DISTRO="${baseImage}" \\
|
||||
--build-arg GIT_COMMIT="${scmvar.GIT_COMMIT}" \\
|
||||
.\\
|
||||
"""
|
||||
@@ -84,9 +87,9 @@ def buildImage(baseImage) {
|
||||
def runTests(dockerVersion, pythonVersion, baseImage) {
|
||||
return {
|
||||
stage("python=${pythonVersion} docker=${dockerVersion} ${baseImage}") {
|
||||
node("ubuntu && amd64 && !zfs") {
|
||||
node("ubuntu-2004 && amd64 && !zfs && cgroup1") {
|
||||
def scmvar = checkout(scm)
|
||||
def imageName = "dockerbuildbot/compose:${baseImage}-${scmvar.GIT_COMMIT}"
|
||||
def imageName = "dockerpinata/compose:${baseImage}-${scmvar.GIT_COMMIT}"
|
||||
def storageDriver = sh(script: "docker info -f \'{{.Driver}}\'", returnStdout: true).trim()
|
||||
echo "Using local system's storage driver: ${storageDriver}"
|
||||
withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') {
|
||||
@@ -96,6 +99,8 @@ def runTests(dockerVersion, pythonVersion, baseImage) {
|
||||
--privileged \\
|
||||
--volume="\$(pwd)/.git:/code/.git" \\
|
||||
--volume="/var/run/docker.sock:/var/run/docker.sock" \\
|
||||
--volume="\${DOCKER_CONFIG}/config.json:/root/.docker/config.json" \\
|
||||
-e "DOCKER_TLS_CERTDIR=" \\
|
||||
-e "TAG=${imageName}" \\
|
||||
-e "STORAGE_DRIVER=${storageDriver}" \\
|
||||
-e "DOCKER_VERSIONS=${dockerVersion}" \\
|
||||
|
||||
57
Makefile
Normal file
57
Makefile
Normal file
@@ -0,0 +1,57 @@
|
||||
TAG = "docker-compose:alpine-$(shell git rev-parse --short HEAD)"
|
||||
GIT_VOLUME = "--volume=$(shell pwd)/.git:/code/.git"
|
||||
|
||||
DOCKERFILE ?="Dockerfile"
|
||||
DOCKER_BUILD_TARGET ?="build"
|
||||
|
||||
UNAME_S := $(shell uname -s)
|
||||
ifeq ($(UNAME_S),Linux)
|
||||
BUILD_SCRIPT = linux
|
||||
endif
|
||||
ifeq ($(UNAME_S),Darwin)
|
||||
BUILD_SCRIPT = osx
|
||||
endif
|
||||
|
||||
COMPOSE_SPEC_SCHEMA_PATH = "compose/config/compose_spec.json"
|
||||
COMPOSE_SPEC_RAW_URL = "https://raw.githubusercontent.com/compose-spec/compose-spec/master/schema/compose-spec.json"
|
||||
|
||||
all: cli
|
||||
|
||||
cli: download-compose-spec ## Compile the cli
|
||||
./script/build/$(BUILD_SCRIPT)
|
||||
|
||||
download-compose-spec: ## Download the compose-spec schema from it's repo
|
||||
curl -so $(COMPOSE_SPEC_SCHEMA_PATH) $(COMPOSE_SPEC_RAW_URL)
|
||||
|
||||
cache-clear: ## Clear the builder cache
|
||||
@docker builder prune --force --filter type=exec.cachemount --filter=unused-for=24h
|
||||
|
||||
base-image: ## Builds base image
|
||||
docker build -f $(DOCKERFILE) -t $(TAG) --target $(DOCKER_BUILD_TARGET) .
|
||||
|
||||
lint: base-image ## Run linter
|
||||
docker run --rm \
|
||||
--tty \
|
||||
$(GIT_VOLUME) \
|
||||
$(TAG) \
|
||||
tox -e pre-commit
|
||||
|
||||
test-unit: base-image ## Run tests
|
||||
docker run --rm \
|
||||
--tty \
|
||||
$(GIT_VOLUME) \
|
||||
$(TAG) \
|
||||
pytest -v tests/unit/
|
||||
|
||||
test: ## Run all tests
|
||||
./script/test/default
|
||||
|
||||
pre-commit: lint test-unit cli
|
||||
|
||||
help: ## Show help
|
||||
@echo Please specify a build target. The choices are:
|
||||
@grep -E '^[0-9a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
|
||||
|
||||
FORCE:
|
||||
|
||||
.PHONY: all cli download-compose-spec cache-clear base-image lint test-unit test pre-commit help
|
||||
106
README.md
106
README.md
@@ -1,62 +1,86 @@
|
||||
Docker Compose
|
||||
==============
|
||||
[](https://ci-next.docker.com/public/job/compose/job/master/)
|
||||
|
||||

|
||||
|
||||
Compose is a tool for defining and running multi-container Docker applications.
|
||||
With Compose, you use a Compose file to configure your application's services.
|
||||
Then, using a single command, you create and start all the services
|
||||
from your configuration. To learn more about all the features of Compose
|
||||
see [the list of features](https://github.com/docker/docker.github.io/blob/master/compose/index.md#features).
|
||||
Docker Compose is a tool for running multi-container applications on Docker
|
||||
defined using the [Compose file format](https://compose-spec.io).
|
||||
A Compose file is used to define how the one or more containers that make up
|
||||
your application are configured.
|
||||
Once you have a Compose file, you can create and start your application with a
|
||||
single command: `docker-compose up`.
|
||||
|
||||
Compose is great for development, testing, and staging environments, as well as
|
||||
CI workflows. You can learn more about each case in
|
||||
[Common Use Cases](https://github.com/docker/docker.github.io/blob/master/compose/index.md#common-use-cases).
|
||||
Compose files can be used to deploy applications locally, or to the cloud on
|
||||
[Amazon ECS](https://aws.amazon.com/ecs) or
|
||||
[Microsoft ACI](https://azure.microsoft.com/services/container-instances/) using
|
||||
the Docker CLI. You can read more about how to do this:
|
||||
- [Compose for Amazon ECS](https://docs.docker.com/engine/context/ecs-integration/)
|
||||
- [Compose for Microsoft ACI](https://docs.docker.com/engine/context/aci-integration/)
|
||||
|
||||
Using Compose is basically a three-step process.
|
||||
Where to get Docker Compose
|
||||
----------------------------
|
||||
|
||||
### Windows and macOS
|
||||
|
||||
Docker Compose is included in
|
||||
[Docker Desktop](https://www.docker.com/products/docker-desktop)
|
||||
for Windows and macOS.
|
||||
|
||||
### Linux
|
||||
|
||||
You can download Docker Compose binaries from the
|
||||
[release page](https://github.com/docker/compose/releases) on this repository.
|
||||
|
||||
### Using pip
|
||||
|
||||
If your platform is not supported, you can download Docker Compose using `pip`:
|
||||
|
||||
```console
|
||||
pip install docker-compose
|
||||
```
|
||||
|
||||
> **Note:** Docker Compose requires Python 3.6 or later.
|
||||
|
||||
Quick Start
|
||||
-----------
|
||||
|
||||
Using Docker Compose is basically a three-step process:
|
||||
1. Define your app's environment with a `Dockerfile` so it can be
|
||||
reproduced anywhere.
|
||||
reproduced anywhere.
|
||||
2. Define the services that make up your app in `docker-compose.yml` so
|
||||
they can be run together in an isolated environment.
|
||||
3. Lastly, run `docker-compose up` and Compose will start and run your entire app.
|
||||
they can be run together in an isolated environment.
|
||||
3. Lastly, run `docker-compose up` and Compose will start and run your entire
|
||||
app.
|
||||
|
||||
A `docker-compose.yml` looks like this:
|
||||
A Compose file looks like this:
|
||||
|
||||
version: '2'
|
||||
```yaml
|
||||
services:
|
||||
web:
|
||||
build: .
|
||||
ports:
|
||||
- "5000:5000"
|
||||
volumes:
|
||||
- .:/code
|
||||
redis:
|
||||
image: redis
|
||||
```
|
||||
|
||||
services:
|
||||
web:
|
||||
build: .
|
||||
ports:
|
||||
- "5000:5000"
|
||||
volumes:
|
||||
- .:/code
|
||||
redis:
|
||||
image: redis
|
||||
You can find examples of Compose applications in our
|
||||
[Awesome Compose repository](https://github.com/docker/awesome-compose).
|
||||
|
||||
For more information about the Compose file, see the
|
||||
[Compose file reference](https://github.com/docker/docker.github.io/blob/master/compose/compose-file/compose-versioning.md).
|
||||
|
||||
Compose has commands for managing the whole lifecycle of your application:
|
||||
|
||||
* Start, stop and rebuild services
|
||||
* View the status of running services
|
||||
* Stream the log output of running services
|
||||
* Run a one-off command on a service
|
||||
|
||||
Installation and documentation
|
||||
------------------------------
|
||||
|
||||
- Full documentation is available on [Docker's website](https://docs.docker.com/compose/).
|
||||
- Code repository for Compose is on [GitHub](https://github.com/docker/compose).
|
||||
- If you find any problems please fill out an [issue](https://github.com/docker/compose/issues/new/choose). Thank you!
|
||||
For more information about the Compose format, see the
|
||||
[Compose file reference](https://docs.docker.com/compose/compose-file/).
|
||||
|
||||
Contributing
|
||||
------------
|
||||
|
||||
[](https://ci-next.docker.com/public/job/compose/job/master/)
|
||||
Want to help develop Docker Compose? Check out our
|
||||
[contributing documentation](https://github.com/docker/compose/blob/master/CONTRIBUTING.md).
|
||||
|
||||
Want to help build Compose? Check out our [contributing documentation](https://github.com/docker/compose/blob/master/CONTRIBUTING.md).
|
||||
If you find an issue, please report it on the
|
||||
[issue tracker](https://github.com/docker/compose/issues/new/choose).
|
||||
|
||||
Releasing
|
||||
---------
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!groovy
|
||||
|
||||
def dockerVersions = ['19.03.8', '18.09.9']
|
||||
def dockerVersions = ['19.03.13', '18.09.9']
|
||||
def baseImages = ['alpine', 'debian']
|
||||
def pythonVersions = ['py37']
|
||||
|
||||
@@ -13,6 +13,9 @@ pipeline {
|
||||
timeout(time: 2, unit: 'HOURS')
|
||||
timestamps()
|
||||
}
|
||||
environment {
|
||||
DOCKER_BUILDKIT="1"
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('Build test images') {
|
||||
@@ -20,7 +23,7 @@ pipeline {
|
||||
parallel {
|
||||
stage('alpine') {
|
||||
agent {
|
||||
label 'linux && docker && ubuntu-2004'
|
||||
label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
|
||||
}
|
||||
steps {
|
||||
buildImage('alpine')
|
||||
@@ -28,7 +31,7 @@ pipeline {
|
||||
}
|
||||
stage('debian') {
|
||||
agent {
|
||||
label 'linux && docker && ubuntu-2004'
|
||||
label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
|
||||
}
|
||||
steps {
|
||||
buildImage('debian')
|
||||
@@ -38,7 +41,7 @@ pipeline {
|
||||
}
|
||||
stage('Test') {
|
||||
agent {
|
||||
label 'linux && docker && ubuntu-2004'
|
||||
label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
|
||||
}
|
||||
steps {
|
||||
// TODO use declarative 1.5.0 `matrix` once available on CI
|
||||
@@ -58,7 +61,7 @@ pipeline {
|
||||
}
|
||||
stage('Generate Changelog') {
|
||||
agent {
|
||||
label 'linux && docker && ubuntu-2004'
|
||||
label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
|
||||
}
|
||||
steps {
|
||||
checkout scm
|
||||
@@ -81,7 +84,7 @@ pipeline {
|
||||
steps {
|
||||
checkout scm
|
||||
sh './script/setup/osx'
|
||||
sh 'tox -e py37 -- tests/unit'
|
||||
sh 'tox -e py39 -- tests/unit'
|
||||
sh './script/build/osx'
|
||||
dir ('dist') {
|
||||
checksum('docker-compose-Darwin-x86_64')
|
||||
@@ -95,7 +98,7 @@ pipeline {
|
||||
}
|
||||
stage('linux binary') {
|
||||
agent {
|
||||
label 'linux && docker && ubuntu-2004'
|
||||
label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
|
||||
}
|
||||
steps {
|
||||
checkout scm
|
||||
@@ -114,11 +117,11 @@ pipeline {
|
||||
label 'windows-python'
|
||||
}
|
||||
environment {
|
||||
PATH = "$PATH;C:\\Python37;C:\\Python37\\Scripts"
|
||||
PATH = "C:\\Python39;C:\\Python39\\Scripts;$PATH"
|
||||
}
|
||||
steps {
|
||||
checkout scm
|
||||
bat 'tox.exe -e py37 -- tests/unit'
|
||||
bat 'tox.exe -e py39 -- tests/unit'
|
||||
powershell '.\\script\\build\\windows.ps1'
|
||||
dir ('dist') {
|
||||
checksum('docker-compose-Windows-x86_64.exe')
|
||||
@@ -131,7 +134,7 @@ pipeline {
|
||||
}
|
||||
stage('alpine image') {
|
||||
agent {
|
||||
label 'linux && docker && ubuntu-2004'
|
||||
label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
|
||||
}
|
||||
steps {
|
||||
buildRuntimeImage('alpine')
|
||||
@@ -139,7 +142,7 @@ pipeline {
|
||||
}
|
||||
stage('debian image') {
|
||||
agent {
|
||||
label 'linux && docker && ubuntu-2004'
|
||||
label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
|
||||
}
|
||||
steps {
|
||||
buildRuntimeImage('debian')
|
||||
@@ -154,7 +157,7 @@ pipeline {
|
||||
parallel {
|
||||
stage('Pushing images') {
|
||||
agent {
|
||||
label 'linux && docker && ubuntu-2004'
|
||||
label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
|
||||
}
|
||||
steps {
|
||||
pushRuntimeImage('alpine')
|
||||
@@ -163,7 +166,7 @@ pipeline {
|
||||
}
|
||||
stage('Creating Github Release') {
|
||||
agent {
|
||||
label 'linux && docker && ubuntu-2004'
|
||||
label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
|
||||
}
|
||||
environment {
|
||||
GITHUB_TOKEN = credentials('github-release-token')
|
||||
@@ -195,7 +198,7 @@ pipeline {
|
||||
}
|
||||
stage('Publishing Python packages') {
|
||||
agent {
|
||||
label 'linux && docker && ubuntu-2004'
|
||||
label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
|
||||
}
|
||||
environment {
|
||||
PYPIRC = credentials('pypirc-docker-dsg-cibot')
|
||||
@@ -219,7 +222,7 @@ pipeline {
|
||||
|
||||
def buildImage(baseImage) {
|
||||
def scmvar = checkout(scm)
|
||||
def imageName = "dockerbuildbot/compose:${baseImage}-${scmvar.GIT_COMMIT}"
|
||||
def imageName = "dockerpinata/compose:${baseImage}-${scmvar.GIT_COMMIT}"
|
||||
image = docker.image(imageName)
|
||||
|
||||
withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') {
|
||||
@@ -229,7 +232,7 @@ def buildImage(baseImage) {
|
||||
ansiColor('xterm') {
|
||||
sh """docker build -t ${imageName} \\
|
||||
--target build \\
|
||||
--build-arg BUILD_PLATFORM="${baseImage}" \\
|
||||
--build-arg DISTRO="${baseImage}" \\
|
||||
--build-arg GIT_COMMIT="${scmvar.GIT_COMMIT}" \\
|
||||
.\\
|
||||
"""
|
||||
@@ -244,9 +247,9 @@ def buildImage(baseImage) {
|
||||
def runTests(dockerVersion, pythonVersion, baseImage) {
|
||||
return {
|
||||
stage("python=${pythonVersion} docker=${dockerVersion} ${baseImage}") {
|
||||
node("linux && docker && ubuntu-2004") {
|
||||
node("linux && docker && ubuntu-2004 && amd64 && cgroup1") {
|
||||
def scmvar = checkout(scm)
|
||||
def imageName = "dockerbuildbot/compose:${baseImage}-${scmvar.GIT_COMMIT}"
|
||||
def imageName = "dockerpinata/compose:${baseImage}-${scmvar.GIT_COMMIT}"
|
||||
def storageDriver = sh(script: "docker info -f \'{{.Driver}}\'", returnStdout: true).trim()
|
||||
echo "Using local system's storage driver: ${storageDriver}"
|
||||
withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') {
|
||||
@@ -256,6 +259,8 @@ def runTests(dockerVersion, pythonVersion, baseImage) {
|
||||
--privileged \\
|
||||
--volume="\$(pwd)/.git:/code/.git" \\
|
||||
--volume="/var/run/docker.sock:/var/run/docker.sock" \\
|
||||
--volume="\${DOCKER_CONFIG}/config.json:/root/.docker/config.json" \\
|
||||
-e "DOCKER_TLS_CERTDIR=" \\
|
||||
-e "TAG=${imageName}" \\
|
||||
-e "STORAGE_DRIVER=${storageDriver}" \\
|
||||
-e "DOCKER_VERSIONS=${dockerVersion}" \\
|
||||
@@ -276,7 +281,7 @@ def buildRuntimeImage(baseImage) {
|
||||
def imageName = "docker/compose:${baseImage}-${env.BRANCH_NAME}"
|
||||
ansiColor('xterm') {
|
||||
sh """docker build -t ${imageName} \\
|
||||
--build-arg BUILD_PLATFORM="${baseImage}" \\
|
||||
--build-arg DISTRO="${baseImage}" \\
|
||||
--build-arg GIT_COMMIT="${scmvar.GIT_COMMIT.take(7)}" \\
|
||||
.
|
||||
"""
|
||||
|
||||
@@ -1 +1 @@
|
||||
__version__ = '1.28.0dev'
|
||||
__version__ = '1.29.2'
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
import enum
|
||||
import os
|
||||
|
||||
from ..const import IS_WINDOWS_PLATFORM
|
||||
|
||||
NAMES = [
|
||||
@@ -12,6 +15,21 @@ NAMES = [
|
||||
]
|
||||
|
||||
|
||||
@enum.unique
|
||||
class AnsiMode(enum.Enum):
|
||||
"""Enumeration for when to output ANSI colors."""
|
||||
NEVER = "never"
|
||||
ALWAYS = "always"
|
||||
AUTO = "auto"
|
||||
|
||||
def use_ansi_codes(self, stream):
|
||||
if self is AnsiMode.ALWAYS:
|
||||
return True
|
||||
if self is AnsiMode.NEVER or os.environ.get('CLICOLOR') == '0':
|
||||
return False
|
||||
return stream.isatty()
|
||||
|
||||
|
||||
def get_pairs():
|
||||
for i, name in enumerate(NAMES):
|
||||
yield (name, str(30 + i))
|
||||
|
||||
@@ -35,7 +35,7 @@ SILENT_COMMANDS = {
|
||||
|
||||
def project_from_options(project_dir, options, additional_options=None):
|
||||
additional_options = additional_options or {}
|
||||
override_dir = options.get('--project-directory')
|
||||
override_dir = get_project_dir(options)
|
||||
environment_file = options.get('--env-file')
|
||||
environment = Environment.from_env_file(override_dir or project_dir, environment_file)
|
||||
environment.silent = options.get('COMMAND', None) in SILENT_COMMANDS
|
||||
@@ -59,14 +59,15 @@ def project_from_options(project_dir, options, additional_options=None):
|
||||
|
||||
return get_project(
|
||||
project_dir,
|
||||
get_config_path_from_options(project_dir, options, environment),
|
||||
get_config_path_from_options(options, environment),
|
||||
project_name=options.get('--project-name'),
|
||||
verbose=options.get('--verbose'),
|
||||
context=context,
|
||||
environment=environment,
|
||||
override_dir=override_dir,
|
||||
interpolate=(not additional_options.get('--no-interpolate')),
|
||||
environment_file=environment_file
|
||||
environment_file=environment_file,
|
||||
enabled_profiles=get_profiles_from_options(options, environment)
|
||||
)
|
||||
|
||||
|
||||
@@ -86,21 +87,29 @@ def set_parallel_limit(environment):
|
||||
parallel.GlobalLimit.set_global_limit(parallel_limit)
|
||||
|
||||
|
||||
def get_project_dir(options):
|
||||
override_dir = None
|
||||
files = get_config_path_from_options(options, os.environ)
|
||||
if files:
|
||||
if files[0] == '-':
|
||||
return '.'
|
||||
override_dir = os.path.dirname(files[0])
|
||||
return options.get('--project-directory') or override_dir
|
||||
|
||||
|
||||
def get_config_from_options(base_dir, options, additional_options=None):
|
||||
additional_options = additional_options or {}
|
||||
override_dir = options.get('--project-directory')
|
||||
override_dir = get_project_dir(options)
|
||||
environment_file = options.get('--env-file')
|
||||
environment = Environment.from_env_file(override_dir or base_dir, environment_file)
|
||||
config_path = get_config_path_from_options(
|
||||
base_dir, options, environment
|
||||
)
|
||||
config_path = get_config_path_from_options(options, environment)
|
||||
return config.load(
|
||||
config.find(base_dir, config_path, environment, override_dir),
|
||||
not additional_options.get('--no-interpolate')
|
||||
)
|
||||
|
||||
|
||||
def get_config_path_from_options(base_dir, options, environment):
|
||||
def get_config_path_from_options(options, environment):
|
||||
def unicode_paths(paths):
|
||||
return [p.decode('utf-8') if isinstance(p, bytes) else p for p in paths]
|
||||
|
||||
@@ -115,9 +124,21 @@ def get_config_path_from_options(base_dir, options, environment):
|
||||
return None
|
||||
|
||||
|
||||
def get_profiles_from_options(options, environment):
|
||||
profile_option = options.get('--profile')
|
||||
if profile_option:
|
||||
return profile_option
|
||||
|
||||
profiles = environment.get('COMPOSE_PROFILES')
|
||||
if profiles:
|
||||
return profiles.split(',')
|
||||
|
||||
return []
|
||||
|
||||
|
||||
def get_project(project_dir, config_path=None, project_name=None, verbose=False,
|
||||
context=None, environment=None, override_dir=None,
|
||||
interpolate=True, environment_file=None):
|
||||
interpolate=True, environment_file=None, enabled_profiles=None):
|
||||
if not environment:
|
||||
environment = Environment.from_env_file(project_dir)
|
||||
config_details = config.find(project_dir, config_path, environment, override_dir)
|
||||
@@ -139,6 +160,7 @@ def get_project(project_dir, config_path=None, project_name=None, verbose=False,
|
||||
client,
|
||||
environment.get('DOCKER_DEFAULT_PLATFORM'),
|
||||
execution_context_labels(config_details, environment_file),
|
||||
enabled_profiles,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -166,8 +166,8 @@ def docker_client(environment, version=None, context=None, tls_version=None):
|
||||
kwargs['credstore_env'] = {
|
||||
'LD_LIBRARY_PATH': environment.get('LD_LIBRARY_PATH_ORIG'),
|
||||
}
|
||||
|
||||
client = APIClient(**kwargs)
|
||||
use_paramiko_ssh = int(environment.get('COMPOSE_PARAMIKO_SSH', 0))
|
||||
client = APIClient(use_ssh_client=not use_paramiko_ssh, **kwargs)
|
||||
client._original_base_url = kwargs.get('base_url')
|
||||
|
||||
return client
|
||||
|
||||
@@ -17,10 +17,16 @@ class DocoptDispatcher:
|
||||
self.command_class = command_class
|
||||
self.options = options
|
||||
|
||||
@classmethod
|
||||
def get_command_and_options(cls, doc_entity, argv, options):
|
||||
command_help = getdoc(doc_entity)
|
||||
opt = docopt_full_help(command_help, argv, **options)
|
||||
command = opt['COMMAND']
|
||||
return command_help, opt, command
|
||||
|
||||
def parse(self, argv):
|
||||
command_help = getdoc(self.command_class)
|
||||
options = docopt_full_help(command_help, argv, **self.options)
|
||||
command = options['COMMAND']
|
||||
command_help, options, command = DocoptDispatcher.get_command_and_options(
|
||||
self.command_class, argv, self.options)
|
||||
|
||||
if command is None:
|
||||
raise SystemExit(command_help)
|
||||
|
||||
@@ -16,18 +16,22 @@ from compose.utils import split_buffer
|
||||
|
||||
class LogPresenter:
|
||||
|
||||
def __init__(self, prefix_width, color_func):
|
||||
def __init__(self, prefix_width, color_func, keep_prefix=True):
|
||||
self.prefix_width = prefix_width
|
||||
self.color_func = color_func
|
||||
self.keep_prefix = keep_prefix
|
||||
|
||||
def present(self, container, line):
|
||||
prefix = container.name_without_project.ljust(self.prefix_width)
|
||||
return '{prefix} {line}'.format(
|
||||
prefix=self.color_func(prefix + ' |'),
|
||||
line=line)
|
||||
to_log = '{line}'.format(line=line)
|
||||
|
||||
if self.keep_prefix:
|
||||
prefix = container.name_without_project.ljust(self.prefix_width)
|
||||
to_log = '{prefix} '.format(prefix=self.color_func(prefix + ' |')) + to_log
|
||||
|
||||
return to_log
|
||||
|
||||
|
||||
def build_log_presenters(service_names, monochrome):
|
||||
def build_log_presenters(service_names, monochrome, keep_prefix=True):
|
||||
"""Return an iterable of functions.
|
||||
|
||||
Each function can be used to format the logs output of a container.
|
||||
@@ -38,7 +42,7 @@ def build_log_presenters(service_names, monochrome):
|
||||
return text
|
||||
|
||||
for color_func in cycle([no_color] if monochrome else colors.rainbow()):
|
||||
yield LogPresenter(prefix_width, color_func)
|
||||
yield LogPresenter(prefix_width, color_func, keep_prefix)
|
||||
|
||||
|
||||
def max_name_width(service_names, max_index_width=3):
|
||||
@@ -154,10 +158,8 @@ class QueueItem(namedtuple('_QueueItem', 'item is_stop exc')):
|
||||
|
||||
|
||||
def tail_container_logs(container, presenter, queue, log_args):
|
||||
generator = get_log_generator(container)
|
||||
|
||||
try:
|
||||
for item in generator(container, log_args):
|
||||
for item in build_log_generator(container, log_args):
|
||||
queue.put(QueueItem.new(presenter.present(container, item)))
|
||||
except Exception as e:
|
||||
queue.put(QueueItem.exception(e))
|
||||
@@ -167,20 +169,6 @@ def tail_container_logs(container, presenter, queue, log_args):
|
||||
queue.put(QueueItem.stop(container.name))
|
||||
|
||||
|
||||
def get_log_generator(container):
|
||||
if container.has_api_logs:
|
||||
return build_log_generator
|
||||
return build_no_log_generator
|
||||
|
||||
|
||||
def build_no_log_generator(container, log_args):
|
||||
"""Return a generator that prints a warning about logs and waits for
|
||||
container to exit.
|
||||
"""
|
||||
yield "WARNING: no logs are available with the '{}' log driver\n".format(
|
||||
container.log_driver)
|
||||
|
||||
|
||||
def build_log_generator(container, log_args):
|
||||
# if the container doesn't have a log_stream we need to attach to container
|
||||
# before log printer starts running
|
||||
|
||||
@@ -2,7 +2,6 @@ import contextlib
|
||||
import functools
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import pipes
|
||||
import re
|
||||
import subprocess
|
||||
@@ -24,8 +23,11 @@ from ..config import resolve_build_args
|
||||
from ..config.environment import Environment
|
||||
from ..config.serialize import serialize_config
|
||||
from ..config.types import VolumeSpec
|
||||
from ..const import IS_LINUX_PLATFORM
|
||||
from ..const import IS_WINDOWS_PLATFORM
|
||||
from ..errors import StreamParseError
|
||||
from ..metrics.decorator import metrics
|
||||
from ..parallel import ParallelStreamWriter
|
||||
from ..progress_stream import StreamOutputError
|
||||
from ..project import get_image_digests
|
||||
from ..project import MissingDigests
|
||||
@@ -38,7 +40,10 @@ from ..service import ConvergenceStrategy
|
||||
from ..service import ImageType
|
||||
from ..service import NeedsBuildError
|
||||
from ..service import OperationFailedError
|
||||
from ..utils import filter_attached_for_up
|
||||
from .colors import AnsiMode
|
||||
from .command import get_config_from_options
|
||||
from .command import get_project_dir
|
||||
from .command import project_from_options
|
||||
from .docopt_command import DocoptDispatcher
|
||||
from .docopt_command import get_handler
|
||||
@@ -51,60 +56,132 @@ from .log_printer import LogPrinter
|
||||
from .utils import get_version_info
|
||||
from .utils import human_readable_file_size
|
||||
from .utils import yesno
|
||||
from compose.metrics.client import MetricsCommand
|
||||
from compose.metrics.client import Status
|
||||
|
||||
|
||||
if not IS_WINDOWS_PLATFORM:
|
||||
from dockerpty.pty import PseudoTerminal, RunOperation, ExecOperation
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
console_handler = logging.StreamHandler(sys.stderr)
|
||||
|
||||
|
||||
def main():
|
||||
def main(): # noqa: C901
|
||||
signals.ignore_sigpipe()
|
||||
command = None
|
||||
try:
|
||||
command = dispatch()
|
||||
command()
|
||||
_, opts, command = DocoptDispatcher.get_command_and_options(
|
||||
TopLevelCommand,
|
||||
get_filtered_args(sys.argv[1:]),
|
||||
{'options_first': True, 'version': get_version_info('compose')})
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
command_func = dispatch()
|
||||
command_func()
|
||||
if not IS_LINUX_PLATFORM and command == 'help':
|
||||
print("\nDocker Compose is now in the Docker CLI, try `docker compose` help")
|
||||
except (KeyboardInterrupt, signals.ShutdownException):
|
||||
log.error("Aborting.")
|
||||
sys.exit(1)
|
||||
exit_with_metrics(command, "Aborting.", status=Status.CANCELED)
|
||||
except (UserError, NoSuchService, ConfigurationError,
|
||||
ProjectError, OperationFailedError) as e:
|
||||
log.error(e.msg)
|
||||
sys.exit(1)
|
||||
exit_with_metrics(command, e.msg, status=Status.FAILURE)
|
||||
except BuildError as e:
|
||||
reason = ""
|
||||
if e.reason:
|
||||
reason = " : " + e.reason
|
||||
log.error("Service '{}' failed to build{}".format(e.service.name, reason))
|
||||
sys.exit(1)
|
||||
exit_with_metrics(command,
|
||||
"Service '{}' failed to build{}".format(e.service.name, reason),
|
||||
status=Status.FAILURE)
|
||||
except StreamOutputError as e:
|
||||
log.error(e)
|
||||
sys.exit(1)
|
||||
exit_with_metrics(command, e, status=Status.FAILURE)
|
||||
except NeedsBuildError as e:
|
||||
log.error("Service '{}' needs to be built, but --no-build was passed.".format(e.service.name))
|
||||
sys.exit(1)
|
||||
exit_with_metrics(command,
|
||||
"Service '{}' needs to be built, but --no-build was passed.".format(
|
||||
e.service.name), status=Status.FAILURE)
|
||||
except NoSuchCommand as e:
|
||||
commands = "\n".join(parse_doc_section("commands:", getdoc(e.supercommand)))
|
||||
log.error("No such command: %s\n\n%s", e.command, commands)
|
||||
sys.exit(1)
|
||||
if not IS_LINUX_PLATFORM:
|
||||
commands += "\n\nDocker Compose is now in the Docker CLI, try `docker compose`"
|
||||
exit_with_metrics("", log_msg="No such command: {}\n\n{}".format(
|
||||
e.command, commands), status=Status.FAILURE)
|
||||
except (errors.ConnectionError, StreamParseError):
|
||||
sys.exit(1)
|
||||
exit_with_metrics(command, status=Status.FAILURE)
|
||||
except SystemExit as e:
|
||||
status = Status.SUCCESS
|
||||
if len(sys.argv) > 1 and '--help' not in sys.argv:
|
||||
status = Status.FAILURE
|
||||
|
||||
if command and len(sys.argv) >= 3 and sys.argv[2] == '--help':
|
||||
command = '--help ' + command
|
||||
|
||||
if not command and len(sys.argv) >= 2 and sys.argv[1] == '--help':
|
||||
command = '--help'
|
||||
|
||||
msg = e.args[0] if len(e.args) else ""
|
||||
code = 0
|
||||
if isinstance(e.code, int):
|
||||
code = e.code
|
||||
|
||||
if not IS_LINUX_PLATFORM and not command:
|
||||
msg += "\n\nDocker Compose is now in the Docker CLI, try `docker compose`"
|
||||
|
||||
exit_with_metrics(command, log_msg=msg, status=status,
|
||||
exit_code=code)
|
||||
|
||||
|
||||
def get_filtered_args(args):
|
||||
if args[0] in ('-h', '--help'):
|
||||
return []
|
||||
if args[0] == '--version':
|
||||
return ['version']
|
||||
|
||||
|
||||
def exit_with_metrics(command, log_msg=None, status=Status.SUCCESS, exit_code=1):
|
||||
if log_msg and command != 'exec':
|
||||
if not exit_code:
|
||||
log.info(log_msg)
|
||||
else:
|
||||
log.error(log_msg)
|
||||
|
||||
MetricsCommand(command, status=status).send_metrics()
|
||||
sys.exit(exit_code)
|
||||
|
||||
|
||||
def dispatch():
|
||||
setup_logging()
|
||||
console_stream = sys.stderr
|
||||
console_handler = logging.StreamHandler(console_stream)
|
||||
setup_logging(console_handler)
|
||||
dispatcher = DocoptDispatcher(
|
||||
TopLevelCommand,
|
||||
{'options_first': True, 'version': get_version_info('compose')})
|
||||
|
||||
options, handler, command_options = dispatcher.parse(sys.argv[1:])
|
||||
|
||||
ansi_mode = AnsiMode.AUTO
|
||||
try:
|
||||
if options.get("--ansi"):
|
||||
ansi_mode = AnsiMode(options.get("--ansi"))
|
||||
except ValueError:
|
||||
raise UserError(
|
||||
'Invalid value for --ansi: {}. Expected one of {}.'.format(
|
||||
options.get("--ansi"),
|
||||
', '.join(m.value for m in AnsiMode)
|
||||
)
|
||||
)
|
||||
if options.get("--no-ansi"):
|
||||
if options.get("--ansi"):
|
||||
raise UserError("--no-ansi and --ansi cannot be combined.")
|
||||
log.warning('--no-ansi option is deprecated and will be removed in future versions. '
|
||||
'Use `--ansi never` instead.')
|
||||
ansi_mode = AnsiMode.NEVER
|
||||
|
||||
setup_console_handler(console_handler,
|
||||
options.get('--verbose'),
|
||||
set_no_color_if_clicolor(options.get('--no-ansi')),
|
||||
ansi_mode.use_ansi_codes(console_handler.stream),
|
||||
options.get("--log-level"))
|
||||
setup_parallel_logger(set_no_color_if_clicolor(options.get('--no-ansi')))
|
||||
if options.get('--no-ansi'):
|
||||
setup_parallel_logger(ansi_mode)
|
||||
if ansi_mode is AnsiMode.NEVER:
|
||||
command_options['--no-color'] = True
|
||||
return functools.partial(perform_command, options, handler, command_options)
|
||||
|
||||
@@ -126,23 +203,23 @@ def perform_command(options, handler, command_options):
|
||||
handler(command, command_options)
|
||||
|
||||
|
||||
def setup_logging():
|
||||
def setup_logging(console_handler):
|
||||
root_logger = logging.getLogger()
|
||||
root_logger.addHandler(console_handler)
|
||||
root_logger.setLevel(logging.DEBUG)
|
||||
|
||||
# Disable requests logging
|
||||
# Disable requests and docker-py logging
|
||||
logging.getLogger("urllib3").propagate = False
|
||||
logging.getLogger("requests").propagate = False
|
||||
logging.getLogger("docker").propagate = False
|
||||
|
||||
|
||||
def setup_parallel_logger(noansi):
|
||||
if noansi:
|
||||
import compose.parallel
|
||||
compose.parallel.ParallelStreamWriter.set_noansi()
|
||||
def setup_parallel_logger(ansi_mode):
|
||||
ParallelStreamWriter.set_default_ansi_mode(ansi_mode)
|
||||
|
||||
|
||||
def setup_console_handler(handler, verbose, noansi=False, level=None):
|
||||
if handler.stream.isatty() and noansi is False:
|
||||
def setup_console_handler(handler, verbose, use_console_formatter=True, level=None):
|
||||
if use_console_formatter:
|
||||
format_class = ConsoleWarningFormatter
|
||||
else:
|
||||
format_class = logging.Formatter
|
||||
@@ -182,7 +259,7 @@ class TopLevelCommand:
|
||||
"""Define and run multi-container applications with Docker.
|
||||
|
||||
Usage:
|
||||
docker-compose [-f <arg>...] [options] [--] [COMMAND] [ARGS...]
|
||||
docker-compose [-f <arg>...] [--profile <name>...] [options] [--] [COMMAND] [ARGS...]
|
||||
docker-compose -h|--help
|
||||
|
||||
Options:
|
||||
@@ -190,10 +267,12 @@ class TopLevelCommand:
|
||||
(default: docker-compose.yml)
|
||||
-p, --project-name NAME Specify an alternate project name
|
||||
(default: directory name)
|
||||
--profile NAME Specify a profile to enable
|
||||
-c, --context NAME Specify a context name
|
||||
--verbose Show more output
|
||||
--log-level LEVEL Set log level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
|
||||
--no-ansi Do not print ANSI control characters
|
||||
--ansi (never|always|auto) Control when to print ANSI control characters
|
||||
--no-ansi Do not print ANSI control characters (DEPRECATED)
|
||||
-v, --version Print version and exit
|
||||
-H, --host HOST Daemon socket to connect to
|
||||
|
||||
@@ -214,7 +293,7 @@ class TopLevelCommand:
|
||||
build Build or rebuild services
|
||||
config Validate and view the Compose file
|
||||
create Create services
|
||||
down Stop and remove containers, networks, images, and volumes
|
||||
down Stop and remove resources
|
||||
events Receive real time events from containers
|
||||
exec Execute a command in a running container
|
||||
help Get help on a command
|
||||
@@ -244,13 +323,14 @@ class TopLevelCommand:
|
||||
|
||||
@property
|
||||
def project_dir(self):
|
||||
return self.toplevel_options.get('--project-directory') or '.'
|
||||
return get_project_dir(self.toplevel_options)
|
||||
|
||||
@property
|
||||
def toplevel_environment(self):
|
||||
environment_file = self.toplevel_options.get('--env-file')
|
||||
return Environment.from_env_file(self.project_dir, environment_file)
|
||||
|
||||
@metrics()
|
||||
def build(self, options):
|
||||
"""
|
||||
Build or rebuild services.
|
||||
@@ -270,8 +350,6 @@ class TopLevelCommand:
|
||||
--no-rm Do not remove intermediate containers after a successful build.
|
||||
--parallel Build images in parallel.
|
||||
--progress string Set type of progress output (auto, plain, tty).
|
||||
EXPERIMENTAL flag for native builder.
|
||||
To enable, run with COMPOSE_DOCKER_CLI_BUILD=1)
|
||||
--pull Always attempt to pull a newer version of the image.
|
||||
-q, --quiet Don't print anything to STDOUT
|
||||
"""
|
||||
@@ -285,7 +363,7 @@ class TopLevelCommand:
|
||||
)
|
||||
build_args = resolve_build_args(build_args, self.toplevel_environment)
|
||||
|
||||
native_builder = self.toplevel_environment.get_boolean('COMPOSE_DOCKER_CLI_BUILD')
|
||||
native_builder = self.toplevel_environment.get_boolean('COMPOSE_DOCKER_CLI_BUILD', True)
|
||||
|
||||
self.project.build(
|
||||
service_names=options['SERVICE'],
|
||||
@@ -302,6 +380,7 @@ class TopLevelCommand:
|
||||
progress=options.get('--progress'),
|
||||
)
|
||||
|
||||
@metrics()
|
||||
def config(self, options):
|
||||
"""
|
||||
Validate and view the Compose file.
|
||||
@@ -313,6 +392,7 @@ class TopLevelCommand:
|
||||
--no-interpolate Don't interpolate environment variables.
|
||||
-q, --quiet Only validate the configuration, don't print
|
||||
anything.
|
||||
--profiles Print the profile names, one per line.
|
||||
--services Print the service names, one per line.
|
||||
--volumes Print the volume names, one per line.
|
||||
--hash="*" Print the service config hash, one per line.
|
||||
@@ -332,6 +412,15 @@ class TopLevelCommand:
|
||||
if options['--quiet']:
|
||||
return
|
||||
|
||||
if options['--profiles']:
|
||||
profiles = set()
|
||||
for service in compose_config.services:
|
||||
if 'profiles' in service:
|
||||
for profile in service['profiles']:
|
||||
profiles.add(profile)
|
||||
print('\n'.join(sorted(profiles)))
|
||||
return
|
||||
|
||||
if options['--services']:
|
||||
print('\n'.join(service['name'] for service in compose_config.services))
|
||||
return
|
||||
@@ -351,6 +440,7 @@ class TopLevelCommand:
|
||||
|
||||
print(serialize_config(compose_config, image_digests, not options['--no-interpolate']))
|
||||
|
||||
@metrics()
|
||||
def create(self, options):
|
||||
"""
|
||||
Creates containers for a service.
|
||||
@@ -379,6 +469,7 @@ class TopLevelCommand:
|
||||
do_build=build_action_from_opts(options),
|
||||
)
|
||||
|
||||
@metrics()
|
||||
def down(self, options):
|
||||
"""
|
||||
Stops containers and removes containers, networks, volumes, and images
|
||||
@@ -430,6 +521,7 @@ class TopLevelCommand:
|
||||
Options:
|
||||
--json Output events as a stream of json objects
|
||||
"""
|
||||
|
||||
def format_event(event):
|
||||
attributes = ["%s=%s" % item for item in event['attributes'].items()]
|
||||
return ("{time} {type} {action} {id} ({attrs})").format(
|
||||
@@ -446,6 +538,7 @@ class TopLevelCommand:
|
||||
print(formatter(event))
|
||||
sys.stdout.flush()
|
||||
|
||||
@metrics("exec")
|
||||
def exec_command(self, options):
|
||||
"""
|
||||
Execute a command in a running container
|
||||
@@ -522,6 +615,7 @@ class TopLevelCommand:
|
||||
sys.exit(exit_code)
|
||||
|
||||
@classmethod
|
||||
@metrics()
|
||||
def help(cls, options):
|
||||
"""
|
||||
Get help on a command.
|
||||
@@ -535,6 +629,7 @@ class TopLevelCommand:
|
||||
|
||||
print(getdoc(subject))
|
||||
|
||||
@metrics()
|
||||
def images(self, options):
|
||||
"""
|
||||
List images used by the created containers.
|
||||
@@ -589,6 +684,7 @@ class TopLevelCommand:
|
||||
])
|
||||
print(Formatter.table(headers, rows))
|
||||
|
||||
@metrics()
|
||||
def kill(self, options):
|
||||
"""
|
||||
Force stop service containers.
|
||||
@@ -603,6 +699,7 @@ class TopLevelCommand:
|
||||
|
||||
self.project.kill(service_names=options['SERVICE'], signal=signal)
|
||||
|
||||
@metrics()
|
||||
def logs(self, options):
|
||||
"""
|
||||
View output from containers.
|
||||
@@ -610,11 +707,12 @@ class TopLevelCommand:
|
||||
Usage: logs [options] [--] [SERVICE...]
|
||||
|
||||
Options:
|
||||
--no-color Produce monochrome output.
|
||||
-f, --follow Follow log output.
|
||||
-t, --timestamps Show timestamps.
|
||||
--tail="all" Number of lines to show from the end of the logs
|
||||
for each container.
|
||||
--no-color Produce monochrome output.
|
||||
-f, --follow Follow log output.
|
||||
-t, --timestamps Show timestamps.
|
||||
--tail="all" Number of lines to show from the end of the logs
|
||||
for each container.
|
||||
--no-log-prefix Don't print prefix in logs.
|
||||
"""
|
||||
containers = self.project.containers(service_names=options['SERVICE'], stopped=True)
|
||||
|
||||
@@ -633,10 +731,12 @@ class TopLevelCommand:
|
||||
log_printer_from_project(
|
||||
self.project,
|
||||
containers,
|
||||
set_no_color_if_clicolor(options['--no-color']),
|
||||
options['--no-color'],
|
||||
log_args,
|
||||
event_stream=self.project.events(service_names=options['SERVICE'])).run()
|
||||
event_stream=self.project.events(service_names=options['SERVICE']),
|
||||
keep_prefix=not options['--no-log-prefix']).run()
|
||||
|
||||
@metrics()
|
||||
def pause(self, options):
|
||||
"""
|
||||
Pause services.
|
||||
@@ -646,6 +746,7 @@ class TopLevelCommand:
|
||||
containers = self.project.pause(service_names=options['SERVICE'])
|
||||
exit_if(not containers, 'No containers to pause', 1)
|
||||
|
||||
@metrics()
|
||||
def port(self, options):
|
||||
"""
|
||||
Print the public port for a port binding.
|
||||
@@ -667,6 +768,7 @@ class TopLevelCommand:
|
||||
options['PRIVATE_PORT'],
|
||||
protocol=options.get('--protocol') or 'tcp') or '')
|
||||
|
||||
@metrics()
|
||||
def ps(self, options):
|
||||
"""
|
||||
List containers.
|
||||
@@ -723,6 +825,7 @@ class TopLevelCommand:
|
||||
])
|
||||
print(Formatter.table(headers, rows))
|
||||
|
||||
@metrics()
|
||||
def pull(self, options):
|
||||
"""
|
||||
Pulls images for services defined in a Compose file, but does not start the containers.
|
||||
@@ -746,6 +849,7 @@ class TopLevelCommand:
|
||||
include_deps=options.get('--include-deps'),
|
||||
)
|
||||
|
||||
@metrics()
|
||||
def push(self, options):
|
||||
"""
|
||||
Pushes images for services.
|
||||
@@ -760,6 +864,7 @@ class TopLevelCommand:
|
||||
ignore_push_failures=options.get('--ignore-push-failures')
|
||||
)
|
||||
|
||||
@metrics()
|
||||
def rm(self, options):
|
||||
"""
|
||||
Removes stopped service containers.
|
||||
@@ -804,6 +909,7 @@ class TopLevelCommand:
|
||||
else:
|
||||
print("No stopped containers")
|
||||
|
||||
@metrics()
|
||||
def run(self, options):
|
||||
"""
|
||||
Run a one-off command on a service.
|
||||
@@ -864,6 +970,7 @@ class TopLevelCommand:
|
||||
self.toplevel_options, self.toplevel_environment
|
||||
)
|
||||
|
||||
@metrics()
|
||||
def scale(self, options):
|
||||
"""
|
||||
Set number of containers to run for a service.
|
||||
@@ -892,6 +999,7 @@ class TopLevelCommand:
|
||||
for service_name, num in parse_scale_args(options['SERVICE=NUM']).items():
|
||||
self.project.get_service(service_name).scale(num, timeout=timeout)
|
||||
|
||||
@metrics()
|
||||
def start(self, options):
|
||||
"""
|
||||
Start existing containers.
|
||||
@@ -901,6 +1009,7 @@ class TopLevelCommand:
|
||||
containers = self.project.start(service_names=options['SERVICE'])
|
||||
exit_if(not containers, 'No containers to start', 1)
|
||||
|
||||
@metrics()
|
||||
def stop(self, options):
|
||||
"""
|
||||
Stop running containers without removing them.
|
||||
@@ -916,6 +1025,7 @@ class TopLevelCommand:
|
||||
timeout = timeout_from_opts(options)
|
||||
self.project.stop(service_names=options['SERVICE'], timeout=timeout)
|
||||
|
||||
@metrics()
|
||||
def restart(self, options):
|
||||
"""
|
||||
Restart running containers.
|
||||
@@ -930,6 +1040,7 @@ class TopLevelCommand:
|
||||
containers = self.project.restart(service_names=options['SERVICE'], timeout=timeout)
|
||||
exit_if(not containers, 'No containers to restart', 1)
|
||||
|
||||
@metrics()
|
||||
def top(self, options):
|
||||
"""
|
||||
Display the running processes
|
||||
@@ -957,6 +1068,7 @@ class TopLevelCommand:
|
||||
print(container.name)
|
||||
print(Formatter.table(headers, rows))
|
||||
|
||||
@metrics()
|
||||
def unpause(self, options):
|
||||
"""
|
||||
Unpause services.
|
||||
@@ -966,6 +1078,7 @@ class TopLevelCommand:
|
||||
containers = self.project.unpause(service_names=options['SERVICE'])
|
||||
exit_if(not containers, 'No containers to unpause', 1)
|
||||
|
||||
@metrics()
|
||||
def up(self, options):
|
||||
"""
|
||||
Builds, (re)creates, starts, and attaches to containers for a service.
|
||||
@@ -1017,6 +1130,7 @@ class TopLevelCommand:
|
||||
container. Implies --abort-on-container-exit.
|
||||
--scale SERVICE=NUM Scale SERVICE to NUM instances. Overrides the
|
||||
`scale` setting in the Compose file if present.
|
||||
--no-log-prefix Don't print prefix in logs.
|
||||
"""
|
||||
start_deps = not options['--no-deps']
|
||||
always_recreate_deps = options['--always-recreate-deps']
|
||||
@@ -1028,6 +1142,7 @@ class TopLevelCommand:
|
||||
detached = options.get('--detach')
|
||||
no_start = options.get('--no-start')
|
||||
attach_dependencies = options.get('--attach-dependencies')
|
||||
keep_prefix = not options.get('--no-log-prefix')
|
||||
|
||||
if detached and (cascade_stop or exit_value_from or attach_dependencies):
|
||||
raise UserError(
|
||||
@@ -1042,7 +1157,7 @@ class TopLevelCommand:
|
||||
for excluded in [x for x in opts if options.get(x) and no_start]:
|
||||
raise UserError('--no-start and {} cannot be combined.'.format(excluded))
|
||||
|
||||
native_builder = self.toplevel_environment.get_boolean('COMPOSE_DOCKER_CLI_BUILD')
|
||||
native_builder = self.toplevel_environment.get_boolean('COMPOSE_DOCKER_CLI_BUILD', True)
|
||||
|
||||
with up_shutdown_context(self.project, service_names, timeout, detached):
|
||||
warn_for_swarm_mode(self.project.client)
|
||||
@@ -1064,6 +1179,7 @@ class TopLevelCommand:
|
||||
renew_anonymous_volumes=options.get('--renew-anon-volumes'),
|
||||
silent=options.get('--quiet-pull'),
|
||||
cli=native_builder,
|
||||
attach_dependencies=attach_dependencies,
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -1091,10 +1207,11 @@ class TopLevelCommand:
|
||||
log_printer = log_printer_from_project(
|
||||
self.project,
|
||||
attached_containers,
|
||||
set_no_color_if_clicolor(options['--no-color']),
|
||||
options['--no-color'],
|
||||
{'follow': True},
|
||||
cascade_stop,
|
||||
event_stream=self.project.events(service_names=service_names))
|
||||
event_stream=self.project.events(service_names=service_names),
|
||||
keep_prefix=keep_prefix)
|
||||
print("Attaching to", list_containers(log_printer.containers))
|
||||
cascade_starter = log_printer.run()
|
||||
|
||||
@@ -1112,6 +1229,7 @@ class TopLevelCommand:
|
||||
sys.exit(exit_code)
|
||||
|
||||
@classmethod
|
||||
@metrics()
|
||||
def version(cls, options):
|
||||
"""
|
||||
Show version information and quit.
|
||||
@@ -1376,29 +1494,28 @@ def get_docker_start_call(container_options, container_id):
|
||||
|
||||
|
||||
def log_printer_from_project(
|
||||
project,
|
||||
containers,
|
||||
monochrome,
|
||||
log_args,
|
||||
cascade_stop=False,
|
||||
event_stream=None,
|
||||
project,
|
||||
containers,
|
||||
monochrome,
|
||||
log_args,
|
||||
cascade_stop=False,
|
||||
event_stream=None,
|
||||
keep_prefix=True,
|
||||
):
|
||||
return LogPrinter(
|
||||
containers,
|
||||
build_log_presenters(project.service_names, monochrome),
|
||||
[c for c in containers if c.log_driver not in (None, 'none')],
|
||||
build_log_presenters(project.service_names, monochrome, keep_prefix),
|
||||
event_stream or project.events(),
|
||||
cascade_stop=cascade_stop,
|
||||
log_args=log_args)
|
||||
|
||||
|
||||
def filter_attached_containers(containers, service_names, attach_dependencies=False):
|
||||
if attach_dependencies or not service_names:
|
||||
return containers
|
||||
|
||||
return [
|
||||
container
|
||||
for container in containers if container.service in service_names
|
||||
]
|
||||
return filter_attached_for_up(
|
||||
containers,
|
||||
service_names,
|
||||
attach_dependencies,
|
||||
lambda container: container.service)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
@@ -1574,7 +1691,3 @@ def warn_for_swarm_mode(client):
|
||||
"To deploy your application across the swarm, "
|
||||
"use `docker stack deploy`.\n"
|
||||
)
|
||||
|
||||
|
||||
def set_no_color_if_clicolor(no_color_flag):
|
||||
return no_color_flag or os.environ.get('CLICOLOR') == "0"
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft/2019-09/schema#",
|
||||
"id": "config_schema_compose_spec.json",
|
||||
"id": "compose_spec.json",
|
||||
"type": "object",
|
||||
"title": "Compose Specification",
|
||||
"description": "The Compose file is a YAML file defining a multi-containers based application.",
|
||||
|
||||
"properties": {
|
||||
"version": {
|
||||
"type": "string",
|
||||
"description": "Version of the Compose specification used. Tools not implementing required version MUST reject the configuration file."
|
||||
},
|
||||
|
||||
"services": {
|
||||
"id": "#/properties/services",
|
||||
"type": "object",
|
||||
@@ -19,6 +21,7 @@
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
"networks": {
|
||||
"id": "#/properties/networks",
|
||||
"type": "object",
|
||||
@@ -28,6 +31,7 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
"volumes": {
|
||||
"id": "#/properties/volumes",
|
||||
"type": "object",
|
||||
@@ -38,6 +42,7 @@
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
"secrets": {
|
||||
"id": "#/properties/secrets",
|
||||
"type": "object",
|
||||
@@ -48,6 +53,7 @@
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
"configs": {
|
||||
"id": "#/properties/configs",
|
||||
"type": "object",
|
||||
@@ -59,12 +65,16 @@
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
|
||||
"patternProperties": {"^x-": {}},
|
||||
"additionalProperties": false,
|
||||
|
||||
"definitions": {
|
||||
|
||||
"service": {
|
||||
"id": "#/definitions/service",
|
||||
"type": "object",
|
||||
|
||||
"properties": {
|
||||
"deploy": {"$ref": "#/definitions/deployment"},
|
||||
"build": {
|
||||
@@ -77,7 +87,7 @@
|
||||
"dockerfile": {"type": "string"},
|
||||
"args": {"$ref": "#/definitions/list_or_dict"},
|
||||
"labels": {"$ref": "#/definitions/list_or_dict"},
|
||||
"cache_from": {"$ref": "#/definitions/list_of_strings"},
|
||||
"cache_from": {"type": "array", "items": {"type": "string"}},
|
||||
"network": {"type": "string"},
|
||||
"target": {"type": "string"},
|
||||
"shm_size": {"type": ["integer", "string"]},
|
||||
@@ -178,7 +188,7 @@
|
||||
"properties": {
|
||||
"condition": {
|
||||
"type": "string",
|
||||
"enum": ["service_started", "service_healthy"]
|
||||
"enum": ["service_started", "service_healthy", "service_completed_successfully"]
|
||||
}
|
||||
},
|
||||
"required": ["condition"]
|
||||
@@ -190,7 +200,6 @@
|
||||
"device_cgroup_rules": {"$ref": "#/definitions/list_of_strings"},
|
||||
"devices": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
|
||||
"dns": {"$ref": "#/definitions/string_or_list"},
|
||||
|
||||
"dns_opt": {"type": "array","items": {"type": "string"}, "uniqueItems": true},
|
||||
"dns_search": {"$ref": "#/definitions/string_or_list"},
|
||||
"domainname": {"type": "string"},
|
||||
@@ -211,12 +220,12 @@
|
||||
},
|
||||
"uniqueItems": true
|
||||
},
|
||||
|
||||
"extends": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "object",
|
||||
|
||||
"properties": {
|
||||
"service": {"type": "string"},
|
||||
"file": {"type": "string"}
|
||||
@@ -245,6 +254,7 @@
|
||||
"links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
|
||||
"logging": {
|
||||
"type": "object",
|
||||
|
||||
"properties": {
|
||||
"driver": {"type": "string"},
|
||||
"options": {
|
||||
@@ -258,7 +268,7 @@
|
||||
"patternProperties": {"^x-": {}}
|
||||
},
|
||||
"mac_address": {"type": "string"},
|
||||
"mem_limit": {"type": "string"},
|
||||
"mem_limit": {"type": ["number", "string"]},
|
||||
"mem_reservation": {"type": ["string", "integer"]},
|
||||
"mem_swappiness": {"type": "integer"},
|
||||
"memswap_limit": {"type": ["number", "string"]},
|
||||
@@ -318,13 +328,13 @@
|
||||
"uniqueItems": true
|
||||
},
|
||||
"privileged": {"type": "boolean"},
|
||||
"profiles": {"$ref": "#/definitions/list_of_strings"},
|
||||
"pull_policy": {"type": "string", "enum": [
|
||||
"always", "never", "if_not_present"
|
||||
"always", "never", "if_not_present", "build"
|
||||
]},
|
||||
"read_only": {"type": "boolean"},
|
||||
"restart": {"type": "string"},
|
||||
"runtime": {
|
||||
"deprecated": true,
|
||||
"type": "string"
|
||||
},
|
||||
"scale": {
|
||||
@@ -356,6 +366,7 @@
|
||||
"stdin_open": {"type": "boolean"},
|
||||
"stop_grace_period": {"type": "string", "format": "duration"},
|
||||
"stop_signal": {"type": "string"},
|
||||
"storage_opt": {"type": "object"},
|
||||
"tmpfs": {"$ref": "#/definitions/string_or_list"},
|
||||
"tty": {"type": "boolean"},
|
||||
"ulimits": {
|
||||
@@ -425,9 +436,9 @@
|
||||
"additionalProperties": false,
|
||||
"patternProperties": {"^x-": {}}
|
||||
}
|
||||
],
|
||||
"uniqueItems": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"uniqueItems": true
|
||||
},
|
||||
"volumes_from": {
|
||||
"type": "array",
|
||||
@@ -514,7 +525,8 @@
|
||||
"properties": {
|
||||
"cpus": {"type": ["number", "string"]},
|
||||
"memory": {"type": "string"},
|
||||
"generic_resources": {"$ref": "#/definitions/generic_resources"}
|
||||
"generic_resources": {"$ref": "#/definitions/generic_resources"},
|
||||
"devices": {"$ref": "#/definitions/devices"}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"patternProperties": {"^x-": {}}
|
||||
@@ -558,6 +570,7 @@
|
||||
"additionalProperties": false,
|
||||
"patternProperties": {"^x-": {}}
|
||||
},
|
||||
|
||||
"generic_resources": {
|
||||
"id": "#/definitions/generic_resources",
|
||||
"type": "array",
|
||||
@@ -578,6 +591,24 @@
|
||||
"patternProperties": {"^x-": {}}
|
||||
}
|
||||
},
|
||||
|
||||
"devices": {
|
||||
"id": "#/definitions/devices",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"capabilities": {"$ref": "#/definitions/list_of_strings"},
|
||||
"count": {"type": ["string", "integer"]},
|
||||
"device_ids": {"$ref": "#/definitions/list_of_strings"},
|
||||
"driver":{"type": "string"},
|
||||
"options":{"$ref": "#/definitions/list_or_dict"}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"patternProperties": {"^x-": {}}
|
||||
}
|
||||
},
|
||||
|
||||
"network": {
|
||||
"id": "#/definitions/network",
|
||||
"type": ["object", "null"],
|
||||
@@ -607,10 +638,10 @@
|
||||
"additionalProperties": false,
|
||||
"patternProperties": {"^.+$": {"type": "string"}}
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"patternProperties": {"^x-": {}}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"patternProperties": {"^x-": {}}
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"type": "object",
|
||||
@@ -640,6 +671,7 @@
|
||||
"additionalProperties": false,
|
||||
"patternProperties": {"^x-": {}}
|
||||
},
|
||||
|
||||
"volume": {
|
||||
"id": "#/definitions/volume",
|
||||
"type": ["object", "null"],
|
||||
@@ -668,6 +700,7 @@
|
||||
"additionalProperties": false,
|
||||
"patternProperties": {"^x-": {}}
|
||||
},
|
||||
|
||||
"secret": {
|
||||
"id": "#/definitions/secret",
|
||||
"type": "object",
|
||||
@@ -693,6 +726,7 @@
|
||||
"additionalProperties": false,
|
||||
"patternProperties": {"^x-": {}}
|
||||
},
|
||||
|
||||
"config": {
|
||||
"id": "#/definitions/config",
|
||||
"type": "object",
|
||||
@@ -714,17 +748,20 @@
|
||||
"additionalProperties": false,
|
||||
"patternProperties": {"^x-": {}}
|
||||
},
|
||||
|
||||
"string_or_list": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{"$ref": "#/definitions/list_of_strings"}
|
||||
]
|
||||
},
|
||||
|
||||
"list_of_strings": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
"uniqueItems": true
|
||||
},
|
||||
|
||||
"list_or_dict": {
|
||||
"oneOf": [
|
||||
{
|
||||
@@ -739,6 +776,7 @@
|
||||
{"type": "array", "items": {"type": "string"}, "uniqueItems": true}
|
||||
]
|
||||
},
|
||||
|
||||
"blkio_limit": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -755,6 +793,7 @@
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
"constraints": {
|
||||
"service": {
|
||||
"id": "#/definitions/constraints/service",
|
||||
@@ -10,7 +10,11 @@ from operator import attrgetter
|
||||
from operator import itemgetter
|
||||
|
||||
import yaml
|
||||
from cached_property import cached_property
|
||||
|
||||
try:
|
||||
from functools import cached_property
|
||||
except ImportError:
|
||||
from cached_property import cached_property
|
||||
|
||||
from . import types
|
||||
from ..const import COMPOSE_SPEC as VERSION
|
||||
@@ -20,6 +24,7 @@ from ..utils import json_hash
|
||||
from ..utils import parse_bytes
|
||||
from ..utils import parse_nanoseconds_int
|
||||
from ..utils import splitdrive
|
||||
from ..version import ComposeVersion
|
||||
from .environment import env_vars_from_file
|
||||
from .environment import Environment
|
||||
from .environment import split_env
|
||||
@@ -132,6 +137,7 @@ ALLOWED_KEYS = DOCKER_CONFIG_KEYS + [
|
||||
'logging',
|
||||
'network_mode',
|
||||
'platform',
|
||||
'profiles',
|
||||
'scale',
|
||||
'stop_grace_period',
|
||||
]
|
||||
@@ -147,9 +153,14 @@ DOCKER_VALID_URL_PREFIXES = (
|
||||
SUPPORTED_FILENAMES = [
|
||||
'docker-compose.yml',
|
||||
'docker-compose.yaml',
|
||||
'compose.yml',
|
||||
'compose.yaml',
|
||||
]
|
||||
|
||||
DEFAULT_OVERRIDE_FILENAMES = ('docker-compose.override.yml', 'docker-compose.override.yaml')
|
||||
DEFAULT_OVERRIDE_FILENAMES = ('docker-compose.override.yml',
|
||||
'docker-compose.override.yaml',
|
||||
'compose.override.yml',
|
||||
'compose.override.yaml')
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@@ -184,6 +195,13 @@ class ConfigFile(namedtuple('_ConfigFile', 'filename config')):
|
||||
def from_filename(cls, filename):
|
||||
return cls(filename, load_yaml(filename))
|
||||
|
||||
@cached_property
|
||||
def config_version(self):
|
||||
version = self.config.get('version', None)
|
||||
if isinstance(version, dict):
|
||||
return V1
|
||||
return ComposeVersion(version) if version else self.version
|
||||
|
||||
@cached_property
|
||||
def version(self):
|
||||
version = self.config.get('version', None)
|
||||
@@ -222,15 +240,13 @@ class ConfigFile(namedtuple('_ConfigFile', 'filename config')):
|
||||
'Version "{}" in "{}" is invalid.'
|
||||
.format(version, self.filename))
|
||||
|
||||
if version.startswith("1"):
|
||||
version = V1
|
||||
|
||||
if version == V1:
|
||||
if version.startswith("1"):
|
||||
raise ConfigurationError(
|
||||
'Version in "{}" is invalid. {}'
|
||||
.format(self.filename, VERSION_EXPLANATION)
|
||||
)
|
||||
return version
|
||||
|
||||
return VERSION
|
||||
|
||||
def get_service(self, name):
|
||||
return self.get_service_dicts()[name]
|
||||
@@ -253,8 +269,10 @@ class ConfigFile(namedtuple('_ConfigFile', 'filename config')):
|
||||
return {} if self.version == V1 else self.config.get('configs', {})
|
||||
|
||||
|
||||
class Config(namedtuple('_Config', 'version services volumes networks secrets configs')):
|
||||
class Config(namedtuple('_Config', 'config_version version services volumes networks secrets configs')):
|
||||
"""
|
||||
:param config_version: configuration file version
|
||||
:type config_version: int
|
||||
:param version: configuration version
|
||||
:type version: int
|
||||
:param services: List of service description dictionaries
|
||||
@@ -295,7 +313,16 @@ def find(base_dir, filenames, environment, override_dir=None):
|
||||
if filenames:
|
||||
filenames = [os.path.join(base_dir, f) for f in filenames]
|
||||
else:
|
||||
# search for compose files in the base dir and its parents
|
||||
filenames = get_default_config_files(base_dir)
|
||||
if not filenames and not override_dir:
|
||||
# none found in base_dir and no override_dir defined
|
||||
raise ComposeFileNotFound(SUPPORTED_FILENAMES)
|
||||
if not filenames:
|
||||
# search for compose files in the project directory and its parents
|
||||
filenames = get_default_config_files(override_dir)
|
||||
if not filenames:
|
||||
raise ComposeFileNotFound(SUPPORTED_FILENAMES)
|
||||
|
||||
log.debug("Using configuration files: {}".format(",".join(filenames)))
|
||||
return ConfigDetails(
|
||||
@@ -326,7 +353,7 @@ def get_default_config_files(base_dir):
|
||||
(candidates, path) = find_candidates_in_parent_dirs(SUPPORTED_FILENAMES, base_dir)
|
||||
|
||||
if not candidates:
|
||||
raise ComposeFileNotFound(SUPPORTED_FILENAMES)
|
||||
return None
|
||||
|
||||
winner = candidates[0]
|
||||
|
||||
@@ -365,6 +392,23 @@ def find_candidates_in_parent_dirs(filenames, path):
|
||||
return (candidates, path)
|
||||
|
||||
|
||||
def check_swarm_only_config(service_dicts):
|
||||
warning_template = (
|
||||
"Some services ({services}) use the '{key}' key, which will be ignored. "
|
||||
"Compose does not support '{key}' configuration - use "
|
||||
"`docker stack deploy` to deploy to a swarm."
|
||||
)
|
||||
key = 'configs'
|
||||
services = [s for s in service_dicts if s.get(key)]
|
||||
if services:
|
||||
log.warning(
|
||||
warning_template.format(
|
||||
services=", ".join(sorted(s['name'] for s in services)),
|
||||
key=key
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def load(config_details, interpolate=True):
|
||||
"""Load the configuration from a working directory and a list of
|
||||
configuration files. Files are loaded in order, and merged on top
|
||||
@@ -401,9 +445,10 @@ def load(config_details, interpolate=True):
|
||||
for service_dict in service_dicts:
|
||||
match_named_volumes(service_dict, volumes)
|
||||
|
||||
version = main_file.version
|
||||
check_swarm_only_config(service_dicts)
|
||||
|
||||
return Config(version, service_dicts, volumes, networks, secrets, configs)
|
||||
return Config(main_file.config_version, main_file.version,
|
||||
service_dicts, volumes, networks, secrets, configs)
|
||||
|
||||
|
||||
def load_mapping(config_files, get_func, entity_type, working_dir=None):
|
||||
@@ -450,9 +495,6 @@ def format_device_option(entity_type, config):
|
||||
device = config['driver_opts'].get('device')
|
||||
if o and o == 'bind' and device:
|
||||
fullpath = os.path.abspath(os.path.expanduser(device))
|
||||
if not os.path.exists(fullpath):
|
||||
raise ConfigurationError(
|
||||
"Device path {} does not exist.".format(fullpath))
|
||||
return fullpath
|
||||
|
||||
|
||||
@@ -532,8 +574,7 @@ def process_config_section(config_file, config, section, environment, interpolat
|
||||
config_file.version,
|
||||
config,
|
||||
section,
|
||||
environment
|
||||
)
|
||||
environment)
|
||||
else:
|
||||
return config
|
||||
|
||||
@@ -1043,7 +1084,7 @@ def merge_service_dicts(base, override, version):
|
||||
|
||||
for field in [
|
||||
'cap_add', 'cap_drop', 'expose', 'external_links',
|
||||
'volumes_from', 'device_cgroup_rules',
|
||||
'volumes_from', 'device_cgroup_rules', 'profiles',
|
||||
]:
|
||||
md.merge_field(field, merge_unique_items_lists, default=[])
|
||||
|
||||
@@ -1162,6 +1203,7 @@ def merge_reservations(base, override):
|
||||
md.merge_scalar('cpus')
|
||||
md.merge_scalar('memory')
|
||||
md.merge_sequence('generic_resources', types.GenericResource.parse)
|
||||
md.merge_field('devices', merge_unique_objects_lists, default=[])
|
||||
return dict(md)
|
||||
|
||||
|
||||
|
||||
@@ -54,9 +54,10 @@ class Environment(dict):
|
||||
if base_dir is None:
|
||||
return result
|
||||
if env_file:
|
||||
env_file_path = os.path.join(base_dir, env_file)
|
||||
else:
|
||||
env_file_path = os.path.join(base_dir, '.env')
|
||||
env_file_path = os.path.join(os.getcwd(), env_file)
|
||||
return cls(env_vars_from_file(env_file_path))
|
||||
|
||||
env_file_path = os.path.join(base_dir, '.env')
|
||||
try:
|
||||
return cls(env_vars_from_file(env_file_path))
|
||||
except EnvFileNotFound:
|
||||
@@ -113,13 +114,13 @@ class Environment(dict):
|
||||
)
|
||||
return super().get(key, *args, **kwargs)
|
||||
|
||||
def get_boolean(self, key):
|
||||
def get_boolean(self, key, default=False):
|
||||
# Convert a value to a boolean using "common sense" rules.
|
||||
# Unset, empty, "0" and "false" (i-case) yield False.
|
||||
# All other values yield True.
|
||||
value = self.get(key)
|
||||
if not value:
|
||||
return False
|
||||
return default
|
||||
if value.lower() in ['0', 'false']:
|
||||
return False
|
||||
return True
|
||||
|
||||
@@ -111,12 +111,14 @@ class TemplateWithDefaults(Template):
|
||||
var, _, err = braced.partition(':?')
|
||||
result = mapping.get(var)
|
||||
if not result:
|
||||
err = err or var
|
||||
raise UnsetRequiredSubstitution(err)
|
||||
return result
|
||||
elif '?' == sep:
|
||||
var, _, err = braced.partition('?')
|
||||
if var in mapping:
|
||||
return mapping.get(var)
|
||||
err = err or var
|
||||
raise UnsetRequiredSubstitution(err)
|
||||
|
||||
# Modified from python2.7/string.py
|
||||
@@ -241,6 +243,7 @@ class ConversionMap:
|
||||
service_path('healthcheck', 'disable'): to_boolean,
|
||||
service_path('deploy', 'labels', PATH_JOKER): to_str,
|
||||
service_path('deploy', 'replicas'): to_int,
|
||||
service_path('deploy', 'placement', 'max_replicas_per_node'): to_int,
|
||||
service_path('deploy', 'resources', 'limits', "cpus"): to_float,
|
||||
service_path('deploy', 'update_config', 'parallelism'): to_int,
|
||||
service_path('deploy', 'update_config', 'max_failure_ratio'): to_float,
|
||||
|
||||
@@ -44,7 +44,7 @@ yaml.SafeDumper.add_representer(types.ServicePort, serialize_dict_type)
|
||||
|
||||
|
||||
def denormalize_config(config, image_digests=None):
|
||||
result = {'version': str(config.version)}
|
||||
result = {'version': str(config.config_version)}
|
||||
denormalized_services = [
|
||||
denormalize_service_dict(
|
||||
service_dict,
|
||||
|
||||
@@ -502,13 +502,13 @@ def get_schema_path():
|
||||
|
||||
|
||||
def load_jsonschema(version):
|
||||
suffix = "compose_spec"
|
||||
name = "compose_spec"
|
||||
if version == V1:
|
||||
suffix = "v1"
|
||||
name = "config_schema_v1"
|
||||
|
||||
filename = os.path.join(
|
||||
get_schema_path(),
|
||||
"config_schema_{}.json".format(suffix))
|
||||
"{}.json".format(name))
|
||||
|
||||
if not os.path.exists(filename):
|
||||
raise ConfigurationError(
|
||||
|
||||
@@ -5,6 +5,7 @@ from .version import ComposeVersion
|
||||
DEFAULT_TIMEOUT = 10
|
||||
HTTP_TIMEOUT = 60
|
||||
IS_WINDOWS_PLATFORM = (sys.platform == "win32")
|
||||
IS_LINUX_PLATFORM = (sys.platform == "linux")
|
||||
LABEL_CONTAINER_NUMBER = 'com.docker.compose.container-number'
|
||||
LABEL_ONE_OFF = 'com.docker.compose.oneoff'
|
||||
LABEL_PROJECT = 'com.docker.compose.project'
|
||||
|
||||
@@ -186,11 +186,6 @@ class Container:
|
||||
def log_driver(self):
|
||||
return self.get('HostConfig.LogConfig.Type')
|
||||
|
||||
@property
|
||||
def has_api_logs(self):
|
||||
log_type = self.log_driver
|
||||
return not log_type or log_type in ('json-file', 'journald', 'local')
|
||||
|
||||
@property
|
||||
def human_readable_health_status(self):
|
||||
""" Generate UP status string with up time and health
|
||||
@@ -204,11 +199,7 @@ class Container:
|
||||
return status_string
|
||||
|
||||
def attach_log_stream(self):
|
||||
"""A log stream can only be attached if the container uses a
|
||||
json-file, journald or local log driver.
|
||||
"""
|
||||
if self.has_api_logs:
|
||||
self.log_stream = self.attach(stdout=True, stderr=True, stream=True)
|
||||
self.log_stream = self.attach(stdout=True, stderr=True, stream=True)
|
||||
|
||||
def get(self, key):
|
||||
"""Return a value from the container or None if the value is not set.
|
||||
|
||||
@@ -27,3 +27,8 @@ class NoHealthCheckConfigured(HealthCheckException):
|
||||
service_name
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class CompletedUnsuccessfully(Exception):
|
||||
def __init__(self, container_id, exit_code):
|
||||
self.msg = 'Container "{}" exited with code {}.'.format(container_id, exit_code)
|
||||
|
||||
0
compose/metrics/__init__.py
Normal file
0
compose/metrics/__init__.py
Normal file
64
compose/metrics/client.py
Normal file
64
compose/metrics/client.py
Normal file
@@ -0,0 +1,64 @@
|
||||
import os
|
||||
from enum import Enum
|
||||
|
||||
import requests
|
||||
from docker import ContextAPI
|
||||
from docker.transport import UnixHTTPAdapter
|
||||
|
||||
from compose.const import IS_WINDOWS_PLATFORM
|
||||
|
||||
if IS_WINDOWS_PLATFORM:
|
||||
from docker.transport import NpipeHTTPAdapter
|
||||
|
||||
|
||||
class Status(Enum):
|
||||
SUCCESS = "success"
|
||||
FAILURE = "failure"
|
||||
CANCELED = "canceled"
|
||||
|
||||
|
||||
class MetricsSource:
|
||||
CLI = "docker-compose"
|
||||
|
||||
|
||||
if IS_WINDOWS_PLATFORM:
|
||||
METRICS_SOCKET_FILE = 'npipe://\\\\.\\pipe\\docker_cli'
|
||||
else:
|
||||
METRICS_SOCKET_FILE = 'http+unix:///var/run/docker-cli.sock'
|
||||
|
||||
|
||||
class MetricsCommand(requests.Session):
|
||||
"""
|
||||
Representation of a command in the metrics.
|
||||
"""
|
||||
|
||||
def __init__(self, command,
|
||||
context_type=None, status=Status.SUCCESS,
|
||||
source=MetricsSource.CLI, uri=None):
|
||||
super().__init__()
|
||||
self.command = ("compose " + command).strip() if command else "compose --help"
|
||||
self.context = context_type or ContextAPI.get_current_context().context_type or 'moby'
|
||||
self.source = source
|
||||
self.status = status.value
|
||||
self.uri = uri or os.environ.get("METRICS_SOCKET_FILE", METRICS_SOCKET_FILE)
|
||||
if IS_WINDOWS_PLATFORM:
|
||||
self.mount("http+unix://", NpipeHTTPAdapter(self.uri))
|
||||
else:
|
||||
self.mount("http+unix://", UnixHTTPAdapter(self.uri))
|
||||
|
||||
def send_metrics(self):
|
||||
try:
|
||||
return self.post("http+unix://localhost/usage",
|
||||
json=self.to_map(),
|
||||
timeout=.05,
|
||||
headers={'Content-Type': 'application/json'})
|
||||
except Exception as e:
|
||||
return e
|
||||
|
||||
def to_map(self):
|
||||
return {
|
||||
'command': self.command,
|
||||
'context': self.context,
|
||||
'source': self.source,
|
||||
'status': self.status,
|
||||
}
|
||||
21
compose/metrics/decorator.py
Normal file
21
compose/metrics/decorator.py
Normal file
@@ -0,0 +1,21 @@
|
||||
import functools
|
||||
|
||||
from compose.metrics.client import MetricsCommand
|
||||
from compose.metrics.client import Status
|
||||
|
||||
|
||||
class metrics:
|
||||
def __init__(self, command_name=None):
|
||||
self.command_name = command_name
|
||||
|
||||
def __call__(self, fn):
|
||||
@functools.wraps(fn,
|
||||
assigned=functools.WRAPPER_ASSIGNMENTS,
|
||||
updated=functools.WRAPPER_UPDATES)
|
||||
def wrapper(*args, **kwargs):
|
||||
if not self.command_name:
|
||||
self.command_name = fn.__name__
|
||||
result = fn(*args, **kwargs)
|
||||
MetricsCommand(self.command_name, status=Status.SUCCESS).send_metrics()
|
||||
return result
|
||||
return wrapper
|
||||
@@ -11,10 +11,12 @@ from threading import Thread
|
||||
from docker.errors import APIError
|
||||
from docker.errors import ImageNotFound
|
||||
|
||||
from compose.cli.colors import AnsiMode
|
||||
from compose.cli.colors import green
|
||||
from compose.cli.colors import red
|
||||
from compose.cli.signals import ShutdownException
|
||||
from compose.const import PARALLEL_LIMIT
|
||||
from compose.errors import CompletedUnsuccessfully
|
||||
from compose.errors import HealthCheckFailed
|
||||
from compose.errors import NoHealthCheckConfigured
|
||||
from compose.errors import OperationFailedError
|
||||
@@ -60,7 +62,8 @@ def parallel_execute_watch(events, writer, errors, results, msg, get_name, fail_
|
||||
elif isinstance(exception, APIError):
|
||||
errors[get_name(obj)] = exception.explanation
|
||||
writer.write(msg, get_name(obj), 'error', red)
|
||||
elif isinstance(exception, (OperationFailedError, HealthCheckFailed, NoHealthCheckConfigured)):
|
||||
elif isinstance(exception, (OperationFailedError, HealthCheckFailed, NoHealthCheckConfigured,
|
||||
CompletedUnsuccessfully)):
|
||||
errors[get_name(obj)] = exception.msg
|
||||
writer.write(msg, get_name(obj), 'error', red)
|
||||
elif isinstance(exception, UpstreamError):
|
||||
@@ -83,10 +86,7 @@ def parallel_execute(objects, func, get_name, msg, get_deps=None, limit=None, fa
|
||||
objects = list(objects)
|
||||
stream = sys.stderr
|
||||
|
||||
if ParallelStreamWriter.instance:
|
||||
writer = ParallelStreamWriter.instance
|
||||
else:
|
||||
writer = ParallelStreamWriter(stream)
|
||||
writer = ParallelStreamWriter.get_or_assign_instance(ParallelStreamWriter(stream))
|
||||
|
||||
for obj in objects:
|
||||
writer.add_object(msg, get_name(obj))
|
||||
@@ -243,6 +243,12 @@ def feed_queue(objects, func, get_deps, results, state, limiter):
|
||||
'not processing'.format(obj)
|
||||
)
|
||||
results.put((obj, None, e))
|
||||
except CompletedUnsuccessfully as e:
|
||||
log.debug(
|
||||
'Service(s) upstream of {} did not completed successfully - '
|
||||
'not processing'.format(obj)
|
||||
)
|
||||
results.put((obj, None, e))
|
||||
|
||||
if state.is_done():
|
||||
results.put(STOP)
|
||||
@@ -259,19 +265,37 @@ class ParallelStreamWriter:
|
||||
to jump to the correct line, and write over the line.
|
||||
"""
|
||||
|
||||
noansi = False
|
||||
lock = Lock()
|
||||
default_ansi_mode = AnsiMode.AUTO
|
||||
write_lock = Lock()
|
||||
|
||||
instance = None
|
||||
instance_lock = Lock()
|
||||
|
||||
@classmethod
|
||||
def set_noansi(cls, value=True):
|
||||
cls.noansi = value
|
||||
def get_instance(cls):
|
||||
return cls.instance
|
||||
|
||||
def __init__(self, stream):
|
||||
@classmethod
|
||||
def get_or_assign_instance(cls, writer):
|
||||
cls.instance_lock.acquire()
|
||||
try:
|
||||
if cls.instance is None:
|
||||
cls.instance = writer
|
||||
return cls.instance
|
||||
finally:
|
||||
cls.instance_lock.release()
|
||||
|
||||
@classmethod
|
||||
def set_default_ansi_mode(cls, ansi_mode):
|
||||
cls.default_ansi_mode = ansi_mode
|
||||
|
||||
def __init__(self, stream, ansi_mode=None):
|
||||
if ansi_mode is None:
|
||||
ansi_mode = self.default_ansi_mode
|
||||
self.stream = stream
|
||||
self.use_ansi_codes = ansi_mode.use_ansi_codes(stream)
|
||||
self.lines = []
|
||||
self.width = 0
|
||||
ParallelStreamWriter.instance = self
|
||||
|
||||
def add_object(self, msg, obj_index):
|
||||
if msg is None:
|
||||
@@ -285,7 +309,7 @@ class ParallelStreamWriter:
|
||||
return self._write_noansi(msg, obj_index, '')
|
||||
|
||||
def _write_ansi(self, msg, obj_index, status):
|
||||
self.lock.acquire()
|
||||
self.write_lock.acquire()
|
||||
position = self.lines.index(msg + obj_index)
|
||||
diff = len(self.lines) - position
|
||||
# move up
|
||||
@@ -297,7 +321,7 @@ class ParallelStreamWriter:
|
||||
# move back down
|
||||
self.stream.write("%c[%dB" % (27, diff))
|
||||
self.stream.flush()
|
||||
self.lock.release()
|
||||
self.write_lock.release()
|
||||
|
||||
def _write_noansi(self, msg, obj_index, status):
|
||||
self.stream.write(
|
||||
@@ -310,17 +334,10 @@ class ParallelStreamWriter:
|
||||
def write(self, msg, obj_index, status, color_func):
|
||||
if msg is None:
|
||||
return
|
||||
if self.noansi:
|
||||
self._write_noansi(msg, obj_index, status)
|
||||
else:
|
||||
if self.use_ansi_codes:
|
||||
self._write_ansi(msg, obj_index, color_func(status))
|
||||
|
||||
|
||||
def get_stream_writer():
|
||||
instance = ParallelStreamWriter.instance
|
||||
if instance is None:
|
||||
raise RuntimeError('ParallelStreamWriter has not yet been instantiated')
|
||||
return instance
|
||||
else:
|
||||
self._write_noansi(msg, obj_index, status)
|
||||
|
||||
|
||||
def parallel_operation(containers, operation, options, message):
|
||||
|
||||
@@ -39,6 +39,7 @@ from .service import Service
|
||||
from .service import ServiceIpcMode
|
||||
from .service import ServiceNetworkMode
|
||||
from .service import ServicePidMode
|
||||
from .utils import filter_attached_for_up
|
||||
from .utils import microseconds_from_time_nano
|
||||
from .utils import truncate_string
|
||||
from .volume import ProjectVolumes
|
||||
@@ -68,13 +69,15 @@ class Project:
|
||||
"""
|
||||
A collection of services.
|
||||
"""
|
||||
def __init__(self, name, services, client, networks=None, volumes=None, config_version=None):
|
||||
def __init__(self, name, services, client, networks=None, volumes=None, config_version=None,
|
||||
enabled_profiles=None):
|
||||
self.name = name
|
||||
self.services = services
|
||||
self.client = client
|
||||
self.volumes = volumes or ProjectVolumes({})
|
||||
self.networks = networks or ProjectNetworks({}, False)
|
||||
self.config_version = config_version
|
||||
self.enabled_profiles = enabled_profiles or []
|
||||
|
||||
def labels(self, one_off=OneOffFilter.exclude, legacy=False):
|
||||
name = self.name
|
||||
@@ -86,7 +89,8 @@ class Project:
|
||||
return labels
|
||||
|
||||
@classmethod
|
||||
def from_config(cls, name, config_data, client, default_platform=None, extra_labels=None):
|
||||
def from_config(cls, name, config_data, client, default_platform=None, extra_labels=None,
|
||||
enabled_profiles=None):
|
||||
"""
|
||||
Construct a Project from a config.Config object.
|
||||
"""
|
||||
@@ -98,7 +102,7 @@ class Project:
|
||||
networks,
|
||||
use_networking)
|
||||
volumes = ProjectVolumes.from_config(name, config_data, client)
|
||||
project = cls(name, [], client, project_networks, volumes, config_data.version)
|
||||
project = cls(name, [], client, project_networks, volumes, config_data.version, enabled_profiles)
|
||||
|
||||
for service_dict in config_data.services:
|
||||
service_dict = dict(service_dict)
|
||||
@@ -128,7 +132,7 @@ class Project:
|
||||
config_data.secrets)
|
||||
|
||||
service_dict['scale'] = project.get_service_scale(service_dict)
|
||||
|
||||
service_dict['device_requests'] = project.get_device_requests(service_dict)
|
||||
service_dict = translate_credential_spec_to_security_opt(service_dict)
|
||||
service_dict, ignored_keys = translate_deploy_keys_to_container_config(
|
||||
service_dict
|
||||
@@ -185,7 +189,7 @@ class Project:
|
||||
if name not in valid_names:
|
||||
raise NoSuchService(name)
|
||||
|
||||
def get_services(self, service_names=None, include_deps=False):
|
||||
def get_services(self, service_names=None, include_deps=False, auto_enable_profiles=True):
|
||||
"""
|
||||
Returns a list of this project's services filtered
|
||||
by the provided list of names, or all services if service_names is None
|
||||
@@ -198,15 +202,36 @@ class Project:
|
||||
reordering as needed to resolve dependencies.
|
||||
|
||||
Raises NoSuchService if any of the named services do not exist.
|
||||
|
||||
Raises ConfigurationError if any service depended on is not enabled by active profiles
|
||||
"""
|
||||
# create a copy so we can *locally* add auto-enabled profiles later
|
||||
enabled_profiles = self.enabled_profiles.copy()
|
||||
|
||||
if service_names is None or len(service_names) == 0:
|
||||
service_names = self.service_names
|
||||
auto_enable_profiles = False
|
||||
service_names = [
|
||||
service.name
|
||||
for service in self.services
|
||||
if service.enabled_for_profiles(enabled_profiles)
|
||||
]
|
||||
|
||||
unsorted = [self.get_service(name) for name in service_names]
|
||||
services = [s for s in self.services if s in unsorted]
|
||||
|
||||
if auto_enable_profiles:
|
||||
# enable profiles of explicitly targeted services
|
||||
for service in services:
|
||||
for profile in service.get_profiles():
|
||||
if profile not in enabled_profiles:
|
||||
enabled_profiles.append(profile)
|
||||
|
||||
if include_deps:
|
||||
services = reduce(self._inject_deps, services, [])
|
||||
services = reduce(
|
||||
lambda acc, s: self._inject_deps(acc, s, enabled_profiles),
|
||||
services,
|
||||
[]
|
||||
)
|
||||
|
||||
uniques = []
|
||||
[uniques.append(s) for s in services if s not in uniques]
|
||||
@@ -331,6 +356,31 @@ class Project:
|
||||
max_replicas))
|
||||
return scale
|
||||
|
||||
def get_device_requests(self, service_dict):
|
||||
deploy_dict = service_dict.get('deploy', None)
|
||||
if not deploy_dict:
|
||||
return
|
||||
|
||||
resources = deploy_dict.get('resources', None)
|
||||
if not resources or not resources.get('reservations', None):
|
||||
return
|
||||
devices = resources['reservations'].get('devices')
|
||||
if not devices:
|
||||
return
|
||||
|
||||
for dev in devices:
|
||||
count = dev.get("count", -1)
|
||||
if not isinstance(count, int):
|
||||
if count != "all":
|
||||
raise ConfigurationError(
|
||||
'Invalid value "{}" for devices count'.format(dev["count"]),
|
||||
'(expected integer or "all")')
|
||||
dev["count"] = -1
|
||||
|
||||
if 'capabilities' in dev:
|
||||
dev['capabilities'] = [dev['capabilities']]
|
||||
return devices
|
||||
|
||||
def start(self, service_names=None, **options):
|
||||
containers = []
|
||||
|
||||
@@ -412,10 +462,12 @@ class Project:
|
||||
self.remove_images(remove_image_type)
|
||||
|
||||
def remove_images(self, remove_image_type):
|
||||
for service in self.get_services():
|
||||
for service in self.services:
|
||||
service.remove_image(remove_image_type)
|
||||
|
||||
def restart(self, service_names=None, **options):
|
||||
# filter service_names by enabled profiles
|
||||
service_names = [s.name for s in self.get_services(service_names)]
|
||||
containers = self.containers(service_names, stopped=True)
|
||||
|
||||
parallel.parallel_execute(
|
||||
@@ -438,7 +490,6 @@ class Project:
|
||||
log.info('%s uses an image, skipping' % service.name)
|
||||
|
||||
if cli:
|
||||
log.warning("Native build is an experimental feature and could change at any time")
|
||||
if parallel_build:
|
||||
log.warning("Flag '--parallel' is ignored when building with "
|
||||
"COMPOSE_DOCKER_CLI_BUILD=1")
|
||||
@@ -594,12 +645,10 @@ class Project:
|
||||
silent=False,
|
||||
cli=False,
|
||||
one_off=False,
|
||||
attach_dependencies=False,
|
||||
override_options=None,
|
||||
):
|
||||
|
||||
if cli:
|
||||
log.warning("Native build is an experimental feature and could change at any time")
|
||||
|
||||
self.initialize()
|
||||
if not ignore_orphans:
|
||||
self.find_orphan_containers(remove_orphans)
|
||||
@@ -620,12 +669,17 @@ class Project:
|
||||
one_off=service_names if one_off else [],
|
||||
)
|
||||
|
||||
def do(service):
|
||||
services_to_attach = filter_attached_for_up(
|
||||
services,
|
||||
service_names,
|
||||
attach_dependencies,
|
||||
lambda service: service.name)
|
||||
|
||||
def do(service):
|
||||
return service.execute_convergence_plan(
|
||||
plans[service.name],
|
||||
timeout=timeout,
|
||||
detached=detached,
|
||||
detached=detached or (service not in services_to_attach),
|
||||
scale_override=scale_override.get(service.name),
|
||||
rescale=rescale,
|
||||
start=start,
|
||||
@@ -695,7 +749,7 @@ class Project:
|
||||
|
||||
return plans
|
||||
|
||||
def pull(self, service_names=None, ignore_pull_failures=False, parallel_pull=False, silent=False,
|
||||
def pull(self, service_names=None, ignore_pull_failures=False, parallel_pull=True, silent=False,
|
||||
include_deps=False):
|
||||
services = self.get_services(service_names, include_deps)
|
||||
|
||||
@@ -729,7 +783,9 @@ class Project:
|
||||
return
|
||||
|
||||
try:
|
||||
writer = parallel.get_stream_writer()
|
||||
writer = parallel.ParallelStreamWriter.get_instance()
|
||||
if writer is None:
|
||||
raise RuntimeError('ParallelStreamWriter has not yet been instantiated')
|
||||
for event in strm:
|
||||
if 'status' not in event:
|
||||
continue
|
||||
@@ -830,14 +886,26 @@ class Project:
|
||||
)
|
||||
)
|
||||
|
||||
def _inject_deps(self, acc, service):
|
||||
def _inject_deps(self, acc, service, enabled_profiles):
|
||||
dep_names = service.get_dependency_names()
|
||||
|
||||
if len(dep_names) > 0:
|
||||
dep_services = self.get_services(
|
||||
service_names=list(set(dep_names)),
|
||||
include_deps=True
|
||||
include_deps=True,
|
||||
auto_enable_profiles=False
|
||||
)
|
||||
|
||||
for dep in dep_services:
|
||||
if not dep.enabled_for_profiles(enabled_profiles):
|
||||
raise ConfigurationError(
|
||||
'Service "{dep_name}" was pulled in as a dependency of '
|
||||
'service "{service_name}" but is not enabled by the '
|
||||
'active profiles. '
|
||||
'You may fix this by adding a common profile to '
|
||||
'"{dep_name}" and "{service_name}".'
|
||||
.format(dep_name=dep.name, service_name=service.name)
|
||||
)
|
||||
else:
|
||||
dep_services = []
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import enum
|
||||
import itertools
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
@@ -45,6 +44,7 @@ from .const import LABEL_VERSION
|
||||
from .const import NANOCPUS_SCALE
|
||||
from .const import WINDOWS_LONGPATH_PREFIX
|
||||
from .container import Container
|
||||
from .errors import CompletedUnsuccessfully
|
||||
from .errors import HealthCheckFailed
|
||||
from .errors import NoHealthCheckConfigured
|
||||
from .errors import OperationFailedError
|
||||
@@ -77,6 +77,7 @@ HOST_CONFIG_KEYS = [
|
||||
'cpuset',
|
||||
'device_cgroup_rules',
|
||||
'devices',
|
||||
'device_requests',
|
||||
'dns',
|
||||
'dns_search',
|
||||
'dns_opt',
|
||||
@@ -111,6 +112,7 @@ HOST_CONFIG_KEYS = [
|
||||
|
||||
CONDITION_STARTED = 'service_started'
|
||||
CONDITION_HEALTHY = 'service_healthy'
|
||||
CONDITION_COMPLETED_SUCCESSFULLY = 'service_completed_successfully'
|
||||
|
||||
|
||||
class BuildError(Exception):
|
||||
@@ -711,12 +713,13 @@ class Service:
|
||||
'image_id': image_id(),
|
||||
'links': self.get_link_names(),
|
||||
'net': self.network_mode.id,
|
||||
'ipc_mode': self.ipc_mode.mode,
|
||||
'networks': self.networks,
|
||||
'secrets': self.secrets,
|
||||
'volumes_from': [
|
||||
(v.source.name, v.mode)
|
||||
for v in self.volumes_from if isinstance(v.source, Service)
|
||||
],
|
||||
]
|
||||
}
|
||||
|
||||
def get_dependency_names(self):
|
||||
@@ -752,6 +755,8 @@ class Service:
|
||||
configs[svc] = lambda s: True
|
||||
elif config['condition'] == CONDITION_HEALTHY:
|
||||
configs[svc] = lambda s: s.is_healthy()
|
||||
elif config['condition'] == CONDITION_COMPLETED_SUCCESSFULLY:
|
||||
configs[svc] = lambda s: s.is_completed_successfully()
|
||||
else:
|
||||
# The config schema already prevents this, but it might be
|
||||
# bypassed if Compose is called programmatically.
|
||||
@@ -1016,6 +1021,7 @@ class Service:
|
||||
privileged=options.get('privileged', False),
|
||||
network_mode=self.network_mode.mode,
|
||||
devices=options.get('devices'),
|
||||
device_requests=options.get('device_requests'),
|
||||
dns=options.get('dns'),
|
||||
dns_opt=options.get('dns_opt'),
|
||||
dns_search=options.get('dns_search'),
|
||||
@@ -1101,8 +1107,9 @@ class Service:
|
||||
'Impossible to perform platform-targeted builds for API version < 1.35'
|
||||
)
|
||||
|
||||
builder = self.client if not cli else _CLIBuilder(progress)
|
||||
build_output = builder.build(
|
||||
builder = _ClientBuilder(self.client) if not cli else _CLIBuilder(progress)
|
||||
return builder.build(
|
||||
service=self,
|
||||
path=path,
|
||||
tag=self.image_name,
|
||||
rm=rm,
|
||||
@@ -1123,30 +1130,7 @@ class Service:
|
||||
gzip=gzip,
|
||||
isolation=build_opts.get('isolation', self.options.get('isolation', None)),
|
||||
platform=self.platform,
|
||||
)
|
||||
|
||||
try:
|
||||
all_events = list(stream_output(build_output, output_stream))
|
||||
except StreamOutputError as e:
|
||||
raise BuildError(self, str(e))
|
||||
|
||||
# Ensure the HTTP connection is not reused for another
|
||||
# streaming command, as the Docker daemon can sometimes
|
||||
# complain about it
|
||||
self.client.close()
|
||||
|
||||
image_id = None
|
||||
|
||||
for event in all_events:
|
||||
if 'stream' in event:
|
||||
match = re.search(r'Successfully built ([0-9a-f]+)', event.get('stream', ''))
|
||||
if match:
|
||||
image_id = match.group(1)
|
||||
|
||||
if image_id is None:
|
||||
raise BuildError(self, event if all_events else 'Unknown')
|
||||
|
||||
return image_id
|
||||
output_stream=output_stream)
|
||||
|
||||
def get_cache_from(self, build_opts):
|
||||
cache_from = build_opts.get('cache_from', None)
|
||||
@@ -1302,6 +1286,21 @@ class Service:
|
||||
raise HealthCheckFailed(ctnr.short_id)
|
||||
return result
|
||||
|
||||
def is_completed_successfully(self):
|
||||
""" Check that all containers for this service has completed successfully
|
||||
Returns false if at least one container does not exited and
|
||||
raises CompletedUnsuccessfully exception if at least one container
|
||||
exited with non-zero exit code.
|
||||
"""
|
||||
result = True
|
||||
for ctnr in self.containers(stopped=True):
|
||||
ctnr.inspect()
|
||||
if ctnr.get('State.Status') != 'exited':
|
||||
result = False
|
||||
elif ctnr.exit_code != 0:
|
||||
raise CompletedUnsuccessfully(ctnr.short_id, ctnr.exit_code)
|
||||
return result
|
||||
|
||||
def _parse_proxy_config(self):
|
||||
client = self.client
|
||||
if 'proxies' not in client._general_configs:
|
||||
@@ -1327,6 +1326,24 @@ class Service:
|
||||
|
||||
return result
|
||||
|
||||
def get_profiles(self):
|
||||
if 'profiles' not in self.options:
|
||||
return []
|
||||
|
||||
return self.options.get('profiles')
|
||||
|
||||
def enabled_for_profiles(self, enabled_profiles):
|
||||
# if service has no profiles specified it is always enabled
|
||||
if 'profiles' not in self.options:
|
||||
return True
|
||||
|
||||
service_profiles = self.options.get('profiles')
|
||||
for profile in enabled_profiles:
|
||||
if profile in service_profiles:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def short_id_alias_exists(container, network):
|
||||
aliases = container.get(
|
||||
@@ -1770,20 +1787,77 @@ def rewrite_build_path(path):
|
||||
return path
|
||||
|
||||
|
||||
class _CLIBuilder:
|
||||
def __init__(self, progress):
|
||||
self._progress = progress
|
||||
class _ClientBuilder:
|
||||
def __init__(self, client):
|
||||
self.client = client
|
||||
|
||||
def build(self, path, tag=None, quiet=False, fileobj=None,
|
||||
def build(self, service, path, tag=None, quiet=False, fileobj=None,
|
||||
nocache=False, rm=False, timeout=None,
|
||||
custom_context=False, encoding=None, pull=False,
|
||||
forcerm=False, dockerfile=None, container_limits=None,
|
||||
decode=False, buildargs=None, gzip=False, shmsize=None,
|
||||
labels=None, cache_from=None, target=None, network_mode=None,
|
||||
squash=None, extra_hosts=None, platform=None, isolation=None,
|
||||
use_config_proxy=True):
|
||||
use_config_proxy=True, output_stream=sys.stdout):
|
||||
build_output = self.client.build(
|
||||
path=path,
|
||||
tag=tag,
|
||||
nocache=nocache,
|
||||
rm=rm,
|
||||
pull=pull,
|
||||
forcerm=forcerm,
|
||||
dockerfile=dockerfile,
|
||||
labels=labels,
|
||||
cache_from=cache_from,
|
||||
buildargs=buildargs,
|
||||
network_mode=network_mode,
|
||||
target=target,
|
||||
shmsize=shmsize,
|
||||
extra_hosts=extra_hosts,
|
||||
container_limits=container_limits,
|
||||
gzip=gzip,
|
||||
isolation=isolation,
|
||||
platform=platform)
|
||||
|
||||
try:
|
||||
all_events = list(stream_output(build_output, output_stream))
|
||||
except StreamOutputError as e:
|
||||
raise BuildError(service, str(e))
|
||||
|
||||
# Ensure the HTTP connection is not reused for another
|
||||
# streaming command, as the Docker daemon can sometimes
|
||||
# complain about it
|
||||
self.client.close()
|
||||
|
||||
image_id = None
|
||||
|
||||
for event in all_events:
|
||||
if 'stream' in event:
|
||||
match = re.search(r'Successfully built ([0-9a-f]+)', event.get('stream', ''))
|
||||
if match:
|
||||
image_id = match.group(1)
|
||||
|
||||
if image_id is None:
|
||||
raise BuildError(service, event if all_events else 'Unknown')
|
||||
|
||||
return image_id
|
||||
|
||||
|
||||
class _CLIBuilder:
|
||||
def __init__(self, progress):
|
||||
self._progress = progress
|
||||
|
||||
def build(self, service, path, tag=None, quiet=False, fileobj=None,
|
||||
nocache=False, rm=False, timeout=None,
|
||||
custom_context=False, encoding=None, pull=False,
|
||||
forcerm=False, dockerfile=None, container_limits=None,
|
||||
decode=False, buildargs=None, gzip=False, shmsize=None,
|
||||
labels=None, cache_from=None, target=None, network_mode=None,
|
||||
squash=None, extra_hosts=None, platform=None, isolation=None,
|
||||
use_config_proxy=True, output_stream=sys.stdout):
|
||||
"""
|
||||
Args:
|
||||
service (str): Service to be built
|
||||
path (str): Path to the directory containing the Dockerfile
|
||||
buildargs (dict): A dictionary of build arguments
|
||||
cache_from (:py:class:`list`): A list of images used for build
|
||||
@@ -1832,10 +1906,11 @@ class _CLIBuilder:
|
||||
configuration file (``~/.docker/config.json`` by default)
|
||||
contains a proxy configuration, the corresponding environment
|
||||
variables will be set in the container being built.
|
||||
output_stream (writer): stream to use for build logs
|
||||
Returns:
|
||||
A generator for the build output.
|
||||
"""
|
||||
if dockerfile:
|
||||
if dockerfile and os.path.isdir(path):
|
||||
dockerfile = os.path.join(path, dockerfile)
|
||||
iidfile = tempfile.mktemp()
|
||||
|
||||
@@ -1853,35 +1928,29 @@ class _CLIBuilder:
|
||||
command_builder.add_arg("--tag", tag)
|
||||
command_builder.add_arg("--target", target)
|
||||
command_builder.add_arg("--iidfile", iidfile)
|
||||
command_builder.add_arg("--platform", platform)
|
||||
command_builder.add_arg("--isolation", isolation)
|
||||
|
||||
if extra_hosts:
|
||||
if isinstance(extra_hosts, dict):
|
||||
extra_hosts = ["{}:{}".format(host, ip) for host, ip in extra_hosts.items()]
|
||||
for host in extra_hosts:
|
||||
command_builder.add_arg("--add-host", "{}".format(host))
|
||||
|
||||
args = command_builder.build([path])
|
||||
|
||||
magic_word = "Successfully built "
|
||||
appear = False
|
||||
with subprocess.Popen(args, stdout=subprocess.PIPE,
|
||||
with subprocess.Popen(args, stdout=output_stream, stderr=sys.stderr,
|
||||
universal_newlines=True) as p:
|
||||
while True:
|
||||
line = p.stdout.readline()
|
||||
if not line:
|
||||
break
|
||||
if line.startswith(magic_word):
|
||||
appear = True
|
||||
yield json.dumps({"stream": line})
|
||||
|
||||
p.communicate()
|
||||
if p.returncode != 0:
|
||||
raise StreamOutputError()
|
||||
raise BuildError(service, "Build failed")
|
||||
|
||||
with open(iidfile) as f:
|
||||
line = f.readline()
|
||||
image_id = line.split(":")[1].strip()
|
||||
os.remove(iidfile)
|
||||
|
||||
# In case of `DOCKER_BUILDKIT=1`
|
||||
# there is no success message already present in the output.
|
||||
# Since that's the way `Service::build` gets the `image_id`
|
||||
# it has to be added `manually`
|
||||
if not appear:
|
||||
yield json.dumps({"stream": "{}{}\n".format(magic_word, image_id)})
|
||||
return image_id
|
||||
|
||||
|
||||
class _CommandBuilder:
|
||||
|
||||
@@ -174,3 +174,18 @@ def truncate_string(s, max_chars=35):
|
||||
if len(s) > max_chars:
|
||||
return s[:max_chars - 2] + '...'
|
||||
return s
|
||||
|
||||
|
||||
def filter_attached_for_up(items, service_names, attach_dependencies=False,
|
||||
item_to_service_name=lambda x: x):
|
||||
"""This function contains the logic of choosing which services to
|
||||
attach when doing docker-compose up. It may be used both with containers
|
||||
and services, and any other entities that map to service names -
|
||||
this mapping is provided by item_to_service_name."""
|
||||
if attach_dependencies or not service_names:
|
||||
return items
|
||||
|
||||
return [
|
||||
item
|
||||
for item in items if item_to_service_name(item) in service_names
|
||||
]
|
||||
|
||||
@@ -138,7 +138,7 @@ _docker_compose_config() {
|
||||
;;
|
||||
esac
|
||||
|
||||
COMPREPLY=( $( compgen -W "--hash --help --no-interpolate --quiet -q --resolve-image-digests --services --volumes" -- "$cur" ) )
|
||||
COMPREPLY=( $( compgen -W "--hash --help --no-interpolate --profiles --quiet -q --resolve-image-digests --services --volumes" -- "$cur" ) )
|
||||
}
|
||||
|
||||
|
||||
@@ -164,10 +164,18 @@ _docker_compose_docker_compose() {
|
||||
_filedir "y?(a)ml"
|
||||
return
|
||||
;;
|
||||
--ansi)
|
||||
COMPREPLY=( $( compgen -W "never always auto" -- "$cur" ) )
|
||||
return
|
||||
;;
|
||||
--log-level)
|
||||
COMPREPLY=( $( compgen -W "debug info warning error critical" -- "$cur" ) )
|
||||
return
|
||||
;;
|
||||
--profile)
|
||||
COMPREPLY=( $( compgen -W "$(__docker_compose_q config --profiles)" -- "$cur" ) )
|
||||
return
|
||||
;;
|
||||
--project-directory)
|
||||
_filedir -d
|
||||
return
|
||||
@@ -290,7 +298,7 @@ _docker_compose_logs() {
|
||||
|
||||
case "$cur" in
|
||||
-*)
|
||||
COMPREPLY=( $( compgen -W "--follow -f --help --no-color --tail --timestamps -t" -- "$cur" ) )
|
||||
COMPREPLY=( $( compgen -W "--follow -f --help --no-color --no-log-prefix --tail --timestamps -t" -- "$cur" ) )
|
||||
;;
|
||||
*)
|
||||
__docker_compose_complete_services
|
||||
@@ -545,7 +553,7 @@ _docker_compose_up() {
|
||||
|
||||
case "$cur" in
|
||||
-*)
|
||||
COMPREPLY=( $( compgen -W "--abort-on-container-exit --always-recreate-deps --attach-dependencies --build -d --detach --exit-code-from --force-recreate --help --no-build --no-color --no-deps --no-recreate --no-start --renew-anon-volumes -V --remove-orphans --scale --timeout -t" -- "$cur" ) )
|
||||
COMPREPLY=( $( compgen -W "--abort-on-container-exit --always-recreate-deps --attach-dependencies --build -d --detach --exit-code-from --force-recreate --help --no-build --no-color --no-deps --no-log-prefix --no-recreate --no-start --renew-anon-volumes -V --remove-orphans --scale --timeout -t" -- "$cur" ) )
|
||||
;;
|
||||
*)
|
||||
__docker_compose_complete_services
|
||||
@@ -614,9 +622,11 @@ _docker_compose() {
|
||||
--tlskey
|
||||
"
|
||||
|
||||
# These options are require special treatment when searching the command.
|
||||
# These options require special treatment when searching the command.
|
||||
local top_level_options_with_args="
|
||||
--ansi
|
||||
--log-level
|
||||
--profile
|
||||
"
|
||||
|
||||
COMPREPLY=()
|
||||
|
||||
@@ -21,5 +21,7 @@ complete -c docker-compose -l tlscert -r -d 'Path to TLS certif
|
||||
complete -c docker-compose -l tlskey -r -d 'Path to TLS key file'
|
||||
complete -c docker-compose -l tlsverify -d 'Use TLS and verify the remote'
|
||||
complete -c docker-compose -l skip-hostname-check -d "Don't check the daemon's hostname against the name specified in the client certificate (for example if your docker host is an IP address)"
|
||||
complete -c docker-compose -l no-ansi -d 'Do not print ANSI control characters'
|
||||
complete -c docker-compose -l ansi -a 'never always auto' -d 'Control when to print ANSI control characters'
|
||||
complete -c docker-compose -s h -l help -d 'Print usage'
|
||||
complete -c docker-compose -s v -l version -d 'Print version and exit'
|
||||
|
||||
@@ -342,6 +342,7 @@ _docker-compose() {
|
||||
'--verbose[Show more output]' \
|
||||
'--log-level=[Set log level]:level:(DEBUG INFO WARNING ERROR CRITICAL)' \
|
||||
'--no-ansi[Do not print ANSI control characters]' \
|
||||
'--ansi=[Control when to print ANSI control characters]:when:(never always auto)' \
|
||||
'(-H --host)'{-H,--host}'[Daemon socket to connect to]:host:' \
|
||||
'--tls[Use TLS; implied by --tlsverify]' \
|
||||
'--tlscacert=[Trust certs signed only by this CA]:ca path:' \
|
||||
|
||||
@@ -23,8 +23,8 @@ exe = EXE(pyz,
|
||||
'DATA'
|
||||
),
|
||||
(
|
||||
'compose/config/config_schema_compose_spec.json',
|
||||
'compose/config/config_schema_compose_spec.json',
|
||||
'compose/config/compose_spec.json',
|
||||
'compose/config/compose_spec.json',
|
||||
'DATA'
|
||||
),
|
||||
(
|
||||
|
||||
@@ -32,8 +32,8 @@ coll = COLLECT(exe,
|
||||
'DATA'
|
||||
),
|
||||
(
|
||||
'compose/config/config_schema_compose_spec.json',
|
||||
'compose/config/config_schema_compose_spec.json',
|
||||
'compose/config/compose_spec.json',
|
||||
'compose/config/compose_spec.json',
|
||||
'DATA'
|
||||
),
|
||||
(
|
||||
|
||||
@@ -1 +1 @@
|
||||
pyinstaller==3.6
|
||||
pyinstaller==4.1
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
Click==7.1.2
|
||||
coverage==5.2.1
|
||||
coverage==5.5
|
||||
ddt==1.4.1
|
||||
flake8==3.8.3
|
||||
gitpython==3.1.7
|
||||
gitpython==3.1.11
|
||||
mock==3.0.5
|
||||
pytest==6.0.1; python_version >= '3.5'
|
||||
pytest==4.6.5; python_version < '3.5'
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
altgraph==0.17
|
||||
appdirs==1.4.4
|
||||
attrs==20.1.0
|
||||
bcrypt==3.1.7
|
||||
cffi==1.14.1
|
||||
cryptography==3.0
|
||||
attrs==20.3.0
|
||||
bcrypt==3.2.0
|
||||
cffi==1.14.4
|
||||
cryptography==3.3.2
|
||||
distlib==0.3.1
|
||||
entrypoints==0.3
|
||||
filelock==3.0.12
|
||||
gitdb2==4.0.2
|
||||
mccabe==0.6.1
|
||||
more-itertools==8.4.0; python_version >= '3.5'
|
||||
more-itertools==8.6.0; python_version >= '3.5'
|
||||
more-itertools==5.0.0; python_version < '3.5'
|
||||
packaging==20.4
|
||||
packaging==20.9
|
||||
pluggy==0.13.1
|
||||
py==1.9.0
|
||||
py==1.10.0
|
||||
pycodestyle==2.6.0
|
||||
pycparser==2.20
|
||||
pyflakes==2.2.0
|
||||
@@ -23,6 +23,6 @@ pyrsistent==0.16.0
|
||||
smmap==3.0.4
|
||||
smmap2==3.0.1
|
||||
toml==0.10.1
|
||||
tox==3.19.0
|
||||
virtualenv==20.0.30
|
||||
tox==3.21.2
|
||||
virtualenv==20.4.0
|
||||
wcwidth==0.2.5
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
backports.shutil_get_terminal_size==1.0.0
|
||||
cached-property==1.5.1
|
||||
cached-property==1.5.1; python_version < '3.8'
|
||||
certifi==2020.6.20
|
||||
chardet==3.0.4
|
||||
colorama==0.4.3; sys_platform == 'win32'
|
||||
distro==1.5.0
|
||||
docker==4.3.1
|
||||
docker==5.0.0
|
||||
docker-pycreds==0.4.0
|
||||
dockerpty==0.4.1
|
||||
docopt==0.6.2
|
||||
@@ -12,11 +12,10 @@ idna==2.10
|
||||
ipaddress==1.0.23
|
||||
jsonschema==3.2.0
|
||||
paramiko==2.7.1
|
||||
pypiwin32==219; sys_platform == 'win32' and python_version < '3.6'
|
||||
pypiwin32==223; sys_platform == 'win32' and python_version >= '3.6'
|
||||
PySocks==1.7.1
|
||||
python-dotenv==0.14.0
|
||||
PyYAML==5.3.1
|
||||
python-dotenv==0.17.0
|
||||
pywin32==227; sys_platform == 'win32'
|
||||
PyYAML==5.4.1
|
||||
requests==2.24.0
|
||||
texttable==1.6.2
|
||||
urllib3==1.25.10; python_version == '3.3'
|
||||
|
||||
@@ -5,14 +5,12 @@ set -ex
|
||||
./script/clean
|
||||
|
||||
DOCKER_COMPOSE_GITSHA="$(script/build/write-git-sha)"
|
||||
TAG="docker/compose:tmp-glibc-linux-binary-${DOCKER_COMPOSE_GITSHA}"
|
||||
|
||||
docker build -t "${TAG}" . \
|
||||
--build-arg BUILD_PLATFORM=debian \
|
||||
--build-arg GIT_COMMIT="${DOCKER_COMPOSE_GITSHA}"
|
||||
TMP_CONTAINER=$(docker create "${TAG}")
|
||||
mkdir -p dist
|
||||
docker build . \
|
||||
--target bin \
|
||||
--build-arg DISTRO=debian \
|
||||
--build-arg GIT_COMMIT="${DOCKER_COMPOSE_GITSHA}" \
|
||||
--output dist/
|
||||
ARCH=$(uname -m)
|
||||
docker cp "${TMP_CONTAINER}":/usr/local/bin/docker-compose "dist/docker-compose-Linux-${ARCH}"
|
||||
docker container rm -f "${TMP_CONTAINER}"
|
||||
docker image rm -f "${TAG}"
|
||||
# Ensure that we output the binary with the same name as we did before
|
||||
mv dist/docker-compose-linux-amd64 "dist/docker-compose-Linux-${ARCH}"
|
||||
|
||||
@@ -24,7 +24,7 @@ if [ ! -z "${BUILD_BOOTLOADER}" ]; then
|
||||
git clone --single-branch --branch develop https://github.com/pyinstaller/pyinstaller.git /tmp/pyinstaller
|
||||
cd /tmp/pyinstaller/bootloader
|
||||
# Checkout commit corresponding to version in requirements-build
|
||||
git checkout v3.6
|
||||
git checkout v4.1
|
||||
"${VENV}"/bin/python3 ./waf configure --no-lsb all
|
||||
"${VENV}"/bin/pip3 install ..
|
||||
cd "${CODE_PATH}"
|
||||
|
||||
@@ -13,6 +13,6 @@ IMAGE="docker/compose-tests"
|
||||
DOCKER_COMPOSE_GITSHA="$(script/build/write-git-sha)"
|
||||
docker build -t "${IMAGE}:${TAG}" . \
|
||||
--target build \
|
||||
--build-arg BUILD_PLATFORM="debian" \
|
||||
--build-arg DISTRO="debian" \
|
||||
--build-arg GIT_COMMIT="${DOCKER_COMPOSE_GITSHA}"
|
||||
docker tag "${IMAGE}":"${TAG}" "${IMAGE}":latest
|
||||
|
||||
@@ -6,17 +6,17 @@
|
||||
#
|
||||
# http://git-scm.com/download/win
|
||||
#
|
||||
# 2. Install Python 3.7.x:
|
||||
# 2. Install Python 3.9.x:
|
||||
#
|
||||
# https://www.python.org/downloads/
|
||||
#
|
||||
# 3. Append ";C:\Python37;C:\Python37\Scripts" to the "Path" environment variable:
|
||||
# 3. Append ";C:\Python39;C:\Python39\Scripts" to the "Path" environment variable:
|
||||
#
|
||||
# https://www.microsoft.com/resources/documentation/windows/xp/all/proddocs/en-us/sysdm_advancd_environmnt_addchange_variable.mspx?mfr=true
|
||||
#
|
||||
# 4. In Powershell, run the following commands:
|
||||
#
|
||||
# $ pip install 'virtualenv==20.0.30'
|
||||
# $ pip install 'virtualenv==20.2.2'
|
||||
# $ Set-ExecutionPolicy -Scope CurrentUser RemoteSigned
|
||||
#
|
||||
# 5. Clone the repository:
|
||||
@@ -39,7 +39,7 @@ if (Test-Path venv) {
|
||||
Get-ChildItem -Recurse -Include *.pyc | foreach ($_) { Remove-Item $_.FullName }
|
||||
|
||||
# Create virtualenv
|
||||
virtualenv -p C:\Python37\python.exe .\venv
|
||||
virtualenv -p C:\Python39\python.exe .\venv
|
||||
|
||||
# pip and pyinstaller generate lots of warnings, so we need to ignore them
|
||||
$ErrorActionPreference = "Continue"
|
||||
|
||||
0
script/release/release.py
Normal file → Executable file
0
script/release/release.py
Normal file → Executable file
@@ -15,16 +15,16 @@
|
||||
|
||||
set -e
|
||||
|
||||
VERSION="1.26.1"
|
||||
VERSION="1.29.2"
|
||||
IMAGE="docker/compose:$VERSION"
|
||||
|
||||
|
||||
# Setup options for connecting to docker host
|
||||
if [ -z "$DOCKER_HOST" ]; then
|
||||
DOCKER_HOST="/var/run/docker.sock"
|
||||
DOCKER_HOST='unix:///var/run/docker.sock'
|
||||
fi
|
||||
if [ -S "$DOCKER_HOST" ]; then
|
||||
DOCKER_ADDR="-v $DOCKER_HOST:$DOCKER_HOST -e DOCKER_HOST"
|
||||
if [ -S "${DOCKER_HOST#unix://}" ]; then
|
||||
DOCKER_ADDR="-v ${DOCKER_HOST#unix://}:${DOCKER_HOST#unix://} -e DOCKER_HOST"
|
||||
else
|
||||
DOCKER_ADDR="-e DOCKER_HOST -e DOCKER_TLS_VERIFY -e DOCKER_CERT_PATH"
|
||||
fi
|
||||
@@ -44,13 +44,34 @@ fi
|
||||
if [ -n "$COMPOSE_PROJECT_NAME" ]; then
|
||||
COMPOSE_OPTIONS="-e COMPOSE_PROJECT_NAME $COMPOSE_OPTIONS"
|
||||
fi
|
||||
# TODO: also check --file argument
|
||||
if [ -n "$compose_dir" ]; then
|
||||
VOLUMES="$VOLUMES -v $compose_dir:$compose_dir"
|
||||
fi
|
||||
if [ -n "$HOME" ]; then
|
||||
VOLUMES="$VOLUMES -v $HOME:$HOME -e HOME" # Pass in HOME to share docker.config and allow ~/-relative paths to work.
|
||||
fi
|
||||
i=$#
|
||||
while [ $i -gt 0 ]; do
|
||||
arg=$1
|
||||
i=$((i - 1))
|
||||
shift
|
||||
|
||||
case "$arg" in
|
||||
-f|--file)
|
||||
value=$1
|
||||
i=$((i - 1))
|
||||
shift
|
||||
set -- "$@" "$arg" "$value"
|
||||
|
||||
file_dir=$(realpath "$(dirname "$value")")
|
||||
VOLUMES="$VOLUMES -v $file_dir:$file_dir"
|
||||
;;
|
||||
*) set -- "$@" "$arg" ;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Setup environment variables for compose config and context
|
||||
ENV_OPTIONS=$(printenv | sed -E "/^PATH=.*/d; s/^/-e /g; s/=.*//g; s/\n/ /g")
|
||||
|
||||
# Only allocate tty if we detect one
|
||||
if [ -t 0 ] && [ -t 1 ]; then
|
||||
@@ -67,4 +88,4 @@ if docker info --format '{{json .SecurityOptions}}' 2>/dev/null | grep -q 'name=
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
exec docker run --rm $DOCKER_RUN_OPTIONS $DOCKER_ADDR $COMPOSE_OPTIONS $VOLUMES -w "$(pwd)" $IMAGE "$@"
|
||||
exec docker run --rm $DOCKER_RUN_OPTIONS $DOCKER_ADDR $COMPOSE_OPTIONS $ENV_OPTIONS $VOLUMES -w "$(pwd)" $IMAGE "$@"
|
||||
|
||||
@@ -13,13 +13,13 @@ if ! [ ${DEPLOYMENT_TARGET} == "$(macos_version)" ]; then
|
||||
SDK_SHA1=dd228a335194e3392f1904ce49aff1b1da26ca62
|
||||
fi
|
||||
|
||||
OPENSSL_VERSION=1.1.1g
|
||||
OPENSSL_VERSION=1.1.1h
|
||||
OPENSSL_URL=https://www.openssl.org/source/openssl-${OPENSSL_VERSION}.tar.gz
|
||||
OPENSSL_SHA1=b213a293f2127ec3e323fb3cfc0c9807664fd997
|
||||
OPENSSL_SHA1=8d0d099e8973ec851368c8c775e05e1eadca1794
|
||||
|
||||
PYTHON_VERSION=3.7.7
|
||||
PYTHON_VERSION=3.9.0
|
||||
PYTHON_URL=https://www.python.org/ftp/python/${PYTHON_VERSION}/Python-${PYTHON_VERSION}.tgz
|
||||
PYTHON_SHA1=8e9968663a214aea29659ba9dfa959e8a7d82b39
|
||||
PYTHON_SHA1=5744a10ba989d2badacbab3c00cdcb83c83106c7
|
||||
|
||||
#
|
||||
# Install prerequisites.
|
||||
@@ -36,7 +36,7 @@ if ! [ -x "$(command -v python3)" ]; then
|
||||
brew install python3
|
||||
fi
|
||||
if ! [ -x "$(command -v virtualenv)" ]; then
|
||||
pip3 install virtualenv==20.0.30
|
||||
pip3 install virtualenv==20.2.2
|
||||
fi
|
||||
|
||||
#
|
||||
|
||||
@@ -21,7 +21,6 @@ elif [ "$DOCKER_VERSIONS" == "all" ]; then
|
||||
DOCKER_VERSIONS=$($get_versions -n 2 recent)
|
||||
fi
|
||||
|
||||
|
||||
BUILD_NUMBER=${BUILD_NUMBER-$USER}
|
||||
PY_TEST_VERSIONS=${PY_TEST_VERSIONS:-py37}
|
||||
|
||||
@@ -39,17 +38,23 @@ for version in $DOCKER_VERSIONS; do
|
||||
|
||||
trap "on_exit" EXIT
|
||||
|
||||
repo="dockerswarm/dind"
|
||||
|
||||
docker run \
|
||||
-d \
|
||||
--name "$daemon_container" \
|
||||
--privileged \
|
||||
--volume="/var/lib/docker" \
|
||||
"$repo:$version" \
|
||||
-e "DOCKER_TLS_CERTDIR=" \
|
||||
"docker:$version-dind" \
|
||||
dockerd -H tcp://0.0.0.0:2375 $DOCKER_DAEMON_ARGS \
|
||||
2>&1 | tail -n 10
|
||||
|
||||
docker exec "$daemon_container" sh -c "apk add --no-cache git"
|
||||
|
||||
# copy docker config from host for authentication with Docker Hub
|
||||
docker exec "$daemon_container" sh -c "mkdir /root/.docker"
|
||||
docker cp /root/.docker/config.json $daemon_container:/root/.docker/config.json
|
||||
docker exec "$daemon_container" sh -c "chmod 644 /root/.docker/config.json"
|
||||
|
||||
docker run \
|
||||
--rm \
|
||||
--tty \
|
||||
|
||||
6
setup.py
6
setup.py
@@ -25,14 +25,13 @@ def find_version(*file_paths):
|
||||
|
||||
|
||||
install_requires = [
|
||||
'cached-property >= 1.2.0, < 2',
|
||||
'docopt >= 0.6.1, < 1',
|
||||
'PyYAML >= 3.10, < 6',
|
||||
'requests >= 2.20.0, < 3',
|
||||
'texttable >= 0.9.0, < 2',
|
||||
'websocket-client >= 0.32.0, < 1',
|
||||
'distro >= 1.5.0, < 2',
|
||||
'docker[ssh] >= 4.3.1, < 5',
|
||||
'docker[ssh] >= 5',
|
||||
'dockerpty >= 0.4.1, < 1',
|
||||
'jsonschema >= 2.5.1, < 4',
|
||||
'python-dotenv >= 0.13.0, < 1',
|
||||
@@ -50,6 +49,7 @@ if sys.version_info[:2] < (3, 4):
|
||||
|
||||
extras_require = {
|
||||
':python_version < "3.5"': ['backports.ssl_match_hostname >= 3.5, < 4'],
|
||||
':python_version < "3.8"': ['cached-property >= 1.2.0, < 2'],
|
||||
':sys_platform == "win32"': ['colorama >= 0.4, < 1'],
|
||||
'socks': ['PySocks >= 1.5.6, != 1.5.7, < 2'],
|
||||
'tests': tests_require,
|
||||
@@ -102,5 +102,7 @@ setup(
|
||||
'Programming Language :: Python :: 3.4',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
],
|
||||
)
|
||||
|
||||
@@ -58,13 +58,16 @@ COMPOSE_COMPATIBILITY_DICT = {
|
||||
}
|
||||
|
||||
|
||||
def start_process(base_dir, options):
|
||||
def start_process(base_dir, options, executable=None, env=None):
|
||||
executable = executable or DOCKER_COMPOSE_EXECUTABLE
|
||||
proc = subprocess.Popen(
|
||||
[DOCKER_COMPOSE_EXECUTABLE] + options,
|
||||
[executable] + options,
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
cwd=base_dir)
|
||||
cwd=base_dir,
|
||||
env=env,
|
||||
)
|
||||
print("Running process: %s" % proc.pid)
|
||||
return proc
|
||||
|
||||
@@ -78,9 +81,10 @@ def wait_on_process(proc, returncode=0, stdin=None):
|
||||
return ProcessResult(stdout.decode('utf-8'), stderr.decode('utf-8'))
|
||||
|
||||
|
||||
def dispatch(base_dir, options, project_options=None, returncode=0, stdin=None):
|
||||
def dispatch(base_dir, options,
|
||||
project_options=None, returncode=0, stdin=None, executable=None, env=None):
|
||||
project_options = project_options or []
|
||||
proc = start_process(base_dir, project_options + options)
|
||||
proc = start_process(base_dir, project_options + options, executable=executable, env=env)
|
||||
return wait_on_process(proc, returncode=returncode, stdin=stdin)
|
||||
|
||||
|
||||
@@ -233,6 +237,11 @@ class CLITestCase(DockerClientTestCase):
|
||||
result = self.dispatch(['-H=tcp://doesnotexist:8000', 'ps'], returncode=1)
|
||||
assert "Couldn't connect to Docker daemon" in result.stderr
|
||||
|
||||
def test_config_list_profiles(self):
|
||||
self.base_dir = 'tests/fixtures/config-profiles'
|
||||
result = self.dispatch(['config', '--profiles'])
|
||||
assert set(result.stdout.rstrip().split('\n')) == {'debug', 'frontend', 'gui'}
|
||||
|
||||
def test_config_list_services(self):
|
||||
self.base_dir = 'tests/fixtures/v2-full'
|
||||
result = self.dispatch(['config', '--services'])
|
||||
@@ -359,7 +368,7 @@ services:
|
||||
'web': {
|
||||
'command': 'true',
|
||||
'image': 'alpine:latest',
|
||||
'ports': ['5643/tcp', '9999/tcp']
|
||||
'ports': [{'target': 5643}, {'target': 9999}]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -374,7 +383,7 @@ services:
|
||||
'web': {
|
||||
'command': 'false',
|
||||
'image': 'alpine:latest',
|
||||
'ports': ['5644/tcp', '9998/tcp']
|
||||
'ports': [{'target': 5644}, {'target': 9998}]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -389,7 +398,7 @@ services:
|
||||
'web': {
|
||||
'command': 'echo uwu',
|
||||
'image': 'alpine:3.10.1',
|
||||
'ports': ['3341/tcp', '4449/tcp']
|
||||
'ports': [{'target': 3341}, {'target': 4449}]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -783,7 +792,11 @@ services:
|
||||
assert BUILD_CACHE_TEXT not in result.stdout
|
||||
assert BUILD_PULL_TEXT in result.stdout
|
||||
|
||||
@mock.patch.dict(os.environ)
|
||||
def test_build_log_level(self):
|
||||
os.environ['COMPOSE_DOCKER_CLI_BUILD'] = '0'
|
||||
os.environ['DOCKER_BUILDKIT'] = '0'
|
||||
self.test_env_file_relative_to_compose_file()
|
||||
self.base_dir = 'tests/fixtures/simple-dockerfile'
|
||||
result = self.dispatch(['--log-level', 'warning', 'build', 'simple'])
|
||||
assert result.stderr == ''
|
||||
@@ -845,13 +858,17 @@ services:
|
||||
for c in self.project.client.containers(all=True):
|
||||
self.addCleanup(self.project.client.remove_container, c, force=True)
|
||||
|
||||
@mock.patch.dict(os.environ)
|
||||
def test_build_shm_size_build_option(self):
|
||||
os.environ['COMPOSE_DOCKER_CLI_BUILD'] = '0'
|
||||
pull_busybox(self.client)
|
||||
self.base_dir = 'tests/fixtures/build-shm-size'
|
||||
result = self.dispatch(['build', '--no-cache'], None)
|
||||
assert 'shm_size: 96' in result.stdout
|
||||
|
||||
@mock.patch.dict(os.environ)
|
||||
def test_build_memory_build_option(self):
|
||||
os.environ['COMPOSE_DOCKER_CLI_BUILD'] = '0'
|
||||
pull_busybox(self.client)
|
||||
self.base_dir = 'tests/fixtures/build-memory'
|
||||
result = self.dispatch(['build', '--no-cache', '--memory', '96m', 'service'], None)
|
||||
@@ -1719,6 +1736,98 @@ services:
|
||||
shareable_mode_container = self.project.get_service('shareable').containers()[0]
|
||||
assert shareable_mode_container.get('HostConfig.IpcMode') == 'shareable'
|
||||
|
||||
def test_profiles_up_with_no_profile(self):
|
||||
self.base_dir = 'tests/fixtures/profiles'
|
||||
self.dispatch(['up'])
|
||||
|
||||
containers = self.project.containers(stopped=True)
|
||||
service_names = [c.service for c in containers]
|
||||
|
||||
assert 'foo' in service_names
|
||||
assert len(containers) == 1
|
||||
|
||||
def test_profiles_up_with_profile(self):
|
||||
self.base_dir = 'tests/fixtures/profiles'
|
||||
self.dispatch(['--profile', 'test', 'up'])
|
||||
|
||||
containers = self.project.containers(stopped=True)
|
||||
service_names = [c.service for c in containers]
|
||||
|
||||
assert 'foo' in service_names
|
||||
assert 'bar' in service_names
|
||||
assert 'baz' in service_names
|
||||
assert len(containers) == 3
|
||||
|
||||
def test_profiles_up_invalid_dependency(self):
|
||||
self.base_dir = 'tests/fixtures/profiles'
|
||||
result = self.dispatch(['--profile', 'debug', 'up'], returncode=1)
|
||||
|
||||
assert ('Service "bar" was pulled in as a dependency of service "zot" '
|
||||
'but is not enabled by the active profiles.') in result.stderr
|
||||
|
||||
def test_profiles_up_with_multiple_profiles(self):
|
||||
self.base_dir = 'tests/fixtures/profiles'
|
||||
self.dispatch(['--profile', 'debug', '--profile', 'test', 'up'])
|
||||
|
||||
containers = self.project.containers(stopped=True)
|
||||
service_names = [c.service for c in containers]
|
||||
|
||||
assert 'foo' in service_names
|
||||
assert 'bar' in service_names
|
||||
assert 'baz' in service_names
|
||||
assert 'zot' in service_names
|
||||
assert len(containers) == 4
|
||||
|
||||
def test_profiles_up_with_profile_enabled_by_service(self):
|
||||
self.base_dir = 'tests/fixtures/profiles'
|
||||
self.dispatch(['up', 'bar'])
|
||||
|
||||
containers = self.project.containers(stopped=True)
|
||||
service_names = [c.service for c in containers]
|
||||
|
||||
assert 'bar' in service_names
|
||||
assert len(containers) == 1
|
||||
|
||||
def test_profiles_up_with_dependency_and_profile_enabled_by_service(self):
|
||||
self.base_dir = 'tests/fixtures/profiles'
|
||||
self.dispatch(['up', 'baz'])
|
||||
|
||||
containers = self.project.containers(stopped=True)
|
||||
service_names = [c.service for c in containers]
|
||||
|
||||
assert 'bar' in service_names
|
||||
assert 'baz' in service_names
|
||||
assert len(containers) == 2
|
||||
|
||||
def test_profiles_up_with_invalid_dependency_for_target_service(self):
|
||||
self.base_dir = 'tests/fixtures/profiles'
|
||||
result = self.dispatch(['up', 'zot'], returncode=1)
|
||||
|
||||
assert ('Service "bar" was pulled in as a dependency of service "zot" '
|
||||
'but is not enabled by the active profiles.') in result.stderr
|
||||
|
||||
def test_profiles_up_with_profile_for_dependency(self):
|
||||
self.base_dir = 'tests/fixtures/profiles'
|
||||
self.dispatch(['--profile', 'test', 'up', 'zot'])
|
||||
|
||||
containers = self.project.containers(stopped=True)
|
||||
service_names = [c.service for c in containers]
|
||||
|
||||
assert 'bar' in service_names
|
||||
assert 'zot' in service_names
|
||||
assert len(containers) == 2
|
||||
|
||||
def test_profiles_up_with_merged_profiles(self):
|
||||
self.base_dir = 'tests/fixtures/profiles'
|
||||
self.dispatch(['-f', 'docker-compose.yml', '-f', 'merge-profiles.yml', 'up', 'zot'])
|
||||
|
||||
containers = self.project.containers(stopped=True)
|
||||
service_names = [c.service for c in containers]
|
||||
|
||||
assert 'bar' in service_names
|
||||
assert 'zot' in service_names
|
||||
assert len(containers) == 2
|
||||
|
||||
def test_exec_without_tty(self):
|
||||
self.base_dir = 'tests/fixtures/links-composefile'
|
||||
self.dispatch(['up', '-d', 'console'])
|
||||
@@ -3034,3 +3143,12 @@ services:
|
||||
another = self.project.get_service('--log-service')
|
||||
assert len(service.containers()) == 1
|
||||
assert len(another.containers()) == 1
|
||||
|
||||
def test_up_no_log_prefix(self):
|
||||
self.base_dir = 'tests/fixtures/echo-services'
|
||||
result = self.dispatch(['up', '--no-log-prefix'])
|
||||
|
||||
assert 'simple' in result.stdout
|
||||
assert 'another' in result.stdout
|
||||
assert 'exited with code 0' in result.stdout
|
||||
assert 'exited with code 0' in result.stdout
|
||||
|
||||
15
tests/fixtures/config-profiles/docker-compose.yml
vendored
Normal file
15
tests/fixtures/config-profiles/docker-compose.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
version: '3.8'
|
||||
services:
|
||||
frontend:
|
||||
image: frontend
|
||||
profiles: ["frontend", "gui"]
|
||||
phpmyadmin:
|
||||
image: phpmyadmin
|
||||
depends_on:
|
||||
- db
|
||||
profiles:
|
||||
- debug
|
||||
backend:
|
||||
image: backend
|
||||
db:
|
||||
image: mysql
|
||||
1
tests/fixtures/env-file-override/.env
vendored
Normal file
1
tests/fixtures/env-file-override/.env
vendored
Normal file
@@ -0,0 +1 @@
|
||||
WHEREAMI=default
|
||||
20
tests/fixtures/profiles/docker-compose.yml
vendored
Normal file
20
tests/fixtures/profiles/docker-compose.yml
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
version: "3"
|
||||
services:
|
||||
foo:
|
||||
image: busybox:1.31.0-uclibc
|
||||
bar:
|
||||
image: busybox:1.31.0-uclibc
|
||||
profiles:
|
||||
- test
|
||||
baz:
|
||||
image: busybox:1.31.0-uclibc
|
||||
depends_on:
|
||||
- bar
|
||||
profiles:
|
||||
- test
|
||||
zot:
|
||||
image: busybox:1.31.0-uclibc
|
||||
depends_on:
|
||||
- bar
|
||||
profiles:
|
||||
- debug
|
||||
5
tests/fixtures/profiles/merge-profiles.yml
vendored
Normal file
5
tests/fixtures/profiles/merge-profiles.yml
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
version: "3"
|
||||
services:
|
||||
bar:
|
||||
profiles:
|
||||
- debug
|
||||
@@ -1,5 +1,6 @@
|
||||
import tempfile
|
||||
|
||||
import pytest
|
||||
from ddt import data
|
||||
from ddt import ddt
|
||||
|
||||
@@ -8,6 +9,7 @@ from ..acceptance.cli_test import dispatch
|
||||
from compose.cli.command import get_project
|
||||
from compose.cli.command import project_from_options
|
||||
from compose.config.environment import Environment
|
||||
from compose.config.errors import EnvFileNotFound
|
||||
from tests.integration.testcases import DockerClientTestCase
|
||||
|
||||
|
||||
@@ -55,13 +57,36 @@ services:
|
||||
class EnvironmentOverrideFileTest(DockerClientTestCase):
|
||||
def test_env_file_override(self):
|
||||
base_dir = 'tests/fixtures/env-file-override'
|
||||
# '--env-file' are relative to the current working dir
|
||||
env = Environment.from_env_file(base_dir, base_dir+'/.env.override')
|
||||
dispatch(base_dir, ['--env-file', '.env.override', 'up'])
|
||||
project = get_project(project_dir=base_dir,
|
||||
config_path=['docker-compose.yml'],
|
||||
environment=Environment.from_env_file(base_dir, '.env.override'),
|
||||
environment=env,
|
||||
override_dir=base_dir)
|
||||
containers = project.containers(stopped=True)
|
||||
assert len(containers) == 1
|
||||
assert "WHEREAMI=override" in containers[0].get('Config.Env')
|
||||
assert "DEFAULT_CONF_LOADED=true" in containers[0].get('Config.Env')
|
||||
dispatch(base_dir, ['--env-file', '.env.override', 'down'], None)
|
||||
|
||||
def test_env_file_not_found_error(self):
|
||||
base_dir = 'tests/fixtures/env-file-override'
|
||||
with pytest.raises(EnvFileNotFound) as excinfo:
|
||||
Environment.from_env_file(base_dir, '.env.override')
|
||||
|
||||
assert "Couldn't find env file" in excinfo.exconly()
|
||||
|
||||
def test_dot_env_file(self):
|
||||
base_dir = 'tests/fixtures/env-file-override'
|
||||
# '.env' is relative to the project_dir (base_dir)
|
||||
env = Environment.from_env_file(base_dir, None)
|
||||
dispatch(base_dir, ['up'])
|
||||
project = get_project(project_dir=base_dir,
|
||||
config_path=['docker-compose.yml'],
|
||||
environment=env,
|
||||
override_dir=base_dir)
|
||||
containers = project.containers(stopped=True)
|
||||
assert len(containers) == 1
|
||||
assert "WHEREAMI=default" in containers[0].get('Config.Env')
|
||||
dispatch(base_dir, ['down'], None)
|
||||
|
||||
125
tests/integration/metrics_test.py
Normal file
125
tests/integration/metrics_test.py
Normal file
@@ -0,0 +1,125 @@
|
||||
import logging
|
||||
import os
|
||||
import socket
|
||||
from http.server import BaseHTTPRequestHandler
|
||||
from http.server import HTTPServer
|
||||
from threading import Thread
|
||||
|
||||
import requests
|
||||
from docker.transport import UnixHTTPAdapter
|
||||
|
||||
from tests.acceptance.cli_test import dispatch
|
||||
from tests.integration.testcases import DockerClientTestCase
|
||||
|
||||
|
||||
TEST_SOCKET_FILE = '/tmp/test-metrics-docker-cli.sock'
|
||||
|
||||
|
||||
class MetricsTest(DockerClientTestCase):
|
||||
test_session = requests.sessions.Session()
|
||||
test_env = None
|
||||
base_dir = 'tests/fixtures/v3-full'
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
MetricsTest.test_session.mount("http+unix://", UnixHTTPAdapter(TEST_SOCKET_FILE))
|
||||
MetricsTest.test_env = os.environ.copy()
|
||||
MetricsTest.test_env['METRICS_SOCKET_FILE'] = TEST_SOCKET_FILE
|
||||
MetricsServer().start()
|
||||
|
||||
@classmethod
|
||||
def test_metrics_help(cls):
|
||||
# root `docker-compose` command is considered as a `--help`
|
||||
dispatch(cls.base_dir, [], env=MetricsTest.test_env)
|
||||
assert cls.get_content() == \
|
||||
b'{"command": "compose --help", "context": "moby", ' \
|
||||
b'"source": "docker-compose", "status": "success"}'
|
||||
dispatch(cls.base_dir, ['help', 'run'], env=MetricsTest.test_env)
|
||||
assert cls.get_content() == \
|
||||
b'{"command": "compose help", "context": "moby", ' \
|
||||
b'"source": "docker-compose", "status": "success"}'
|
||||
dispatch(cls.base_dir, ['--help'], env=MetricsTest.test_env)
|
||||
assert cls.get_content() == \
|
||||
b'{"command": "compose --help", "context": "moby", ' \
|
||||
b'"source": "docker-compose", "status": "success"}'
|
||||
dispatch(cls.base_dir, ['run', '--help'], env=MetricsTest.test_env)
|
||||
assert cls.get_content() == \
|
||||
b'{"command": "compose --help run", "context": "moby", ' \
|
||||
b'"source": "docker-compose", "status": "success"}'
|
||||
dispatch(cls.base_dir, ['up', '--help', 'extra_args'], env=MetricsTest.test_env)
|
||||
assert cls.get_content() == \
|
||||
b'{"command": "compose --help up", "context": "moby", ' \
|
||||
b'"source": "docker-compose", "status": "success"}'
|
||||
|
||||
@classmethod
|
||||
def test_metrics_simple_commands(cls):
|
||||
dispatch(cls.base_dir, ['ps'], env=MetricsTest.test_env)
|
||||
assert cls.get_content() == \
|
||||
b'{"command": "compose ps", "context": "moby", ' \
|
||||
b'"source": "docker-compose", "status": "success"}'
|
||||
dispatch(cls.base_dir, ['version'], env=MetricsTest.test_env)
|
||||
assert cls.get_content() == \
|
||||
b'{"command": "compose version", "context": "moby", ' \
|
||||
b'"source": "docker-compose", "status": "success"}'
|
||||
dispatch(cls.base_dir, ['version', '--yyy'], env=MetricsTest.test_env)
|
||||
assert cls.get_content() == \
|
||||
b'{"command": "compose version", "context": "moby", ' \
|
||||
b'"source": "docker-compose", "status": "failure"}'
|
||||
|
||||
@staticmethod
|
||||
def get_content():
|
||||
resp = MetricsTest.test_session.get("http+unix://localhost")
|
||||
print(resp.content)
|
||||
return resp.content
|
||||
|
||||
|
||||
def start_server(uri=TEST_SOCKET_FILE):
|
||||
try:
|
||||
os.remove(uri)
|
||||
except OSError:
|
||||
pass
|
||||
httpd = HTTPServer(uri, MetricsHTTPRequestHandler, False)
|
||||
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
|
||||
sock.bind(TEST_SOCKET_FILE)
|
||||
sock.listen(0)
|
||||
httpd.socket = sock
|
||||
print('Serving on ', uri)
|
||||
httpd.serve_forever()
|
||||
sock.shutdown(socket.SHUT_RDWR)
|
||||
sock.close()
|
||||
os.remove(uri)
|
||||
|
||||
|
||||
class MetricsServer:
|
||||
@classmethod
|
||||
def start(cls):
|
||||
t = Thread(target=start_server, daemon=True)
|
||||
t.start()
|
||||
|
||||
|
||||
class MetricsHTTPRequestHandler(BaseHTTPRequestHandler):
|
||||
usages = []
|
||||
|
||||
def do_GET(self):
|
||||
self.client_address = ('',) # avoid exception in BaseHTTPServer.py log_message()
|
||||
self.send_response(200)
|
||||
self.end_headers()
|
||||
for u in MetricsHTTPRequestHandler.usages:
|
||||
self.wfile.write(u)
|
||||
MetricsHTTPRequestHandler.usages = []
|
||||
|
||||
def do_POST(self):
|
||||
self.client_address = ('',) # avoid exception in BaseHTTPServer.py log_message()
|
||||
content_length = int(self.headers['Content-Length'])
|
||||
body = self.rfile.read(content_length)
|
||||
print(body)
|
||||
MetricsHTTPRequestHandler.usages.append(body)
|
||||
self.send_response(200)
|
||||
self.end_headers()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
logging.getLogger("urllib3").propagate = False
|
||||
logging.getLogger("requests").propagate = False
|
||||
start_server()
|
||||
@@ -25,6 +25,7 @@ from compose.const import COMPOSE_SPEC as VERSION
|
||||
from compose.const import LABEL_PROJECT
|
||||
from compose.const import LABEL_SERVICE
|
||||
from compose.container import Container
|
||||
from compose.errors import CompletedUnsuccessfully
|
||||
from compose.errors import HealthCheckFailed
|
||||
from compose.errors import NoHealthCheckConfigured
|
||||
from compose.project import Project
|
||||
@@ -37,6 +38,7 @@ from tests.integration.testcases import no_cluster
|
||||
|
||||
def build_config(**kwargs):
|
||||
return config.Config(
|
||||
config_version=kwargs.get('version', VERSION),
|
||||
version=kwargs.get('version', VERSION),
|
||||
services=kwargs.get('services'),
|
||||
volumes=kwargs.get('volumes'),
|
||||
@@ -1898,6 +1900,106 @@ class ProjectTest(DockerClientTestCase):
|
||||
with pytest.raises(NoHealthCheckConfigured):
|
||||
svc1.is_healthy()
|
||||
|
||||
def test_project_up_completed_successfully_dependency(self):
|
||||
config_dict = {
|
||||
'version': '2.1',
|
||||
'services': {
|
||||
'svc1': {
|
||||
'image': BUSYBOX_IMAGE_WITH_TAG,
|
||||
'command': 'true'
|
||||
},
|
||||
'svc2': {
|
||||
'image': BUSYBOX_IMAGE_WITH_TAG,
|
||||
'command': 'top',
|
||||
'depends_on': {
|
||||
'svc1': {'condition': 'service_completed_successfully'},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
config_data = load_config(config_dict)
|
||||
project = Project.from_config(
|
||||
name='composetest', config_data=config_data, client=self.client
|
||||
)
|
||||
project.up()
|
||||
|
||||
svc1 = project.get_service('svc1')
|
||||
svc2 = project.get_service('svc2')
|
||||
|
||||
assert 'svc1' in svc2.get_dependency_names()
|
||||
assert svc2.containers()[0].is_running
|
||||
assert len(svc1.containers()) == 0
|
||||
assert svc1.is_completed_successfully()
|
||||
|
||||
def test_project_up_completed_unsuccessfully_dependency(self):
|
||||
config_dict = {
|
||||
'version': '2.1',
|
||||
'services': {
|
||||
'svc1': {
|
||||
'image': BUSYBOX_IMAGE_WITH_TAG,
|
||||
'command': 'false'
|
||||
},
|
||||
'svc2': {
|
||||
'image': BUSYBOX_IMAGE_WITH_TAG,
|
||||
'command': 'top',
|
||||
'depends_on': {
|
||||
'svc1': {'condition': 'service_completed_successfully'},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
config_data = load_config(config_dict)
|
||||
project = Project.from_config(
|
||||
name='composetest', config_data=config_data, client=self.client
|
||||
)
|
||||
with pytest.raises(ProjectError):
|
||||
project.up()
|
||||
|
||||
svc1 = project.get_service('svc1')
|
||||
svc2 = project.get_service('svc2')
|
||||
assert 'svc1' in svc2.get_dependency_names()
|
||||
assert len(svc2.containers()) == 0
|
||||
with pytest.raises(CompletedUnsuccessfully):
|
||||
svc1.is_completed_successfully()
|
||||
|
||||
def test_project_up_completed_differently_dependencies(self):
|
||||
config_dict = {
|
||||
'version': '2.1',
|
||||
'services': {
|
||||
'svc1': {
|
||||
'image': BUSYBOX_IMAGE_WITH_TAG,
|
||||
'command': 'true'
|
||||
},
|
||||
'svc2': {
|
||||
'image': BUSYBOX_IMAGE_WITH_TAG,
|
||||
'command': 'false'
|
||||
},
|
||||
'svc3': {
|
||||
'image': BUSYBOX_IMAGE_WITH_TAG,
|
||||
'command': 'top',
|
||||
'depends_on': {
|
||||
'svc1': {'condition': 'service_completed_successfully'},
|
||||
'svc2': {'condition': 'service_completed_successfully'},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
config_data = load_config(config_dict)
|
||||
project = Project.from_config(
|
||||
name='composetest', config_data=config_data, client=self.client
|
||||
)
|
||||
with pytest.raises(ProjectError):
|
||||
project.up()
|
||||
|
||||
svc1 = project.get_service('svc1')
|
||||
svc2 = project.get_service('svc2')
|
||||
svc3 = project.get_service('svc3')
|
||||
assert ['svc1', 'svc2'] == svc3.get_dependency_names()
|
||||
assert svc1.is_completed_successfully()
|
||||
assert len(svc3.containers()) == 0
|
||||
with pytest.raises(CompletedUnsuccessfully):
|
||||
svc2.is_completed_successfully()
|
||||
|
||||
def test_project_up_seccomp_profile(self):
|
||||
seccomp_data = {
|
||||
'defaultAction': 'SCMP_ACT_ALLOW',
|
||||
|
||||
@@ -948,7 +948,12 @@ class ServiceTest(DockerClientTestCase):
|
||||
with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f:
|
||||
f.write("FROM busybox\n")
|
||||
|
||||
service = self.create_service('web', build={'context': base_dir})
|
||||
service = self.create_service('web',
|
||||
build={'context': base_dir},
|
||||
environment={
|
||||
'COMPOSE_DOCKER_CLI_BUILD': '0',
|
||||
'DOCKER_BUILDKIT': '0',
|
||||
})
|
||||
service.build()
|
||||
self.addCleanup(self.client.remove_image, service.image_name)
|
||||
|
||||
@@ -964,7 +969,6 @@ class ServiceTest(DockerClientTestCase):
|
||||
service = self.create_service('web',
|
||||
build={'context': base_dir},
|
||||
environment={
|
||||
'COMPOSE_DOCKER_CLI_BUILD': '1',
|
||||
'DOCKER_BUILDKIT': '1',
|
||||
})
|
||||
service.build(cli=True)
|
||||
@@ -1015,7 +1019,6 @@ class ServiceTest(DockerClientTestCase):
|
||||
web = self.create_service('web',
|
||||
build={'context': base_dir},
|
||||
environment={
|
||||
'COMPOSE_DOCKER_CLI_BUILD': '1',
|
||||
'DOCKER_BUILDKIT': '1',
|
||||
})
|
||||
project = Project('composetest', [web], self.client)
|
||||
|
||||
@@ -61,6 +61,7 @@ class DockerClientTestCase(unittest.TestCase):
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
cls.client.close()
|
||||
del cls.client
|
||||
|
||||
def tearDown(self):
|
||||
|
||||
56
tests/unit/cli/colors_test.py
Normal file
56
tests/unit/cli/colors_test.py
Normal file
@@ -0,0 +1,56 @@
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
from compose.cli.colors import AnsiMode
|
||||
from tests import mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tty_stream():
|
||||
stream = mock.Mock()
|
||||
stream.isatty.return_value = True
|
||||
return stream
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def non_tty_stream():
|
||||
stream = mock.Mock()
|
||||
stream.isatty.return_value = False
|
||||
return stream
|
||||
|
||||
|
||||
class TestAnsiModeTestCase:
|
||||
|
||||
@mock.patch.dict(os.environ)
|
||||
def test_ansi_mode_never(self, tty_stream, non_tty_stream):
|
||||
if "CLICOLOR" in os.environ:
|
||||
del os.environ["CLICOLOR"]
|
||||
assert not AnsiMode.NEVER.use_ansi_codes(tty_stream)
|
||||
assert not AnsiMode.NEVER.use_ansi_codes(non_tty_stream)
|
||||
|
||||
os.environ["CLICOLOR"] = "0"
|
||||
assert not AnsiMode.NEVER.use_ansi_codes(tty_stream)
|
||||
assert not AnsiMode.NEVER.use_ansi_codes(non_tty_stream)
|
||||
|
||||
@mock.patch.dict(os.environ)
|
||||
def test_ansi_mode_always(self, tty_stream, non_tty_stream):
|
||||
if "CLICOLOR" in os.environ:
|
||||
del os.environ["CLICOLOR"]
|
||||
assert AnsiMode.ALWAYS.use_ansi_codes(tty_stream)
|
||||
assert AnsiMode.ALWAYS.use_ansi_codes(non_tty_stream)
|
||||
|
||||
os.environ["CLICOLOR"] = "0"
|
||||
assert AnsiMode.ALWAYS.use_ansi_codes(tty_stream)
|
||||
assert AnsiMode.ALWAYS.use_ansi_codes(non_tty_stream)
|
||||
|
||||
@mock.patch.dict(os.environ)
|
||||
def test_ansi_mode_auto(self, tty_stream, non_tty_stream):
|
||||
if "CLICOLOR" in os.environ:
|
||||
del os.environ["CLICOLOR"]
|
||||
assert AnsiMode.AUTO.use_ansi_codes(tty_stream)
|
||||
assert not AnsiMode.AUTO.use_ansi_codes(non_tty_stream)
|
||||
|
||||
os.environ["CLICOLOR"] = "0"
|
||||
assert not AnsiMode.AUTO.use_ansi_codes(tty_stream)
|
||||
assert not AnsiMode.AUTO.use_ansi_codes(non_tty_stream)
|
||||
@@ -14,49 +14,41 @@ class TestGetConfigPathFromOptions:
|
||||
paths = ['one.yml', 'two.yml']
|
||||
opts = {'--file': paths}
|
||||
environment = Environment.from_env_file('.')
|
||||
assert get_config_path_from_options('.', opts, environment) == paths
|
||||
assert get_config_path_from_options(opts, environment) == paths
|
||||
|
||||
def test_single_path_from_env(self):
|
||||
with mock.patch.dict(os.environ):
|
||||
os.environ['COMPOSE_FILE'] = 'one.yml'
|
||||
environment = Environment.from_env_file('.')
|
||||
assert get_config_path_from_options('.', {}, environment) == ['one.yml']
|
||||
assert get_config_path_from_options({}, environment) == ['one.yml']
|
||||
|
||||
@pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='posix separator')
|
||||
def test_multiple_path_from_env(self):
|
||||
with mock.patch.dict(os.environ):
|
||||
os.environ['COMPOSE_FILE'] = 'one.yml:two.yml'
|
||||
environment = Environment.from_env_file('.')
|
||||
assert get_config_path_from_options(
|
||||
'.', {}, environment
|
||||
) == ['one.yml', 'two.yml']
|
||||
assert get_config_path_from_options({}, environment) == ['one.yml', 'two.yml']
|
||||
|
||||
@pytest.mark.skipif(not IS_WINDOWS_PLATFORM, reason='windows separator')
|
||||
def test_multiple_path_from_env_windows(self):
|
||||
with mock.patch.dict(os.environ):
|
||||
os.environ['COMPOSE_FILE'] = 'one.yml;two.yml'
|
||||
environment = Environment.from_env_file('.')
|
||||
assert get_config_path_from_options(
|
||||
'.', {}, environment
|
||||
) == ['one.yml', 'two.yml']
|
||||
assert get_config_path_from_options({}, environment) == ['one.yml', 'two.yml']
|
||||
|
||||
def test_multiple_path_from_env_custom_separator(self):
|
||||
with mock.patch.dict(os.environ):
|
||||
os.environ['COMPOSE_PATH_SEPARATOR'] = '^'
|
||||
os.environ['COMPOSE_FILE'] = 'c:\\one.yml^.\\semi;colon.yml'
|
||||
environment = Environment.from_env_file('.')
|
||||
assert get_config_path_from_options(
|
||||
'.', {}, environment
|
||||
) == ['c:\\one.yml', '.\\semi;colon.yml']
|
||||
assert get_config_path_from_options({}, environment) == ['c:\\one.yml', '.\\semi;colon.yml']
|
||||
|
||||
def test_no_path(self):
|
||||
environment = Environment.from_env_file('.')
|
||||
assert not get_config_path_from_options('.', {}, environment)
|
||||
assert not get_config_path_from_options({}, environment)
|
||||
|
||||
def test_unicode_path_from_options(self):
|
||||
paths = [b'\xe5\xb0\xb1\xe5\x90\x83\xe9\xa5\xad/docker-compose.yml']
|
||||
opts = {'--file': paths}
|
||||
environment = Environment.from_env_file('.')
|
||||
assert get_config_path_from_options(
|
||||
'.', opts, environment
|
||||
) == ['就吃饭/docker-compose.yml']
|
||||
assert get_config_path_from_options(opts, environment) == ['就吃饭/docker-compose.yml']
|
||||
|
||||
@@ -8,7 +8,6 @@ from docker.errors import APIError
|
||||
|
||||
from compose.cli.log_printer import build_log_generator
|
||||
from compose.cli.log_printer import build_log_presenters
|
||||
from compose.cli.log_printer import build_no_log_generator
|
||||
from compose.cli.log_printer import consume_queue
|
||||
from compose.cli.log_printer import QueueItem
|
||||
from compose.cli.log_printer import wait_on_exit
|
||||
@@ -75,14 +74,6 @@ def test_wait_on_exit_raises():
|
||||
assert expected in wait_on_exit(mock_container)
|
||||
|
||||
|
||||
def test_build_no_log_generator(mock_container):
|
||||
mock_container.has_api_logs = False
|
||||
mock_container.log_driver = 'none'
|
||||
output, = build_no_log_generator(mock_container, None)
|
||||
assert "WARNING: no logs are available with the 'none' log driver\n" in output
|
||||
assert "exited with code" not in output
|
||||
|
||||
|
||||
class TestBuildLogGenerator:
|
||||
|
||||
def test_no_log_stream(self, mock_container):
|
||||
|
||||
@@ -137,21 +137,20 @@ class TestCLIMainTestCase:
|
||||
|
||||
class TestSetupConsoleHandlerTestCase:
|
||||
|
||||
def test_with_tty_verbose(self, logging_handler):
|
||||
def test_with_console_formatter_verbose(self, logging_handler):
|
||||
setup_console_handler(logging_handler, True)
|
||||
assert type(logging_handler.formatter) == ConsoleWarningFormatter
|
||||
assert '%(name)s' in logging_handler.formatter._fmt
|
||||
assert '%(funcName)s' in logging_handler.formatter._fmt
|
||||
|
||||
def test_with_tty_not_verbose(self, logging_handler):
|
||||
def test_with_console_formatter_not_verbose(self, logging_handler):
|
||||
setup_console_handler(logging_handler, False)
|
||||
assert type(logging_handler.formatter) == ConsoleWarningFormatter
|
||||
assert '%(name)s' not in logging_handler.formatter._fmt
|
||||
assert '%(funcName)s' not in logging_handler.formatter._fmt
|
||||
|
||||
def test_with_not_a_tty(self, logging_handler):
|
||||
logging_handler.stream.isatty.return_value = False
|
||||
setup_console_handler(logging_handler, False)
|
||||
def test_without_console_formatter(self, logging_handler):
|
||||
setup_console_handler(logging_handler, False, use_console_formatter=False)
|
||||
assert type(logging_handler.formatter) == logging.Formatter
|
||||
|
||||
|
||||
|
||||
@@ -168,12 +168,14 @@ class ConfigTest(unittest.TestCase):
|
||||
}
|
||||
})
|
||||
)
|
||||
assert cfg.config_version == VERSION
|
||||
assert cfg.version == VERSION
|
||||
|
||||
for version in ['2', '2.0', '2.1', '2.2', '2.3',
|
||||
'3', '3.0', '3.1', '3.2', '3.3', '3.4', '3.5', '3.6', '3.7', '3.8']:
|
||||
cfg = config.load(build_config_details({'version': version}))
|
||||
assert cfg.version == version
|
||||
assert cfg.config_version == version
|
||||
assert cfg.version == VERSION
|
||||
|
||||
def test_v1_file_version(self):
|
||||
cfg = config.load(build_config_details({'web': {'image': 'busybox'}}))
|
||||
@@ -236,7 +238,9 @@ class ConfigTest(unittest.TestCase):
|
||||
)
|
||||
)
|
||||
|
||||
assert 'Invalid top-level property "web"' in excinfo.exconly()
|
||||
assert "compose.config.errors.ConfigurationError: " \
|
||||
"The Compose file 'filename.yml' is invalid because:\n" \
|
||||
"'web' does not match any of the regexes: '^x-'" in excinfo.exconly()
|
||||
assert VERSION_EXPLANATION in excinfo.exconly()
|
||||
|
||||
def test_named_volume_config_empty(self):
|
||||
@@ -665,7 +669,7 @@ class ConfigTest(unittest.TestCase):
|
||||
|
||||
assert 'Invalid service name \'mong\\o\'' in excinfo.exconly()
|
||||
|
||||
def test_config_duplicate_cache_from_values_validation_error(self):
|
||||
def test_config_duplicate_cache_from_values_no_validation_error(self):
|
||||
with pytest.raises(ConfigurationError) as exc:
|
||||
config.load(
|
||||
build_config_details({
|
||||
@@ -677,7 +681,7 @@ class ConfigTest(unittest.TestCase):
|
||||
})
|
||||
)
|
||||
|
||||
assert 'build.cache_from contains non-unique items' in exc.exconly()
|
||||
assert 'build.cache_from contains non-unique items' not in exc.exconly()
|
||||
|
||||
def test_load_with_multiple_files_v1(self):
|
||||
base_file = config.ConfigFile(
|
||||
@@ -2393,7 +2397,8 @@ web:
|
||||
'image': 'busybox',
|
||||
'depends_on': {
|
||||
'app1': {'condition': 'service_started'},
|
||||
'app2': {'condition': 'service_healthy'}
|
||||
'app2': {'condition': 'service_healthy'},
|
||||
'app3': {'condition': 'service_completed_successfully'}
|
||||
}
|
||||
}
|
||||
override = {}
|
||||
@@ -2405,11 +2410,12 @@ web:
|
||||
'image': 'busybox',
|
||||
'depends_on': {
|
||||
'app1': {'condition': 'service_started'},
|
||||
'app2': {'condition': 'service_healthy'}
|
||||
'app2': {'condition': 'service_healthy'},
|
||||
'app3': {'condition': 'service_completed_successfully'}
|
||||
}
|
||||
}
|
||||
override = {
|
||||
'depends_on': ['app3']
|
||||
'depends_on': ['app4']
|
||||
}
|
||||
|
||||
actual = config.merge_service_dicts(base, override, VERSION)
|
||||
@@ -2418,7 +2424,8 @@ web:
|
||||
'depends_on': {
|
||||
'app1': {'condition': 'service_started'},
|
||||
'app2': {'condition': 'service_healthy'},
|
||||
'app3': {'condition': 'service_started'}
|
||||
'app3': {'condition': 'service_completed_successfully'},
|
||||
'app4': {'condition': 'service_started'},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3563,9 +3570,11 @@ class InterpolationTest(unittest.TestCase):
|
||||
@mock.patch.dict(os.environ)
|
||||
def test_config_file_with_options_environment_file(self):
|
||||
project_dir = 'tests/fixtures/default-env-file'
|
||||
# env-file is relative to current working dir
|
||||
env = Environment.from_env_file(project_dir, project_dir + '/.env2')
|
||||
service_dicts = config.load(
|
||||
config.find(
|
||||
project_dir, None, Environment.from_env_file(project_dir, '.env2')
|
||||
project_dir, None, env
|
||||
)
|
||||
).services
|
||||
|
||||
@@ -5229,6 +5238,8 @@ class GetDefaultConfigFilesTestCase(unittest.TestCase):
|
||||
files = [
|
||||
'docker-compose.yml',
|
||||
'docker-compose.yaml',
|
||||
'compose.yml',
|
||||
'compose.yaml',
|
||||
]
|
||||
|
||||
def test_get_config_path_default_file_in_basedir(self):
|
||||
@@ -5262,8 +5273,10 @@ def get_config_filename_for_files(filenames, subdir=None):
|
||||
base_dir = tempfile.mkdtemp(dir=project_dir)
|
||||
else:
|
||||
base_dir = project_dir
|
||||
filename, = config.get_default_config_files(base_dir)
|
||||
return os.path.basename(filename)
|
||||
filenames = config.get_default_config_files(base_dir)
|
||||
if not filenames:
|
||||
raise config.ComposeFileNotFound(config.SUPPORTED_FILENAMES)
|
||||
return os.path.basename(filenames[0])
|
||||
finally:
|
||||
shutil.rmtree(project_dir)
|
||||
|
||||
@@ -5369,7 +5382,7 @@ class SerializeTest(unittest.TestCase):
|
||||
assert serialized_config['secrets']['two'] == {'external': True, 'name': 'two'}
|
||||
|
||||
def test_serialize_ports(self):
|
||||
config_dict = config.Config(version=VERSION, services=[
|
||||
config_dict = config.Config(config_version=VERSION, version=VERSION, services=[
|
||||
{
|
||||
'ports': [types.ServicePort('80', '8080', None, None, None)],
|
||||
'image': 'alpine',
|
||||
@@ -5380,8 +5393,20 @@ class SerializeTest(unittest.TestCase):
|
||||
serialized_config = yaml.safe_load(serialize_config(config_dict))
|
||||
assert [{'published': 8080, 'target': 80}] == serialized_config['services']['web']['ports']
|
||||
|
||||
def test_serialize_ports_v1(self):
|
||||
config_dict = config.Config(config_version=V1, version=V1, services=[
|
||||
{
|
||||
'ports': [types.ServicePort('80', '8080', None, None, None)],
|
||||
'image': 'alpine',
|
||||
'name': 'web'
|
||||
}
|
||||
], volumes={}, networks={}, secrets={}, configs={})
|
||||
|
||||
serialized_config = yaml.safe_load(serialize_config(config_dict))
|
||||
assert ['8080:80/tcp'] == serialized_config['services']['web']['ports']
|
||||
|
||||
def test_serialize_ports_with_ext_ip(self):
|
||||
config_dict = config.Config(version=VERSION, services=[
|
||||
config_dict = config.Config(config_version=VERSION, version=VERSION, services=[
|
||||
{
|
||||
'ports': [types.ServicePort('80', '8080', None, None, '127.0.0.1')],
|
||||
'image': 'alpine',
|
||||
|
||||
@@ -416,7 +416,7 @@ def test_interpolate_mandatory_no_err_msg(defaults_interpolator):
|
||||
with pytest.raises(UnsetRequiredSubstitution) as e:
|
||||
defaults_interpolator("not ok ${BAZ?}")
|
||||
|
||||
assert e.value.err == ''
|
||||
assert e.value.err == 'BAZ'
|
||||
|
||||
|
||||
def test_interpolate_mixed_separators(defaults_interpolator):
|
||||
|
||||
@@ -221,34 +221,6 @@ class ContainerTest(unittest.TestCase):
|
||||
container = Container(None, self.container_dict, has_been_inspected=True)
|
||||
assert container.short_id == self.container_id[:12]
|
||||
|
||||
def test_has_api_logs(self):
|
||||
container_dict = {
|
||||
'HostConfig': {
|
||||
'LogConfig': {
|
||||
'Type': 'json-file'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
container = Container(None, container_dict, has_been_inspected=True)
|
||||
assert container.has_api_logs is True
|
||||
|
||||
container_dict['HostConfig']['LogConfig']['Type'] = 'none'
|
||||
container = Container(None, container_dict, has_been_inspected=True)
|
||||
assert container.has_api_logs is False
|
||||
|
||||
container_dict['HostConfig']['LogConfig']['Type'] = 'syslog'
|
||||
container = Container(None, container_dict, has_been_inspected=True)
|
||||
assert container.has_api_logs is False
|
||||
|
||||
container_dict['HostConfig']['LogConfig']['Type'] = 'journald'
|
||||
container = Container(None, container_dict, has_been_inspected=True)
|
||||
assert container.has_api_logs is True
|
||||
|
||||
container_dict['HostConfig']['LogConfig']['Type'] = 'foobar'
|
||||
container = Container(None, container_dict, has_been_inspected=True)
|
||||
assert container.has_api_logs is False
|
||||
|
||||
|
||||
class GetContainerNameTestCase(unittest.TestCase):
|
||||
|
||||
|
||||
0
tests/unit/metrics/__init__.py
Normal file
0
tests/unit/metrics/__init__.py
Normal file
36
tests/unit/metrics/metrics_test.py
Normal file
36
tests/unit/metrics/metrics_test.py
Normal file
@@ -0,0 +1,36 @@
|
||||
import unittest
|
||||
|
||||
from compose.metrics.client import MetricsCommand
|
||||
from compose.metrics.client import Status
|
||||
|
||||
|
||||
class MetricsTest(unittest.TestCase):
|
||||
@classmethod
|
||||
def test_metrics(cls):
|
||||
assert MetricsCommand('up', 'moby').to_map() == {
|
||||
'command': 'compose up',
|
||||
'context': 'moby',
|
||||
'status': 'success',
|
||||
'source': 'docker-compose',
|
||||
}
|
||||
|
||||
assert MetricsCommand('down', 'local').to_map() == {
|
||||
'command': 'compose down',
|
||||
'context': 'local',
|
||||
'status': 'success',
|
||||
'source': 'docker-compose',
|
||||
}
|
||||
|
||||
assert MetricsCommand('help', 'aci', Status.FAILURE).to_map() == {
|
||||
'command': 'compose help',
|
||||
'context': 'aci',
|
||||
'status': 'failure',
|
||||
'source': 'docker-compose',
|
||||
}
|
||||
|
||||
assert MetricsCommand('run', 'ecs').to_map() == {
|
||||
'command': 'compose run',
|
||||
'context': 'ecs',
|
||||
'status': 'success',
|
||||
'source': 'docker-compose',
|
||||
}
|
||||
@@ -3,6 +3,7 @@ from threading import Lock
|
||||
|
||||
from docker.errors import APIError
|
||||
|
||||
from compose.cli.colors import AnsiMode
|
||||
from compose.parallel import GlobalLimit
|
||||
from compose.parallel import parallel_execute
|
||||
from compose.parallel import parallel_execute_iter
|
||||
@@ -156,7 +157,7 @@ def test_parallel_execute_alignment(capsys):
|
||||
|
||||
def test_parallel_execute_ansi(capsys):
|
||||
ParallelStreamWriter.instance = None
|
||||
ParallelStreamWriter.set_noansi(value=False)
|
||||
ParallelStreamWriter.set_default_ansi_mode(AnsiMode.ALWAYS)
|
||||
results, errors = parallel_execute(
|
||||
objects=["something", "something more"],
|
||||
func=lambda x: x,
|
||||
@@ -172,7 +173,7 @@ def test_parallel_execute_ansi(capsys):
|
||||
|
||||
def test_parallel_execute_noansi(capsys):
|
||||
ParallelStreamWriter.instance = None
|
||||
ParallelStreamWriter.set_noansi()
|
||||
ParallelStreamWriter.set_default_ansi_mode(AnsiMode.NEVER)
|
||||
results, errors = parallel_execute(
|
||||
objects=["something", "something more"],
|
||||
func=lambda x: x,
|
||||
|
||||
@@ -28,6 +28,7 @@ from compose.service import Service
|
||||
|
||||
def build_config(**kwargs):
|
||||
return Config(
|
||||
config_version=kwargs.get('config_version', VERSION),
|
||||
version=kwargs.get('version', VERSION),
|
||||
services=kwargs.get('services'),
|
||||
volumes=kwargs.get('volumes'),
|
||||
|
||||
@@ -330,7 +330,7 @@ class ServiceTest(unittest.TestCase):
|
||||
assert service.options['environment'] == environment
|
||||
|
||||
assert opts['labels'][LABEL_CONFIG_HASH] == \
|
||||
'689149e6041a85f6fb4945a2146a497ed43c8a5cbd8991753d875b165f1b4de4'
|
||||
'6da0f3ec0d5adf901de304bdc7e0ee44ec5dd7adb08aebc20fe0dd791d4ee5a8'
|
||||
assert opts['environment'] == ['also=real']
|
||||
|
||||
def test_get_container_create_options_sets_affinity_with_binds(self):
|
||||
@@ -700,6 +700,7 @@ class ServiceTest(unittest.TestCase):
|
||||
config_dict = service.config_dict()
|
||||
expected = {
|
||||
'image_id': 'abcd',
|
||||
'ipc_mode': None,
|
||||
'options': {'image': 'example.com/foo'},
|
||||
'links': [('one', 'one')],
|
||||
'net': 'other',
|
||||
@@ -723,6 +724,7 @@ class ServiceTest(unittest.TestCase):
|
||||
config_dict = service.config_dict()
|
||||
expected = {
|
||||
'image_id': 'abcd',
|
||||
'ipc_mode': None,
|
||||
'options': {'image': 'example.com/foo'},
|
||||
'links': [],
|
||||
'networks': {},
|
||||
|
||||
Reference in New Issue
Block a user