mirror of
https://github.com/docker/compose.git
synced 2026-02-17 22:12:35 +08:00
Compare commits
36 Commits
release
...
1.14.0-rc1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c18a7ad946 | ||
|
|
79858443f3 | ||
|
|
f32b5e2dd1 | ||
|
|
89efeccbf6 | ||
|
|
242e10841f | ||
|
|
296ed14a47 | ||
|
|
21c47cedac | ||
|
|
5b3e8ab53e | ||
|
|
92381b10f3 | ||
|
|
0f80986638 | ||
|
|
067bab88fa | ||
|
|
9e6226306b | ||
|
|
4b3b4fd280 | ||
|
|
4fed4ef89c | ||
|
|
f2f79cd9db | ||
|
|
ca8a5ff278 | ||
|
|
62ca4eb6e2 | ||
|
|
615716db25 | ||
|
|
0d535c0458 | ||
|
|
42abe07341 | ||
|
|
2392c1a10f | ||
|
|
5ce9c7df63 | ||
|
|
cd674b2e20 | ||
|
|
4356f75782 | ||
|
|
96a014e634 | ||
|
|
eeecfd516b | ||
|
|
33acbcbe0a | ||
|
|
2169ecbb92 | ||
|
|
effe518810 | ||
|
|
af8197e926 | ||
|
|
612838d767 | ||
|
|
0d6843a0bc | ||
|
|
ef4331d06e | ||
|
|
e9ee69e813 | ||
|
|
11ce96e8de | ||
|
|
a0b8472991 |
49
CHANGELOG.md
49
CHANGELOG.md
@@ -1,6 +1,55 @@
|
||||
Change log
|
||||
==========
|
||||
|
||||
1.14.0 (2017-06-06)
|
||||
-------------------
|
||||
|
||||
### New features
|
||||
|
||||
#### Compose file version 3.3
|
||||
|
||||
- Introduced version 3.3 of the `docker-compose.yml` specification.
|
||||
This version requires to be used with Docker Engine 17.06.0 or above.
|
||||
Note: the `credential_spec` key only applies to Swarm services and will
|
||||
be ignored by Compose
|
||||
|
||||
#### Compose file version 2.2
|
||||
|
||||
- Added the following parameters in service definitions: `cpu_count`,
|
||||
`cpu_percent`, `cpus`
|
||||
|
||||
#### Compose file version 2.1
|
||||
|
||||
- Added support for build labels. This feature is also available in the
|
||||
2.2 and 3.3 formats.
|
||||
|
||||
#### All formats
|
||||
|
||||
- Added shorthand `-u` for `--user` flag in `docker-compose exec`
|
||||
|
||||
- Differences in labels between the Compose file and remote network
|
||||
will now print a warning instead of preventing redeployment.
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Fixed a bug where service's dependencies were being rescaled to their
|
||||
default scale when running a `docker-compose run` command
|
||||
|
||||
- Fixed a bug where `docker-compose rm` with the `--stop` flag was not
|
||||
behaving properly when provided with a list of services to remove
|
||||
|
||||
- Fixed a bug where `cache_from` in the build section would be ignored when
|
||||
using more than one Compose file.
|
||||
|
||||
- Fixed a bug where override files would not be picked up by Compose if they
|
||||
had the `.yaml` extension
|
||||
|
||||
- Fixed a bug on Windows Engine where networks would be incorrectly flagged
|
||||
for recreation
|
||||
|
||||
- Fixed a bug where services declaring ports would cause crashes on some
|
||||
versions of Python 3
|
||||
|
||||
1.13.0 (2017-05-02)
|
||||
-------------------
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ Using Compose is basically a three-step process.
|
||||
1. Define your app's environment with a `Dockerfile` so it can be
|
||||
reproduced anywhere.
|
||||
2. Define the services that make up your app in `docker-compose.yml` so
|
||||
they can be run together in an isolated environment:
|
||||
they can be run together in an isolated environment.
|
||||
3. Lastly, run `docker-compose up` and Compose will start and run your entire app.
|
||||
|
||||
A `docker-compose.yml` looks like this:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
__version__ = '1.13.0'
|
||||
__version__ = '1.14.0-rc1'
|
||||
|
||||
@@ -171,12 +171,12 @@ class TopLevelCommand(object):
|
||||
in the client certificate (for example if your docker host
|
||||
is an IP address)
|
||||
--project-directory PATH Specify an alternate working directory
|
||||
(default: the path of the compose file)
|
||||
(default: the path of the Compose file)
|
||||
|
||||
Commands:
|
||||
build Build or rebuild services
|
||||
bundle Generate a Docker bundle from the Compose file
|
||||
config Validate and view the compose file
|
||||
config Validate and view the Compose file
|
||||
create Create services
|
||||
down Stop and remove containers, networks, images, and volumes
|
||||
events Receive real time events from containers
|
||||
@@ -273,7 +273,7 @@ class TopLevelCommand(object):
|
||||
|
||||
def config(self, config_options, options):
|
||||
"""
|
||||
Validate and view the compose file.
|
||||
Validate and view the Compose file.
|
||||
|
||||
Usage: config [options]
|
||||
|
||||
@@ -391,7 +391,7 @@ class TopLevelCommand(object):
|
||||
Options:
|
||||
-d Detached mode: Run command in the background.
|
||||
--privileged Give extended privileges to the process.
|
||||
--user USER Run the command as this user.
|
||||
-u, --user USER Run the command as this user.
|
||||
-T Disable pseudo-tty allocation. By default `docker-compose exec`
|
||||
allocates a TTY.
|
||||
--index=index index of the container if there are multiple
|
||||
@@ -627,7 +627,7 @@ class TopLevelCommand(object):
|
||||
|
||||
def pull(self, options):
|
||||
"""
|
||||
Pulls images for services.
|
||||
Pulls images for services defined in a Compose file, but does not start the containers.
|
||||
|
||||
Usage: pull [options] [SERVICE...]
|
||||
|
||||
@@ -680,13 +680,7 @@ class TopLevelCommand(object):
|
||||
one_off = OneOffFilter.include
|
||||
|
||||
if options.get('--stop'):
|
||||
running_containers = self.project.containers(
|
||||
service_names=options['SERVICE'], stopped=False, one_off=one_off
|
||||
)
|
||||
self.project.stop(
|
||||
service_names=running_containers,
|
||||
one_off=one_off
|
||||
)
|
||||
self.project.stop(service_names=options['SERVICE'], one_off=one_off)
|
||||
|
||||
all_containers = self.project.containers(
|
||||
service_names=options['SERVICE'], stopped=True, one_off=one_off
|
||||
@@ -764,6 +758,9 @@ class TopLevelCommand(object):
|
||||
|
||||
$ docker-compose scale web=2 worker=3
|
||||
|
||||
This command is deprecated. Use the up command with the `--scale` flag
|
||||
instead.
|
||||
|
||||
Usage: scale [options] [SERVICE=NUM...]
|
||||
|
||||
Options:
|
||||
@@ -777,6 +774,11 @@ class TopLevelCommand(object):
|
||||
'The scale command is incompatible with the v2.2 format. '
|
||||
'Use the up command with the --scale flag instead.'
|
||||
)
|
||||
else:
|
||||
log.warn(
|
||||
'The scale command is deprecated. '
|
||||
'Use the up command with the --scale flag instead.'
|
||||
)
|
||||
|
||||
for service_name, num in parse_scale_args(options['SERVICE=NUM']).items():
|
||||
self.project.get_service(service_name).scale(num, timeout=timeout)
|
||||
@@ -1130,7 +1132,9 @@ def run_one_off_container(container_options, project, service, options):
|
||||
project.up(
|
||||
service_names=deps,
|
||||
start_deps=True,
|
||||
strategy=ConvergenceStrategy.never)
|
||||
strategy=ConvergenceStrategy.never,
|
||||
rescale=False
|
||||
)
|
||||
|
||||
project.initialize()
|
||||
|
||||
|
||||
@@ -24,6 +24,7 @@ from .environment import split_env
|
||||
from .errors import CircularReference
|
||||
from .errors import ComposeFileNotFound
|
||||
from .errors import ConfigurationError
|
||||
from .errors import DuplicateOverrideFileFound
|
||||
from .errors import VERSION_EXPLANATION
|
||||
from .interpolation import interpolate_environment_variables
|
||||
from .sort_services import get_container_name_from_network_mode
|
||||
@@ -38,6 +39,7 @@ from .types import VolumeSpec
|
||||
from .validation import match_named_volumes
|
||||
from .validation import validate_against_config_schema
|
||||
from .validation import validate_config_section
|
||||
from .validation import validate_cpu
|
||||
from .validation import validate_depends_on
|
||||
from .validation import validate_extends_file_path
|
||||
from .validation import validate_links
|
||||
@@ -52,8 +54,11 @@ DOCKER_CONFIG_KEYS = [
|
||||
'cap_drop',
|
||||
'cgroup_parent',
|
||||
'command',
|
||||
'cpu_count',
|
||||
'cpu_percent',
|
||||
'cpu_quota',
|
||||
'cpu_shares',
|
||||
'cpus',
|
||||
'cpuset',
|
||||
'detach',
|
||||
'devices',
|
||||
@@ -103,6 +108,7 @@ DOCKER_CONFIG_KEYS = [
|
||||
ALLOWED_KEYS = DOCKER_CONFIG_KEYS + [
|
||||
'build',
|
||||
'container_name',
|
||||
'credential_spec',
|
||||
'dockerfile',
|
||||
'log_driver',
|
||||
'log_opt',
|
||||
@@ -124,7 +130,7 @@ SUPPORTED_FILENAMES = [
|
||||
'docker-compose.yaml',
|
||||
]
|
||||
|
||||
DEFAULT_OVERRIDE_FILENAME = 'docker-compose.override.yml'
|
||||
DEFAULT_OVERRIDE_FILENAMES = ('docker-compose.override.yml', 'docker-compose.override.yaml')
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@@ -288,8 +294,12 @@ def get_default_config_files(base_dir):
|
||||
|
||||
|
||||
def get_default_override_file(path):
|
||||
override_filename = os.path.join(path, DEFAULT_OVERRIDE_FILENAME)
|
||||
return [override_filename] if os.path.exists(override_filename) else []
|
||||
override_files_in_path = [os.path.join(path, override_filename) for override_filename
|
||||
in DEFAULT_OVERRIDE_FILENAMES
|
||||
if os.path.exists(os.path.join(path, override_filename))]
|
||||
if len(override_files_in_path) > 1:
|
||||
raise DuplicateOverrideFileFound(override_files_in_path)
|
||||
return override_files_in_path
|
||||
|
||||
|
||||
def find_candidates_in_parent_dirs(filenames, path):
|
||||
@@ -311,6 +321,27 @@ def find_candidates_in_parent_dirs(filenames, path):
|
||||
return (candidates, path)
|
||||
|
||||
|
||||
def check_swarm_only_config(service_dicts):
|
||||
warning_template = (
|
||||
"Some services ({services}) use the '{key}' key, which will be ignored. "
|
||||
"Compose does not support '{key}' configuration - use "
|
||||
"`docker stack deploy` to deploy to a swarm."
|
||||
)
|
||||
|
||||
def check_swarm_only_key(service_dicts, key):
|
||||
services = [s for s in service_dicts if s.get(key)]
|
||||
if services:
|
||||
log.warn(
|
||||
warning_template.format(
|
||||
services=", ".join(sorted(s['name'] for s in services)),
|
||||
key=key
|
||||
)
|
||||
)
|
||||
|
||||
check_swarm_only_key(service_dicts, 'deploy')
|
||||
check_swarm_only_key(service_dicts, 'credential_spec')
|
||||
|
||||
|
||||
def load(config_details):
|
||||
"""Load the configuration from a working directory and a list of
|
||||
configuration files. Files are loaded in order, and merged on top
|
||||
@@ -340,13 +371,7 @@ def load(config_details):
|
||||
for service_dict in service_dicts:
|
||||
match_named_volumes(service_dict, volumes)
|
||||
|
||||
services_using_deploy = [s for s in service_dicts if s.get('deploy')]
|
||||
if services_using_deploy:
|
||||
log.warn(
|
||||
"Some services ({}) use the 'deploy' key, which will be ignored. "
|
||||
"Compose does not support deploy configuration - use "
|
||||
"`docker stack deploy` to deploy to a swarm."
|
||||
.format(", ".join(sorted(s['name'] for s in services_using_deploy))))
|
||||
check_swarm_only_config(service_dicts)
|
||||
|
||||
return Config(main_file.version, service_dicts, volumes, networks, secrets)
|
||||
|
||||
@@ -640,6 +665,7 @@ def validate_service(service_config, service_names, config_file):
|
||||
validate_service_constraints(service_dict, service_name, config_file)
|
||||
validate_paths(service_dict)
|
||||
|
||||
validate_cpu(service_config)
|
||||
validate_ulimits(service_config)
|
||||
validate_network_mode(service_config, service_names)
|
||||
validate_depends_on(service_config, service_names)
|
||||
@@ -874,7 +900,7 @@ def merge_service_dicts(base, override, version):
|
||||
|
||||
md.merge_mapping('environment', parse_environment)
|
||||
md.merge_mapping('labels', parse_labels)
|
||||
md.merge_mapping('ulimits', parse_ulimits)
|
||||
md.merge_mapping('ulimits', parse_flat_dict)
|
||||
md.merge_mapping('networks', parse_networks)
|
||||
md.merge_mapping('sysctls', parse_sysctls)
|
||||
md.merge_mapping('depends_on', parse_depends_on)
|
||||
@@ -942,6 +968,8 @@ def merge_build(output, base, override):
|
||||
md.merge_scalar('context')
|
||||
md.merge_scalar('dockerfile')
|
||||
md.merge_mapping('args', parse_build_arguments)
|
||||
md.merge_field('cache_from', merge_unique_items_lists, default=[])
|
||||
md.merge_mapping('labels', parse_labels)
|
||||
return dict(md)
|
||||
|
||||
|
||||
@@ -1008,12 +1036,14 @@ parse_depends_on = functools.partial(
|
||||
parse_deploy = functools.partial(parse_dict_or_list, split_kv, 'deploy')
|
||||
|
||||
|
||||
def parse_ulimits(ulimits):
|
||||
if not ulimits:
|
||||
def parse_flat_dict(d):
|
||||
if not d:
|
||||
return {}
|
||||
|
||||
if isinstance(ulimits, dict):
|
||||
return dict(ulimits)
|
||||
if isinstance(d, dict):
|
||||
return dict(d)
|
||||
|
||||
raise ConfigurationError("Invalid type: expected mapping")
|
||||
|
||||
|
||||
def resolve_env_var(key, val, environment):
|
||||
|
||||
@@ -58,7 +58,8 @@
|
||||
"properties": {
|
||||
"context": {"type": "string"},
|
||||
"dockerfile": {"type": "string"},
|
||||
"args": {"$ref": "#/definitions/list_or_dict"}
|
||||
"args": {"$ref": "#/definitions/list_or_dict"},
|
||||
"labels": {"$ref": "#/definitions/list_or_dict"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
|
||||
@@ -58,7 +58,9 @@
|
||||
"properties": {
|
||||
"context": {"type": "string"},
|
||||
"dockerfile": {"type": "string"},
|
||||
"args": {"$ref": "#/definitions/list_or_dict"}
|
||||
"args": {"$ref": "#/definitions/list_or_dict"},
|
||||
"labels": {"$ref": "#/definitions/list_or_dict"},
|
||||
"cache_from": {"$ref": "#/definitions/list_of_strings"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
@@ -74,8 +76,11 @@
|
||||
]
|
||||
},
|
||||
"container_name": {"type": "string"},
|
||||
"cpu_count": {"type": "integer", "minimum": 0},
|
||||
"cpu_percent": {"type": "integer", "minimum": 0, "maximum": 100},
|
||||
"cpu_shares": {"type": ["number", "string"]},
|
||||
"cpu_quota": {"type": ["number", "string"]},
|
||||
"cpus": {"type": "number", "minimum": 0},
|
||||
"cpuset": {"type": "string"},
|
||||
"depends_on": {
|
||||
"oneOf": [
|
||||
|
||||
534
compose/config/config_schema_v3.3.json
Normal file
534
compose/config/config_schema_v3.3.json
Normal file
@@ -0,0 +1,534 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"id": "config_schema_v3.3.json",
|
||||
"type": "object",
|
||||
"required": ["version"],
|
||||
|
||||
"properties": {
|
||||
"version": {
|
||||
"type": "string"
|
||||
},
|
||||
|
||||
"services": {
|
||||
"id": "#/properties/services",
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^[a-zA-Z0-9._-]+$": {
|
||||
"$ref": "#/definitions/service"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
"networks": {
|
||||
"id": "#/properties/networks",
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^[a-zA-Z0-9._-]+$": {
|
||||
"$ref": "#/definitions/network"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
"volumes": {
|
||||
"id": "#/properties/volumes",
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^[a-zA-Z0-9._-]+$": {
|
||||
"$ref": "#/definitions/volume"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
"secrets": {
|
||||
"id": "#/properties/secrets",
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^[a-zA-Z0-9._-]+$": {
|
||||
"$ref": "#/definitions/secret"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
"configs": {
|
||||
"id": "#/properties/configs",
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^[a-zA-Z0-9._-]+$": {
|
||||
"$ref": "#/definitions/config"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
|
||||
"additionalProperties": false,
|
||||
|
||||
"definitions": {
|
||||
|
||||
"service": {
|
||||
"id": "#/definitions/service",
|
||||
"type": "object",
|
||||
|
||||
"properties": {
|
||||
"deploy": {"$ref": "#/definitions/deployment"},
|
||||
"build": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"context": {"type": "string"},
|
||||
"dockerfile": {"type": "string"},
|
||||
"args": {"$ref": "#/definitions/list_or_dict"},
|
||||
"labels": {"$ref": "#/definitions/list_or_dict"},
|
||||
"cache_from": {"$ref": "#/definitions/list_of_strings"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"cap_add": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
|
||||
"cap_drop": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
|
||||
"cgroup_parent": {"type": "string"},
|
||||
"command": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{"type": "array", "items": {"type": "string"}}
|
||||
]
|
||||
},
|
||||
"configs": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"source": {"type": "string"},
|
||||
"target": {"type": "string"},
|
||||
"uid": {"type": "string"},
|
||||
"gid": {"type": "string"},
|
||||
"mode": {"type": "number"}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"container_name": {"type": "string"},
|
||||
"credential_spec": {"type": "object", "properties": {
|
||||
"file": {"type": "string"},
|
||||
"registry": {"type": "string"}
|
||||
}},
|
||||
"depends_on": {"$ref": "#/definitions/list_of_strings"},
|
||||
"devices": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
|
||||
"dns": {"$ref": "#/definitions/string_or_list"},
|
||||
"dns_search": {"$ref": "#/definitions/string_or_list"},
|
||||
"domainname": {"type": "string"},
|
||||
"entrypoint": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{"type": "array", "items": {"type": "string"}}
|
||||
]
|
||||
},
|
||||
"env_file": {"$ref": "#/definitions/string_or_list"},
|
||||
"environment": {"$ref": "#/definitions/list_or_dict"},
|
||||
|
||||
"expose": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": ["string", "number"],
|
||||
"format": "expose"
|
||||
},
|
||||
"uniqueItems": true
|
||||
},
|
||||
|
||||
"external_links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
|
||||
"extra_hosts": {"$ref": "#/definitions/list_or_dict"},
|
||||
"healthcheck": {"$ref": "#/definitions/healthcheck"},
|
||||
"hostname": {"type": "string"},
|
||||
"image": {"type": "string"},
|
||||
"ipc": {"type": "string"},
|
||||
"labels": {"$ref": "#/definitions/list_or_dict"},
|
||||
"links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
|
||||
|
||||
"logging": {
|
||||
"type": "object",
|
||||
|
||||
"properties": {
|
||||
"driver": {"type": "string"},
|
||||
"options": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^.+$": {"type": ["string", "number", "null"]}
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
"mac_address": {"type": "string"},
|
||||
"network_mode": {"type": "string"},
|
||||
|
||||
"networks": {
|
||||
"oneOf": [
|
||||
{"$ref": "#/definitions/list_of_strings"},
|
||||
{
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^[a-zA-Z0-9._-]+$": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"aliases": {"$ref": "#/definitions/list_of_strings"},
|
||||
"ipv4_address": {"type": "string"},
|
||||
"ipv6_address": {"type": "string"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
{"type": "null"}
|
||||
]
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"pid": {"type": ["string", "null"]},
|
||||
|
||||
"ports": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"oneOf": [
|
||||
{"type": "number", "format": "ports"},
|
||||
{"type": "string", "format": "ports"},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"mode": {"type": "string"},
|
||||
"target": {"type": "integer"},
|
||||
"published": {"type": "integer"},
|
||||
"protocol": {"type": "string"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"uniqueItems": true
|
||||
},
|
||||
|
||||
"privileged": {"type": "boolean"},
|
||||
"read_only": {"type": "boolean"},
|
||||
"restart": {"type": "string"},
|
||||
"security_opt": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
|
||||
"shm_size": {"type": ["number", "string"]},
|
||||
"secrets": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"source": {"type": "string"},
|
||||
"target": {"type": "string"},
|
||||
"uid": {"type": "string"},
|
||||
"gid": {"type": "string"},
|
||||
"mode": {"type": "number"}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"sysctls": {"$ref": "#/definitions/list_or_dict"},
|
||||
"stdin_open": {"type": "boolean"},
|
||||
"stop_grace_period": {"type": "string", "format": "duration"},
|
||||
"stop_signal": {"type": "string"},
|
||||
"tmpfs": {"$ref": "#/definitions/string_or_list"},
|
||||
"tty": {"type": "boolean"},
|
||||
"ulimits": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^[a-z]+$": {
|
||||
"oneOf": [
|
||||
{"type": "integer"},
|
||||
{
|
||||
"type":"object",
|
||||
"properties": {
|
||||
"hard": {"type": "integer"},
|
||||
"soft": {"type": "integer"}
|
||||
},
|
||||
"required": ["soft", "hard"],
|
||||
"additionalProperties": false
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"user": {"type": "string"},
|
||||
"userns_mode": {"type": "string"},
|
||||
"volumes": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "object",
|
||||
"required": ["type"],
|
||||
"properties": {
|
||||
"type": {"type": "string"},
|
||||
"source": {"type": "string"},
|
||||
"target": {"type": "string"},
|
||||
"read_only": {"type": "boolean"},
|
||||
"consistency": {"type": "string"},
|
||||
"bind": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"propagation": {"type": "string"}
|
||||
}
|
||||
},
|
||||
"volume": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"nocopy": {"type": "boolean"}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"uniqueItems": true
|
||||
}
|
||||
},
|
||||
"working_dir": {"type": "string"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
"healthcheck": {
|
||||
"id": "#/definitions/healthcheck",
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"disable": {"type": "boolean"},
|
||||
"interval": {"type": "string"},
|
||||
"retries": {"type": "number"},
|
||||
"test": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{"type": "array", "items": {"type": "string"}}
|
||||
]
|
||||
},
|
||||
"timeout": {"type": "string"}
|
||||
}
|
||||
},
|
||||
"deployment": {
|
||||
"id": "#/definitions/deployment",
|
||||
"type": ["object", "null"],
|
||||
"properties": {
|
||||
"mode": {"type": "string"},
|
||||
"endpoint_mode": {"type": "string"},
|
||||
"replicas": {"type": "integer"},
|
||||
"labels": {"$ref": "#/definitions/list_or_dict"},
|
||||
"update_config": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"parallelism": {"type": "integer"},
|
||||
"delay": {"type": "string", "format": "duration"},
|
||||
"failure_action": {"type": "string"},
|
||||
"monitor": {"type": "string", "format": "duration"},
|
||||
"max_failure_ratio": {"type": "number"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"resources": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"limits": {"$ref": "#/definitions/resource"},
|
||||
"reservations": {"$ref": "#/definitions/resource"}
|
||||
}
|
||||
},
|
||||
"restart_policy": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"condition": {"type": "string"},
|
||||
"delay": {"type": "string", "format": "duration"},
|
||||
"max_attempts": {"type": "integer"},
|
||||
"window": {"type": "string", "format": "duration"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"placement": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"constraints": {"type": "array", "items": {"type": "string"}},
|
||||
"preferences": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"spread": {"type": "string"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
"resource": {
|
||||
"id": "#/definitions/resource",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"cpus": {"type": "string"},
|
||||
"memory": {"type": "string"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
"network": {
|
||||
"id": "#/definitions/network",
|
||||
"type": ["object", "null"],
|
||||
"properties": {
|
||||
"driver": {"type": "string"},
|
||||
"driver_opts": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^.+$": {"type": ["string", "number"]}
|
||||
}
|
||||
},
|
||||
"ipam": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"driver": {"type": "string"},
|
||||
"config": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"subnet": {"type": "string"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"external": {
|
||||
"type": ["boolean", "object"],
|
||||
"properties": {
|
||||
"name": {"type": "string"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"internal": {"type": "boolean"},
|
||||
"attachable": {"type": "boolean"},
|
||||
"labels": {"$ref": "#/definitions/list_or_dict"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
"volume": {
|
||||
"id": "#/definitions/volume",
|
||||
"type": ["object", "null"],
|
||||
"properties": {
|
||||
"driver": {"type": "string"},
|
||||
"driver_opts": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^.+$": {"type": ["string", "number"]}
|
||||
}
|
||||
},
|
||||
"external": {
|
||||
"type": ["boolean", "object"],
|
||||
"properties": {
|
||||
"name": {"type": "string"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"labels": {"$ref": "#/definitions/list_or_dict"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
"secret": {
|
||||
"id": "#/definitions/secret",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"file": {"type": "string"},
|
||||
"external": {
|
||||
"type": ["boolean", "object"],
|
||||
"properties": {
|
||||
"name": {"type": "string"}
|
||||
}
|
||||
},
|
||||
"labels": {"$ref": "#/definitions/list_or_dict"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
"config": {
|
||||
"id": "#/definitions/config",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"file": {"type": "string"},
|
||||
"external": {
|
||||
"type": ["boolean", "object"],
|
||||
"properties": {
|
||||
"name": {"type": "string"}
|
||||
}
|
||||
},
|
||||
"labels": {"$ref": "#/definitions/list_or_dict"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
"string_or_list": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{"$ref": "#/definitions/list_of_strings"}
|
||||
]
|
||||
},
|
||||
|
||||
"list_of_strings": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
"uniqueItems": true
|
||||
},
|
||||
|
||||
"list_or_dict": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
".+": {
|
||||
"type": ["string", "number", "null"]
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
{"type": "array", "items": {"type": "string"}, "uniqueItems": true}
|
||||
]
|
||||
},
|
||||
|
||||
"constraints": {
|
||||
"service": {
|
||||
"id": "#/definitions/constraints/service",
|
||||
"anyOf": [
|
||||
{"required": ["build"]},
|
||||
{"required": ["image"]}
|
||||
],
|
||||
"properties": {
|
||||
"build": {
|
||||
"required": ["context"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -44,3 +44,12 @@ class ComposeFileNotFound(ConfigurationError):
|
||||
|
||||
Supported filenames: %s
|
||||
""" % ", ".join(supported_filenames))
|
||||
|
||||
|
||||
class DuplicateOverrideFileFound(ConfigurationError):
|
||||
def __init__(self, override_filenames):
|
||||
self.override_filenames = override_filenames
|
||||
super(DuplicateOverrideFileFound, self).__init__(
|
||||
"Multiple override files found: {}. You may only use a single "
|
||||
"override file.".format(", ".join(override_filenames))
|
||||
)
|
||||
|
||||
@@ -10,6 +10,7 @@ from compose.const import COMPOSEFILE_V2_1 as V2_1
|
||||
from compose.const import COMPOSEFILE_V2_2 as V2_2
|
||||
from compose.const import COMPOSEFILE_V3_1 as V3_1
|
||||
from compose.const import COMPOSEFILE_V3_2 as V3_2
|
||||
from compose.const import COMPOSEFILE_V3_3 as V3_3
|
||||
|
||||
|
||||
def serialize_config_type(dumper, data):
|
||||
@@ -50,7 +51,7 @@ def denormalize_config(config, image_digests=None):
|
||||
if 'external_name' in vol_conf:
|
||||
del vol_conf['external_name']
|
||||
|
||||
if config.version in (V3_1, V3_2):
|
||||
if config.version in (V3_1, V3_2, V3_3):
|
||||
result['secrets'] = config.secrets.copy()
|
||||
for secret_name, secret_conf in result['secrets'].items():
|
||||
if 'external_name' in secret_conf:
|
||||
@@ -114,7 +115,7 @@ def denormalize_service_dict(service_dict, version, image_digest=None):
|
||||
service_dict['healthcheck']['timeout']
|
||||
)
|
||||
|
||||
if 'ports' in service_dict and version not in (V3_2,):
|
||||
if 'ports' in service_dict and version not in (V3_2, V3_3):
|
||||
service_dict['ports'] = [
|
||||
p.legacy_repr() if isinstance(p, types.ServicePort) else p
|
||||
for p in service_dict['ports']
|
||||
|
||||
@@ -258,7 +258,7 @@ class ServiceSecret(namedtuple('_ServiceSecret', 'source target uid gid mode')):
|
||||
|
||||
def repr(self):
|
||||
return dict(
|
||||
[(k, v) for k, v in self._asdict().items() if v is not None]
|
||||
[(k, v) for k, v in zip(self._fields, self) if v is not None]
|
||||
)
|
||||
|
||||
|
||||
@@ -306,7 +306,7 @@ class ServicePort(namedtuple('_ServicePort', 'target published protocol mode ext
|
||||
|
||||
def repr(self):
|
||||
return dict(
|
||||
[(k, v) for k, v in self._asdict().items() if v is not None]
|
||||
[(k, v) for k, v in zip(self._fields, self) if v is not None]
|
||||
)
|
||||
|
||||
def legacy_repr(self):
|
||||
|
||||
@@ -15,6 +15,7 @@ from jsonschema import RefResolver
|
||||
from jsonschema import ValidationError
|
||||
|
||||
from ..const import COMPOSEFILE_V1 as V1
|
||||
from ..const import NANOCPUS_SCALE
|
||||
from .errors import ConfigurationError
|
||||
from .errors import VERSION_EXPLANATION
|
||||
from .sort_services import get_service_name_from_network_mode
|
||||
@@ -387,6 +388,16 @@ def validate_service_constraints(config, service_name, config_file):
|
||||
handle_errors(validator.iter_errors(config), handler, None)
|
||||
|
||||
|
||||
def validate_cpu(service_config):
|
||||
cpus = service_config.config.get('cpus')
|
||||
if not cpus:
|
||||
return
|
||||
nano_cpus = cpus * NANOCPUS_SCALE
|
||||
if isinstance(nano_cpus, float) and not nano_cpus.is_integer():
|
||||
raise ConfigurationError(
|
||||
"cpus must have nine or less digits after decimal point")
|
||||
|
||||
|
||||
def get_schema_path():
|
||||
return os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ LABEL_NETWORK = 'com.docker.compose.network'
|
||||
LABEL_VERSION = 'com.docker.compose.version'
|
||||
LABEL_VOLUME = 'com.docker.compose.volume'
|
||||
LABEL_CONFIG_HASH = 'com.docker.compose.config-hash'
|
||||
NANOCPUS_SCALE = 1000000000
|
||||
|
||||
SECRETS_PATH = '/run/secrets'
|
||||
|
||||
@@ -26,6 +27,7 @@ COMPOSEFILE_V2_2 = '2.2'
|
||||
COMPOSEFILE_V3_0 = '3.0'
|
||||
COMPOSEFILE_V3_1 = '3.1'
|
||||
COMPOSEFILE_V3_2 = '3.2'
|
||||
COMPOSEFILE_V3_3 = '3.3'
|
||||
|
||||
API_VERSIONS = {
|
||||
COMPOSEFILE_V1: '1.21',
|
||||
@@ -35,6 +37,7 @@ API_VERSIONS = {
|
||||
COMPOSEFILE_V3_0: '1.25',
|
||||
COMPOSEFILE_V3_1: '1.25',
|
||||
COMPOSEFILE_V3_2: '1.25',
|
||||
COMPOSEFILE_V3_3: '1.30',
|
||||
}
|
||||
|
||||
API_VERSION_TO_ENGINE_VERSION = {
|
||||
@@ -45,4 +48,5 @@ API_VERSION_TO_ENGINE_VERSION = {
|
||||
API_VERSIONS[COMPOSEFILE_V3_0]: '1.13.0',
|
||||
API_VERSIONS[COMPOSEFILE_V3_1]: '1.13.0',
|
||||
API_VERSIONS[COMPOSEFILE_V3_2]: '1.13.0',
|
||||
API_VERSIONS[COMPOSEFILE_V3_3]: '17.06.0',
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ log = logging.getLogger(__name__)
|
||||
|
||||
OPTS_EXCEPTIONS = [
|
||||
'com.docker.network.driver.overlay.vxlanid_list',
|
||||
'com.docker.network.windowsshim.hnsid'
|
||||
]
|
||||
|
||||
|
||||
@@ -187,10 +188,13 @@ def check_remote_network_config(remote, local):
|
||||
local_labels = local.labels or {}
|
||||
remote_labels = remote.get('Labels', {})
|
||||
for k in set.union(set(remote_labels.keys()), set(local_labels.keys())):
|
||||
if k.startswith('com.docker.compose.'): # We are only interested in user-specified labels
|
||||
if k.startswith('com.docker.'): # We are only interested in user-specified labels
|
||||
continue
|
||||
if remote_labels.get(k) != local_labels.get(k):
|
||||
raise NetworkConfigChangedError(local.full_name, 'label "{}"'.format(k))
|
||||
log.warn(
|
||||
'Network {}: label "{}" has changed. It may need to be'
|
||||
' recreated.'.format(local.full_name, k)
|
||||
)
|
||||
|
||||
|
||||
def build_networks(name, config_data, client):
|
||||
|
||||
@@ -382,7 +382,8 @@ class Project(object):
|
||||
timeout=None,
|
||||
detached=False,
|
||||
remove_orphans=False,
|
||||
scale_override=None):
|
||||
scale_override=None,
|
||||
rescale=True):
|
||||
|
||||
warn_for_swarm_mode(self.client)
|
||||
|
||||
@@ -405,7 +406,8 @@ class Project(object):
|
||||
plans[service.name],
|
||||
timeout=timeout,
|
||||
detached=detached,
|
||||
scale_override=scale_override.get(service.name)
|
||||
scale_override=scale_override.get(service.name),
|
||||
rescale=rescale
|
||||
)
|
||||
|
||||
def get_deps(service):
|
||||
|
||||
@@ -34,6 +34,7 @@ from .const import LABEL_ONE_OFF
|
||||
from .const import LABEL_PROJECT
|
||||
from .const import LABEL_SERVICE
|
||||
from .const import LABEL_VERSION
|
||||
from .const import NANOCPUS_SCALE
|
||||
from .container import Container
|
||||
from .errors import HealthCheckFailed
|
||||
from .errors import NoHealthCheckConfigured
|
||||
@@ -52,7 +53,10 @@ HOST_CONFIG_KEYS = [
|
||||
'cap_add',
|
||||
'cap_drop',
|
||||
'cgroup_parent',
|
||||
'cpu_count',
|
||||
'cpu_percent',
|
||||
'cpu_quota',
|
||||
'cpus',
|
||||
'devices',
|
||||
'dns',
|
||||
'dns_search',
|
||||
@@ -390,7 +394,7 @@ class Service(object):
|
||||
return containers
|
||||
|
||||
def _execute_convergence_recreate(self, containers, scale, timeout, detached, start):
|
||||
if len(containers) > scale:
|
||||
if scale is not None and len(containers) > scale:
|
||||
self._downscale(containers[scale:], timeout)
|
||||
containers = containers[:scale]
|
||||
|
||||
@@ -408,14 +412,14 @@ class Service(object):
|
||||
for error in errors.values():
|
||||
raise OperationFailedError(error)
|
||||
|
||||
if len(containers) < scale:
|
||||
if scale is not None and len(containers) < scale:
|
||||
containers.extend(self._execute_convergence_create(
|
||||
scale - len(containers), detached, start
|
||||
))
|
||||
return containers
|
||||
|
||||
def _execute_convergence_start(self, containers, scale, timeout, detached, start):
|
||||
if len(containers) > scale:
|
||||
if scale is not None and len(containers) > scale:
|
||||
self._downscale(containers[scale:], timeout)
|
||||
containers = containers[:scale]
|
||||
if start:
|
||||
@@ -429,7 +433,7 @@ class Service(object):
|
||||
for error in errors.values():
|
||||
raise OperationFailedError(error)
|
||||
|
||||
if len(containers) < scale:
|
||||
if scale is not None and len(containers) < scale:
|
||||
containers.extend(self._execute_convergence_create(
|
||||
scale - len(containers), detached, start
|
||||
))
|
||||
@@ -448,7 +452,7 @@ class Service(object):
|
||||
)
|
||||
|
||||
def execute_convergence_plan(self, plan, timeout=None, detached=False,
|
||||
start=True, scale_override=None):
|
||||
start=True, scale_override=None, rescale=True):
|
||||
(action, containers) = plan
|
||||
scale = scale_override if scale_override is not None else self.scale_num
|
||||
containers = sorted(containers, key=attrgetter('number'))
|
||||
@@ -460,6 +464,11 @@ class Service(object):
|
||||
scale, detached, start
|
||||
)
|
||||
|
||||
# The create action needs always needs an initial scale, but otherwise,
|
||||
# we set scale to none in no-rescale scenarios (`run` dependencies)
|
||||
if not rescale:
|
||||
scale = None
|
||||
|
||||
if action == 'recreate':
|
||||
return self._execute_convergence_recreate(
|
||||
containers, scale, timeout, detached, start
|
||||
@@ -793,6 +802,10 @@ class Service(object):
|
||||
init_path = options.get('init')
|
||||
options['init'] = True
|
||||
|
||||
nano_cpus = None
|
||||
if 'cpus' in options:
|
||||
nano_cpus = int(options.get('cpus') * NANOCPUS_SCALE)
|
||||
|
||||
return self.client.create_host_config(
|
||||
links=self._get_links(link_to_self=one_off),
|
||||
port_bindings=build_port_bindings(
|
||||
@@ -832,6 +845,9 @@ class Service(object):
|
||||
init=options.get('init', None),
|
||||
init_path=init_path,
|
||||
isolation=options.get('isolation'),
|
||||
cpu_count=options.get('cpu_count'),
|
||||
cpu_percent=options.get('cpu_percent'),
|
||||
nano_cpus=nano_cpus,
|
||||
)
|
||||
|
||||
def get_secret_volumes(self):
|
||||
@@ -868,6 +884,7 @@ class Service(object):
|
||||
nocache=no_cache,
|
||||
dockerfile=build_opts.get('dockerfile', None),
|
||||
cache_from=build_opts.get('cache_from', None),
|
||||
labels=build_opts.get('labels', None),
|
||||
buildargs=build_args
|
||||
)
|
||||
|
||||
|
||||
@@ -224,14 +224,14 @@ _docker_compose_events() {
|
||||
|
||||
_docker_compose_exec() {
|
||||
case "$prev" in
|
||||
--index|--user)
|
||||
--index|--user|-u)
|
||||
return
|
||||
;;
|
||||
esac
|
||||
|
||||
case "$cur" in
|
||||
-*)
|
||||
COMPREPLY=( $( compgen -W "-d --help --index --privileged -T --user" -- "$cur" ) )
|
||||
COMPREPLY=( $( compgen -W "-d --help --index --privileged -T --user -u" -- "$cur" ) )
|
||||
;;
|
||||
*)
|
||||
__docker_compose_services_running
|
||||
|
||||
@@ -241,7 +241,7 @@ __docker-compose_subcommand() {
|
||||
$opts_help \
|
||||
'-d[Detached mode: Run command in the background.]' \
|
||||
'--privileged[Give extended privileges to the process.]' \
|
||||
'--user=[Run the command as this user.]:username:_users' \
|
||||
'(-u --user)'{-u,--user=}'[Run the command as this user.]:username:_users' \
|
||||
'-T[Disable pseudo-tty allocation. By default `docker-compose exec` allocates a TTY.]' \
|
||||
'--index=[Index of the container if there are multiple instances of a service \[default: 1\]]:index: ' \
|
||||
'(-):running services:__docker-compose_runningservices' \
|
||||
|
||||
@@ -52,6 +52,11 @@ exe = EXE(pyz,
|
||||
'compose/config/config_schema_v3.2.json',
|
||||
'DATA'
|
||||
),
|
||||
(
|
||||
'compose/config/config_schema_v3.3.json',
|
||||
'compose/config/config_schema_v3.3.json',
|
||||
'DATA'
|
||||
),
|
||||
(
|
||||
'compose/GITSHA',
|
||||
'compose/GITSHA',
|
||||
|
||||
32
script/release/download-binaries
Executable file
32
script/release/download-binaries
Executable file
@@ -0,0 +1,32 @@
|
||||
#!/bin/bash
|
||||
|
||||
function usage() {
|
||||
>&2 cat << EOM
|
||||
Download Linux, Mac OS and Windows binaries from remote endpoints
|
||||
|
||||
Usage:
|
||||
|
||||
$0 <version>
|
||||
|
||||
Options:
|
||||
|
||||
version version string for the release (ex: 1.6.0)
|
||||
|
||||
EOM
|
||||
exit 1
|
||||
}
|
||||
|
||||
|
||||
[ -n "$1" ] || usage
|
||||
VERSION=$1
|
||||
BASE_BINTRAY_URL=https://dl.bintray.com/docker-compose/bump-$VERSION/
|
||||
DESTINATION=binaries-$VERSION
|
||||
APPVEYOR_URL=https://ci.appveyor.com/api/projects/docker/compose/\
|
||||
artifacts/dist%2Fdocker-compose-Windows-x86_64.exe?branch=bump-$VERSION
|
||||
|
||||
mkdir $DESTINATION
|
||||
|
||||
|
||||
wget -O $DESTINATION/docker-compose-Darwin-x86_64 $BASE_BINTRAY_URL/docker-compose-Darwin-x86_64
|
||||
wget -O $DESTINATION/docker-compose-Linux-x86_64 $BASE_BINTRAY_URL/docker-compose-Linux-x86_64
|
||||
wget -O $DESTINATION/docker-compose-Windows-x86_64.exe $APPVEYOR_URL
|
||||
@@ -15,7 +15,7 @@
|
||||
|
||||
set -e
|
||||
|
||||
VERSION="1.13.0"
|
||||
VERSION="1.14.0-rc1"
|
||||
IMAGE="docker/compose:$VERSION"
|
||||
|
||||
|
||||
@@ -35,6 +35,7 @@ if [ "$(pwd)" != '/' ]; then
|
||||
VOLUMES="-v $(pwd):$(pwd)"
|
||||
fi
|
||||
if [ -n "$COMPOSE_FILE" ]; then
|
||||
COMPOSE_OPTIONS="$COMPOSE_OPTIONS -e COMPOSE_FILE=$COMPOSE_FILE"
|
||||
compose_dir=$(realpath $(dirname $COMPOSE_FILE))
|
||||
fi
|
||||
# TODO: also check --file argument
|
||||
|
||||
2
setup.py
2
setup.py
@@ -37,7 +37,7 @@ install_requires = [
|
||||
'requests >= 2.6.1, != 2.11.0, < 2.12',
|
||||
'texttable >= 0.8.1, < 0.9',
|
||||
'websocket-client >= 0.32.0, < 1.0',
|
||||
'docker >= 2.2.1, < 3.0',
|
||||
'docker >= 2.3.0, < 3.0',
|
||||
'dockerpty >= 0.4.1, < 0.5',
|
||||
'six >= 1.3.0, < 2',
|
||||
'jsonschema >= 2.5.1, < 3',
|
||||
|
||||
@@ -21,6 +21,7 @@ from docker import errors
|
||||
from .. import mock
|
||||
from ..helpers import create_host_file
|
||||
from compose.cli.command import get_project
|
||||
from compose.config.errors import DuplicateOverrideFileFound
|
||||
from compose.container import Container
|
||||
from compose.project import OneOffFilter
|
||||
from compose.utils import nanoseconds_from_time_seconds
|
||||
@@ -31,7 +32,6 @@ from tests.integration.testcases import v2_1_only
|
||||
from tests.integration.testcases import v2_only
|
||||
from tests.integration.testcases import v3_only
|
||||
|
||||
|
||||
ProcessResult = namedtuple('ProcessResult', 'stdout stderr')
|
||||
|
||||
|
||||
@@ -1211,6 +1211,17 @@ class CLITestCase(DockerClientTestCase):
|
||||
self.assertEqual(len(db.containers()), 1)
|
||||
self.assertEqual(len(console.containers()), 0)
|
||||
|
||||
def test_run_service_with_scaled_dependencies(self):
|
||||
self.base_dir = 'tests/fixtures/v2-dependencies'
|
||||
self.dispatch(['up', '-d', '--scale', 'db=2', '--scale', 'console=0'])
|
||||
db = self.project.get_service('db')
|
||||
console = self.project.get_service('console')
|
||||
assert len(db.containers()) == 2
|
||||
assert len(console.containers()) == 0
|
||||
self.dispatch(['run', 'web', '/bin/true'], None)
|
||||
assert len(db.containers()) == 2
|
||||
assert len(console.containers()) == 0
|
||||
|
||||
def test_run_with_no_deps(self):
|
||||
self.base_dir = 'tests/fixtures/links-composefile'
|
||||
self.dispatch(['run', '--no-deps', 'web', '/bin/true'])
|
||||
@@ -1616,8 +1627,24 @@ class CLITestCase(DockerClientTestCase):
|
||||
service = self.project.get_service('simple')
|
||||
service.create_container()
|
||||
self.dispatch(['rm', '-fs'], None)
|
||||
self.assertEqual(len(service.containers(stopped=True)), 0)
|
||||
|
||||
def test_rm_stop(self):
|
||||
self.dispatch(['up', '-d'], None)
|
||||
simple = self.project.get_service('simple')
|
||||
self.assertEqual(len(simple.containers()), 0)
|
||||
another = self.project.get_service('another')
|
||||
assert len(simple.containers()) == 1
|
||||
assert len(another.containers()) == 1
|
||||
self.dispatch(['rm', '-fs'], None)
|
||||
assert len(simple.containers(stopped=True)) == 0
|
||||
assert len(another.containers(stopped=True)) == 0
|
||||
|
||||
self.dispatch(['up', '-d'], None)
|
||||
assert len(simple.containers()) == 1
|
||||
assert len(another.containers()) == 1
|
||||
self.dispatch(['rm', '-fs', 'another'], None)
|
||||
assert len(simple.containers()) == 1
|
||||
assert len(another.containers(stopped=True)) == 0
|
||||
|
||||
def test_rm_all(self):
|
||||
service = self.project.get_service('simple')
|
||||
@@ -2138,3 +2165,25 @@ class CLITestCase(DockerClientTestCase):
|
||||
assert 'busybox' in result.stdout
|
||||
assert 'multiplecomposefiles_another_1' in result.stdout
|
||||
assert 'multiplecomposefiles_simple_1' in result.stdout
|
||||
|
||||
def test_up_with_override_yaml(self):
|
||||
self.base_dir = 'tests/fixtures/override-yaml-files'
|
||||
self._project = get_project(self.base_dir, [])
|
||||
self.dispatch(
|
||||
[
|
||||
'up', '-d',
|
||||
],
|
||||
None)
|
||||
|
||||
containers = self.project.containers()
|
||||
self.assertEqual(len(containers), 2)
|
||||
|
||||
web, db = containers
|
||||
self.assertEqual(web.human_readable_command, 'sleep 100')
|
||||
self.assertEqual(db.human_readable_command, 'top')
|
||||
|
||||
def test_up_with_duplicate_override_yaml_files(self):
|
||||
self.base_dir = 'tests/fixtures/duplicate-override-yaml-files'
|
||||
with self.assertRaises(DuplicateOverrideFileFound):
|
||||
get_project(self.base_dir, [])
|
||||
self.base_dir = None
|
||||
|
||||
3
tests/fixtures/duplicate-override-yaml-files/docker-compose.override.yaml
vendored
Normal file
3
tests/fixtures/duplicate-override-yaml-files/docker-compose.override.yaml
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
|
||||
db:
|
||||
command: "top"
|
||||
3
tests/fixtures/duplicate-override-yaml-files/docker-compose.override.yml
vendored
Normal file
3
tests/fixtures/duplicate-override-yaml-files/docker-compose.override.yml
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
|
||||
db:
|
||||
command: "sleep 300"
|
||||
10
tests/fixtures/duplicate-override-yaml-files/docker-compose.yml
vendored
Normal file
10
tests/fixtures/duplicate-override-yaml-files/docker-compose.yml
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
|
||||
web:
|
||||
image: busybox:latest
|
||||
command: "sleep 100"
|
||||
links:
|
||||
- db
|
||||
|
||||
db:
|
||||
image: busybox:latest
|
||||
command: "sleep 200"
|
||||
3
tests/fixtures/override-yaml-files/docker-compose.override.yaml
vendored
Normal file
3
tests/fixtures/override-yaml-files/docker-compose.override.yaml
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
|
||||
db:
|
||||
command: "top"
|
||||
10
tests/fixtures/override-yaml-files/docker-compose.yml
vendored
Normal file
10
tests/fixtures/override-yaml-files/docker-compose.yml
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
|
||||
web:
|
||||
image: busybox:latest
|
||||
command: "sleep 100"
|
||||
links:
|
||||
- db
|
||||
|
||||
db:
|
||||
image: busybox:latest
|
||||
command: "sleep 200"
|
||||
@@ -19,6 +19,7 @@ from .testcases import pull_busybox
|
||||
from compose import __version__
|
||||
from compose.config.types import VolumeFromSpec
|
||||
from compose.config.types import VolumeSpec
|
||||
from compose.const import IS_WINDOWS_PLATFORM
|
||||
from compose.const import LABEL_CONFIG_HASH
|
||||
from compose.const import LABEL_CONTAINER_NUMBER
|
||||
from compose.const import LABEL_ONE_OFF
|
||||
@@ -33,6 +34,7 @@ from compose.service import ConvergenceStrategy
|
||||
from compose.service import NetworkMode
|
||||
from compose.service import Service
|
||||
from tests.integration.testcases import v2_1_only
|
||||
from tests.integration.testcases import v2_2_only
|
||||
from tests.integration.testcases import v2_only
|
||||
from tests.integration.testcases import v3_only
|
||||
|
||||
@@ -110,6 +112,31 @@ class ServiceTest(DockerClientTestCase):
|
||||
container.start()
|
||||
self.assertEqual(container.get('HostConfig.CpuQuota'), 40000)
|
||||
|
||||
@v2_2_only()
|
||||
def test_create_container_with_cpu_count(self):
|
||||
self.require_api_version('1.25')
|
||||
service = self.create_service('db', cpu_count=2)
|
||||
container = service.create_container()
|
||||
service.start_container(container)
|
||||
self.assertEqual(container.get('HostConfig.CpuCount'), 2)
|
||||
|
||||
@v2_2_only()
|
||||
@pytest.mark.skipif(not IS_WINDOWS_PLATFORM, reason='cpu_percent is not supported for Linux')
|
||||
def test_create_container_with_cpu_percent(self):
|
||||
self.require_api_version('1.25')
|
||||
service = self.create_service('db', cpu_percent=12)
|
||||
container = service.create_container()
|
||||
service.start_container(container)
|
||||
self.assertEqual(container.get('HostConfig.CpuPercent'), 12)
|
||||
|
||||
@v2_2_only()
|
||||
def test_create_container_with_cpus(self):
|
||||
self.require_api_version('1.25')
|
||||
service = self.create_service('db', cpus=1)
|
||||
container = service.create_container()
|
||||
service.start_container(container)
|
||||
self.assertEqual(container.get('HostConfig.NanoCpus'), 1000000000)
|
||||
|
||||
def test_create_container_with_shm_size(self):
|
||||
self.require_api_version('1.22')
|
||||
service = self.create_service('db', shm_size=67108864)
|
||||
@@ -639,6 +666,21 @@ class ServiceTest(DockerClientTestCase):
|
||||
assert service.image()
|
||||
assert "build_version=2" in service.image()['ContainerConfig']['Cmd']
|
||||
|
||||
def test_build_with_build_labels(self):
|
||||
base_dir = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, base_dir)
|
||||
|
||||
with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f:
|
||||
f.write('FROM busybox\n')
|
||||
|
||||
service = self.create_service('buildlabels', build={
|
||||
'context': text_type(base_dir),
|
||||
'labels': {'com.docker.compose.test': 'true'}
|
||||
})
|
||||
service.build()
|
||||
assert service.image()
|
||||
assert service.image()['Config']['Labels']['com.docker.compose.test'] == 'true'
|
||||
|
||||
def test_start_container_stays_unprivileged(self):
|
||||
service = self.create_service('web')
|
||||
container = create_and_start_container(service).inspect()
|
||||
|
||||
@@ -15,6 +15,7 @@ from compose.const import API_VERSIONS
|
||||
from compose.const import COMPOSEFILE_V1 as V1
|
||||
from compose.const import COMPOSEFILE_V2_0 as V2_0
|
||||
from compose.const import COMPOSEFILE_V2_0 as V2_1
|
||||
from compose.const import COMPOSEFILE_V2_2 as V2_2
|
||||
from compose.const import COMPOSEFILE_V3_2 as V3_2
|
||||
from compose.const import LABEL_PROJECT
|
||||
from compose.progress_stream import stream_output
|
||||
@@ -70,10 +71,14 @@ def v2_1_only():
|
||||
return build_version_required_decorator((V1, V2_0))
|
||||
|
||||
|
||||
def v3_only():
|
||||
def v2_2_only():
|
||||
return build_version_required_decorator((V1, V2_0, V2_1))
|
||||
|
||||
|
||||
def v3_only():
|
||||
return build_version_required_decorator((V1, V2_0, V2_1, V2_2))
|
||||
|
||||
|
||||
class DockerClientTestCase(unittest.TestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
|
||||
@@ -27,9 +27,11 @@ from compose.config.types import VolumeSpec
|
||||
from compose.const import COMPOSEFILE_V1 as V1
|
||||
from compose.const import COMPOSEFILE_V2_0 as V2_0
|
||||
from compose.const import COMPOSEFILE_V2_1 as V2_1
|
||||
from compose.const import COMPOSEFILE_V2_2 as V2_2
|
||||
from compose.const import COMPOSEFILE_V3_0 as V3_0
|
||||
from compose.const import COMPOSEFILE_V3_1 as V3_1
|
||||
from compose.const import COMPOSEFILE_V3_2 as V3_2
|
||||
from compose.const import COMPOSEFILE_V3_3 as V3_3
|
||||
from compose.const import IS_WINDOWS_PLATFORM
|
||||
from compose.utils import nanoseconds_from_time_seconds
|
||||
from tests import mock
|
||||
@@ -174,6 +176,9 @@ class ConfigTest(unittest.TestCase):
|
||||
cfg = config.load(build_config_details({'version': '2.1'}))
|
||||
assert cfg.version == V2_1
|
||||
|
||||
cfg = config.load(build_config_details({'version': '2.2'}))
|
||||
assert cfg.version == V2_2
|
||||
|
||||
for version in ['3', '3.0']:
|
||||
cfg = config.load(build_config_details({'version': version}))
|
||||
assert cfg.version == V3_0
|
||||
@@ -821,6 +826,33 @@ class ConfigTest(unittest.TestCase):
|
||||
assert service['build']['args']['opt1'] == '42'
|
||||
assert service['build']['args']['opt2'] == 'foobar'
|
||||
|
||||
def test_load_with_build_labels(self):
|
||||
service = config.load(
|
||||
build_config_details(
|
||||
{
|
||||
'version': V3_3,
|
||||
'services': {
|
||||
'web': {
|
||||
'build': {
|
||||
'context': '.',
|
||||
'dockerfile': 'Dockerfile-alt',
|
||||
'labels': {
|
||||
'label1': 42,
|
||||
'label2': 'foobar'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'tests/fixtures/extends',
|
||||
'filename.yml'
|
||||
)
|
||||
).services[0]
|
||||
assert 'labels' in service['build']
|
||||
assert 'label1' in service['build']['labels']
|
||||
assert service['build']['labels']['label1'] == 42
|
||||
assert service['build']['labels']['label2'] == 'foobar'
|
||||
|
||||
def test_build_args_allow_empty_properties(self):
|
||||
service = config.load(
|
||||
build_config_details(
|
||||
@@ -2001,6 +2033,23 @@ class ConfigTest(unittest.TestCase):
|
||||
}
|
||||
}
|
||||
|
||||
def test_merge_credential_spec(self):
|
||||
base = {
|
||||
'image': 'bb',
|
||||
'credential_spec': {
|
||||
'file': '/hello-world',
|
||||
}
|
||||
}
|
||||
|
||||
override = {
|
||||
'credential_spec': {
|
||||
'registry': 'revolution.com',
|
||||
}
|
||||
}
|
||||
|
||||
actual = config.merge_service_dicts(base, override, V3_3)
|
||||
assert actual['credential_spec'] == override['credential_spec']
|
||||
|
||||
def test_external_volume_config(self):
|
||||
config_details = build_config_details({
|
||||
'version': '2',
|
||||
@@ -2815,6 +2864,74 @@ class MergeLabelsTest(unittest.TestCase):
|
||||
assert service_dict['labels'] == {'foo': '1', 'bar': ''}
|
||||
|
||||
|
||||
class MergeBuildTest(unittest.TestCase):
|
||||
def test_full(self):
|
||||
base = {
|
||||
'context': '.',
|
||||
'dockerfile': 'Dockerfile',
|
||||
'args': {
|
||||
'x': '1',
|
||||
'y': '2',
|
||||
},
|
||||
'cache_from': ['ubuntu'],
|
||||
'labels': ['com.docker.compose.test=true']
|
||||
}
|
||||
|
||||
override = {
|
||||
'context': './prod',
|
||||
'dockerfile': 'Dockerfile.prod',
|
||||
'args': ['x=12'],
|
||||
'cache_from': ['debian'],
|
||||
'labels': {
|
||||
'com.docker.compose.test': 'false',
|
||||
'com.docker.compose.prod': 'true',
|
||||
}
|
||||
}
|
||||
|
||||
result = config.merge_build(None, {'build': base}, {'build': override})
|
||||
assert result['context'] == override['context']
|
||||
assert result['dockerfile'] == override['dockerfile']
|
||||
assert result['args'] == {'x': '12', 'y': '2'}
|
||||
assert set(result['cache_from']) == set(['ubuntu', 'debian'])
|
||||
assert result['labels'] == override['labels']
|
||||
|
||||
def test_empty_override(self):
|
||||
base = {
|
||||
'context': '.',
|
||||
'dockerfile': 'Dockerfile',
|
||||
'args': {
|
||||
'x': '1',
|
||||
'y': '2',
|
||||
},
|
||||
'cache_from': ['ubuntu'],
|
||||
'labels': {
|
||||
'com.docker.compose.test': 'true'
|
||||
}
|
||||
}
|
||||
|
||||
override = {}
|
||||
|
||||
result = config.merge_build(None, {'build': base}, {'build': override})
|
||||
assert result == base
|
||||
|
||||
def test_empty_base(self):
|
||||
base = {}
|
||||
|
||||
override = {
|
||||
'context': './prod',
|
||||
'dockerfile': 'Dockerfile.prod',
|
||||
'args': {'x': '12'},
|
||||
'cache_from': ['debian'],
|
||||
'labels': {
|
||||
'com.docker.compose.test': 'false',
|
||||
'com.docker.compose.prod': 'true',
|
||||
}
|
||||
}
|
||||
|
||||
result = config.merge_build(None, {'build': base}, {'build': override})
|
||||
assert result == override
|
||||
|
||||
|
||||
class MemoryOptionsTest(unittest.TestCase):
|
||||
|
||||
def test_validation_fails_with_just_memswap_limit(self):
|
||||
|
||||
@@ -3,6 +3,7 @@ from __future__ import unicode_literals
|
||||
|
||||
import pytest
|
||||
|
||||
from .. import mock
|
||||
from .. import unittest
|
||||
from compose.network import check_remote_network_config
|
||||
from compose.network import Network
|
||||
@@ -66,7 +67,8 @@ class NetworkTest(unittest.TestCase):
|
||||
options = {'com.docker.network.driver.foo': 'bar'}
|
||||
remote_options = {
|
||||
'com.docker.network.driver.overlay.vxlanid_list': '257',
|
||||
'com.docker.network.driver.foo': 'bar'
|
||||
'com.docker.network.driver.foo': 'bar',
|
||||
'com.docker.network.windowsshim.hnsid': 'aac3fd4887daaec1e3b',
|
||||
}
|
||||
net = Network(
|
||||
None, 'compose_test', 'net1', 'overlay',
|
||||
@@ -151,7 +153,9 @@ class NetworkTest(unittest.TestCase):
|
||||
'com.project.touhou.character': 'marisa.kirisame',
|
||||
}
|
||||
}
|
||||
with pytest.raises(NetworkConfigChangedError) as e:
|
||||
with mock.patch('compose.network.log') as mock_log:
|
||||
check_remote_network_config(remote, net)
|
||||
|
||||
assert 'label "com.project.touhou.character" has changed' in str(e.value)
|
||||
mock_log.warn.assert_called_once_with(mock.ANY)
|
||||
_, args, kwargs = mock_log.warn.mock_calls[0]
|
||||
assert 'label "com.project.touhou.character" has changed' in args[0]
|
||||
|
||||
@@ -471,6 +471,7 @@ class ServiceTest(unittest.TestCase):
|
||||
nocache=False,
|
||||
rm=True,
|
||||
buildargs={},
|
||||
labels=None,
|
||||
cache_from=None,
|
||||
)
|
||||
|
||||
@@ -508,6 +509,7 @@ class ServiceTest(unittest.TestCase):
|
||||
nocache=False,
|
||||
rm=True,
|
||||
buildargs={},
|
||||
labels=None,
|
||||
cache_from=None,
|
||||
)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user