mirror of
https://github.com/docker/compose.git
synced 2026-02-16 21:42:33 +08:00
Compare commits
92 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e20d808ed2 | ||
|
|
f46880fe9a | ||
|
|
cda827cbfc | ||
|
|
8c0411910d | ||
|
|
d9545a5909 | ||
|
|
cb1b88c4f8 | ||
|
|
5985d046e3 | ||
|
|
6817b533a8 | ||
|
|
15718810c0 | ||
|
|
969525c190 | ||
|
|
bdd7d47640 | ||
|
|
e7de1bc3c9 | ||
|
|
1fb5039585 | ||
|
|
e8af19daa3 | ||
|
|
156ce2bc1d | ||
|
|
709ba0975d | ||
|
|
429b1c8b3c | ||
|
|
7f0734ca3c | ||
|
|
80322cfa5b | ||
|
|
c187d3c39f | ||
|
|
f0674be578 | ||
|
|
a728ff6a59 | ||
|
|
9cb1a07c66 | ||
|
|
c584ad67fc | ||
|
|
13d8cf413e | ||
|
|
7a19b7548f | ||
|
|
b9cccf2efc | ||
|
|
70574efd5b | ||
|
|
025fb7f860 | ||
|
|
706164accd | ||
|
|
e245fb04cf | ||
|
|
cc62764c12 | ||
|
|
7846f6e2a0 | ||
|
|
c15c79ed2f | ||
|
|
263e939125 | ||
|
|
d5ebc73482 | ||
|
|
f368b4846f | ||
|
|
1cf1217ecb | ||
|
|
c3bb958865 | ||
|
|
ddcd5c9fe9 | ||
|
|
5aafa54667 | ||
|
|
05638ab5ea | ||
|
|
31a4ceeab0 | ||
|
|
e6aedb1ce0 | ||
|
|
5eb3f4b32f | ||
|
|
bc03441550 | ||
|
|
bb44d06f07 | ||
|
|
90c89e34f1 | ||
|
|
948ce555da | ||
|
|
d469113b37 | ||
|
|
c2355175ea | ||
|
|
aecc0de28f | ||
|
|
9f42fac2bb | ||
|
|
6e09e37114 | ||
|
|
faa532c315 | ||
|
|
3b2ce82fa1 | ||
|
|
c1657dc46a | ||
|
|
fa3acbeb8d | ||
|
|
3cf58705b7 | ||
|
|
fa6d837b49 | ||
|
|
299ce6ad00 | ||
|
|
4dece7fcb2 | ||
|
|
aa66338f39 | ||
|
|
0578a58471 | ||
|
|
7536c331e0 | ||
|
|
62fc24eb27 | ||
|
|
eba67910f3 | ||
|
|
a752208621 | ||
|
|
6b83a651f6 | ||
|
|
2b5ad06e00 | ||
|
|
b06bc3cdea | ||
|
|
8511570764 | ||
|
|
e7086091be | ||
|
|
c49eca41a0 | ||
|
|
a120759c9d | ||
|
|
e9f6abf8f4 | ||
|
|
599456378b | ||
|
|
6a71040514 | ||
|
|
ae6dd8a93c | ||
|
|
b1c831c54a | ||
|
|
fc923c3580 | ||
|
|
12b68572ef | ||
|
|
3f85c4291b | ||
|
|
7078c8740a | ||
|
|
d898b0cee4 | ||
|
|
27447d9144 | ||
|
|
ca396bac6d | ||
|
|
20a9ae50b0 | ||
|
|
6234cc8343 | ||
|
|
8356576a9a | ||
|
|
2975f06ca2 | ||
|
|
7aa51a18ff |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -12,3 +12,4 @@ compose/GITSHA
|
||||
*.swo
|
||||
*.swp
|
||||
.DS_Store
|
||||
.cache
|
||||
|
||||
54
CHANGELOG.md
54
CHANGELOG.md
@@ -1,6 +1,60 @@
|
||||
Change log
|
||||
==========
|
||||
|
||||
1.22.0 (2018-07-17)
|
||||
-------------------
|
||||
|
||||
### Features
|
||||
|
||||
#### Compose format version 3.7
|
||||
|
||||
- Introduced version 3.7 of the `docker-compose.yml` specification.
|
||||
This version requires Docker Engine 18.06.0 or above.
|
||||
|
||||
- Added support for `rollback_config` in the deploy configuration
|
||||
|
||||
- Added support for the `init` parameter in service configurations
|
||||
|
||||
- Added support for extension fields in service, network, volume, secret,
|
||||
and config configurations
|
||||
|
||||
#### Compose format version 2.4
|
||||
|
||||
- Added support for extension fields in service, network,
|
||||
and volume configurations
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Fixed a bug that prevented deployment with some Compose files when
|
||||
`DOCKER_DEFAULT_PLATFORM` was set
|
||||
|
||||
- Compose will no longer try to create containers or volumes with
|
||||
invalid starting characters
|
||||
|
||||
- Fixed several bugs that prevented Compose commands from working properly
|
||||
with containers created with an older version of Compose
|
||||
|
||||
- Fixed an issue with the output of `docker-compose config` with the
|
||||
`--compatibility-mode` flag enabled when the source file contains
|
||||
attachable networks
|
||||
|
||||
- Fixed a bug that prevented the `gcloud` credential store from working
|
||||
properly when used with the Compose binary on UNIX
|
||||
|
||||
- Fixed a bug that caused connection errors when trying to operate
|
||||
over a non-HTTPS TCP connection on Windows
|
||||
|
||||
- Fixed a bug that caused builds to fail on Windows if the Dockerfile
|
||||
was located in a subdirectory of the build context
|
||||
|
||||
- Fixed an issue that prevented proper parsing of UTF-8 BOM encoded
|
||||
Compose files on Windows
|
||||
|
||||
- Fixed an issue with handling of the double-wildcard (`**`) pattern in `.dockerignore` files when using `docker-compose build`
|
||||
|
||||
- Fixed a bug that caused auth values in legacy `.dockercfg` files to be ignored
|
||||
- `docker-compose build` will no longer attempt to create image names starting with an invalid character
|
||||
|
||||
1.21.2 (2018-05-03)
|
||||
-------------------
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
__version__ = '1.21.2'
|
||||
__version__ = '1.22.0'
|
||||
|
||||
@@ -117,6 +117,13 @@ def docker_client(environment, version=None, tls_config=None, host=None,
|
||||
|
||||
kwargs['user_agent'] = generate_user_agent()
|
||||
|
||||
# Workaround for
|
||||
# https://pyinstaller.readthedocs.io/en/v3.3.1/runtime-information.html#ld-library-path-libpath-considerations
|
||||
if 'LD_LIBRARY_PATH_ORIG' in environment:
|
||||
kwargs['credstore_env'] = {
|
||||
'LD_LIBRARY_PATH': environment.get('LD_LIBRARY_PATH_ORIG'),
|
||||
}
|
||||
|
||||
client = APIClient(**kwargs)
|
||||
client._original_base_url = kwargs.get('base_url')
|
||||
|
||||
|
||||
@@ -918,12 +918,17 @@ def convert_restart_policy(name):
|
||||
|
||||
|
||||
def translate_deploy_keys_to_container_config(service_dict):
|
||||
if 'credential_spec' in service_dict:
|
||||
del service_dict['credential_spec']
|
||||
if 'configs' in service_dict:
|
||||
del service_dict['configs']
|
||||
|
||||
if 'deploy' not in service_dict:
|
||||
return service_dict, []
|
||||
|
||||
deploy_dict = service_dict['deploy']
|
||||
ignored_keys = [
|
||||
k for k in ['endpoint_mode', 'labels', 'update_config', 'placement']
|
||||
k for k in ['endpoint_mode', 'labels', 'update_config', 'rollback_config', 'placement']
|
||||
if k in deploy_dict
|
||||
]
|
||||
|
||||
@@ -946,10 +951,6 @@ def translate_deploy_keys_to_container_config(service_dict):
|
||||
)
|
||||
|
||||
del service_dict['deploy']
|
||||
if 'credential_spec' in service_dict:
|
||||
del service_dict['credential_spec']
|
||||
if 'configs' in service_dict:
|
||||
del service_dict['configs']
|
||||
|
||||
return service_dict, ignored_keys
|
||||
|
||||
@@ -1135,6 +1136,7 @@ def merge_deploy(base, override):
|
||||
md.merge_scalar('replicas')
|
||||
md.merge_mapping('labels', parse_labels)
|
||||
md.merge_mapping('update_config')
|
||||
md.merge_mapping('rollback_config')
|
||||
md.merge_mapping('restart_policy')
|
||||
if md.needs_merge('resources'):
|
||||
resources_md = MergeDict(md.base.get('resources') or {}, md.override.get('resources') or {})
|
||||
@@ -1434,15 +1436,15 @@ def has_uppercase(name):
|
||||
return any(char in string.ascii_uppercase for char in name)
|
||||
|
||||
|
||||
def load_yaml(filename, encoding=None):
|
||||
def load_yaml(filename, encoding=None, binary=True):
|
||||
try:
|
||||
with io.open(filename, 'r', encoding=encoding) as fh:
|
||||
with io.open(filename, 'rb' if binary else 'r', encoding=encoding) as fh:
|
||||
return yaml.safe_load(fh)
|
||||
except (IOError, yaml.YAMLError, UnicodeDecodeError) as e:
|
||||
if encoding is None:
|
||||
# Sometimes the user's locale sets an encoding that doesn't match
|
||||
# the YAML files. Im such cases, retry once with the "default"
|
||||
# UTF-8 encoding
|
||||
return load_yaml(filename, encoding='utf-8')
|
||||
return load_yaml(filename, encoding='utf-8-sig', binary=False)
|
||||
error_name = getattr(e, '__module__', '') + '.' + e.__class__.__name__
|
||||
raise ConfigurationError(u"{}: {}".format(error_name, e))
|
||||
|
||||
@@ -346,6 +346,7 @@
|
||||
"dependencies": {
|
||||
"memswap_limit": ["mem_limit"]
|
||||
},
|
||||
"patternProperties": {"^x-": {}},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
@@ -409,6 +410,7 @@
|
||||
"labels": {"$ref": "#/definitions/labels"},
|
||||
"name": {"type": "string"}
|
||||
},
|
||||
"patternProperties": {"^x-": {}},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
@@ -451,6 +453,7 @@
|
||||
"labels": {"$ref": "#/definitions/labels"},
|
||||
"name": {"type": "string"}
|
||||
},
|
||||
"patternProperties": {"^x-": {}},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
|
||||
602
compose/config/config_schema_v3.7.json
Normal file
602
compose/config/config_schema_v3.7.json
Normal file
@@ -0,0 +1,602 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"id": "config_schema_v3.7.json",
|
||||
"type": "object",
|
||||
"required": ["version"],
|
||||
|
||||
"properties": {
|
||||
"version": {
|
||||
"type": "string"
|
||||
},
|
||||
|
||||
"services": {
|
||||
"id": "#/properties/services",
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^[a-zA-Z0-9._-]+$": {
|
||||
"$ref": "#/definitions/service"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
"networks": {
|
||||
"id": "#/properties/networks",
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^[a-zA-Z0-9._-]+$": {
|
||||
"$ref": "#/definitions/network"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
"volumes": {
|
||||
"id": "#/properties/volumes",
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^[a-zA-Z0-9._-]+$": {
|
||||
"$ref": "#/definitions/volume"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
"secrets": {
|
||||
"id": "#/properties/secrets",
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^[a-zA-Z0-9._-]+$": {
|
||||
"$ref": "#/definitions/secret"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
"configs": {
|
||||
"id": "#/properties/configs",
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^[a-zA-Z0-9._-]+$": {
|
||||
"$ref": "#/definitions/config"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
|
||||
"patternProperties": {"^x-": {}},
|
||||
"additionalProperties": false,
|
||||
|
||||
"definitions": {
|
||||
|
||||
"service": {
|
||||
"id": "#/definitions/service",
|
||||
"type": "object",
|
||||
|
||||
"properties": {
|
||||
"deploy": {"$ref": "#/definitions/deployment"},
|
||||
"build": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"context": {"type": "string"},
|
||||
"dockerfile": {"type": "string"},
|
||||
"args": {"$ref": "#/definitions/list_or_dict"},
|
||||
"labels": {"$ref": "#/definitions/list_or_dict"},
|
||||
"cache_from": {"$ref": "#/definitions/list_of_strings"},
|
||||
"network": {"type": "string"},
|
||||
"target": {"type": "string"},
|
||||
"shm_size": {"type": ["integer", "string"]}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"cap_add": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
|
||||
"cap_drop": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
|
||||
"cgroup_parent": {"type": "string"},
|
||||
"command": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{"type": "array", "items": {"type": "string"}}
|
||||
]
|
||||
},
|
||||
"configs": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"source": {"type": "string"},
|
||||
"target": {"type": "string"},
|
||||
"uid": {"type": "string"},
|
||||
"gid": {"type": "string"},
|
||||
"mode": {"type": "number"}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"container_name": {"type": "string"},
|
||||
"credential_spec": {"type": "object", "properties": {
|
||||
"file": {"type": "string"},
|
||||
"registry": {"type": "string"}
|
||||
}},
|
||||
"depends_on": {"$ref": "#/definitions/list_of_strings"},
|
||||
"devices": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
|
||||
"dns": {"$ref": "#/definitions/string_or_list"},
|
||||
"dns_search": {"$ref": "#/definitions/string_or_list"},
|
||||
"domainname": {"type": "string"},
|
||||
"entrypoint": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{"type": "array", "items": {"type": "string"}}
|
||||
]
|
||||
},
|
||||
"env_file": {"$ref": "#/definitions/string_or_list"},
|
||||
"environment": {"$ref": "#/definitions/list_or_dict"},
|
||||
|
||||
"expose": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": ["string", "number"],
|
||||
"format": "expose"
|
||||
},
|
||||
"uniqueItems": true
|
||||
},
|
||||
|
||||
"external_links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
|
||||
"extra_hosts": {"$ref": "#/definitions/list_or_dict"},
|
||||
"healthcheck": {"$ref": "#/definitions/healthcheck"},
|
||||
"hostname": {"type": "string"},
|
||||
"image": {"type": "string"},
|
||||
"init": {"type": "boolean"},
|
||||
"ipc": {"type": "string"},
|
||||
"isolation": {"type": "string"},
|
||||
"labels": {"$ref": "#/definitions/list_or_dict"},
|
||||
"links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
|
||||
|
||||
"logging": {
|
||||
"type": "object",
|
||||
|
||||
"properties": {
|
||||
"driver": {"type": "string"},
|
||||
"options": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^.+$": {"type": ["string", "number", "null"]}
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
"mac_address": {"type": "string"},
|
||||
"network_mode": {"type": "string"},
|
||||
|
||||
"networks": {
|
||||
"oneOf": [
|
||||
{"$ref": "#/definitions/list_of_strings"},
|
||||
{
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^[a-zA-Z0-9._-]+$": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"aliases": {"$ref": "#/definitions/list_of_strings"},
|
||||
"ipv4_address": {"type": "string"},
|
||||
"ipv6_address": {"type": "string"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
{"type": "null"}
|
||||
]
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"pid": {"type": ["string", "null"]},
|
||||
|
||||
"ports": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"oneOf": [
|
||||
{"type": "number", "format": "ports"},
|
||||
{"type": "string", "format": "ports"},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"mode": {"type": "string"},
|
||||
"target": {"type": "integer"},
|
||||
"published": {"type": "integer"},
|
||||
"protocol": {"type": "string"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"uniqueItems": true
|
||||
},
|
||||
|
||||
"privileged": {"type": "boolean"},
|
||||
"read_only": {"type": "boolean"},
|
||||
"restart": {"type": "string"},
|
||||
"security_opt": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
|
||||
"shm_size": {"type": ["number", "string"]},
|
||||
"secrets": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"source": {"type": "string"},
|
||||
"target": {"type": "string"},
|
||||
"uid": {"type": "string"},
|
||||
"gid": {"type": "string"},
|
||||
"mode": {"type": "number"}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"sysctls": {"$ref": "#/definitions/list_or_dict"},
|
||||
"stdin_open": {"type": "boolean"},
|
||||
"stop_grace_period": {"type": "string", "format": "duration"},
|
||||
"stop_signal": {"type": "string"},
|
||||
"tmpfs": {"$ref": "#/definitions/string_or_list"},
|
||||
"tty": {"type": "boolean"},
|
||||
"ulimits": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^[a-z]+$": {
|
||||
"oneOf": [
|
||||
{"type": "integer"},
|
||||
{
|
||||
"type":"object",
|
||||
"properties": {
|
||||
"hard": {"type": "integer"},
|
||||
"soft": {"type": "integer"}
|
||||
},
|
||||
"required": ["soft", "hard"],
|
||||
"additionalProperties": false
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"user": {"type": "string"},
|
||||
"userns_mode": {"type": "string"},
|
||||
"volumes": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "object",
|
||||
"required": ["type"],
|
||||
"properties": {
|
||||
"type": {"type": "string"},
|
||||
"source": {"type": "string"},
|
||||
"target": {"type": "string"},
|
||||
"read_only": {"type": "boolean"},
|
||||
"consistency": {"type": "string"},
|
||||
"bind": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"propagation": {"type": "string"}
|
||||
}
|
||||
},
|
||||
"volume": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"nocopy": {"type": "boolean"}
|
||||
}
|
||||
},
|
||||
"tmpfs": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"size": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
],
|
||||
"uniqueItems": true
|
||||
}
|
||||
},
|
||||
"working_dir": {"type": "string"}
|
||||
},
|
||||
"patternProperties": {"^x-": {}},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
"healthcheck": {
|
||||
"id": "#/definitions/healthcheck",
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"disable": {"type": "boolean"},
|
||||
"interval": {"type": "string", "format": "duration"},
|
||||
"retries": {"type": "number"},
|
||||
"test": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{"type": "array", "items": {"type": "string"}}
|
||||
]
|
||||
},
|
||||
"timeout": {"type": "string", "format": "duration"},
|
||||
"start_period": {"type": "string", "format": "duration"}
|
||||
}
|
||||
},
|
||||
"deployment": {
|
||||
"id": "#/definitions/deployment",
|
||||
"type": ["object", "null"],
|
||||
"properties": {
|
||||
"mode": {"type": "string"},
|
||||
"endpoint_mode": {"type": "string"},
|
||||
"replicas": {"type": "integer"},
|
||||
"labels": {"$ref": "#/definitions/list_or_dict"},
|
||||
"rollback_config": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"parallelism": {"type": "integer"},
|
||||
"delay": {"type": "string", "format": "duration"},
|
||||
"failure_action": {"type": "string"},
|
||||
"monitor": {"type": "string", "format": "duration"},
|
||||
"max_failure_ratio": {"type": "number"},
|
||||
"order": {"type": "string", "enum": [
|
||||
"start-first", "stop-first"
|
||||
]}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"update_config": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"parallelism": {"type": "integer"},
|
||||
"delay": {"type": "string", "format": "duration"},
|
||||
"failure_action": {"type": "string"},
|
||||
"monitor": {"type": "string", "format": "duration"},
|
||||
"max_failure_ratio": {"type": "number"},
|
||||
"order": {"type": "string", "enum": [
|
||||
"start-first", "stop-first"
|
||||
]}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"resources": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"limits": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"cpus": {"type": "string"},
|
||||
"memory": {"type": "string"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"reservations": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"cpus": {"type": "string"},
|
||||
"memory": {"type": "string"},
|
||||
"generic_resources": {"$ref": "#/definitions/generic_resources"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"restart_policy": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"condition": {"type": "string"},
|
||||
"delay": {"type": "string", "format": "duration"},
|
||||
"max_attempts": {"type": "integer"},
|
||||
"window": {"type": "string", "format": "duration"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"placement": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"constraints": {"type": "array", "items": {"type": "string"}},
|
||||
"preferences": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"spread": {"type": "string"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
"generic_resources": {
|
||||
"id": "#/definitions/generic_resources",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"discrete_resource_spec": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"kind": {"type": "string"},
|
||||
"value": {"type": "number"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
|
||||
"network": {
|
||||
"id": "#/definitions/network",
|
||||
"type": ["object", "null"],
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"driver": {"type": "string"},
|
||||
"driver_opts": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^.+$": {"type": ["string", "number"]}
|
||||
}
|
||||
},
|
||||
"ipam": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"driver": {"type": "string"},
|
||||
"config": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"subnet": {"type": "string"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"external": {
|
||||
"type": ["boolean", "object"],
|
||||
"properties": {
|
||||
"name": {"type": "string"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"internal": {"type": "boolean"},
|
||||
"attachable": {"type": "boolean"},
|
||||
"labels": {"$ref": "#/definitions/list_or_dict"}
|
||||
},
|
||||
"patternProperties": {"^x-": {}},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
"volume": {
|
||||
"id": "#/definitions/volume",
|
||||
"type": ["object", "null"],
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"driver": {"type": "string"},
|
||||
"driver_opts": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^.+$": {"type": ["string", "number"]}
|
||||
}
|
||||
},
|
||||
"external": {
|
||||
"type": ["boolean", "object"],
|
||||
"properties": {
|
||||
"name": {"type": "string"}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"labels": {"$ref": "#/definitions/list_or_dict"}
|
||||
},
|
||||
"patternProperties": {"^x-": {}},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
"secret": {
|
||||
"id": "#/definitions/secret",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"file": {"type": "string"},
|
||||
"external": {
|
||||
"type": ["boolean", "object"],
|
||||
"properties": {
|
||||
"name": {"type": "string"}
|
||||
}
|
||||
},
|
||||
"labels": {"$ref": "#/definitions/list_or_dict"}
|
||||
},
|
||||
"patternProperties": {"^x-": {}},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
"config": {
|
||||
"id": "#/definitions/config",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"file": {"type": "string"},
|
||||
"external": {
|
||||
"type": ["boolean", "object"],
|
||||
"properties": {
|
||||
"name": {"type": "string"}
|
||||
}
|
||||
},
|
||||
"labels": {"$ref": "#/definitions/list_or_dict"}
|
||||
},
|
||||
"patternProperties": {"^x-": {}},
|
||||
"additionalProperties": false
|
||||
},
|
||||
|
||||
"string_or_list": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{"$ref": "#/definitions/list_of_strings"}
|
||||
]
|
||||
},
|
||||
|
||||
"list_of_strings": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
"uniqueItems": true
|
||||
},
|
||||
|
||||
"list_or_dict": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
".+": {
|
||||
"type": ["string", "number", "null"]
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
{"type": "array", "items": {"type": "string"}, "uniqueItems": true}
|
||||
]
|
||||
},
|
||||
|
||||
"constraints": {
|
||||
"service": {
|
||||
"id": "#/definitions/constraints/service",
|
||||
"anyOf": [
|
||||
{"required": ["build"]},
|
||||
{"required": ["image"]}
|
||||
],
|
||||
"properties": {
|
||||
"build": {
|
||||
"required": ["context"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -248,6 +248,8 @@ class ConversionMap(object):
|
||||
service_path('deploy', 'replicas'): to_int,
|
||||
service_path('deploy', 'update_config', 'parallelism'): to_int,
|
||||
service_path('deploy', 'update_config', 'max_failure_ratio'): to_float,
|
||||
service_path('deploy', 'rollback_config', 'parallelism'): to_int,
|
||||
service_path('deploy', 'rollback_config', 'max_failure_ratio'): to_float,
|
||||
service_path('deploy', 'restart_policy', 'max_attempts'): to_int,
|
||||
service_path('mem_swappiness'): to_int,
|
||||
service_path('labels', FULL_JOKER): to_str,
|
||||
|
||||
@@ -80,6 +80,10 @@ def denormalize_config(config, image_digests=None):
|
||||
elif 'external' in conf:
|
||||
conf['external'] = True
|
||||
|
||||
if 'attachable' in conf and config.version < V3_2:
|
||||
# For compatibility mode, this option is invalid in v2
|
||||
del conf['attachable']
|
||||
|
||||
return result
|
||||
|
||||
|
||||
|
||||
@@ -36,6 +36,7 @@ COMPOSEFILE_V3_3 = ComposeVersion('3.3')
|
||||
COMPOSEFILE_V3_4 = ComposeVersion('3.4')
|
||||
COMPOSEFILE_V3_5 = ComposeVersion('3.5')
|
||||
COMPOSEFILE_V3_6 = ComposeVersion('3.6')
|
||||
COMPOSEFILE_V3_7 = ComposeVersion('3.7')
|
||||
|
||||
API_VERSIONS = {
|
||||
COMPOSEFILE_V1: '1.21',
|
||||
@@ -51,6 +52,7 @@ API_VERSIONS = {
|
||||
COMPOSEFILE_V3_4: '1.30',
|
||||
COMPOSEFILE_V3_5: '1.30',
|
||||
COMPOSEFILE_V3_6: '1.36',
|
||||
COMPOSEFILE_V3_7: '1.38',
|
||||
}
|
||||
|
||||
API_VERSION_TO_ENGINE_VERSION = {
|
||||
@@ -67,4 +69,5 @@ API_VERSION_TO_ENGINE_VERSION = {
|
||||
API_VERSIONS[COMPOSEFILE_V3_4]: '17.06.0',
|
||||
API_VERSIONS[COMPOSEFILE_V3_5]: '17.06.0',
|
||||
API_VERSIONS[COMPOSEFILE_V3_6]: '18.02.0',
|
||||
API_VERSIONS[COMPOSEFILE_V3_7]: '18.06.0',
|
||||
}
|
||||
|
||||
@@ -9,6 +9,8 @@ from docker.errors import ImageNotFound
|
||||
from .const import LABEL_CONTAINER_NUMBER
|
||||
from .const import LABEL_PROJECT
|
||||
from .const import LABEL_SERVICE
|
||||
from .const import LABEL_VERSION
|
||||
from .version import ComposeVersion
|
||||
|
||||
|
||||
class Container(object):
|
||||
@@ -283,6 +285,12 @@ class Container(object):
|
||||
def attach(self, *args, **kwargs):
|
||||
return self.client.attach(self.id, *args, **kwargs)
|
||||
|
||||
def has_legacy_proj_name(self, project_name):
|
||||
return (
|
||||
ComposeVersion(self.labels.get(LABEL_VERSION)) < ComposeVersion('1.21.0') and
|
||||
self.project != project_name
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return '<Container: %s (%s)>' % (self.name, self.id[:6])
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import unicode_literals
|
||||
import datetime
|
||||
import logging
|
||||
import operator
|
||||
import re
|
||||
from functools import reduce
|
||||
|
||||
import enum
|
||||
@@ -70,8 +71,11 @@ class Project(object):
|
||||
self.networks = networks or ProjectNetworks({}, False)
|
||||
self.config_version = config_version
|
||||
|
||||
def labels(self, one_off=OneOffFilter.exclude):
|
||||
labels = ['{0}={1}'.format(LABEL_PROJECT, self.name)]
|
||||
def labels(self, one_off=OneOffFilter.exclude, legacy=False):
|
||||
name = self.name
|
||||
if legacy:
|
||||
name = re.sub(r'[_-]', '', name)
|
||||
labels = ['{0}={1}'.format(LABEL_PROJECT, name)]
|
||||
|
||||
OneOffFilter.update_labels(one_off, labels)
|
||||
return labels
|
||||
@@ -128,7 +132,8 @@ class Project(object):
|
||||
volumes_from=volumes_from,
|
||||
secrets=secrets,
|
||||
pid_mode=pid_mode,
|
||||
platform=service_dict.pop('platform', default_platform),
|
||||
platform=service_dict.pop('platform', None),
|
||||
default_platform=default_platform,
|
||||
**service_dict)
|
||||
)
|
||||
|
||||
@@ -570,12 +575,21 @@ class Project(object):
|
||||
service.push(ignore_push_failures)
|
||||
|
||||
def _labeled_containers(self, stopped=False, one_off=OneOffFilter.exclude):
|
||||
return list(filter(None, [
|
||||
ctnrs = list(filter(None, [
|
||||
Container.from_ps(self.client, container)
|
||||
for container in self.client.containers(
|
||||
all=stopped,
|
||||
filters={'label': self.labels(one_off=one_off)})])
|
||||
)
|
||||
if ctnrs:
|
||||
return ctnrs
|
||||
|
||||
return list(filter(lambda c: c.has_legacy_proj_name(self.name), filter(None, [
|
||||
Container.from_ps(self.client, container)
|
||||
for container in self.client.containers(
|
||||
all=stopped,
|
||||
filters={'label': self.labels(one_off=one_off, legacy=True)})])
|
||||
))
|
||||
|
||||
def containers(self, service_names=None, stopped=False, one_off=OneOffFilter.exclude):
|
||||
if service_names:
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import itertools
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
@@ -51,7 +52,6 @@ from .progress_stream import StreamOutputError
|
||||
from .utils import json_hash
|
||||
from .utils import parse_bytes
|
||||
from .utils import parse_seconds_float
|
||||
from .version import ComposeVersion
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@@ -172,6 +172,7 @@ class Service(object):
|
||||
secrets=None,
|
||||
scale=None,
|
||||
pid_mode=None,
|
||||
default_platform=None,
|
||||
**options
|
||||
):
|
||||
self.name = name
|
||||
@@ -185,13 +186,14 @@ class Service(object):
|
||||
self.networks = networks or {}
|
||||
self.secrets = secrets or []
|
||||
self.scale_num = scale or 1
|
||||
self.default_platform = default_platform
|
||||
self.options = options
|
||||
|
||||
def __repr__(self):
|
||||
return '<Service: {}>'.format(self.name)
|
||||
|
||||
def containers(self, stopped=False, one_off=False, filters={}):
|
||||
filters.update({'label': self.labels(one_off=one_off)})
|
||||
def containers(self, stopped=False, one_off=False, filters={}, labels=None):
|
||||
filters.update({'label': self.labels(one_off=one_off) + (labels or [])})
|
||||
|
||||
result = list(filter(None, [
|
||||
Container.from_ps(self.client, container)
|
||||
@@ -202,10 +204,10 @@ class Service(object):
|
||||
if result:
|
||||
return result
|
||||
|
||||
filters.update({'label': self.labels(one_off=one_off, legacy=True)})
|
||||
filters.update({'label': self.labels(one_off=one_off, legacy=True) + (labels or [])})
|
||||
return list(
|
||||
filter(
|
||||
self.has_legacy_proj_name, filter(None, [
|
||||
lambda c: c.has_legacy_proj_name(self.project), filter(None, [
|
||||
Container.from_ps(self.client, container)
|
||||
for container in self.client.containers(
|
||||
all=stopped,
|
||||
@@ -217,9 +219,9 @@ class Service(object):
|
||||
"""Return a :class:`compose.container.Container` for this service. The
|
||||
container must be active, and match `number`.
|
||||
"""
|
||||
labels = self.labels() + ['{0}={1}'.format(LABEL_CONTAINER_NUMBER, number)]
|
||||
for container in self.client.containers(filters={'label': labels}):
|
||||
return Container.from_ps(self.client, container)
|
||||
|
||||
for container in self.containers(labels=['{0}={1}'.format(LABEL_CONTAINER_NUMBER, number)]):
|
||||
return container
|
||||
|
||||
raise ValueError("No container found for %s_%s" % (self.name, number))
|
||||
|
||||
@@ -256,6 +258,11 @@ class Service(object):
|
||||
|
||||
running_containers = self.containers(stopped=False)
|
||||
num_running = len(running_containers)
|
||||
for c in running_containers:
|
||||
if not c.has_legacy_proj_name(self.project):
|
||||
continue
|
||||
log.info('Recreating container with legacy name %s' % c.name)
|
||||
self.recreate_container(c, timeout, start_new_container=False)
|
||||
|
||||
if desired_num == num_running:
|
||||
# do nothing as we already have the desired number
|
||||
@@ -356,7 +363,16 @@ class Service(object):
|
||||
|
||||
@property
|
||||
def image_name(self):
|
||||
return self.options.get('image', '{s.project}_{s.name}'.format(s=self))
|
||||
return self.options.get('image', '{project}_{s.name}'.format(
|
||||
s=self, project=self.project.lstrip('_-')
|
||||
))
|
||||
|
||||
@property
|
||||
def platform(self):
|
||||
platform = self.options.get('platform')
|
||||
if not platform and version_gte(self.client.api_version, '1.35'):
|
||||
platform = self.default_platform
|
||||
return platform
|
||||
|
||||
def convergence_plan(self, strategy=ConvergenceStrategy.changed):
|
||||
containers = self.containers(stopped=True)
|
||||
@@ -395,7 +411,7 @@ class Service(object):
|
||||
has_diverged = False
|
||||
|
||||
for c in containers:
|
||||
if self.has_legacy_proj_name(c):
|
||||
if c.has_legacy_proj_name(self.project):
|
||||
log.debug('%s has diverged: Legacy project name' % c.name)
|
||||
has_diverged = True
|
||||
continue
|
||||
@@ -704,9 +720,14 @@ class Service(object):
|
||||
# TODO: this would benefit from github.com/docker/docker/pull/14699
|
||||
# to remove the need to inspect every container
|
||||
def _next_container_number(self, one_off=False):
|
||||
containers = self._fetch_containers(
|
||||
all=True,
|
||||
filters={'label': self.labels(one_off=one_off)}
|
||||
containers = itertools.chain(
|
||||
self._fetch_containers(
|
||||
all=True,
|
||||
filters={'label': self.labels(one_off=one_off)}
|
||||
), self._fetch_containers(
|
||||
all=True,
|
||||
filters={'label': self.labels(one_off=one_off, legacy=True)}
|
||||
)
|
||||
)
|
||||
numbers = [c.number for c in containers]
|
||||
return 1 if not numbers else max(numbers) + 1
|
||||
@@ -1018,8 +1039,7 @@ class Service(object):
|
||||
if not six.PY3 and not IS_WINDOWS_PLATFORM:
|
||||
path = path.encode('utf8')
|
||||
|
||||
platform = self.options.get('platform')
|
||||
if platform and version_lt(self.client.api_version, '1.35'):
|
||||
if self.platform and version_lt(self.client.api_version, '1.35'):
|
||||
raise OperationFailedError(
|
||||
'Impossible to perform platform-targeted builds for API version < 1.35'
|
||||
)
|
||||
@@ -1044,7 +1064,7 @@ class Service(object):
|
||||
},
|
||||
gzip=gzip,
|
||||
isolation=build_opts.get('isolation', self.options.get('isolation', None)),
|
||||
platform=platform,
|
||||
platform=self.platform,
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -1150,14 +1170,14 @@ class Service(object):
|
||||
kwargs = {
|
||||
'tag': tag or 'latest',
|
||||
'stream': True,
|
||||
'platform': self.options.get('platform'),
|
||||
'platform': self.platform,
|
||||
}
|
||||
if not silent:
|
||||
log.info('Pulling %s (%s%s%s)...' % (self.name, repo, separator, tag))
|
||||
|
||||
if kwargs['platform'] and version_lt(self.client.api_version, '1.35'):
|
||||
raise OperationFailedError(
|
||||
'Impossible to perform platform-targeted builds for API version < 1.35'
|
||||
'Impossible to perform platform-targeted pulls for API version < 1.35'
|
||||
)
|
||||
try:
|
||||
output = self.client.pull(repo, **kwargs)
|
||||
@@ -1235,12 +1255,6 @@ class Service(object):
|
||||
|
||||
return result
|
||||
|
||||
def has_legacy_proj_name(self, ctnr):
|
||||
return (
|
||||
ComposeVersion(ctnr.labels.get(LABEL_VERSION)) < ComposeVersion('1.21.0') and
|
||||
ctnr.project != self.project
|
||||
)
|
||||
|
||||
|
||||
def short_id_alias_exists(container, network):
|
||||
aliases = container.get(
|
||||
@@ -1347,7 +1361,7 @@ class ServiceNetworkMode(object):
|
||||
|
||||
|
||||
def build_container_name(project, service, number, one_off=False):
|
||||
bits = [project, service]
|
||||
bits = [project.lstrip('-_'), service]
|
||||
if one_off:
|
||||
bits.append('run')
|
||||
return '_'.join(bits + [str(number)])
|
||||
|
||||
@@ -60,7 +60,7 @@ class Volume(object):
|
||||
def full_name(self):
|
||||
if self.custom_name:
|
||||
return self.name
|
||||
return '{0}_{1}'.format(self.project, self.name)
|
||||
return '{0}_{1}'.format(self.project.lstrip('-_'), self.name)
|
||||
|
||||
@property
|
||||
def legacy_full_name(self):
|
||||
|
||||
@@ -98,7 +98,7 @@ __docker_compose_complete_services() {
|
||||
|
||||
# The services for which at least one running container exists
|
||||
__docker_compose_complete_running_services() {
|
||||
local names=$(__docker_compose_complete_services --filter status=running)
|
||||
local names=$(__docker_compose_services --filter status=running)
|
||||
COMPREPLY=( $(compgen -W "$names" -- "$cur") )
|
||||
}
|
||||
|
||||
|
||||
@@ -82,6 +82,11 @@ exe = EXE(pyz,
|
||||
'compose/config/config_schema_v3.6.json',
|
||||
'DATA'
|
||||
),
|
||||
(
|
||||
'compose/config/config_schema_v3.7.json',
|
||||
'compose/config/config_schema_v3.7.json',
|
||||
'DATA'
|
||||
),
|
||||
(
|
||||
'compose/GITSHA',
|
||||
'compose/GITSHA',
|
||||
|
||||
@@ -2,8 +2,8 @@ backports.ssl-match-hostname==3.5.0.1; python_version < '3'
|
||||
cached-property==1.3.0
|
||||
certifi==2017.4.17
|
||||
chardet==3.0.4
|
||||
docker==3.3.0
|
||||
docker-pycreds==0.2.3
|
||||
docker==3.4.1
|
||||
docker-pycreds==0.3.0
|
||||
dockerpty==0.4.1
|
||||
docopt==0.6.2
|
||||
enum34==1.1.6; python_version < '3.4'
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import unicode_literals
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import time
|
||||
from distutils.core import run_setup
|
||||
@@ -27,6 +28,7 @@ from release.utils import ScriptError
|
||||
from release.utils import update_init_py_version
|
||||
from release.utils import update_run_sh_version
|
||||
from release.utils import yesno
|
||||
from requests.exceptions import HTTPError
|
||||
from twine.commands.upload import main as twine_upload
|
||||
|
||||
|
||||
@@ -125,13 +127,60 @@ def print_final_instructions(args):
|
||||
"You're almost done! Please verify that everything is in order and "
|
||||
"you are ready to make the release public, then run the following "
|
||||
"command:\n{exe} -b {user} finalize {version}".format(
|
||||
exe=sys.argv[0], user=args.bintray_user, version=args.release
|
||||
exe='./script/release/release.sh', user=args.bintray_user, version=args.release
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def distclean():
|
||||
print('Running distclean...')
|
||||
dirs = [
|
||||
os.path.join(REPO_ROOT, 'build'), os.path.join(REPO_ROOT, 'dist'),
|
||||
os.path.join(REPO_ROOT, 'docker-compose.egg-info')
|
||||
]
|
||||
files = []
|
||||
for base, dirnames, fnames in os.walk(REPO_ROOT):
|
||||
for fname in fnames:
|
||||
path = os.path.normpath(os.path.join(base, fname))
|
||||
if fname.endswith('.pyc'):
|
||||
files.append(path)
|
||||
elif fname.startswith('.coverage.'):
|
||||
files.append(path)
|
||||
for dirname in dirnames:
|
||||
path = os.path.normpath(os.path.join(base, dirname))
|
||||
if dirname == '__pycache__':
|
||||
dirs.append(path)
|
||||
elif dirname == '.coverage-binfiles':
|
||||
dirs.append(path)
|
||||
|
||||
for file in files:
|
||||
os.unlink(file)
|
||||
|
||||
for folder in dirs:
|
||||
shutil.rmtree(folder, ignore_errors=True)
|
||||
|
||||
|
||||
def pypi_upload(args):
|
||||
print('Uploading to PyPi')
|
||||
try:
|
||||
twine_upload([
|
||||
'dist/docker_compose-{}*.whl'.format(args.release),
|
||||
'dist/docker-compose-{}*.tar.gz'.format(args.release)
|
||||
])
|
||||
except HTTPError as e:
|
||||
if e.response.status_code == 400 and 'File already exists' in e.message:
|
||||
if not args.finalize_resume:
|
||||
raise ScriptError(
|
||||
'Package already uploaded on PyPi.'
|
||||
)
|
||||
print('Skipping PyPi upload - package already uploaded')
|
||||
else:
|
||||
raise ScriptError('Unexpected HTTP error uploading package to PyPi: {}'.format(e))
|
||||
|
||||
|
||||
def resume(args):
|
||||
try:
|
||||
distclean()
|
||||
repository = Repository(REPO_ROOT, args.repo)
|
||||
br_name = branch_name(args.release)
|
||||
if not repository.branch_exists(br_name):
|
||||
@@ -183,6 +232,7 @@ def cancel(args):
|
||||
bintray_api = BintrayAPI(os.environ['BINTRAY_TOKEN'], args.bintray_user)
|
||||
print('Removing Bintray data repository for {}'.format(args.release))
|
||||
bintray_api.delete_repository(args.bintray_org, branch_name(args.release))
|
||||
distclean()
|
||||
except ScriptError as e:
|
||||
print(e)
|
||||
return 1
|
||||
@@ -191,6 +241,7 @@ def cancel(args):
|
||||
|
||||
|
||||
def start(args):
|
||||
distclean()
|
||||
try:
|
||||
repository = Repository(REPO_ROOT, args.repo)
|
||||
create_initial_branch(repository, args)
|
||||
@@ -213,6 +264,7 @@ def start(args):
|
||||
|
||||
|
||||
def finalize(args):
|
||||
distclean()
|
||||
try:
|
||||
repository = Repository(REPO_ROOT, args.repo)
|
||||
img_manager = ImageManager(args.release)
|
||||
@@ -238,10 +290,13 @@ def finalize(args):
|
||||
run_setup(os.path.join(REPO_ROOT, 'setup.py'), script_args=['sdist', 'bdist_wheel'])
|
||||
|
||||
merge_status = pr_data.merge()
|
||||
if not merge_status.merged:
|
||||
raise ScriptError('Unable to merge PR #{}: {}'.format(pr_data.number, merge_status.message))
|
||||
print('Uploading to PyPi')
|
||||
twine_upload(['dist/*'])
|
||||
if not merge_status.merged and not args.finalize_resume:
|
||||
raise ScriptError(
|
||||
'Unable to merge PR #{}: {}'.format(pr_data.number, merge_status.message)
|
||||
)
|
||||
|
||||
pypi_upload(args)
|
||||
|
||||
img_manager.push_images()
|
||||
repository.publish_release(gh_release)
|
||||
except ScriptError as e:
|
||||
@@ -260,13 +315,13 @@ ACTIONS = [
|
||||
|
||||
EPILOG = '''Example uses:
|
||||
* Start a new feature release (includes all changes currently in master)
|
||||
release.py -b user start 1.23.0
|
||||
release.sh -b user start 1.23.0
|
||||
* Start a new patch release
|
||||
release.py -b user --patch 1.21.0 start 1.21.1
|
||||
release.sh -b user --patch 1.21.0 start 1.21.1
|
||||
* Cancel / rollback an existing release draft
|
||||
release.py -b user cancel 1.23.0
|
||||
release.sh -b user cancel 1.23.0
|
||||
* Restart a previously aborted patch release
|
||||
release.py -b user -p 1.21.0 resume 1.21.1
|
||||
release.sh -b user -p 1.21.0 resume 1.21.1
|
||||
'''
|
||||
|
||||
|
||||
@@ -316,6 +371,10 @@ def main():
|
||||
'--skip-ci-checks', dest='skip_ci', action='store_true',
|
||||
help='If set, the program will not wait for CI jobs to complete'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--finalize-resume', dest='finalize_resume', action='store_true',
|
||||
help='If set, finalize will continue through steps that have already been completed.'
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.action == 'start':
|
||||
|
||||
@@ -15,12 +15,13 @@ if test -z $BINTRAY_TOKEN; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
docker run -e GITHUB_TOKEN=$GITHUB_TOKEN -e BINTRAY_TOKEN=$BINTRAY_TOKEN -it \
|
||||
docker run -e GITHUB_TOKEN=$GITHUB_TOKEN -e BINTRAY_TOKEN=$BINTRAY_TOKEN -e SSH_AUTH_SOCK=$SSH_AUTH_SOCK -it \
|
||||
--mount type=bind,source=$(pwd),target=/src \
|
||||
--mount type=bind,source=$(pwd)/.git,target=/src/.git \
|
||||
--mount type=bind,source=$HOME/.docker,target=/root/.docker \
|
||||
--mount type=bind,source=$HOME/.gitconfig,target=/root/.gitconfig \
|
||||
--mount type=bind,source=/var/run/docker.sock,target=/var/run/docker.sock \
|
||||
--mount type=bind,source=$HOME/.ssh,target=/root/.ssh \
|
||||
--mount type=bind,source=/tmp,target=/tmp \
|
||||
-v $HOME/.pypirc:/root/.pypirc \
|
||||
compose/release-tool $*
|
||||
|
||||
@@ -48,7 +48,7 @@ class ImageManager(object):
|
||||
container = docker_client.create_container(
|
||||
'docker-compose-tests:tmp', entrypoint='tox'
|
||||
)
|
||||
docker_client.commit(container, 'docker/compose-tests:latest')
|
||||
docker_client.commit(container, 'docker/compose-tests', 'latest')
|
||||
docker_client.tag('docker/compose-tests:latest', 'docker/compose-tests:{}'.format(self.version))
|
||||
docker_client.remove_container(container, force=True)
|
||||
docker_client.remove_image('docker-compose-tests:tmp', force=True)
|
||||
@@ -81,3 +81,7 @@ class ImageManager(object):
|
||||
for chunk in logstream:
|
||||
if 'status' in chunk:
|
||||
print(chunk['status'])
|
||||
if 'error' in chunk:
|
||||
raise ScriptError(
|
||||
'Error pushing {name}: {err}'.format(name=name, err=chunk['error'])
|
||||
)
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
|
||||
set -e
|
||||
|
||||
VERSION="1.21.2"
|
||||
VERSION="1.22.0"
|
||||
IMAGE="docker/compose:$VERSION"
|
||||
|
||||
|
||||
|
||||
2
setup.py
2
setup.py
@@ -36,7 +36,7 @@ install_requires = [
|
||||
'requests >= 2.6.1, != 2.11.0, != 2.12.2, != 2.18.0, < 2.19',
|
||||
'texttable >= 0.9.0, < 0.10',
|
||||
'websocket-client >= 0.32.0, < 1.0',
|
||||
'docker >= 3.3.0, < 4.0',
|
||||
'docker >= 3.4.1, < 4.0',
|
||||
'dockerpty >= 0.4.1, < 0.5',
|
||||
'six >= 1.3.0, < 2',
|
||||
'jsonschema >= 2.5.1, < 3',
|
||||
|
||||
@@ -481,6 +481,7 @@ class CLITestCase(DockerClientTestCase):
|
||||
assert yaml.load(result.stdout) == {
|
||||
'version': '2.3',
|
||||
'volumes': {'foo': {'driver': 'default'}},
|
||||
'networks': {'bar': {}},
|
||||
'services': {
|
||||
'foo': {
|
||||
'command': '/bin/true',
|
||||
@@ -490,9 +491,10 @@ class CLITestCase(DockerClientTestCase):
|
||||
'mem_limit': '300M',
|
||||
'mem_reservation': '100M',
|
||||
'cpus': 0.7,
|
||||
'volumes': ['foo:/bar:rw']
|
||||
'volumes': ['foo:/bar:rw'],
|
||||
'networks': {'bar': None},
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
def test_ps(self):
|
||||
|
||||
@@ -16,7 +16,13 @@ services:
|
||||
memory: 100M
|
||||
volumes:
|
||||
- foo:/bar
|
||||
networks:
|
||||
- bar
|
||||
|
||||
volumes:
|
||||
foo:
|
||||
driver: default
|
||||
|
||||
networks:
|
||||
bar:
|
||||
attachable: true
|
||||
|
||||
@@ -1915,3 +1915,65 @@ class ProjectTest(DockerClientTestCase):
|
||||
assert len(remote_secopts) == 1
|
||||
assert remote_secopts[0].startswith('seccomp=')
|
||||
assert json.loads(remote_secopts[0].lstrip('seccomp=')) == seccomp_data
|
||||
|
||||
@no_cluster('inspect volume by name defect on Swarm Classic')
|
||||
def test_project_up_name_starts_with_illegal_char(self):
|
||||
config_dict = {
|
||||
'version': '2.3',
|
||||
'services': {
|
||||
'svc1': {
|
||||
'image': 'busybox:latest',
|
||||
'command': 'ls',
|
||||
'volumes': ['foo:/foo:rw'],
|
||||
'networks': ['bar'],
|
||||
},
|
||||
},
|
||||
'volumes': {
|
||||
'foo': {},
|
||||
},
|
||||
'networks': {
|
||||
'bar': {},
|
||||
}
|
||||
}
|
||||
config_data = load_config(config_dict)
|
||||
project = Project.from_config(
|
||||
name='_underscoretest', config_data=config_data, client=self.client
|
||||
)
|
||||
project.up()
|
||||
self.addCleanup(project.down, None, True)
|
||||
|
||||
containers = project.containers(stopped=True)
|
||||
assert len(containers) == 1
|
||||
assert containers[0].name == 'underscoretest_svc1_1'
|
||||
assert containers[0].project == '_underscoretest'
|
||||
|
||||
full_vol_name = 'underscoretest_foo'
|
||||
vol_data = self.get_volume_data(full_vol_name)
|
||||
assert vol_data
|
||||
assert vol_data['Labels'][LABEL_PROJECT] == '_underscoretest'
|
||||
|
||||
full_net_name = '_underscoretest_bar'
|
||||
net_data = self.client.inspect_network(full_net_name)
|
||||
assert net_data
|
||||
assert net_data['Labels'][LABEL_PROJECT] == '_underscoretest'
|
||||
|
||||
project2 = Project.from_config(
|
||||
name='-dashtest', config_data=config_data, client=self.client
|
||||
)
|
||||
project2.up()
|
||||
self.addCleanup(project2.down, None, True)
|
||||
|
||||
containers = project2.containers(stopped=True)
|
||||
assert len(containers) == 1
|
||||
assert containers[0].name == 'dashtest_svc1_1'
|
||||
assert containers[0].project == '-dashtest'
|
||||
|
||||
full_vol_name = 'dashtest_foo'
|
||||
vol_data = self.get_volume_data(full_vol_name)
|
||||
assert vol_data
|
||||
assert vol_data['Labels'][LABEL_PROJECT] == '-dashtest'
|
||||
|
||||
full_net_name = '-dashtest_bar'
|
||||
net_data = self.client.inspect_network(full_net_name)
|
||||
assert net_data
|
||||
assert net_data['Labels'][LABEL_PROJECT] == '-dashtest'
|
||||
|
||||
@@ -1137,6 +1137,21 @@ class ServiceTest(DockerClientTestCase):
|
||||
service.build()
|
||||
assert service.image()
|
||||
|
||||
def test_build_with_illegal_leading_chars(self):
|
||||
base_dir = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, base_dir)
|
||||
with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f:
|
||||
f.write('FROM busybox\nRUN echo "Embodiment of Scarlet Devil"\n')
|
||||
service = Service(
|
||||
'build_leading_slug', client=self.client,
|
||||
project='___-composetest', build={
|
||||
'context': text_type(base_dir)
|
||||
}
|
||||
)
|
||||
assert service.image_name == 'composetest_build_leading_slug'
|
||||
service.build()
|
||||
assert service.image()
|
||||
|
||||
def test_start_container_stays_unprivileged(self):
|
||||
service = self.create_service('web')
|
||||
container = create_and_start_container(service).inspect()
|
||||
|
||||
@@ -29,6 +29,7 @@ class ProjectTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.mock_client = mock.create_autospec(docker.APIClient)
|
||||
self.mock_client._general_configs = {}
|
||||
self.mock_client.api_version = docker.constants.DEFAULT_DOCKER_API_VERSION
|
||||
|
||||
def test_from_config_v1(self):
|
||||
config = Config(
|
||||
@@ -578,21 +579,21 @@ class ProjectTest(unittest.TestCase):
|
||||
)
|
||||
|
||||
project = Project.from_config(name='test', client=self.mock_client, config_data=config_data)
|
||||
assert project.get_service('web').options.get('platform') is None
|
||||
assert project.get_service('web').platform is None
|
||||
|
||||
project = Project.from_config(
|
||||
name='test', client=self.mock_client, config_data=config_data, default_platform='windows'
|
||||
)
|
||||
assert project.get_service('web').options.get('platform') == 'windows'
|
||||
assert project.get_service('web').platform == 'windows'
|
||||
|
||||
service_config['platform'] = 'linux/s390x'
|
||||
project = Project.from_config(name='test', client=self.mock_client, config_data=config_data)
|
||||
assert project.get_service('web').options.get('platform') == 'linux/s390x'
|
||||
assert project.get_service('web').platform == 'linux/s390x'
|
||||
|
||||
project = Project.from_config(
|
||||
name='test', client=self.mock_client, config_data=config_data, default_platform='windows'
|
||||
)
|
||||
assert project.get_service('web').options.get('platform') == 'linux/s390x'
|
||||
assert project.get_service('web').platform == 'linux/s390x'
|
||||
|
||||
@mock.patch('compose.parallel.ParallelStreamWriter._write_noansi')
|
||||
def test_error_parallel_pull(self, mock_write):
|
||||
|
||||
@@ -446,6 +446,20 @@ class ServiceTest(unittest.TestCase):
|
||||
with pytest.raises(OperationFailedError):
|
||||
service.pull()
|
||||
|
||||
def test_pull_image_with_default_platform(self):
|
||||
self.mock_client.api_version = '1.35'
|
||||
|
||||
service = Service(
|
||||
'foo', client=self.mock_client, image='someimage:sometag',
|
||||
default_platform='linux'
|
||||
)
|
||||
assert service.platform == 'linux'
|
||||
service.pull()
|
||||
|
||||
assert self.mock_client.pull.call_count == 1
|
||||
call_args = self.mock_client.pull.call_args
|
||||
assert call_args[1]['platform'] == 'linux'
|
||||
|
||||
@mock.patch('compose.service.Container', autospec=True)
|
||||
def test_recreate_container(self, _):
|
||||
mock_container = mock.create_autospec(Container)
|
||||
@@ -538,7 +552,7 @@ class ServiceTest(unittest.TestCase):
|
||||
assert self.mock_client.build.call_count == 1
|
||||
assert not self.mock_client.build.call_args[1]['pull']
|
||||
|
||||
def test_build_does_with_platform(self):
|
||||
def test_build_with_platform(self):
|
||||
self.mock_client.api_version = '1.35'
|
||||
self.mock_client.build.return_value = [
|
||||
b'{"stream": "Successfully built 12345"}',
|
||||
@@ -551,6 +565,47 @@ class ServiceTest(unittest.TestCase):
|
||||
call_args = self.mock_client.build.call_args
|
||||
assert call_args[1]['platform'] == 'linux'
|
||||
|
||||
def test_build_with_default_platform(self):
|
||||
self.mock_client.api_version = '1.35'
|
||||
self.mock_client.build.return_value = [
|
||||
b'{"stream": "Successfully built 12345"}',
|
||||
]
|
||||
|
||||
service = Service(
|
||||
'foo', client=self.mock_client, build={'context': '.'},
|
||||
default_platform='linux'
|
||||
)
|
||||
assert service.platform == 'linux'
|
||||
service.build()
|
||||
|
||||
assert self.mock_client.build.call_count == 1
|
||||
call_args = self.mock_client.build.call_args
|
||||
assert call_args[1]['platform'] == 'linux'
|
||||
|
||||
def test_service_platform_precedence(self):
|
||||
self.mock_client.api_version = '1.35'
|
||||
|
||||
service = Service(
|
||||
'foo', client=self.mock_client, platform='linux/arm',
|
||||
default_platform='osx'
|
||||
)
|
||||
assert service.platform == 'linux/arm'
|
||||
|
||||
def test_service_ignore_default_platform_with_unsupported_api(self):
|
||||
self.mock_client.api_version = '1.32'
|
||||
self.mock_client.build.return_value = [
|
||||
b'{"stream": "Successfully built 12345"}',
|
||||
]
|
||||
|
||||
service = Service(
|
||||
'foo', client=self.mock_client, default_platform='windows', build={'context': '.'}
|
||||
)
|
||||
assert service.platform is None
|
||||
service.build()
|
||||
assert self.mock_client.build.call_count == 1
|
||||
call_args = self.mock_client.build.call_args
|
||||
assert call_args[1]['platform'] is None
|
||||
|
||||
def test_build_with_override_build_args(self):
|
||||
self.mock_client.build.return_value = [
|
||||
b'{"stream": "Successfully built 12345"}',
|
||||
|
||||
Reference in New Issue
Block a user