mirror of
https://github.com/docker/compose.git
synced 2026-02-16 21:42:33 +08:00
Compare commits
29 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5becea4ca9 | ||
|
|
3ecf001f96 | ||
|
|
96a8e3b3c5 | ||
|
|
aca6edd631 | ||
|
|
39ab3aee50 | ||
|
|
39c2d42a0e | ||
|
|
aa45dedb3d | ||
|
|
afb36f236b | ||
|
|
f01e535cf4 | ||
|
|
853efbfb54 | ||
|
|
6ea24001fa | ||
|
|
c3d8e558a2 | ||
|
|
8f9f1111f3 | ||
|
|
1b42ecba14 | ||
|
|
c34c88b217 | ||
|
|
575d67618e | ||
|
|
adae403b27 | ||
|
|
0bea52b18d | ||
|
|
2d0c9366ff | ||
|
|
8a6dc88f9e | ||
|
|
d79745a2cd | ||
|
|
1250bb7481 | ||
|
|
3ba41b98d3 | ||
|
|
7f7370b811 | ||
|
|
a9e8ae190f | ||
|
|
3eee3e093a | ||
|
|
ad770b272c | ||
|
|
e5cab3ced5 | ||
|
|
7984767db2 |
26
CHANGELOG.md
26
CHANGELOG.md
@@ -1,6 +1,32 @@
|
||||
Change log
|
||||
==========
|
||||
|
||||
1.29.2 (2021-05-10)
|
||||
-------------------
|
||||
|
||||
[List of PRs / issues for this release](https://github.com/docker/compose/milestone/59?closed=1)
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
- Remove advertisement for `docker compose` in the `up` command to avoid annoyance
|
||||
|
||||
- Bump `py` to `1.10.0` in `requirements-indirect.txt`
|
||||
|
||||
1.29.1 (2021-04-13)
|
||||
-------------------
|
||||
|
||||
[List of PRs / issues for this release](https://github.com/docker/compose/milestone/58?closed=1)
|
||||
|
||||
### Bugs
|
||||
|
||||
- Fix for invalid handler warning on Windows builds
|
||||
|
||||
- Fix config hash to trigger container recreation on IPC mode updates
|
||||
|
||||
- Fix conversion map for `placement.max_replicas_per_node`
|
||||
|
||||
- Remove extra scan suggestion on build
|
||||
|
||||
1.29.0 (2021-04-06)
|
||||
-------------------
|
||||
|
||||
|
||||
2
Jenkinsfile
vendored
2
Jenkinsfile
vendored
@@ -87,7 +87,7 @@ def buildImage(baseImage) {
|
||||
def runTests(dockerVersion, pythonVersion, baseImage) {
|
||||
return {
|
||||
stage("python=${pythonVersion} docker=${dockerVersion} ${baseImage}") {
|
||||
node("ubuntu && amd64 && !zfs") {
|
||||
node("ubuntu-2004 && amd64 && !zfs && cgroup1") {
|
||||
def scmvar = checkout(scm)
|
||||
def imageName = "dockerpinata/compose:${baseImage}-${scmvar.GIT_COMMIT}"
|
||||
def storageDriver = sh(script: "docker info -f \'{{.Driver}}\'", returnStdout: true).trim()
|
||||
|
||||
@@ -1 +1 @@
|
||||
__version__ = '1.29.0'
|
||||
__version__ = '1.29.2'
|
||||
|
||||
@@ -82,7 +82,7 @@ def main(): # noqa: C901
|
||||
if not IS_LINUX_PLATFORM and command == 'help':
|
||||
print("\nDocker Compose is now in the Docker CLI, try `docker compose` help")
|
||||
except (KeyboardInterrupt, signals.ShutdownException):
|
||||
exit_with_metrics(command, "Aborting.", status=Status.FAILURE)
|
||||
exit_with_metrics(command, "Aborting.", status=Status.CANCELED)
|
||||
except (UserError, NoSuchService, ConfigurationError,
|
||||
ProjectError, OperationFailedError) as e:
|
||||
exit_with_metrics(command, e.msg, status=Status.FAILURE)
|
||||
@@ -103,7 +103,8 @@ def main(): # noqa: C901
|
||||
commands = "\n".join(parse_doc_section("commands:", getdoc(e.supercommand)))
|
||||
if not IS_LINUX_PLATFORM:
|
||||
commands += "\n\nDocker Compose is now in the Docker CLI, try `docker compose`"
|
||||
exit_with_metrics(e.command, "No such command: {}\n\n{}".format(e.command, commands))
|
||||
exit_with_metrics("", log_msg="No such command: {}\n\n{}".format(
|
||||
e.command, commands), status=Status.FAILURE)
|
||||
except (errors.ConnectionError, StreamParseError):
|
||||
exit_with_metrics(command, status=Status.FAILURE)
|
||||
except SystemExit as e:
|
||||
@@ -1143,9 +1144,6 @@ class TopLevelCommand:
|
||||
attach_dependencies = options.get('--attach-dependencies')
|
||||
keep_prefix = not options.get('--no-log-prefix')
|
||||
|
||||
if not IS_LINUX_PLATFORM:
|
||||
print('Docker Compose is now in the Docker CLI, try `docker compose up`\n')
|
||||
|
||||
if detached and (cascade_stop or exit_value_from or attach_dependencies):
|
||||
raise UserError(
|
||||
"-d cannot be combined with --abort-on-container-exit or --attach-dependencies.")
|
||||
|
||||
@@ -1,85 +0,0 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from distutils.util import strtobool
|
||||
|
||||
from docker.constants import IS_WINDOWS_PLATFORM
|
||||
from docker.utils.config import find_config_file
|
||||
|
||||
|
||||
SCAN_BINARY_NAME = "docker-scan" + (".exe" if IS_WINDOWS_PLATFORM else "")
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ScanConfig:
|
||||
def __init__(self, d):
|
||||
self.optin = False
|
||||
vars(self).update(d)
|
||||
|
||||
|
||||
def display_scan_suggest_msg():
|
||||
if environment_scan_avoid_suggest() or \
|
||||
scan_available() is None or \
|
||||
scan_already_invoked():
|
||||
return
|
||||
log.info("Use 'docker scan' to run Snyk tests against images to find vulnerabilities "
|
||||
"and learn how to fix them")
|
||||
|
||||
|
||||
def environment_scan_avoid_suggest():
|
||||
return os.getenv('DOCKER_SCAN_SUGGEST', 'true').lower() == 'false'
|
||||
|
||||
|
||||
def scan_already_invoked():
|
||||
docker_folder = docker_config_folder()
|
||||
if docker_folder is None:
|
||||
return False
|
||||
|
||||
scan_config_file = os.path.join(docker_folder, 'scan', "config.json")
|
||||
if not os.path.exists(scan_config_file):
|
||||
return False
|
||||
|
||||
try:
|
||||
data = ''
|
||||
with open(scan_config_file) as f:
|
||||
data = f.read()
|
||||
scan_config = json.loads(data, object_hook=ScanConfig)
|
||||
return scan_config.optin if isinstance(scan_config.optin, bool) else strtobool(scan_config.optin)
|
||||
except Exception: # pylint:disable=broad-except
|
||||
return True
|
||||
|
||||
|
||||
def scan_available():
|
||||
docker_folder = docker_config_folder()
|
||||
if docker_folder:
|
||||
home_scan_bin = os.path.join(docker_folder, 'cli-plugins', SCAN_BINARY_NAME)
|
||||
if os.path.isfile(home_scan_bin) or os.path.islink(home_scan_bin):
|
||||
return home_scan_bin
|
||||
|
||||
if IS_WINDOWS_PLATFORM:
|
||||
program_data_scan_bin = os.path.join('C:\\', 'ProgramData', 'Docker', 'cli-plugins',
|
||||
SCAN_BINARY_NAME)
|
||||
if os.path.isfile(program_data_scan_bin) or os.path.islink(program_data_scan_bin):
|
||||
return program_data_scan_bin
|
||||
else:
|
||||
lib_scan_bin = os.path.join('/usr', 'local', 'lib', 'docker', 'cli-plugins', SCAN_BINARY_NAME)
|
||||
if os.path.isfile(lib_scan_bin) or os.path.islink(lib_scan_bin):
|
||||
return lib_scan_bin
|
||||
lib_exec_scan_bin = os.path.join('/usr', 'local', 'libexec', 'docker', 'cli-plugins',
|
||||
SCAN_BINARY_NAME)
|
||||
if os.path.isfile(lib_exec_scan_bin) or os.path.islink(lib_exec_scan_bin):
|
||||
return lib_exec_scan_bin
|
||||
lib_scan_bin = os.path.join('/usr', 'lib', 'docker', 'cli-plugins', SCAN_BINARY_NAME)
|
||||
if os.path.isfile(lib_scan_bin) or os.path.islink(lib_scan_bin):
|
||||
return lib_scan_bin
|
||||
lib_exec_scan_bin = os.path.join('/usr', 'libexec', 'docker', 'cli-plugins', SCAN_BINARY_NAME)
|
||||
if os.path.isfile(lib_exec_scan_bin) or os.path.islink(lib_exec_scan_bin):
|
||||
return lib_exec_scan_bin
|
||||
return None
|
||||
|
||||
|
||||
def docker_config_folder():
|
||||
docker_config_file = find_config_file()
|
||||
return None if not docker_config_file \
|
||||
else os.path.dirname(os.path.abspath(docker_config_file))
|
||||
@@ -243,6 +243,7 @@ class ConversionMap:
|
||||
service_path('healthcheck', 'disable'): to_boolean,
|
||||
service_path('deploy', 'labels', PATH_JOKER): to_str,
|
||||
service_path('deploy', 'replicas'): to_int,
|
||||
service_path('deploy', 'placement', 'max_replicas_per_node'): to_int,
|
||||
service_path('deploy', 'resources', 'limits', "cpus"): to_float,
|
||||
service_path('deploy', 'update_config', 'parallelism'): to_int,
|
||||
service_path('deploy', 'update_config', 'max_failure_ratio'): to_float,
|
||||
|
||||
@@ -36,7 +36,7 @@ class MetricsCommand(requests.Session):
|
||||
context_type=None, status=Status.SUCCESS,
|
||||
source=MetricsSource.CLI, uri=None):
|
||||
super().__init__()
|
||||
self.command = "compose " + command if command else "compose --help"
|
||||
self.command = ("compose " + command).strip() if command else "compose --help"
|
||||
self.context = context_type or ContextAPI.get_current_context().context_type or 'moby'
|
||||
self.source = source
|
||||
self.status = status.value
|
||||
|
||||
@@ -13,7 +13,6 @@ from docker.utils import version_lt
|
||||
|
||||
from . import parallel
|
||||
from .cli.errors import UserError
|
||||
from .cli.scan_suggest import display_scan_suggest_msg
|
||||
from .config import ConfigurationError
|
||||
from .config.config import V1
|
||||
from .config.sort_services import get_container_name_from_network_mode
|
||||
@@ -519,9 +518,6 @@ class Project:
|
||||
for service in services:
|
||||
build_service(service)
|
||||
|
||||
if services:
|
||||
display_scan_suggest_msg()
|
||||
|
||||
def create(
|
||||
self,
|
||||
service_names=None,
|
||||
@@ -664,15 +660,8 @@ class Project:
|
||||
service_names,
|
||||
include_deps=start_deps)
|
||||
|
||||
must_build = False
|
||||
for svc in services:
|
||||
if svc.must_build(do_build=do_build):
|
||||
must_build = True
|
||||
svc.ensure_image_exists(do_build=do_build, silent=silent, cli=cli)
|
||||
|
||||
if must_build:
|
||||
display_scan_suggest_msg()
|
||||
|
||||
plans = self._get_convergence_plans(
|
||||
services,
|
||||
strategy,
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import enum
|
||||
import itertools
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
@@ -368,24 +367,6 @@ class Service:
|
||||
"rebuild this image you must use `docker-compose build` or "
|
||||
"`docker-compose up --build`.".format(self.name))
|
||||
|
||||
def must_build(self, do_build=BuildAction.none):
|
||||
if self.can_be_built() and do_build == BuildAction.force:
|
||||
return True
|
||||
|
||||
try:
|
||||
self.image()
|
||||
return False
|
||||
except NoSuchImageError:
|
||||
pass
|
||||
|
||||
if not self.can_be_built():
|
||||
return False
|
||||
|
||||
if do_build == BuildAction.skip:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def get_image_registry_data(self):
|
||||
try:
|
||||
return self.client.inspect_distribution(self.image_name)
|
||||
@@ -732,6 +713,7 @@ class Service:
|
||||
'image_id': image_id(),
|
||||
'links': self.get_link_names(),
|
||||
'net': self.network_mode.id,
|
||||
'ipc_mode': self.ipc_mode.mode,
|
||||
'networks': self.networks,
|
||||
'secrets': self.secrets,
|
||||
'volumes_from': [
|
||||
@@ -1125,8 +1107,9 @@ class Service:
|
||||
'Impossible to perform platform-targeted builds for API version < 1.35'
|
||||
)
|
||||
|
||||
builder = self.client if not cli else _CLIBuilder(progress)
|
||||
build_output = builder.build(
|
||||
builder = _ClientBuilder(self.client) if not cli else _CLIBuilder(progress)
|
||||
return builder.build(
|
||||
service=self,
|
||||
path=path,
|
||||
tag=self.image_name,
|
||||
rm=rm,
|
||||
@@ -1147,30 +1130,7 @@ class Service:
|
||||
gzip=gzip,
|
||||
isolation=build_opts.get('isolation', self.options.get('isolation', None)),
|
||||
platform=self.platform,
|
||||
)
|
||||
|
||||
try:
|
||||
all_events = list(stream_output(build_output, output_stream))
|
||||
except StreamOutputError as e:
|
||||
raise BuildError(self, str(e))
|
||||
|
||||
# Ensure the HTTP connection is not reused for another
|
||||
# streaming command, as the Docker daemon can sometimes
|
||||
# complain about it
|
||||
self.client.close()
|
||||
|
||||
image_id = None
|
||||
|
||||
for event in all_events:
|
||||
if 'stream' in event:
|
||||
match = re.search(r'Successfully built ([0-9a-f]+)', event.get('stream', ''))
|
||||
if match:
|
||||
image_id = match.group(1)
|
||||
|
||||
if image_id is None:
|
||||
raise BuildError(self, event if all_events else 'Unknown')
|
||||
|
||||
return image_id
|
||||
output_stream=output_stream)
|
||||
|
||||
def get_cache_from(self, build_opts):
|
||||
cache_from = build_opts.get('cache_from', None)
|
||||
@@ -1827,20 +1787,77 @@ def rewrite_build_path(path):
|
||||
return path
|
||||
|
||||
|
||||
class _CLIBuilder:
|
||||
def __init__(self, progress):
|
||||
self._progress = progress
|
||||
class _ClientBuilder:
|
||||
def __init__(self, client):
|
||||
self.client = client
|
||||
|
||||
def build(self, path, tag=None, quiet=False, fileobj=None,
|
||||
def build(self, service, path, tag=None, quiet=False, fileobj=None,
|
||||
nocache=False, rm=False, timeout=None,
|
||||
custom_context=False, encoding=None, pull=False,
|
||||
forcerm=False, dockerfile=None, container_limits=None,
|
||||
decode=False, buildargs=None, gzip=False, shmsize=None,
|
||||
labels=None, cache_from=None, target=None, network_mode=None,
|
||||
squash=None, extra_hosts=None, platform=None, isolation=None,
|
||||
use_config_proxy=True):
|
||||
use_config_proxy=True, output_stream=sys.stdout):
|
||||
build_output = self.client.build(
|
||||
path=path,
|
||||
tag=tag,
|
||||
nocache=nocache,
|
||||
rm=rm,
|
||||
pull=pull,
|
||||
forcerm=forcerm,
|
||||
dockerfile=dockerfile,
|
||||
labels=labels,
|
||||
cache_from=cache_from,
|
||||
buildargs=buildargs,
|
||||
network_mode=network_mode,
|
||||
target=target,
|
||||
shmsize=shmsize,
|
||||
extra_hosts=extra_hosts,
|
||||
container_limits=container_limits,
|
||||
gzip=gzip,
|
||||
isolation=isolation,
|
||||
platform=platform)
|
||||
|
||||
try:
|
||||
all_events = list(stream_output(build_output, output_stream))
|
||||
except StreamOutputError as e:
|
||||
raise BuildError(service, str(e))
|
||||
|
||||
# Ensure the HTTP connection is not reused for another
|
||||
# streaming command, as the Docker daemon can sometimes
|
||||
# complain about it
|
||||
self.client.close()
|
||||
|
||||
image_id = None
|
||||
|
||||
for event in all_events:
|
||||
if 'stream' in event:
|
||||
match = re.search(r'Successfully built ([0-9a-f]+)', event.get('stream', ''))
|
||||
if match:
|
||||
image_id = match.group(1)
|
||||
|
||||
if image_id is None:
|
||||
raise BuildError(service, event if all_events else 'Unknown')
|
||||
|
||||
return image_id
|
||||
|
||||
|
||||
class _CLIBuilder:
|
||||
def __init__(self, progress):
|
||||
self._progress = progress
|
||||
|
||||
def build(self, service, path, tag=None, quiet=False, fileobj=None,
|
||||
nocache=False, rm=False, timeout=None,
|
||||
custom_context=False, encoding=None, pull=False,
|
||||
forcerm=False, dockerfile=None, container_limits=None,
|
||||
decode=False, buildargs=None, gzip=False, shmsize=None,
|
||||
labels=None, cache_from=None, target=None, network_mode=None,
|
||||
squash=None, extra_hosts=None, platform=None, isolation=None,
|
||||
use_config_proxy=True, output_stream=sys.stdout):
|
||||
"""
|
||||
Args:
|
||||
service (str): Service to be built
|
||||
path (str): Path to the directory containing the Dockerfile
|
||||
buildargs (dict): A dictionary of build arguments
|
||||
cache_from (:py:class:`list`): A list of images used for build
|
||||
@@ -1889,6 +1906,7 @@ class _CLIBuilder:
|
||||
configuration file (``~/.docker/config.json`` by default)
|
||||
contains a proxy configuration, the corresponding environment
|
||||
variables will be set in the container being built.
|
||||
output_stream (writer): stream to use for build logs
|
||||
Returns:
|
||||
A generator for the build output.
|
||||
"""
|
||||
@@ -1921,33 +1939,18 @@ class _CLIBuilder:
|
||||
|
||||
args = command_builder.build([path])
|
||||
|
||||
magic_word = "Successfully built "
|
||||
appear = False
|
||||
with subprocess.Popen(args, stdout=subprocess.PIPE,
|
||||
with subprocess.Popen(args, stdout=output_stream, stderr=sys.stderr,
|
||||
universal_newlines=True) as p:
|
||||
while True:
|
||||
line = p.stdout.readline()
|
||||
if not line:
|
||||
break
|
||||
if line.startswith(magic_word):
|
||||
appear = True
|
||||
yield json.dumps({"stream": line})
|
||||
|
||||
p.communicate()
|
||||
if p.returncode != 0:
|
||||
raise StreamOutputError()
|
||||
raise BuildError(service, "Build failed")
|
||||
|
||||
with open(iidfile) as f:
|
||||
line = f.readline()
|
||||
image_id = line.split(":")[1].strip()
|
||||
os.remove(iidfile)
|
||||
|
||||
# In case of `DOCKER_BUILDKIT=1`
|
||||
# there is no success message already present in the output.
|
||||
# Since that's the way `Service::build` gets the `image_id`
|
||||
# it has to be added `manually`
|
||||
if not appear:
|
||||
yield json.dumps({"stream": "{}{}\n".format(magic_word, image_id)})
|
||||
return image_id
|
||||
|
||||
|
||||
class _CommandBuilder:
|
||||
|
||||
@@ -13,7 +13,7 @@ more-itertools==8.6.0; python_version >= '3.5'
|
||||
more-itertools==5.0.0; python_version < '3.5'
|
||||
packaging==20.9
|
||||
pluggy==0.13.1
|
||||
py==1.9.0
|
||||
py==1.10.0
|
||||
pycodestyle==2.6.0
|
||||
pycparser==2.20
|
||||
pyflakes==2.2.0
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
|
||||
set -e
|
||||
|
||||
VERSION="1.29.0"
|
||||
VERSION="1.29.2"
|
||||
IMAGE="docker/compose:$VERSION"
|
||||
|
||||
|
||||
|
||||
@@ -43,7 +43,6 @@ for version in $DOCKER_VERSIONS; do
|
||||
--name "$daemon_container" \
|
||||
--privileged \
|
||||
--volume="/var/lib/docker" \
|
||||
-v $DOCKER_CONFIG/config.json:/root/.docker/config.json \
|
||||
-e "DOCKER_TLS_CERTDIR=" \
|
||||
"docker:$version-dind" \
|
||||
dockerd -H tcp://0.0.0.0:2375 $DOCKER_DAEMON_ARGS \
|
||||
@@ -51,6 +50,11 @@ for version in $DOCKER_VERSIONS; do
|
||||
|
||||
docker exec "$daemon_container" sh -c "apk add --no-cache git"
|
||||
|
||||
# copy docker config from host for authentication with Docker Hub
|
||||
docker exec "$daemon_container" sh -c "mkdir /root/.docker"
|
||||
docker cp /root/.docker/config.json $daemon_container:/root/.docker/config.json
|
||||
docker exec "$daemon_container" sh -c "chmod 644 /root/.docker/config.json"
|
||||
|
||||
docker run \
|
||||
--rm \
|
||||
--tty \
|
||||
|
||||
@@ -1955,12 +1955,10 @@ class ProjectTest(DockerClientTestCase):
|
||||
with pytest.raises(ProjectError):
|
||||
project.up()
|
||||
|
||||
containers = project.containers()
|
||||
assert len(containers) == 0
|
||||
|
||||
svc1 = project.get_service('svc1')
|
||||
svc2 = project.get_service('svc2')
|
||||
assert 'svc1' in svc2.get_dependency_names()
|
||||
assert len(svc2.containers()) == 0
|
||||
with pytest.raises(CompletedUnsuccessfully):
|
||||
svc1.is_completed_successfully()
|
||||
|
||||
@@ -1993,14 +1991,12 @@ class ProjectTest(DockerClientTestCase):
|
||||
with pytest.raises(ProjectError):
|
||||
project.up()
|
||||
|
||||
containers = project.containers()
|
||||
assert len(containers) == 0
|
||||
|
||||
svc1 = project.get_service('svc1')
|
||||
svc2 = project.get_service('svc2')
|
||||
svc3 = project.get_service('svc3')
|
||||
assert ['svc1', 'svc2'] == svc3.get_dependency_names()
|
||||
assert svc1.is_completed_successfully()
|
||||
assert len(svc3.containers()) == 0
|
||||
with pytest.raises(CompletedUnsuccessfully):
|
||||
svc2.is_completed_successfully()
|
||||
|
||||
|
||||
@@ -330,7 +330,7 @@ class ServiceTest(unittest.TestCase):
|
||||
assert service.options['environment'] == environment
|
||||
|
||||
assert opts['labels'][LABEL_CONFIG_HASH] == \
|
||||
'689149e6041a85f6fb4945a2146a497ed43c8a5cbd8991753d875b165f1b4de4'
|
||||
'6da0f3ec0d5adf901de304bdc7e0ee44ec5dd7adb08aebc20fe0dd791d4ee5a8'
|
||||
assert opts['environment'] == ['also=real']
|
||||
|
||||
def test_get_container_create_options_sets_affinity_with_binds(self):
|
||||
@@ -700,6 +700,7 @@ class ServiceTest(unittest.TestCase):
|
||||
config_dict = service.config_dict()
|
||||
expected = {
|
||||
'image_id': 'abcd',
|
||||
'ipc_mode': None,
|
||||
'options': {'image': 'example.com/foo'},
|
||||
'links': [('one', 'one')],
|
||||
'net': 'other',
|
||||
@@ -723,6 +724,7 @@ class ServiceTest(unittest.TestCase):
|
||||
config_dict = service.config_dict()
|
||||
expected = {
|
||||
'image_id': 'abcd',
|
||||
'ipc_mode': None,
|
||||
'options': {'image': 'example.com/foo'},
|
||||
'links': [],
|
||||
'networks': {},
|
||||
|
||||
Reference in New Issue
Block a user