Skip to content

Commit

Permalink
Merge branch 'master' into cachefrom
Browse files Browse the repository at this point in the history
Signed-off-by: Thomas Schaaf <[email protected]>
  • Loading branch information
thomaschaaf committed Feb 6, 2017
2 parents 275d851 + 76d4f5b commit 811c1f1
Show file tree
Hide file tree
Showing 24 changed files with 398 additions and 50 deletions.
48 changes: 48 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,54 @@
Change log
==========

1.10.0 (2017-01-18)
-------------------

### New Features

#### Compose file version 3.0

- Introduced version 3.0 of the `docker-compose.yml` specification. This
version requires to be used with Docker Engine 1.13 or above and is
specifically designed to work with the `docker stack` commands.

#### Compose file version 2.1 and up

- Healthcheck configuration can now be done in the service definition using
the `healthcheck` parameter

- Containers dependencies can now be set up to wait on positive healthchecks
when declared using `depends_on`. See the documentation for the updated
syntax.
**Note:** This feature will not be ported to version 3 Compose files.

- Added support for the `sysctls` parameter in service definitions

- Added support for the `userns_mode` parameter in service definitions

- Compose now adds identifying labels to networks and volumes it creates

#### Compose file version 2.0 and up

- Added support for the `stop_grace_period` option in service definitions.

### Bugfixes

- Colored output now works properly on Windows.

- Fixed a bug where docker-compose run would fail to set up link aliases
in interactive mode on Windows.

- Networks created by Compose are now always made attachable
(Compose files v2.1 and up).

- Fixed a bug where falsy values of `COMPOSE_CONVERT_WINDOWS_PATHS`
(`0`, `false`, empty value) were being interpreted as true.

- Fixed a bug where forward slashes in some .dockerignore patterns weren't
being parsed correctly on Windows


1.9.0 (2016-11-16)
-----------------

Expand Down
5 changes: 3 additions & 2 deletions Dockerfile.run
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@

FROM alpine:3.4
ARG version
RUN apk -U add \
python \
py-pip

COPY requirements.txt /code/requirements.txt
RUN pip install -r /code/requirements.txt

ADD dist/docker-compose-release.tar.gz /code/docker-compose
RUN pip install --no-deps /code/docker-compose/docker-compose-*
COPY dist/docker_compose-${version}-py2.py3-none-any.whl /code/
RUN pip install --no-deps /code/docker_compose-${version}-py2.py3-none-any.whl

ENTRYPOINT ["/usr/bin/docker-compose"]
2 changes: 1 addition & 1 deletion compose/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from __future__ import absolute_import
from __future__ import unicode_literals

__version__ = '1.10.0dev'
__version__ = '1.11.0dev'
24 changes: 24 additions & 0 deletions compose/cli/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,30 @@
from inspect import getdoc
from operator import attrgetter


# Attempt to detect https://github.com/docker/compose/issues/4344
try:
# A regular import statement causes PyInstaller to freak out while
# trying to load pip. This way it is simply ignored.
pip = __import__('pip')
pip_packages = pip.get_installed_distributions()
if 'docker-py' in [pkg.project_name for pkg in pip_packages]:
from .colors import red
print(
red('ERROR:'),
"Dependency conflict: an older version of the 'docker-py' package "
"is polluting the namespace. "
"Run the following command to remedy the issue:\n"
"pip uninstall docker docker-py; pip install docker",
file=sys.stderr
)
sys.exit(1)
except ImportError:
# pip is not available, which indicates it's probably the binary
# distribution of Compose which is not affected
pass


from . import errors
from . import signals
from .. import __version__
Expand Down
8 changes: 6 additions & 2 deletions compose/config/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -713,7 +713,7 @@ def finalize_service(service_config, service_names, version, environment):
if 'volumes' in service_dict:
service_dict['volumes'] = [
VolumeSpec.parse(
v, environment.get('COMPOSE_CONVERT_WINDOWS_PATHS')
v, environment.get_boolean('COMPOSE_CONVERT_WINDOWS_PATHS')
) for v in service_dict['volumes']
]

Expand Down Expand Up @@ -819,14 +819,15 @@ def merge_service_dicts(base, override, version):
md.merge_mapping('ulimits', parse_ulimits)
md.merge_mapping('networks', parse_networks)
md.merge_mapping('sysctls', parse_sysctls)
md.merge_mapping('depends_on', parse_depends_on)
md.merge_sequence('links', ServiceLink.parse)

for field in ['volumes', 'devices']:
md.merge_field(field, merge_path_mappings)

for field in [
'ports', 'cap_add', 'cap_drop', 'expose', 'external_links',
'security_opt', 'volumes_from', 'depends_on',
'security_opt', 'volumes_from',
]:
md.merge_field(field, merge_unique_items_lists, default=[])

Expand Down Expand Up @@ -921,6 +922,9 @@ def parse_dict_or_list(split_func, type_name, arguments):
parse_labels = functools.partial(parse_dict_or_list, split_kv, 'labels')
parse_networks = functools.partial(parse_dict_or_list, lambda k: (k, None), 'networks')
parse_sysctls = functools.partial(parse_dict_or_list, split_kv, 'sysctls')
parse_depends_on = functools.partial(
parse_dict_or_list, lambda k: (k, {'condition': 'service_started'}), 'depends_on'
)


def parse_ulimits(ulimits):
Expand Down
5 changes: 3 additions & 2 deletions compose/config/config_schema_v3.0.json
Original file line number Diff line number Diff line change
Expand Up @@ -309,6 +309,7 @@
},
"additionalProperties": false
},
"internal": {"type": "boolean"},
"labels": {"$ref": "#/definitions/list_or_dict"}
},
"additionalProperties": false
Expand All @@ -331,9 +332,9 @@
"name": {"type": "string"}
},
"additionalProperties": false
}
},
"labels": {"$ref": "#/definitions/list_or_dict"}
},
"labels": {"$ref": "#/definitions/list_or_dict"},
"additionalProperties": false
},

Expand Down
11 changes: 11 additions & 0 deletions compose/config/environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,3 +105,14 @@ def get(self, key, *args, **kwargs):
super(Environment, self).get(key.upper(), *args, **kwargs)
)
return super(Environment, self).get(key, *args, **kwargs)

def get_boolean(self, key):
# Convert a value to a boolean using "common sense" rules.
# Unset, empty, "0" and "false" (i-case) yield False.
# All other values yield True.
value = self.get(key)
if not value:
return False
if value.lower() in ['0', 'false']:
return False
return True
45 changes: 43 additions & 2 deletions compose/config/serialize.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,11 @@ def denormalize_config(config):
if 'external_name' in net_conf:
del net_conf['external_name']

volumes = config.volumes.copy()
for vol_name, vol_conf in volumes.items():
if 'external_name' in vol_conf:
del vol_conf['external_name']

version = config.version
if version == V1:
version = V2_1
Expand All @@ -40,7 +45,7 @@ def denormalize_config(config):
'version': version,
'services': services,
'networks': networks,
'volumes': config.volumes,
'volumes': volumes,
}


Expand All @@ -52,13 +57,49 @@ def serialize_config(config):
width=80)


def serialize_ns_time_value(value):
result = (value, 'ns')
table = [
(1000., 'us'),
(1000., 'ms'),
(1000., 's'),
(60., 'm'),
(60., 'h')
]
for stage in table:
tmp = value / stage[0]
if tmp == int(value / stage[0]):
value = tmp
result = (int(value), stage[1])
else:
break
return '{0}{1}'.format(*result)


def denormalize_service_dict(service_dict, version):
service_dict = service_dict.copy()

if 'restart' in service_dict:
service_dict['restart'] = types.serialize_restart_spec(service_dict['restart'])
service_dict['restart'] = types.serialize_restart_spec(
service_dict['restart']
)

if version == V1 and 'network_mode' not in service_dict:
service_dict['network_mode'] = 'bridge'

if 'depends_on' in service_dict and version != V2_1:
service_dict['depends_on'] = sorted([
svc for svc in service_dict['depends_on'].keys()
])

if 'healthcheck' in service_dict:
if 'interval' in service_dict['healthcheck']:
service_dict['healthcheck']['interval'] = serialize_ns_time_value(
service_dict['healthcheck']['interval']
)
if 'timeout' in service_dict['healthcheck']:
service_dict['healthcheck']['timeout'] = serialize_ns_time_value(
service_dict['healthcheck']['timeout']
)

return service_dict
40 changes: 24 additions & 16 deletions compose/parallel.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
from six.moves.queue import Queue

from compose.cli.signals import ShutdownException
from compose.errors import HealthCheckFailed
from compose.errors import NoHealthCheckConfigured
from compose.errors import OperationFailedError
from compose.utils import get_output_stream

Expand Down Expand Up @@ -48,7 +50,7 @@ def parallel_execute(objects, func, get_name, msg, get_deps=None):
elif isinstance(exception, APIError):
errors[get_name(obj)] = exception.explanation
writer.write(get_name(obj), 'error')
elif isinstance(exception, OperationFailedError):
elif isinstance(exception, (OperationFailedError, HealthCheckFailed, NoHealthCheckConfigured)):
errors[get_name(obj)] = exception.msg
writer.write(get_name(obj), 'error')
elif isinstance(exception, UpstreamError):
Expand Down Expand Up @@ -164,21 +166,27 @@ def feed_queue(objects, func, get_deps, results, state):

for obj in pending:
deps = get_deps(obj)

if any(dep[0] in state.failed for dep in deps):
log.debug('{} has upstream errors - not processing'.format(obj))
results.put((obj, None, UpstreamError()))
state.failed.add(obj)
elif all(
dep not in objects or (
dep in state.finished and (not ready_check or ready_check(dep))
) for dep, ready_check in deps
):
log.debug('Starting producer thread for {}'.format(obj))
t = Thread(target=producer, args=(obj, func, results))
t.daemon = True
t.start()
state.started.add(obj)
try:
if any(dep[0] in state.failed for dep in deps):
log.debug('{} has upstream errors - not processing'.format(obj))
results.put((obj, None, UpstreamError()))
state.failed.add(obj)
elif all(
dep not in objects or (
dep in state.finished and (not ready_check or ready_check(dep))
) for dep, ready_check in deps
):
log.debug('Starting producer thread for {}'.format(obj))
t = Thread(target=producer, args=(obj, func, results))
t.daemon = True
t.start()
state.started.add(obj)
except (HealthCheckFailed, NoHealthCheckConfigured) as e:
log.debug(
'Healthcheck for service(s) upstream of {} failed - '
'not processing'.format(obj)
)
results.put((obj, None, e))

if state.is_done():
results.put(STOP)
Expand Down
15 changes: 7 additions & 8 deletions compose/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import enum
import six
from docker.errors import APIError
from docker.errors import ImageNotFound
from docker.errors import NotFound
from docker.types import LogConfig
from docker.utils.ports import build_port_bindings
Expand All @@ -21,6 +22,7 @@
from .config import merge_environment
from .config.types import VolumeSpec
from .const import DEFAULT_TIMEOUT
from .const import IS_WINDOWS_PLATFORM
from .const import LABEL_CONFIG_HASH
from .const import LABEL_CONTAINER_NUMBER
from .const import LABEL_ONE_OFF
Expand Down Expand Up @@ -323,11 +325,8 @@ def ensure_image_exists(self, do_build=BuildAction.none):
def image(self):
try:
return self.client.inspect_image(self.image_name)
except APIError as e:
if e.response.status_code == 404 and e.explanation and 'No such image' in str(e.explanation):
raise NoSuchImageError("Image '{}' not found".format(self.image_name))
else:
raise
except ImageNotFound:
raise NoSuchImageError("Image '{}' not found".format(self.image_name))

@property
def image_name(self):
Expand Down Expand Up @@ -771,9 +770,9 @@ def build(self, no_cache=False, pull=False, force_rm=False):

build_opts = self.options.get('build', {})
path = build_opts.get('context')
# python2 os.path() doesn't support unicode, so we need to encode it to
# a byte string
if not six.PY3:
# python2 os.stat() doesn't support unicode on some UNIX, so we
# encode it to a bytestring to be safe
if not six.PY3 and not IS_WINDOWS_PLATFORM:
path = path.encode('utf8')

build_output = self.client.build(
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ PyYAML==3.11
backports.ssl-match-hostname==3.5.0.1; python_version < '3'
cached-property==1.2.0
colorama==0.3.7
docker==2.0.0
docker==2.0.1
dockerpty==0.4.1
docopt==0.6.1
enum34==1.0.4; python_version < '3.4'
Expand Down
5 changes: 2 additions & 3 deletions script/build/image
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,5 @@ TAG=$1
VERSION="$(python setup.py --version)"

./script/build/write-git-sha
python setup.py sdist
cp dist/docker-compose-$VERSION.tar.gz dist/docker-compose-release.tar.gz
docker build -t docker/compose:$TAG -f Dockerfile.run .
python setup.py sdist bdist_wheel
docker build --build-arg version=$VERSION -t docker/compose:$TAG -f Dockerfile.run .
7 changes: 4 additions & 3 deletions script/release/push-release
Original file line number Diff line number Diff line change
Expand Up @@ -54,18 +54,19 @@ git push $GITHUB_REPO $VERSION
echo "Uploading the docker image"
docker push docker/compose:$VERSION

echo "Uploading sdist to PyPI"
echo "Uploading package to PyPI"
pandoc -f markdown -t rst README.md -o README.rst
sed -i -e 's/logo.png?raw=true/https:\/\/github.com\/docker\/compose\/raw\/master\/logo.png?raw=true/' README.rst
./script/build/write-git-sha
python setup.py sdist
python setup.py sdist bdist_wheel
if [ "$(command -v twine 2> /dev/null)" ]; then
twine upload ./dist/docker-compose-${VERSION/-/}.tar.gz
twine upload ./dist/docker-compose-${VERSION/-/}.tar.gz ./dist/docker_compose-${VERSION/-/}-py2.py3-none-any.whl
else
python setup.py upload
fi

echo "Testing pip package"
deactivate || true
virtualenv venv-test
source venv-test/bin/activate
pip install docker-compose==$VERSION
Expand Down
Loading

0 comments on commit 811c1f1

Please sign in to comment.