diff --git a/.github/.pre-commit-config.yaml b/.github/.pre-commit-config.yaml
index f1c18967..22285e70 100644
--- a/.github/.pre-commit-config.yaml
+++ b/.github/.pre-commit-config.yaml
@@ -5,3 +5,30 @@ repos:
       - id: hadolint-docker
         always_run: true
         args: ["Dockerfile"]
+  # Specify `local` to use local pip versions of the pre-commit libs.
+  - repo: local
+    hooks:
+      - id: linter
+        name: Linter
+        description: https://github.com/charliermarsh/ruff
+        entry: .venv/bin/ruff
+        language: python
+        files: \.py$
+        # Ignore lambda functions in `appcli/models/configuration.py::Hooks`.
+        args: [check, --fix, --ignore, E731]
+      - id: formatter
+        name: Formatter
+        description: https://github.com/charliermarsh/ruff
+        entry: .venv/bin/ruff
+        language: python
+        files: \.py$
+        args: [format]
+      # NOTE: Disabling this for now as there is currently no way to exit gracefully when no test are run.
+      # See https://github.com/pytest-dev/pytest/issues/2393
+      # - id: pytest
+      #   name: PyTest
+      #   description: https://docs.pytest.org/en/latest/
+      #   entry: .venv/bin/pytest
+      #   language: python
+      #   files: \.py$
+      #   args: []
\ No newline at end of file
diff --git a/.github/workflows/build_python.yml b/.github/workflows/build_python.yml
index 7ae3f5e7..0dc619cc 100644
--- a/.github/workflows/build_python.yml
+++ b/.github/workflows/build_python.yml
@@ -1,6 +1,8 @@
 name: Test Python
 
-on: [push]
+on:
+  push:
+  workflow_call:
 
 jobs:
   build:
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql_analysis.yml
similarity index 99%
rename from .github/workflows/codeql-analysis.yml
rename to .github/workflows/codeql_analysis.yml
index a4e99922..06dbf368 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql_analysis.yml
@@ -19,6 +19,7 @@ on:
     branches: [develop]
   schedule:
     - cron: "36 21 * * 4"
+  workflow_call:
 
 jobs:
   analyze:
diff --git a/.github/workflows/dependabot_test_and_auto_merge.yml b/.github/workflows/dependabot_test_and_auto_merge.yml
new file mode 100644
index 00000000..dc7f844a
--- /dev/null
+++ b/.github/workflows/dependabot_test_and_auto_merge.yml
@@ -0,0 +1,18 @@
+name: Dependabot test and auto-merge
+on: pull_request
+
+jobs:
+  # Merge pre-conditions.
+  test-build-python:
+    uses: ./.github/workflows/build_python.yml
+  test-codeql-analysis:
+    uses: ./.github/workflows/codeql_analysis.yml
+
+  # Merge job.
+  dependabot-auto-merge-workflow:
+    permissions:
+      contents: write
+      pull-requests: write
+    needs: [test-build-python, test-codeql-analysis]
+    uses: brightsparklabs/github-actions/.github/workflows/dependabot-auto-merge.yml@master
+    secrets: inherit
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 5b8f26a8..6eb02284 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -12,9 +12,41 @@ The changelog is applicable from version `1.0.0` onwards.
 
 ### Added
 
-- APPCLI-133: Add fix for Git repository ownership issue to the AppCli Dockerfile.
+### Fixed
+
+---
+
+## [2.5.0] - 2024-01-11
+
+### Added
+
+- APPCLI-115: Enable automerge for dependabot PRs.
+- APPCLI-112: Autovalidate config file when a `.schema.json` file is provided.
+- APPCLI-116: Set Ruff as the linter and formatter.
+- AF-209: Build the docker-compose plugin into the appcli image.
+- DIS-395: Limit the backups to 10 as default.
+- AF-210: Stream stdout and stderr.
+
+---
+
+## [2.4.0] - 2023-10-03
+
+### Added
+
 - APED-67: Add support for running `NullOrchestrator` apps on Windows OS.
 
+---
+
+## [2.3.0] - 2023-08-14
+
+### Added
+
+- APPCLI-133: Add fix for Git repository ownership issue to the appcli Dockerfile.
+
+---
+
+## [2.2.1] - 2023-07-24
+
 ### Fixed
 
 - APED-37: Prevent quoted arguments with spaces splitting into multiple arguments in the launcher script.
@@ -284,3 +316,15 @@ and methods, any python referring to the library will need to use full module pa
 ## [1.0.0] - 2020-10-07
 
 _No changelog for this release._
+
+---
+
+# Template
+
+## [Unreleased]
+
+### Added
+
+### Fixed
+
+---
diff --git a/Dockerfile b/Dockerfile
index 3f84fef4..26ab5f29 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -10,16 +10,24 @@ FROM alpine:3.15.0 AS docker-binary-download
 
 WORKDIR /tmp
 
-# Download and extract the static docker binary
-RUN \
-    wget -q https://download.docker.com/linux/static/stable/x86_64/docker-20.10.6.tgz \
-    && tar xf docker-20.10.6.tgz
+# List required versions for docker and compose.
+ENV DOCKER_VERSION=24.0.7
+ENV DOCKER_COMPOSE_VERSION=2.23.3
+
+# Download docker and compose.
+RUN wget -q https://download.docker.com/linux/static/stable/x86_64/docker-${DOCKER_VERSION}.tgz \
+    && tar xf docker-${DOCKER_VERSION}.tgz \
+    && wget -q https://github.com/docker/compose/releases/download/v${DOCKER_COMPOSE_VERSION}/docker-compose-linux-x86_64 \
+    && wget -q https://github.com/docker/compose/releases/download/v${DOCKER_COMPOSE_VERSION}/docker-compose-linux-x86_64.sha256 \
+    && sha256sum -c docker-compose-linux-x86_64.sha256 \
+    && chmod +x docker-compose-linux-x86_64
 
 FROM python:3.10.2-slim-bullseye
 
 ENV LANG=C.UTF-8
 
 COPY --from=docker-binary-download /tmp/docker/docker /usr/bin
+COPY --from=docker-binary-download /tmp/docker-compose-linux-x86_64 /usr/local/lib/docker/cli-plugins/docker-compose
 
 RUN \
     # set timezone to UTC by default
diff --git a/Makefile b/Makefile
index 7c1456c3..e2f5ced8 100644
--- a/Makefile
+++ b/Makefile
@@ -12,33 +12,36 @@ VENV_NAME?=.venv
 VENV_ACTIVATE=. $(VENV_NAME)/bin/activate
 PYTHON=${VENV_NAME}/bin/python
 APP_VERSION=$(shell git describe --always --dirty)
+# Format and linter rules to ignore.
+# See https://docs.astral.sh/ruff/rules/
+RULES=E731
 
 .DEFAULT: help
 help:
 	@echo "make test"
 	@echo "       run tests"
 	@echo "make lint"
-	@echo "       run flake8"
-	@echo "make isort"
-	@echo "       run + apply isort"
-	@echo "make isort-check"
-	@echo "       check with isort"
+	@echo "       run ruff check"
+	@echo "make lint-check"
+	@echo "       dry-run ruff check"
 	@echo "make format"
-	@echo "       run + apply black"
+	@echo "       run ruff format"
 	@echo "make format-check"
-	@echo "       check with black"
+	@echo "       dry-run ruff format"
 	@echo "make build-wheel"
 	@echo "       build wheel"
 	@echo "make publish-wheel"
 	@echo "       publish wheel"
 	@echo "make all"
-	@echo "       run format + isort + lint + test"
+	@echo "       run format + lint + test"
 	@echo "make docker"
 	@echo "       build docker image"
 	@echo "make docker-publish"
 	@echo "       publish docker image"
 	@echo "make check"
-	@echo "       run format-check + isort-check + lint + test"
+	@echo "       run format-check + lint-check + test"
+	@echo "make precommit"
+	@echo "       manually trigger precommit hooks"
 
 # Requirements are in setup.py, so whenever setup.py is changed, re-run installation of dependencies.
 venv: $(VENV_NAME)/bin/activate
@@ -54,19 +57,17 @@ test: venv
 	${PYTHON} -m pytest
 
 lint: venv
-	${PYTHON} -m flake8 --ignore=E501,W503 --exclude=appcli/__init__.py appcli tests
+# Ignore lambda functions in `appcli/models/configuration.py::Hooks`.
+	${PYTHON} -m ruff check --fix --ignore ${RULES} .
 
-isort: venv
-	${PYTHON} -m isort .
-
-isort-check: venv
-	${PYTHON} -m isort . --diff --check-only
+lint-check: venv
+	${PYTHON} -m ruff check --ignore ${RULES} .
 
 format: venv
-	${PYTHON} -m black .
+	${PYTHON} -m ruff format .
 
 format-check: venv
-	${PYTHON} -m black . --diff --check
+	${PYTHON} -m ruff format --check .
 
 clean:
 	rm -rf build/ dist/ bsl_appcli.egg-info/
@@ -90,6 +91,9 @@ docker-publish: docker
 	docker push brightsparklabs/appcli:${APP_VERSION}
 	docker push brightsparklabs/appcli:latest
 
-all: format isort lint test
+all: format lint test
+
+check: format-check lint-check test
 
-check: format-check isort-check lint test
+precommit: venv
+	$(VENV_NAME)/bin/pre-commit run -c .github/.pre-commit-config.yaml
\ No newline at end of file
diff --git a/README.md b/README.md
index f9b1a61a..285bb4e1 100644
--- a/README.md
+++ b/README.md
@@ -228,6 +228,32 @@ def main():
   - `resources/templates/baseline` - for templates which the end user **is not** expected to modify.
   - `resources/templates/configurable` - for templates which the end user is expected to modify.
 
+#### Schema validation
+
+Configuration files will be automatically validated against provided schema files whenever
+`configure apply` is run.
+Validation is done with [jsonschema](https://json-schema.org/) and is only available for `yaml/yml`
+and `json/jsn` files.
+The JSON schema file must match the name of the file to validate with a suffix of `.schema.json.`.
+It must be placed in the same directory as the file to validate,
+The `settings.yml`, `stack_settings.yml` file, and any files in the `resource/templates` or
+`resources/overrides` directory can be validated.
+
+```yaml
+# resources/templates/configurable/my-config.yml
+foobar: 5
+```
+
+```json
+# resources/templates/configurable/my-config.yml.schema.json
+{
+    "type": "object",
+    "properties" : {
+        "foobar" : {"type": "number"}
+    }
+}
+```
+
 #### Application context files
 
 Template files are templated with Jinja2. The 'data' passed into the templating engine
diff --git a/appcli/commands/configure_cli.py b/appcli/commands/configure_cli.py
index fc6c88ae..d8e647f2 100644
--- a/appcli/commands/configure_cli.py
+++ b/appcli/commands/configure_cli.py
@@ -74,6 +74,12 @@ def init(ctx):
             logger.debug("Running pre-configure init hook")
             hooks.pre_configure_init(ctx)
 
+            # Validate the configuration schema.
+            logger.debug("Validating configuration files")
+            ConfigurationManager(
+                cli_context, self.cli_configuration
+            ).validate_configuration()
+
             # Initialise configuration directory
             logger.debug("Initialising configuration directory")
             ConfigurationManager(
diff --git a/appcli/configuration_manager.py b/appcli/configuration_manager.py
index 7d5aa9d9..3a0a29c6 100644
--- a/appcli/configuration_manager.py
+++ b/appcli/configuration_manager.py
@@ -2,7 +2,7 @@
 # # -*- coding: utf-8 -*-
 
 """
-Manages configuration.
+Manages the configuration of the application.
 ________________________________________________________________________________
 
 Created by brightSPARK Labs
@@ -19,12 +19,14 @@
 from pathlib import Path
 from typing import Iterable
 
+import jsonschema
+import yaml
+
 # vendor libraries
 from jinja2 import StrictUndefined, Template
 
-from appcli.crypto import crypto
-
 # local libraries
+from appcli.crypto import crypto
 from appcli.crypto.crypto import decrypt_values_in_file
 from appcli.functions import error_and_exit, print_header
 from appcli.git_repositories.git_repositories import (
@@ -32,7 +34,7 @@
     GeneratedConfigurationGitRepository,
 )
 from appcli.logger import logger
-from appcli.models.cli_context import CliContext
+from appcli.models.cli_context import SCHEMA_SUFFIX, CliContext
 from appcli.models.configuration import Configuration
 from appcli.variables_manager import VariablesManager
 
@@ -43,6 +45,14 @@
 METADATA_FILE_NAME = "metadata-configure-apply.json"
 """ Name of the file holding metadata from running a configure (relative to the generated configuration directory) """
 
+FILETYPE_LOADERS = {
+    ".json": json.load,
+    ".jsn": json.load,
+    ".yaml": yaml.safe_load,
+    ".yml": yaml.safe_load,
+}
+""" The supported filetypes for the validator. """
+
 # ------------------------------------------------------------------------------
 # PUBLIC CLASSES
 # ------------------------------------------------------------------------------
@@ -59,6 +69,39 @@ def __init__(self, cli_context: CliContext, configuration: Configuration):
         self.cli_configuration: Configuration = configuration
         self.variables_manager: VariablesManager = cli_context.get_variables_manager()
 
+    def validate_configuration(self):
+        """Validates all settings files that have associated schema files."""
+
+        # Define all the config directories and files.
+        settings_schema: Path = self.cli_context.get_app_configuration_file_schema()
+        stack_settings_schema: Path = (
+            self.cli_context.get_stack_configuration_file_schema()
+        )
+        overrides_dir: Path = self.cli_context.get_baseline_template_overrides_dir()
+        templates_dir: Path = self.cli_context.get_configurable_templates_dir()
+
+        # Parse the directories to get the schema files.
+        schema_files = []
+        if settings_schema.is_file():
+            schema_files.append(settings_schema)
+        if stack_settings_schema.is_file():
+            schema_files.append(stack_settings_schema)
+        schema_files.extend(overrides_dir.glob(f"**/*{SCHEMA_SUFFIX}"))
+        schema_files.extend(templates_dir.glob(f"**/*{SCHEMA_SUFFIX}"))
+
+        for schema_file in schema_files:
+            # Take out the `schema` suffix to get the original config file.
+            # NOTE: As we only need to remove part of the suffix, it is easier to convert to string
+            # and just remove part of the substring.
+            config_file: Path = Path(str(schema_file).removesuffix(SCHEMA_SUFFIX))
+            if not config_file.exists():
+                logger.warning(f"Found {schema_file} but no matching config file.")
+                continue
+
+            # Load and validate the config/schema.
+            logger.debug(f"Found schema for {config_file}. Validating...")
+            self.__validate_schema(config_file, schema_file)
+
     def initialise_configuration(self):
         """Initialises the configuration repository"""
 
@@ -206,6 +249,37 @@ def set_variable(self, variable: str, value: any):
     def get_stack_variable(self, variable: str):
         return self.variables_manager.get_stack_variable(variable)
 
+    def __validate_schema(self, config_file: Path, schema_file: Path) -> None:
+        """Attempt to validate a config file against a provided schema file.
+        The function will try and determine the file content based off the extension.
+        It will throw if the extension is unknown or the contents do not match the expected format.
+
+        Args:
+            config_file (Path): Path to the config file to validate.
+            schema_file (Path): Path to the schema file.
+        """
+        # Check we actually have a loader for the filetype.
+        try:
+            loader = FILETYPE_LOADERS[config_file.suffix]
+        except KeyError:
+            error_and_exit(
+                f"The `{config_file}` file does not have an associated loader function."
+                f"Check that the suffix is one of the supported types: {[k for k in FILETYPE_LOADERS.keys()]}"
+            )
+
+        # Load the files.
+        data = loader(open(config_file, "r"))
+        schema = json.load(open(schema_file, "r"))
+
+        # Validate the config.
+        try:
+            jsonschema.validate(instance=data, schema=schema)
+            logger.debug(
+                f"The configuration file `{config_file}` matched the provided schema."
+            )
+        except jsonschema.exceptions.ValidationError as e:
+            error_and_exit(f"Validation of `{config_file}` failed at:\n{e}")
+
     def __create_new_configuration_branch_and_files(self):
         app_version: str = self.cli_context.app_version
         app_version_branch: str = self.config_repo.generate_branch_name(app_version)
@@ -256,11 +330,11 @@ def __seed_configuration_dir(self):
         os.makedirs(target_app_configuration_file.parent, exist_ok=True)
         shutil.copy2(seed_app_configuration_file, target_app_configuration_file)
 
+        # Copy in the stack configuration file.
         stack_configuration_file = self.cli_configuration.stack_configuration_file
         target_stack_configuration_file = (
             self.cli_context.get_stack_configuration_file()
         )
-        # Copy in the stack configuration file
         if stack_configuration_file.is_file():
             shutil.copy2(stack_configuration_file, target_stack_configuration_file)
 
@@ -535,7 +609,10 @@ def __generate_configuration_metadata_file(self):
         logger.debug("Configuration record written to [%s]", configuration_record_file)
 
     def __backup_and_create_new_directory(
-        self, source_dir: Path, additional_filename_descriptor: str = "backup"
+        self,
+        source_dir: Path,
+        additional_filename_descriptor: str = "backup",
+        maximum_backups: int = 10,
     ) -> Path:
         """Backs up a directory to a tar gzipped file with the current datetimestamp,
         deletes the existing directory, and creates a new empty directory in its place
@@ -544,9 +621,11 @@ def __backup_and_create_new_directory(
             source_dir (Path): Path to the directory to backup and delete
             additional_filename_descriptor (str, optional): an additional identifier to put into
                 the new tgz filename. If not supplied, defaults to 'backup'.
+            maximum_backups (int): Number of archives to store (default 10).
+                This is implemented as a FIFO queue based off the timestamp.
         """
 
-        if os.path.exists(source_dir) and os.listdir(source_dir):
+        if source_dir.exists() and os.listdir(source_dir):
             # The datetime is accurate to seconds (microseconds was overkill), and we remove
             # colon (:) because `tar tvf` doesn't like filenames with colons
             current_datetime = (
@@ -557,25 +636,29 @@ def __backup_and_create_new_directory(
             clean_additional_filename_descriptor = (
                 additional_filename_descriptor.replace("/", "-")
             )
-            basename = os.path.basename(source_dir)
-            output_filename = os.path.join(
-                os.path.dirname(source_dir),
-                Path(".generated-archive/"),
-                f"{basename}_{clean_additional_filename_descriptor}_{current_datetime}.tgz",
-            )
+
+            backup_dir = source_dir.parent / ".generated-archive"
+            backup_filename = f"{source_dir.name}_{clean_additional_filename_descriptor}_{current_datetime}.tgz"
+            backup_file = backup_dir / backup_filename
+
+            # Remove old backups.
+            logger.debug(f"Removing old backups from [{backup_dir}]")
+            old_backups = backup_dir.glob("*.tgz")
+            old_backups = sorted(old_backups, reverse=True)
+            if len(old_backups) > maximum_backups:
+                [archive.unlink() for archive in old_backups[maximum_backups:]]
 
             # Create the backup
-            logger.debug(f"Backing up directory [{source_dir}] to [{output_filename}]")
-            output_dir = os.path.dirname(output_filename)
-            if not os.path.exists(output_dir):
-                os.makedirs(output_dir)
-            with tarfile.open(output_filename, "w:gz") as tar:
-                tar.add(source_dir, arcname=os.path.basename(source_dir))
+            logger.debug(f"Backing up directory [{source_dir}] to [{backup_file}]")
+            if not backup_dir.exists():
+                backup_dir.mkdir()
+            with tarfile.open(backup_file, "w:gz") as tar:
+                tar.add(source_dir, arcname=source_dir.name)
 
             # Ensure the backup has been successfully created before deleting the existing generated configuration directory
-            if not os.path.exists(output_filename):
+            if not backup_file.exists():
                 error_and_exit(
-                    f"Current generated configuration directory backup failed. Could not write out file [{output_filename}]."
+                    f"Current generated configuration directory backup failed. Could not write out file [{backup_file}]."
                 )
 
             # Remove the existing directory
diff --git a/appcli/models/cli_context.py b/appcli/models/cli_context.py
index 70bc02c7..87d4b9b1 100644
--- a/appcli/models/cli_context.py
+++ b/appcli/models/cli_context.py
@@ -1,6 +1,14 @@
 #!/usr/bin/env python3
 # # -*- coding: utf-8 -*-
 
+"""
+Outlines the CLI context.
+________________________________________________________________________________
+
+Created by brightSPARK Labs
+www.brightsparklabs.com
+"""
+
 # standard libraries
 from pathlib import Path
 from typing import Dict, Iterable, NamedTuple, Tuple
@@ -13,6 +21,18 @@
 from appcli.logger import logger
 from appcli.variables_manager import VariablesManager
 
+# ------------------------------------------------------------------------------
+# CONSTANTS
+# ------------------------------------------------------------------------------
+
+SCHEMA_SUFFIX = ".schema.json"
+""" The suffix for the validation schema files. """
+
+
+# ------------------------------------------------------------------------------
+# PUBLIC CLASSES
+# ------------------------------------------------------------------------------
+
 
 class CliContext(NamedTuple):
     """Shared context from a run of the CLI."""
@@ -131,6 +151,14 @@ def get_app_configuration_file(self) -> Path:
         """
         return self.configuration_dir.joinpath("settings.yml")
 
+    def get_app_configuration_file_schema(self) -> Path:
+        """Get the location of the configuration schema file
+
+        Returns:
+            Path: location of the configuration schema file
+        """
+        return self.configuration_dir.joinpath("settings.yml", SCHEMA_SUFFIX)
+
     def get_stack_configuration_file(self) -> Path:
         """Get the location of the configuration file
 
@@ -139,6 +167,14 @@ def get_stack_configuration_file(self) -> Path:
         """
         return self.configuration_dir.joinpath("stack-settings.yml")
 
+    def get_stack_configuration_file_schema(self) -> Path:
+        """Get the location of the configuration schema file
+
+        Returns:
+            Path: location of the configuration schema file
+        """
+        return self.configuration_dir.joinpath("stack-settings.yml", SCHEMA_SUFFIX)
+
     def get_baseline_template_overrides_dir(self) -> Path:
         """Get the directory of the configuration template overrides
 
diff --git a/appcli/models/configuration.py b/appcli/models/configuration.py
index fa1f28f0..a778fd7a 100644
--- a/appcli/models/configuration.py
+++ b/appcli/models/configuration.py
@@ -1,6 +1,14 @@
 #!/usr/bin/env python3
 # # -*- coding: utf-8 -*-
 
+"""
+Configuration for building the CLI.
+________________________________________________________________________________
+
+Created by brightSPARK Labs
+www.brightsparklabs.com
+"""
+
 # standard libraries
 import inspect
 import os
@@ -20,16 +28,16 @@
 class Hooks(NamedTuple):
     """Hooks to run before/after stages"""
 
-    migrate_variables: Callable[
-        [click.Context, Dict, str, Dict], Dict
-    ] = lambda w, x, y, z: x
+    migrate_variables: Callable[[click.Context, Dict, str, Dict], Dict] = (
+        lambda w, x, y, z: x
+    )
     """ Optional. Delegate function to run during a migration, which converts variables between application versions.
      Args are: CLI context, [Dict of variables to transform], [version of the current variables], and [Dict of clean variables
      at the new application version]. Returns [transformed Dict of variables]. If no function provided, identity
      function is used."""
-    is_valid_variables: Callable[
-        [click.Context, Dict, Dict], bool
-    ] = lambda w, x, y: True
+    is_valid_variables: Callable[[click.Context, Dict, Dict], bool] = (
+        lambda w, x, y: True
+    )
     """ Validate a Dict of variables are valid for use in the current application version.
       Args are: CLI context, [Dict of the variables to validate], and [Dict of the current version's clean variables]. Returns
       True if the Dict to validate is valid for the application at the current version.
diff --git a/appcli/orchestrators.py b/appcli/orchestrators.py
index 69549281..b6205090 100644
--- a/appcli/orchestrators.py
+++ b/appcli/orchestrators.py
@@ -15,7 +15,6 @@
 import os
 import subprocess
 import sys
-import textwrap
 from pathlib import Path
 from subprocess import CompletedProcess
 from tempfile import NamedTemporaryFile
@@ -116,6 +115,7 @@ def exec(
         service_name: str,
         command: Iterable[str],
         stdin_input: str = None,
+        capture_output: bool = False,
     ) -> CompletedProcess:
         """
         Executes a command in a running container.
@@ -125,6 +125,7 @@ def exec(
             service_name (str): Name of the container to be acted upon.
             command (str): The command to be executed, along with any arguments.
             stdin_input (str): Optional - defaults to None. String passed through to the stdin of the exec command.
+            capture_output (bool): Optional - defaults to False. True to capture stdout/stderr for the run command.
 
         Returns:
             CompletedProcess: Result of the orchestrator command.
@@ -273,6 +274,7 @@ def exec(
         service_name: str,
         command: Iterable[str],
         stdin_input: str = None,
+        capture_output: bool = False,
     ) -> CompletedProcess:
         cmd = ["exec"]  # Command is: exec SERVICE COMMAND
         # If there's stdin_input being piped to the command, we need to provide
@@ -281,7 +283,7 @@ def exec(
             cmd.append("-T")
         cmd.append(service_name)
         cmd.extend(list(command))
-        return self.__compose_service(cli_context, cmd, stdin_input)
+        return self.__compose_service(cli_context, cmd, stdin_input, capture_output)
 
     def verify_service_names(
         self, cli_context: CliContext, service_names: tuple[str, ...]
@@ -289,7 +291,7 @@ def verify_service_names(
         if service_names is None or len(service_names) == 0:
             return True
         command = ["config", "--services"]
-        result = self.__compose_service(cli_context, command)
+        result = self.__compose_service(cli_context, command, capture_output=True)
         if result.returncode != 0:
             error_msg = result.stderr.decode()
             logger.error(
@@ -359,6 +361,7 @@ def __compose_service(
         cli_context: CliContext,
         command: Iterable[str],
         stdin_input: str = None,
+        capture_output: bool = False,
     ):
         return execute_compose(
             cli_context,
@@ -366,6 +369,7 @@ def __compose_service(
             self.docker_compose_file,
             self.docker_compose_override_directory,
             stdin_input=stdin_input,
+            capture_output=capture_output,
         )
 
     def __compose_task(
@@ -373,6 +377,7 @@ def __compose_task(
         cli_context: CliContext,
         command: Iterable[str],
         stdin_input: str = None,
+        capture_output: bool = False,
     ):
         return execute_compose(
             cli_context,
@@ -380,6 +385,7 @@ def __compose_task(
             self.docker_compose_task_file,
             self.docker_compose_task_override_directory,
             stdin_input=stdin_input,
+            capture_output=capture_output,
         )
 
 
@@ -492,6 +498,7 @@ def exec(
         service_name: str,
         command: Iterable[str],
         stdin_input: str = None,
+        capture_output: bool = False,
     ) -> CompletedProcess:
         # Running 'docker exec' on containers in a docker swarm is non-trivial
         # due to the distributed nature of docker swarm, and the fact there could
@@ -504,7 +511,7 @@ def verify_service_names(
         if service_names is None or len(service_names) == 0:
             return True
         subcommand = ["config", "--services"]
-        result = self.__docker_stack(cli_context, subcommand)
+        result = self.__docker_stack(cli_context, subcommand, capture_output=True)
         if result.returncode != 0:
             error_msg = result.stderr.decode()
             logger.error(
@@ -575,6 +582,7 @@ def __compose_task(
         cli_context: CliContext,
         command: Iterable[str],
         stdin_input: str = None,
+        capture_output: bool = False,
     ):
         return execute_compose(
             cli_context,
@@ -582,11 +590,12 @@ def __compose_task(
             self.docker_compose_task_file,
             self.docker_compose_task_override_directory,
             stdin_input=stdin_input,
+            capture_output=capture_output,
         )
 
     def __exec_command(self, command: Iterable[str]) -> CompletedProcess:
         logger.debug("Running [%s]", " ".join(command))
-        return subprocess.run(command, capture_output=True)
+        return subprocess.run(command, capture_output=False)
 
 
 class NullOrchestrator(Orchestrator):
@@ -629,6 +638,7 @@ def exec(
         service_name: str,
         command: Iterable[str],
         stdin_input: str = None,
+        capture_output: bool = False,
     ) -> CompletedProcess:
         logger.info(
             "NullOrchestrator has no running containers to execute commands in."
@@ -772,6 +782,7 @@ def execute_compose(
     docker_compose_file_relative_path: Path,
     docker_compose_override_directory_relative_path: Path,
     stdin_input: str = None,
+    capture_output: bool = False,
 ) -> CompletedProcess:
     """Builds and executes a docker-compose command.
 
@@ -783,12 +794,14 @@ def execute_compose(
         docker_compose_override_directory_relative_path (Path): The relative path to a directory containing
             docker-compose override files. Path is relative to the generated configuration directory.
         stdin_input (str): Optional - defaults to None. String passed through to the subprocess via stdin.
+        capture_output (bool): Optional - defaults to False. True to capture stdout/stderr for the run command.
 
     Returns:
         CompletedProcess: The completed process and its exit code.
     """
     docker_compose_command = [
-        "docker-compose",
+        "docker",
+        "compose",
         "--project-name",
         cli_context.get_project_name(),
     ]
@@ -819,26 +832,8 @@ def execute_compose(
     logger.debug("Encoded input: [%s]", encoded_input)
     result = subprocess.run(
         docker_compose_command,
-        capture_output=True,
+        capture_output=capture_output,
         input=encoded_input,
     )
-    # For failures, error log both stdout/stderr if present.
-    if result.returncode != 0:
-        if result.stdout:
-            logger.error(
-                "Command failed - stdout:\n%s",
-                textwrap.indent(result.stdout.decode("utf-8"), "    "),
-            )
-        if result.stderr:
-            logger.error(
-                "Command failed - stderr:\n%s",
-                textwrap.indent(result.stderr.decode("utf-8"), "    "),
-            )
-    # For normal exits, just debug log the stdout if present.
-    elif result.stdout:
-        logger.debug(
-            "Command output:\n%s",
-            textwrap.indent(result.stdout.decode("utf-8"), "    "),
-        )
 
     return result
diff --git a/quickstart.md b/quickstart.md
index 4abf3728..cd647e71 100644
--- a/quickstart.md
+++ b/quickstart.md
@@ -81,14 +81,11 @@ EOF
 
 ```bash
 cat <<EOF >Dockerfile
-FROM brightsparklabs/appcli
+FROM brightsparklabs/appcli:<version>
 
 ENTRYPOINT ["./myapp.py"]
 WORKDIR /app
 
-# Install docker-compose if using it as the orchestrator.
-RUN pip install docker-compose
-
 COPY requirements.txt .
 RUN pip install --requirement requirements.txt
 COPY src .
diff --git a/setup.py b/setup.py
index 97deae7b..fd66fb3b 100644
--- a/setup.py
+++ b/setup.py
@@ -57,23 +57,24 @@ def get_version():
     packages=find_namespace_packages(exclude=["contrib", "docs", "tests"]),
     include_package_data=True,
     install_requires=[
-        "boto3==1.28.40",
+        "boto3==1.34.14",
         "click==8.1.7",
         "coloredlogs==15.0.1",
         "cronex==0.1.3.1",
         "dataclasses-json==0.5.7",
-        "deepdiff==6.5.0",
-        "GitPython==3.1.35",
+        "deepdiff==6.7.1",
+        "GitPython==3.1.41",
+        "jsonschema==4.20.0",
         "jinja2==3.1.2",
-        "pre-commit==3.3.3",
-        "pycryptodome==3.18.0",
-        "pydantic==2.3.0",
-        "pyfiglet==0.8.post1",
+        "pre-commit==3.6.0",
+        "pycryptodome==3.19.1",
+        "pydantic==2.5.3",
+        "pyfiglet==1.0.2",
         "python-keycloak==0.22.0",
         "python-slugify==8.0.1",
-        "ruamel-yaml==0.17.32",
+        "ruamel-yaml==0.18.5",
         "tabulate==0.9.0",
-        "wheel==0.41.2",
+        "wheel==0.42.0",
     ],
-    extras_require={"dev": ["black", "flake8", "isort", "pytest"]},
+    extras_require={"dev": ["ruff", "pytest"]},
 )
diff --git a/tests/commands/test_commands.py b/tests/commands/test_commands.py
index b65ce294..3ffdb863 100644
--- a/tests/commands/test_commands.py
+++ b/tests/commands/test_commands.py
@@ -416,7 +416,7 @@ def _create_cli_context(self, tmpdir, config) -> CliContext:
             subcommand_args=None,
             debug=True,
             is_dev_mode=False,
-            app_name_slug=APP_NAME,
+            app_name_slug=APP_NAME_SLUG,
             app_version="0.0.0",
             commands=ConfigureCli(config).commands,
         )
diff --git a/tests/configuration_manager/invalid_resources/settings.yml b/tests/configuration_manager/invalid_resources/settings.yml
new file mode 100644
index 00000000..73b314ff
--- /dev/null
+++ b/tests/configuration_manager/invalid_resources/settings.yml
@@ -0,0 +1 @@
+---
\ No newline at end of file
diff --git a/tests/configuration_manager/invalid_resources/templates/configurable/invalid.yml b/tests/configuration_manager/invalid_resources/templates/configurable/invalid.yml
new file mode 100644
index 00000000..02a418a8
--- /dev/null
+++ b/tests/configuration_manager/invalid_resources/templates/configurable/invalid.yml
@@ -0,0 +1 @@
+foobar: "5"  #   <- This should be int.
\ No newline at end of file
diff --git a/tests/configuration_manager/invalid_resources/templates/configurable/invalid.yml.schema.json b/tests/configuration_manager/invalid_resources/templates/configurable/invalid.yml.schema.json
new file mode 100644
index 00000000..8a4d306a
--- /dev/null
+++ b/tests/configuration_manager/invalid_resources/templates/configurable/invalid.yml.schema.json
@@ -0,0 +1,6 @@
+{
+    "type": "object",
+    "properties" : {
+        "foobar" : {"type": "number"}
+    }
+}
\ No newline at end of file
diff --git a/tests/configuration_manager/invalid_resources/templates/stack_settings.yml b/tests/configuration_manager/invalid_resources/templates/stack_settings.yml
new file mode 100644
index 00000000..73b314ff
--- /dev/null
+++ b/tests/configuration_manager/invalid_resources/templates/stack_settings.yml
@@ -0,0 +1 @@
+---
\ No newline at end of file
diff --git a/tests/configuration_manager/resources/templates/configurable/valid_json.json b/tests/configuration_manager/resources/templates/configurable/valid_json.json
new file mode 100644
index 00000000..37586328
--- /dev/null
+++ b/tests/configuration_manager/resources/templates/configurable/valid_json.json
@@ -0,0 +1,6 @@
+{
+    "alphabet": {
+        "a": 1,
+        "b": 2
+    }
+}
\ No newline at end of file
diff --git a/tests/configuration_manager/resources/templates/configurable/valid_json.json.schema.json b/tests/configuration_manager/resources/templates/configurable/valid_json.json.schema.json
new file mode 100644
index 00000000..4589417d
--- /dev/null
+++ b/tests/configuration_manager/resources/templates/configurable/valid_json.json.schema.json
@@ -0,0 +1,12 @@
+{
+    "type": "object",
+    "properties" : {
+        "alphabet" : {
+            "type": "object",
+            "properties" : {
+                "a": {"type": "number"},
+                "b": {"type": "number"}
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/tests/configuration_manager/resources/templates/configurable/valid_yaml.yml b/tests/configuration_manager/resources/templates/configurable/valid_yaml.yml
new file mode 100644
index 00000000..53c0f509
--- /dev/null
+++ b/tests/configuration_manager/resources/templates/configurable/valid_yaml.yml
@@ -0,0 +1,3 @@
+alphabet:
+  a: 1
+  b: 2
\ No newline at end of file
diff --git a/tests/configuration_manager/resources/templates/configurable/valid_yaml.yml.schema.json b/tests/configuration_manager/resources/templates/configurable/valid_yaml.yml.schema.json
new file mode 100644
index 00000000..4589417d
--- /dev/null
+++ b/tests/configuration_manager/resources/templates/configurable/valid_yaml.yml.schema.json
@@ -0,0 +1,12 @@
+{
+    "type": "object",
+    "properties" : {
+        "alphabet" : {
+            "type": "object",
+            "properties" : {
+                "a": {"type": "number"},
+                "b": {"type": "number"}
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/tests/configuration_manager/test_configuration_manager.py b/tests/configuration_manager/test_configuration_manager.py
index 6fe4f34a..cc871d34 100644
--- a/tests/configuration_manager/test_configuration_manager.py
+++ b/tests/configuration_manager/test_configuration_manager.py
@@ -209,6 +209,36 @@ def test_migration_maintains_stack_settings(tmpdir):
     assert migrated_stack_settings_content == new_stack_settings_content
 
 
+def test_validation_valid(tmpdir):
+    cli_context = create_cli_context(tmpdir, app_version="1.0.0")
+    conf_manager = create_conf_manager(tmpdir, cli_context)
+
+    conf_manager.initialise_configuration()
+    conf_manager.validate_configuration()
+
+
+def test_validation_invalid(tmpdir):
+    cli_context = create_cli_context(tmpdir, app_version="1.0.0")
+    conf_manager = create_conf_manager_invalid_resources(tmpdir, cli_context)
+
+    conf_manager.initialise_configuration()
+    with pytest.raises(SystemExit) as ex:
+        conf_manager.validate_configuration()
+    # NOTE: Assert that the `SystemExit` exception has been thrown as we expect.
+    assert ex
+
+
+def test_validation_unknown_suffix(tmpdir):
+    cli_context = create_cli_context(tmpdir, app_version="1.0.0")
+    conf_manager = create_conf_manager_unknown_suffix(tmpdir, cli_context)
+
+    conf_manager.initialise_configuration()
+    with pytest.raises(SystemExit) as ex:
+        conf_manager.validate_configuration()
+    # NOTE: Assert that the `SystemExit` exception has been thrown as we expect.
+    assert ex
+
+
 # TODO: Test where conf/data directories don't exist
 # TODO: Test deliberately failing migrations with migration function hooks
 
@@ -260,3 +290,49 @@ def create_conf_manager(tmpdir, cli_context: CliContext = None) -> Configuration
     )
 
     return ConfigurationManager(cli_context, configuration)
+
+
+def create_conf_manager_invalid_resources(
+    tmpdir, cli_context: CliContext = None
+) -> ConfigurationManager:
+    # If not supplied, create default CliContext.
+    if cli_context is None:
+        cli_context = create_cli_context(tmpdir)
+
+    configuration = Configuration(
+        app_name=APP_NAME,
+        docker_image="invalid-image-name",
+        seed_app_configuration_file=Path(BASE_DIR, "invalid_resources/settings.yml"),
+        application_context_files_dir=APP_CONTEXT_FILES_DIR,
+        baseline_templates_dir=Path(BASE_DIR, "invalid_resources/templates/baseline"),
+        configurable_templates_dir=Path(
+            BASE_DIR, "invalid_resources/templates/configurable"
+        ),
+        orchestrator=DockerComposeOrchestrator("cli/docker-compose.yml", []),
+        stack_configuration_file=STACK_CONFIGURATION_FILE,
+    )
+
+    return ConfigurationManager(cli_context, configuration)
+
+
+def create_conf_manager_unknown_suffix(
+    tmpdir, cli_context: CliContext = None
+) -> ConfigurationManager:
+    # If not supplied, create default CliContext.
+    if cli_context is None:
+        cli_context = create_cli_context(tmpdir)
+
+    configuration = Configuration(
+        app_name=APP_NAME,
+        docker_image="invalid-image-name",
+        seed_app_configuration_file=Path(BASE_DIR, "unknown_suffix/settings.yml"),
+        application_context_files_dir=APP_CONTEXT_FILES_DIR,
+        baseline_templates_dir=Path(BASE_DIR, "unknown_suffix/templates/baseline"),
+        configurable_templates_dir=Path(
+            BASE_DIR, "unknown_suffix/templates/configurable"
+        ),
+        orchestrator=DockerComposeOrchestrator("cli/docker-compose.yml", []),
+        stack_configuration_file=STACK_CONFIGURATION_FILE,
+    )
+
+    return ConfigurationManager(cli_context, configuration)
diff --git a/tests/configuration_manager/unknown_suffix/settings.yml b/tests/configuration_manager/unknown_suffix/settings.yml
new file mode 100644
index 00000000..73b314ff
--- /dev/null
+++ b/tests/configuration_manager/unknown_suffix/settings.yml
@@ -0,0 +1 @@
+---
\ No newline at end of file
diff --git a/tests/configuration_manager/unknown_suffix/templates/configurable/unknown_suffix.toml b/tests/configuration_manager/unknown_suffix/templates/configurable/unknown_suffix.toml
new file mode 100644
index 00000000..a33d6912
--- /dev/null
+++ b/tests/configuration_manager/unknown_suffix/templates/configurable/unknown_suffix.toml
@@ -0,0 +1,2 @@
+# This will fail as we do not have a TOML loader.
+foobar = 5
\ No newline at end of file
diff --git a/tests/configuration_manager/unknown_suffix/templates/configurable/unknown_suffix.toml.schema.json b/tests/configuration_manager/unknown_suffix/templates/configurable/unknown_suffix.toml.schema.json
new file mode 100644
index 00000000..8a4d306a
--- /dev/null
+++ b/tests/configuration_manager/unknown_suffix/templates/configurable/unknown_suffix.toml.schema.json
@@ -0,0 +1,6 @@
+{
+    "type": "object",
+    "properties" : {
+        "foobar" : {"type": "number"}
+    }
+}
\ No newline at end of file
diff --git a/tests/configuration_manager/unknown_suffix/templates/stack_settings.yml b/tests/configuration_manager/unknown_suffix/templates/stack_settings.yml
new file mode 100644
index 00000000..73b314ff
--- /dev/null
+++ b/tests/configuration_manager/unknown_suffix/templates/stack_settings.yml
@@ -0,0 +1 @@
+---
\ No newline at end of file