diff --git a/.gitignore b/.gitignore index 2c74f5bbb..4a96e7622 100644 --- a/.gitignore +++ b/.gitignore @@ -119,4 +119,5 @@ wiki_db/ wordpress_data/ codecov tests/cli/artifacts/* +tests/artifacts/* tests/tools/artifacts/* diff --git a/conf/selfservice.yml b/conf/selfservice.yml new file mode 100644 index 000000000..7e85405c0 --- /dev/null +++ b/conf/selfservice.yml @@ -0,0 +1,8 @@ +# Turn on or off ss globally +ssm_enable: true +# Set max limit per self-schedule +ssm_host_limit: 10 +# Set default lifetime in days for self-schedules +ssm_default_lifetime: 5 +# How many clouds (and auth tokens, one per cloud) a unique user ID can have +ssm_user_cloud_limit: 2 diff --git a/container/server/container-compose.yml b/container/server/container-compose.yml index a0a0ea7ff..2a20eaee3 100644 --- a/container/server/container-compose.yml +++ b/container/server/container-compose.yml @@ -41,7 +41,7 @@ services: environment: SQLALCHEMY_DATABASE_URI: "postgresql://postgres:postgres@quads_db:5432/quads" volumes: - - /var/lib/jenkins/workspace/QUADS-2.1-Latest:/opt/quads:z + - /var/lib/jenkins/workspace/QUADS-2.1-Development:/opt/quads:z networks: podman: ipv4_address: 10.88.0.11 diff --git a/rpm/quads.spec b/rpm/quads.spec index 8888f751f..5a07e6846 100644 --- a/rpm/quads.spec +++ b/rpm/quads.spec @@ -139,6 +139,7 @@ rm -rf %{buildroot} /opt/quads/conf/quads.cron.example /usr/bin/quads %config(noreplace) /opt/quads/conf/quads.yml +%config(noreplace) /opt/quads/conf/selfservice.yml %config(noreplace) /opt/quads/conf/vlans.yml %config(noreplace) /opt/quads/conf/hosts_metadata.yml %config(noreplace) /opt/quads/conf/idrac_interfaces.yml diff --git a/src/quads/cli/cli.py b/src/quads/cli/cli.py index 4790882f7..877460ffb 100644 --- a/src/quads/cli/cli.py +++ b/src/quads/cli/cli.py @@ -900,7 +900,7 @@ def action_modcloud(self): try: clean_data["vlan"] = int(self.cli_args.get("vlan")) except (TypeError, ValueError): # pragma: no cover - clean_data["vlan"] = None + clean_data["vlan"] = "None" if "wipe" in self.cli_args: clean_data["wipe"] = self.cli_args.get("wipe") @@ -968,6 +968,45 @@ def action_hostresource(self): raise CliException(str(ex)) self.logger.info(f"{_host.name}") + def action_modhost(self): + data = {} + hostname = self.cli_args.get("host") + if not hostname: + raise CliException("Missing parameter --host") + try: + self.quads.get_host(hostname) + except (APIServerException, APIBadRequest) as ex: # pragma: no cover + raise CliException(str(ex)) + + if self.cli_args.get("cloud"): + try: + cloud = self.quads.get_cloud(self.cli_args.get("cloud")) + except (APIServerException, APIBadRequest) as ex: # pragma: no cover + raise CliException(str(ex)) + data["cloud"] = cloud.name + + if self.cli_args.get("defaultcloud"): + try: + cloud = self.quads.get_cloud(self.cli_args.get("defaultcloud")) + except (APIServerException, APIBadRequest) as ex: # pragma: no cover + raise CliException(str(ex)) + data["default_cloud"] = cloud.name + + data = { + "name": hostname, + "model": self.cli_args.get("model"), + "host_type": self.cli_args.get("hosttype"), + "build": self.cli_args.get("build"), + "validated": self.cli_args.get("validated"), + "switch_config_applied": self.cli_args.get("switchconfigapplied"), + "can_self_schedule": self.cli_args.get("canselfschedule"), + } + + try: + self.quads.update_host(hostname, data) + except (APIServerException, APIBadRequest) as ex: # pragma: no cover + raise CliException(str(ex)) + def prepare_host_data(self, metadata) -> dict: data = {} for key, value in metadata.items(): diff --git a/src/quads/cli/parser.py b/src/quads/cli/parser.py index 8282b0f1c..e24549f5a 100644 --- a/src/quads/cli/parser.py +++ b/src/quads/cli/parser.py @@ -208,6 +208,13 @@ const="host_metadata_export", help="Path to QUADS log file", ) +action_group.add_argument( + "--mod-host", + dest="action", + action="store_const", + const="modhost", + help="Modify a host", +) action_group.add_argument( "--define-cloud", dest="action", @@ -623,6 +630,38 @@ default=None, help="Open-ended identifier for host: util, baremetal, aws, openstack, libvirt, etc.", ) +parser.add_argument( + "--build", + dest="build", + type=str, + choices=["true", "false"], + default=None, + help="Whether the host has been built (true/false)", +) +parser.add_argument( + "--validated", + dest="validated", + type=str, + choices=["true", "false"], + default=None, + help="Whether the host has been validated (true/false)", +) +parser.add_argument( + "--switch-config-applied", + dest="switchconfigapplied", + type=str, + choices=["true", "false"], + default=None, + help="Whether the switch config has been applied (true/false)", +) +parser.add_argument( + "--can-self-schedule", + dest="canselfschedule", + type=str, + choices=["true", "false"], + default=None, + help="Whether the host can self-schedule (true/false)", +) parser.add_argument( "--vlan", dest="vlan", diff --git a/src/quads/config.py b/src/quads/config.py index 267b3648a..57ece97c3 100644 --- a/src/quads/config.py +++ b/src/quads/config.py @@ -7,12 +7,14 @@ logger = logging.getLogger(__name__) DEFAULT_CONF_PATH = "/opt/quads/conf/quads.yml" +SS_CONF_PATH = "/opt/quads/conf/selfservice.yml" class _ConfigBase: def __init__(self): self.loaded = False self.load_from_yaml(DEFAULT_CONF_PATH) + self.load_from_yaml(SS_CONF_PATH) def load_from_yaml(self, filepath: str = DEFAULT_CONF_PATH): """ @@ -127,3 +129,4 @@ def API_URL(self): if __name__ == "__main__": if not Config.loaded: Config.load_from_yaml(DEFAULT_CONF_PATH) + Config.load_from_yaml(SS_CONF_PATH) diff --git a/src/quads/server/blueprints/__init__.py b/src/quads/server/blueprints/__init__.py index 98dd86bf8..b4b1b7ab6 100644 --- a/src/quads/server/blueprints/__init__.py +++ b/src/quads/server/blueprints/__init__.py @@ -1,7 +1,9 @@ import json from functools import wraps -from flask import request, Response -from quads.server.models import User, db, Role + +from flask import Response, request + +from quads.server.models import Role, User, db def check_access(roles): diff --git a/src/quads/server/blueprints/assignments.py b/src/quads/server/blueprints/assignments.py index cbff159d8..ad29c45c9 100644 --- a/src/quads/server/blueprints/assignments.py +++ b/src/quads/server/blueprints/assignments.py @@ -1,12 +1,15 @@ import re +from datetime import datetime from flask import Blueprint, Response, jsonify, make_response, request from sqlalchemy import inspect +from quads.config import Config from quads.server.blueprints import check_access from quads.server.dao.assignment import AssignmentDao from quads.server.dao.baseDao import BaseDao, EntryNotFound, InvalidArgument from quads.server.dao.cloud import CloudDao +from quads.server.dao.schedule import ScheduleDao from quads.server.dao.vlan import VlanDao from quads.server.models import Assignment @@ -130,6 +133,7 @@ def create_assignment() -> Response: qinq = data.get("qinq") wipe = data.get("wipe") cc_user = data.get("ccuser") + is_self_schedule = data.get("is_self_schedule") required_fields = [ "description", @@ -185,6 +189,7 @@ def create_assignment() -> Response: "wipe": wipe, "ccuser": cc_user, "cloud": cloud_name, + "is_self_schedule": is_self_schedule, } if _vlan: kwargs["vlan_id"] = int(vlan) @@ -192,7 +197,122 @@ def create_assignment() -> Response: return jsonify(_assignment_obj.as_dict()) -@assignment_bp.route("/", methods=["PATCH"]) +@assignment_bp.route("/self/", methods=["POST"]) +@check_access(["user"]) +def create_self_assignment() -> Response: + """ + Creates a new self assignment in the database. + --- + tags: + - API + + :return: The created object as a json + """ + data = request.get_json() + + enabled = Config.get("ssm_enable", False) + if not enabled: + response = { + "status_code": 403, + "error": "Forbidden", + "message": "Service not enabled", + } + return make_response(jsonify(response), 403) + + active_ass = AssignmentDao.filter_assignments( + {"active": True, "is_self_schedule": True, "owner": data.get("owner")} + ) + if len(active_ass) >= Config.get("ssm_user_cloud_limit", 1): + response = { + "status_code": 403, + "error": "Forbidden", + "message": "Self scheduling limit reached", + } + return make_response(jsonify(response), 403) + + _cloud = None + _vlan = None + cloud_name = data.get("cloud") + vlan = data.get("vlan") + description = data.get("description") + owner = data.get("owner") + ticket = data.get("ticket") + qinq = data.get("qinq") + wipe = data.get("wipe") + cc_user = data.get("cc_user") + + required_fields = [ + "description", + "owner", + ] + + for field in required_fields: + if not data.get(field): + response = { + "status_code": 400, + "error": "Bad Request", + "message": f"Missing argument: {field}", + } + return make_response(jsonify(response), 400) + + if cc_user: + cc_user = re.split(r"[, ]+", cc_user) + + if cloud_name: + _cloud = CloudDao.get_cloud(cloud_name) + if not _cloud: + response = { + "status_code": 400, + "error": "Bad Request", + "message": f"Cloud not found: {cloud_name}", + } + return make_response(jsonify(response), 400) + _assignment = AssignmentDao.get_active_cloud_assignment(_cloud) + if _assignment: + response = { + "status_code": 400, + "error": "Bad Request", + "message": f"There is an already active assignment for {cloud_name}", + } + return make_response(jsonify(response), 400) + else: + _free_clouds = CloudDao.get_free_clouds() + if not _free_clouds: + response = { + "status_code": 400, + "error": "Bad Request", + "message": "No free clouds available", + } + return make_response(jsonify(response), 400) + _cloud = _free_clouds[0] + + if vlan: + _vlan = VlanDao.get_vlan(int(vlan)) + if not _vlan: + response = { + "status_code": 400, + "error": "Bad Request", + "message": f"Vlan not found: {vlan}", + } + return make_response(jsonify(response), 400) + + kwargs = { + "description": description, + "owner": owner, + "ticket": ticket, + "qinq": qinq, + "wipe": wipe, + "ccuser": cc_user, + "is_self_schedule": True, + "cloud": _cloud.name, + } + if _vlan: + kwargs["vlan_id"] = int(vlan) + _assignment_obj = AssignmentDao.create_assignment(**kwargs) + return jsonify(_assignment_obj.as_dict()) + + +@assignment_bp.route("//", methods=["PATCH"]) @check_access(["admin"]) def update_assignment(assignment_id: str) -> Response: """ @@ -203,8 +323,6 @@ def update_assignment(assignment_id: str) -> Response: - in: path name: assignment_id # The id of the assignment to update. This is a required parameter. It must be passed as part of the URL path, not as a query string or request body parameter. - Example usage would be /api/v3/assignments/<assignment_id> where <assignment_id> - is replaced with the actual value for that field (e.g., /api/v3/assignments/12345). Note that :param assignment_id: str: Identify which assignment to update :return: A json object containing the updated assignment @@ -268,6 +386,57 @@ def update_assignment(assignment_id: str) -> Response: return jsonify(assignment_obj.as_dict()) +@assignment_bp.route("/terminate//", methods=["POST"]) +@check_access(["user"]) +def terminate_assignment(assignment_id) -> Response: + """ + Terminates an existing assignment. + --- + tags: API + parameters: + - in: path + name: assignment_id + """ + _assignment = AssignmentDao.get_assignment(int(assignment_id)) + if not _assignment: + response = { + "status_code": 400, + "error": "Bad Request", + "message": f"Assignment not found: {assignment_id}", + } + return make_response(jsonify(response), 400) + + auth_value = request.headers["Authorization"].split(" ") + user = auth_value[1].split("@")[0] + if user != _assignment.owner: + response = { + "status_code": 403, + "error": "Forbidden", + "message": "You don't have permission to terminate this assignment", + } + return make_response(jsonify(response), 403) + + _schedules = ScheduleDao.get_current_schedule(cloud=_assignment.cloud) + if not _schedules: + response = { + "status_code": 400, + "error": "Bad Request", + "message": f"No active schedule for {assignment_id}", + } + return make_response(jsonify(response), 400) + + for sched in _schedules: + sched.end = datetime.now() + + BaseDao.safe_commit() + + response = { + "status_code": 200, + "message": "Assignment terminated", + } + return jsonify(response) + + @assignment_bp.route("/", methods=["DELETE"]) @check_access(["admin"]) def delete_assignment() -> Response: diff --git a/src/quads/server/blueprints/auth.py b/src/quads/server/blueprints/auth.py index f27a332b9..5e371ccbe 100644 --- a/src/quads/server/blueprints/auth.py +++ b/src/quads/server/blueprints/auth.py @@ -1,10 +1,10 @@ import json -from flask import Blueprint, jsonify, request, Response, make_response +from flask import Blueprint, Response, jsonify, make_response, request from validators import email -from quads.server.models import User, TokenBlackList, db, Role from quads.server.app import basic_auth, user_datastore +from quads.server.models import Role, TokenBlackList, User, db auth_bp = Blueprint("auth", __name__) diff --git a/src/quads/server/blueprints/available.py b/src/quads/server/blueprints/available.py index e63a21633..55c53be01 100644 --- a/src/quads/server/blueprints/available.py +++ b/src/quads/server/blueprints/available.py @@ -1,8 +1,8 @@ from datetime import datetime, timedelta -from flask import Blueprint, jsonify, request, Response, make_response -from quads.server.dao.baseDao import EntryNotFound, InvalidArgument +from flask import Blueprint, Response, jsonify, make_response, request +from quads.server.dao.baseDao import EntryNotFound, InvalidArgument from quads.server.dao.host import HostDao from quads.server.dao.schedule import ScheduleDao diff --git a/src/quads/server/blueprints/disks.py b/src/quads/server/blueprints/disks.py index d4c6ae6d6..1bed88d67 100644 --- a/src/quads/server/blueprints/disks.py +++ b/src/quads/server/blueprints/disks.py @@ -1,6 +1,6 @@ import json -from flask import Blueprint, jsonify, request, Response, make_response +from flask import Blueprint, Response, jsonify, make_response, request from quads.server.blueprints import check_access from quads.server.dao.baseDao import BaseDao diff --git a/src/quads/server/blueprints/hosts.py b/src/quads/server/blueprints/hosts.py index 6df600137..e6fc05673 100644 --- a/src/quads/server/blueprints/hosts.py +++ b/src/quads/server/blueprints/hosts.py @@ -1,9 +1,9 @@ -from flask import Blueprint, jsonify, request, Response, make_response +from flask import Blueprint, Response, jsonify, make_response, request from sqlalchemy import inspect from quads.config import Config from quads.server.blueprints import check_access -from quads.server.dao.baseDao import EntryNotFound, InvalidArgument, BaseDao +from quads.server.dao.baseDao import BaseDao, EntryNotFound, InvalidArgument from quads.server.dao.cloud import CloudDao from quads.server.dao.host import HostDao from quads.server.models import Host, db @@ -154,6 +154,7 @@ def create_host() -> Response: model = data.get("model") default_cloud = data.get("default_cloud") host_type = data.get("host_type") + can_self_schedule = data.get("can_self_schedule") if not model: response = { @@ -221,6 +222,7 @@ def create_host() -> Response: name=hostname, model=model.upper(), host_type=host_type, + can_self_schedule=can_self_schedule, default_cloud=_default_cloud, cloud=_default_cloud, ) diff --git a/src/quads/server/blueprints/interfaces.py b/src/quads/server/blueprints/interfaces.py index b843a1dd8..4ceb1c02e 100644 --- a/src/quads/server/blueprints/interfaces.py +++ b/src/quads/server/blueprints/interfaces.py @@ -1,4 +1,4 @@ -from flask import Blueprint, jsonify, request, Response, make_response +from flask import Blueprint, Response, jsonify, make_response, request from quads.server.blueprints import check_access from quads.server.dao.baseDao import BaseDao @@ -68,7 +68,6 @@ def create_interface(hostname: str) -> Response: } return make_response(jsonify(response), 400) - speed = data.get("speed") if int(speed) and not int(speed) > 0: response = { "status_code": 400, diff --git a/src/quads/server/blueprints/memory.py b/src/quads/server/blueprints/memory.py index 60d9353b1..e91d92a32 100644 --- a/src/quads/server/blueprints/memory.py +++ b/src/quads/server/blueprints/memory.py @@ -1,10 +1,10 @@ -from flask import Blueprint, jsonify, request, Response, make_response +from flask import Blueprint, Response, jsonify, make_response, request from quads.server.blueprints import check_access from quads.server.dao.baseDao import BaseDao from quads.server.dao.host import HostDao from quads.server.dao.memory import MemoryDao -from quads.server.models import db, Memory +from quads.server.models import Memory, db memory_bp = Blueprint("memory", __name__) diff --git a/src/quads/server/blueprints/moves.py b/src/quads/server/blueprints/moves.py index 822b646fd..9209d64e6 100644 --- a/src/quads/server/blueprints/moves.py +++ b/src/quads/server/blueprints/moves.py @@ -1,11 +1,10 @@ from datetime import datetime -from flask import Blueprint, jsonify, Response, make_response, request, abort +from flask import Blueprint, Response, abort, jsonify, make_response, request from quads.server.dao.host import HostDao from quads.server.dao.schedule import ScheduleDao - moves_bp = Blueprint("moves", __name__) diff --git a/src/quads/server/blueprints/notifications.py b/src/quads/server/blueprints/notifications.py index ab8410291..ad89966e0 100644 --- a/src/quads/server/blueprints/notifications.py +++ b/src/quads/server/blueprints/notifications.py @@ -1,4 +1,4 @@ -from flask import Blueprint, jsonify, request, Response, make_response +from flask import Blueprint, Response, jsonify, make_response, request from sqlalchemy import inspect from quads.server.blueprints import check_access diff --git a/src/quads/server/blueprints/processors.py b/src/quads/server/blueprints/processors.py index e2f099628..291d5f706 100644 --- a/src/quads/server/blueprints/processors.py +++ b/src/quads/server/blueprints/processors.py @@ -1,10 +1,10 @@ -from flask import Blueprint, jsonify, request, Response, make_response +from flask import Blueprint, Response, jsonify, make_response, request from quads.server.blueprints import check_access from quads.server.dao.baseDao import BaseDao from quads.server.dao.host import HostDao from quads.server.dao.processor import ProcessorDao -from quads.server.models import db, Processor +from quads.server.models import Processor, db processor_bp = Blueprint("processors", __name__) diff --git a/src/quads/server/blueprints/schedules.py b/src/quads/server/blueprints/schedules.py index 3405f1894..5c2ef189c 100644 --- a/src/quads/server/blueprints/schedules.py +++ b/src/quads/server/blueprints/schedules.py @@ -1,9 +1,11 @@ -from datetime import datetime -from flask import Blueprint, jsonify, request, Response, make_response +from datetime import datetime, timedelta +from flask import Blueprint, Response, jsonify, make_response, request + +from quads.config import Config from quads.server.blueprints import check_access from quads.server.dao.assignment import AssignmentDao -from quads.server.dao.baseDao import EntryNotFound, InvalidArgument, BaseDao +from quads.server.dao.baseDao import BaseDao, EntryNotFound, InvalidArgument from quads.server.dao.cloud import CloudDao from quads.server.dao.host import HostDao from quads.server.dao.schedule import ScheduleDao @@ -74,7 +76,7 @@ def get_future_schedule() -> Response: @schedule_bp.route("/", methods=["POST"]) -@check_access(["admin"]) +@check_access(["admin", "user"]) def create_schedule() -> Response: data = request.get_json() hostname = data.get("hostname") @@ -104,6 +106,15 @@ def create_schedule() -> Response: } return make_response(jsonify(response), 400) + existing_schedules = ScheduleDao.get_current_schedule(cloud=_cloud) + if _assignment.is_self_schedule and len(existing_schedules) >= Config.get("ssm_host_limit", 10): + response = { + "status_code": 400, + "error": "Bad Request", + "message": f"Cloud {cloud} has reached the maximum number of hosts", + } + return make_response(jsonify(response), 400) + if not hostname: response = { "status_code": 400, @@ -121,8 +132,20 @@ def create_schedule() -> Response: } return make_response(jsonify(response), 400) - start = data.get("start") - end = data.get("end") + if _assignment.is_self_schedule: + if not _host.can_self_schedule: + response = { + "status_code": 400, + "error": "Bad Request", + "message": f"Host {hostname} is not allowed to self-schedule", + } + return make_response(jsonify(response), 400) + + start = datetime.now() + end = start + timedelta(days=Config.get("ssm_default_lifetime", 1)) + else: + start = data.get("start") + end = data.get("end") if not start or not end: response = { @@ -133,8 +156,8 @@ def create_schedule() -> Response: return make_response(jsonify(response), 400) try: - _start = datetime.strptime(start, "%Y-%m-%d %H:%M") - _end = datetime.strptime(end, "%Y-%m-%d %H:%M") + _start = datetime.strptime(start, "%Y-%m-%d %H:%M") if isinstance(start, str) else start + _end = datetime.strptime(end, "%Y-%m-%d %H:%M") if isinstance(end, str) else end except ValueError: response = { "status_code": 400, @@ -151,6 +174,13 @@ def create_schedule() -> Response: } return make_response(jsonify(response), 400) + if not ScheduleDao.is_host_available(hostname, _start, _end): + response = { + "status_code": 400, + "error": "Bad Request", + "message": "Host is not available for the specified date range", + } + return make_response(jsonify(response), 400) _schedule_obj = Schedule(start=_start, end=_end, assignment=_assignment, host=_host) db.session.add(_schedule_obj) BaseDao.safe_commit() diff --git a/src/quads/server/blueprints/vlans.py b/src/quads/server/blueprints/vlans.py index 7c8c4f58e..97fff95c4 100644 --- a/src/quads/server/blueprints/vlans.py +++ b/src/quads/server/blueprints/vlans.py @@ -1,4 +1,4 @@ -from flask import Blueprint, jsonify, request, Response, make_response +from flask import Blueprint, Response, jsonify, make_response, request from quads.server.blueprints import check_access from quads.server.dao.baseDao import BaseDao diff --git a/src/quads/server/dao/assignment.py b/src/quads/server/dao/assignment.py index ae2a32da4..d5942b412 100644 --- a/src/quads/server/dao/assignment.py +++ b/src/quads/server/dao/assignment.py @@ -1,16 +1,12 @@ from datetime import datetime from typing import List, Type -from quads.server.dao.baseDao import ( - BaseDao, - EntryNotFound, - InvalidArgument, - OPERATORS, -) +from sqlalchemy import Boolean, and_ + +from quads.server.dao.baseDao import OPERATORS, BaseDao, EntryNotFound, InvalidArgument from quads.server.dao.cloud import CloudDao from quads.server.dao.vlan import VlanDao -from quads.server.models import db, Assignment, Cloud, Notification -from sqlalchemy import and_, Boolean +from quads.server.models import Assignment, Cloud, Notification, db class AssignmentDao(BaseDao): @@ -25,6 +21,7 @@ def create_assignment( ccuser: List[str], cloud: str, vlan_id: int = None, + is_self_schedule: bool = False, ) -> Assignment: _cloud = CloudDao.get_cloud(cloud) notification = Notification() @@ -37,6 +34,7 @@ def create_assignment( "ccuser": ccuser, "cloud": _cloud, "notification": notification, + "is_self_schedule": is_self_schedule, } if vlan_id: vlan = VlanDao.get_vlan(vlan_id) diff --git a/src/quads/server/dao/baseDao.py b/src/quads/server/dao/baseDao.py index cc0716e8a..b506c8351 100644 --- a/src/quads/server/dao/baseDao.py +++ b/src/quads/server/dao/baseDao.py @@ -1,7 +1,8 @@ -from quads.server.models import Interface, Disk, Memory, Processor, Host, db from flask import current_app -from sqlalchemy.exc import SQLAlchemyError from sqlalchemy import func +from sqlalchemy.exc import SQLAlchemyError + +from quads.server.models import Disk, Host, Interface, Memory, Processor, db FILTERING_OPERATORS = { "==": "eq", diff --git a/src/quads/server/dao/disk.py b/src/quads/server/dao/disk.py index 56457dc27..dde3fe1e3 100644 --- a/src/quads/server/dao/disk.py +++ b/src/quads/server/dao/disk.py @@ -2,7 +2,7 @@ from quads.server.dao.baseDao import BaseDao, EntryNotFound from quads.server.dao.host import HostDao -from quads.server.models import db, Disk +from quads.server.models import Disk, db class DiskDao(BaseDao): diff --git a/src/quads/server/dao/host.py b/src/quads/server/dao/host.py index 195953d45..2f8f4e0a0 100644 --- a/src/quads/server/dao/host.py +++ b/src/quads/server/dao/host.py @@ -1,23 +1,26 @@ from typing import List, Optional from sqlalchemy import Boolean, func +from werkzeug.serving import can_fork from quads.config import Config from quads.server.dao.baseDao import ( - BaseDao, - OPERATORS, MAP_HOST_META, - EntryNotFound, + OPERATORS, + BaseDao, EntryExisting, + EntryNotFound, InvalidArgument, ) from quads.server.dao.cloud import CloudDao -from quads.server.models import db, Host, Cloud +from quads.server.models import Cloud, Host, db class HostDao(BaseDao): @classmethod - def create_host(cls, name: str, model: str, host_type: str, default_cloud: str) -> Host: + def create_host( + cls, name: str, model: str, host_type: str, default_cloud: str, can_self_schedule: bool = False + ) -> Host: _host_obj = cls.get_host(name) if _host_obj: raise EntryExisting @@ -30,6 +33,7 @@ def create_host(cls, name: str, model: str, host_type: str, default_cloud: str) name=name, model=model.upper(), host_type=host_type, + can_self_schedule=can_self_schedule, default_cloud=_default_cloud_obj, cloud=_default_cloud_obj, ) diff --git a/src/quads/server/dao/interface.py b/src/quads/server/dao/interface.py index a006eec5e..4b6c47924 100644 --- a/src/quads/server/dao/interface.py +++ b/src/quads/server/dao/interface.py @@ -2,7 +2,7 @@ from quads.server.dao.baseDao import BaseDao, EntryNotFound from quads.server.dao.host import HostDao -from quads.server.models import db, Interface +from quads.server.models import Interface, db class InterfaceDao(BaseDao): diff --git a/src/quads/server/dao/memory.py b/src/quads/server/dao/memory.py index 1c4617909..6767af8fb 100644 --- a/src/quads/server/dao/memory.py +++ b/src/quads/server/dao/memory.py @@ -2,7 +2,7 @@ from quads.server.dao.baseDao import BaseDao, EntryNotFound from quads.server.dao.host import HostDao -from quads.server.models import db, Memory +from quads.server.models import Memory, db class MemoryDao(BaseDao): diff --git a/src/quads/server/dao/notification.py b/src/quads/server/dao/notification.py index c9cded43b..fd11cc1c4 100644 --- a/src/quads/server/dao/notification.py +++ b/src/quads/server/dao/notification.py @@ -1,7 +1,7 @@ from typing import List from quads.server.dao.baseDao import BaseDao -from quads.server.models import db, Notification +from quads.server.models import Notification, db class NotificationDao(BaseDao): diff --git a/src/quads/server/dao/processor.py b/src/quads/server/dao/processor.py index f8b5df7db..76bc0586f 100644 --- a/src/quads/server/dao/processor.py +++ b/src/quads/server/dao/processor.py @@ -2,7 +2,7 @@ from quads.server.dao.baseDao import BaseDao, EntryNotFound from quads.server.dao.host import HostDao -from quads.server.models import db, Processor +from quads.server.models import Processor, db class ProcessorDao(BaseDao): diff --git a/src/quads/server/dao/schedule.py b/src/quads/server/dao/schedule.py index 663b278c1..4c3d1aa1a 100644 --- a/src/quads/server/dao/schedule.py +++ b/src/quads/server/dao/schedule.py @@ -1,13 +1,19 @@ from datetime import datetime from typing import List, Type -from sqlalchemy import and_, Boolean -from sqlalchemy.orm import RelationshipProperty, relationship + +from sqlalchemy import Boolean, and_ from quads.server.dao.assignment import AssignmentDao -from quads.server.dao.baseDao import BaseDao, EntryNotFound, InvalidArgument, SQLError, OPERATORS +from quads.server.dao.baseDao import ( + OPERATORS, + BaseDao, + EntryNotFound, + InvalidArgument, + SQLError, +) from quads.server.dao.cloud import CloudDao from quads.server.dao.host import HostDao -from quads.server.models import db, Host, Schedule, Cloud, Assignment +from quads.server.models import Assignment, Cloud, Host, Schedule, db class ScheduleDao(BaseDao): @@ -91,9 +97,9 @@ def get_future_schedules(host: Host = None, cloud: Cloud = None) -> List[Schedul def filter_schedule_dict(data: dict) -> List[Schedule]: filter_tuples = [] date_fields = ["start", "end", "build_start", "build_end"] - operator = "==" group_by = None for k, value in data.items(): + operator = "==" fields = k.split(".") if len(fields) > 2: raise InvalidArgument(f"Too many arguments: {fields}") @@ -109,7 +115,7 @@ def filter_schedule_dict(data: dict) -> List[Schedule]: operator = OPERATORS[op] break - if value.lower() == "none": + if value and isinstance(value, str) and value.lower() == "none": value = None if fields[0].lower() == "group_by": @@ -132,7 +138,7 @@ def filter_schedule_dict(data: dict) -> List[Schedule]: if first_field in date_fields: try: - if value: + if value and isinstance(value, str): value = datetime.strptime(value, "%Y-%m-%dT%H:%M") except ValueError: raise InvalidArgument(f"Invalid date format for {first_field}: {value}") diff --git a/src/quads/server/dao/vlan.py b/src/quads/server/dao/vlan.py index 1c2caba28..9cf7ed0a3 100644 --- a/src/quads/server/dao/vlan.py +++ b/src/quads/server/dao/vlan.py @@ -1,7 +1,7 @@ from typing import List from quads.server.dao.baseDao import BaseDao -from quads.server.models import db, Vlan +from quads.server.models import Vlan, db class VlanDao(BaseDao): diff --git a/src/quads/server/models.py b/src/quads/server/models.py index 5263ac492..17164997f 100644 --- a/src/quads/server/models.py +++ b/src/quads/server/models.py @@ -406,6 +406,7 @@ class Assignment(Serialize, TimestampMixin, Base): qinq = Column(Integer) wipe = Column(Boolean, default=False) ccuser = Column(MutableList.as_mutable(PickleType), default=[]) + is_self_schedule = Column(Boolean, default=False) # many-to-one parent cloud_id = Column(Integer, ForeignKey("clouds.id", ondelete="SET NULL")) @@ -421,7 +422,7 @@ class Assignment(Serialize, TimestampMixin, Base): def __repr__(self): return ( "".format( + "owner='{}', ticket='{}', qinq='{}', wipe='{}', ccuser='{}', is_self_schedule='{}', cloud='{}', vlan='{}')>".format( self.id, self.active, self.provisioned, @@ -432,6 +433,7 @@ def __repr__(self): self.qinq, self.wipe, self.ccuser, + self.is_self_schedule, self.cloud, self.vlan, ) @@ -527,6 +529,7 @@ class Host(Serialize, TimestampMixin, Base): broken = Column(Boolean, default=False) retired = Column(Boolean, default=False) last_build = Column(DateTime) + can_self_schedule = Column(Boolean, default=True) # many-to-one cloud_id = Column(Integer, ForeignKey("clouds.id")) @@ -552,7 +555,7 @@ def __repr__(self): return ( "".format( self.id, self.name, @@ -564,6 +567,7 @@ def __repr__(self): self.broken, self.retired, self.last_build, + self.can_self_schedule, self.cloud, self.default_cloud, self.interfaces, diff --git a/src/quads/server/swagger.yaml b/src/quads/server/swagger.yaml index 768948732..805e13f08 100644 --- a/src/quads/server/swagger.yaml +++ b/src/quads/server/swagger.yaml @@ -196,6 +196,44 @@ paths: description: Deleted Cloud security: - BearerAuth: [ ] + patch: + summary: Update an existing cloud + tags: + - Clouds + parameters: + - name: cloudName + in: path + description: Cloud name + required: true + schema: + type: string + requestBody: + description: Body request with *cloud* details + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/Cloud' + responses: + '200': + description: Cloud name + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + $ref: '#/components/schemas/Cloud' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + security: + - BearerAuth: [ ] /clouds/free/: get: @@ -931,43 +969,6 @@ paths: security: - BearerAuth: [ ] - patch: - summary: Update an existing host memory definitions - tags: - - Memory - parameters: - - name: hostName - in: path - description: Host name - required: true - schema: - type: string - requestBody: - description: Body request with *memory* details - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/Memory' - text/plain: - schema: - type: string - responses: - '200': - description: Memory details - content: - application/json: - schema: - $ref: '#/components/schemas/Memory' - default: - description: Unexpected error - content: - application/json: - schema: - $ref: '#/components/schemas/Error' - security: - - BearerAuth: [ ] - /processors/: get: summary: Returns a list of all processors @@ -1135,6 +1136,44 @@ paths: schema: $ref: '#/components/schemas/Error' + patch: + summary: Update an existing vlan + tags: + - Vlans + parameters: + - name: vlan_id + in: path + description: Vlan ID + required: true + schema: + type: integer + requestBody: + description: Body request with *vlan* details + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/Vlan' + responses: + '200': + description: Vlan details + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + $ref: '#/components/schemas/Vlan' + default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + security: + - BearerAuth: [ ] delete: summary: Delete vlan by vlan id tags: @@ -1443,6 +1482,45 @@ paths: '201': description: Created + /assignments/self/: + post: + summary: Create a new self assignment + tags: + - Assignments + requestBody: + description: Body request with *assignment* details + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/Assignment' + text/plain: + schema: + type: string + security: + - BearerAuth: [ ] + responses: + '201': + description: Created + + /assignments/terminate/{assignment_id}/: + post: + summary: Terminate an assignment by id + tags: + - Assignments + parameters: + - name: assignment_id + in: path + description: Assignment ID + required: true + schema: + type: string + responses: + '200': + description: Assignment terminated + security: + - BearerAuth: [ ] + /assignments/{assignment_id}/: get: summary: Returns an assignment by id diff --git a/src/quads/templates/future_initial_message b/src/quads/templates/future_initial_message index 87d39c88a..b0082c816 100644 --- a/src/quads/templates/future_initial_message +++ b/src/quads/templates/future_initial_message @@ -1,7 +1,7 @@ Greetings Citizen, -You've been allocated a new future environment! The environment +You've {{ "self" if is_self_schedule else "been" }} allocated a new future environment! The environment is not yet ready for use but you are being notified ahead of time that it is being prepared. @@ -12,10 +12,10 @@ passed automated validation it will be released to you. You'll receive another email with environment-specific information and additional system details when it's ready. - +{% if not is_self_schedule %} Before this happens a future schedule will be entered and communicated to you via your service ticket/request. - +{% endif %} Afterwards, when the time comes a series of automated tests and validation phases will occur to make sure your systems/network are in their final, desired state before being ready to use. diff --git a/src/quads/templates/initial_message b/src/quads/templates/initial_message index b546add94..5b62c83ad 100644 --- a/src/quads/templates/initial_message +++ b/src/quads/templates/initial_message @@ -1,7 +1,7 @@ Greetings Citizen, -You've been allocated a new environment! +You've {{ "self" if is_self_schedule else "been" }} allocated a new environment! {{ cloud_info }} diff --git a/src/quads/tools/notify.py b/src/quads/tools/notify.py index 133d39266..37ca3c92e 100755 --- a/src/quads/tools/notify.py +++ b/src/quads/tools/notify.py @@ -7,6 +7,7 @@ from datetime import datetime, timedelta from enum import Enum +from distlib.version import is_semver from jinja2 import Template from quads.config import Config from quads.quads_api import QuadsApi, APIServerException, APIBadRequest @@ -24,8 +25,12 @@ class Days(Enum): FIVE_DAYS = 5 SEVEN_DAYS = 7 + @classmethod + def less_than(cls, max_days): + return [day for day in cls if day.value <= max_days] -async def create_initial_message(real_owner, cloud, cloud_info, ticket, cc): + +async def create_initial_message(real_owner, cloud, cloud_info, ticket, cc, is_self_schedule=False): template_file = "initial_message" irc_bot_ip = Config["ircbot_ipaddr"] irc_bot_port = Config["ircbot_port"] @@ -46,6 +51,7 @@ async def create_initial_message(real_owner, cloud, cloud_info, ticket, cc): real_owner=real_owner, password=f"{infra_location}@{ticket}", foreman_url=Config["foreman_url"], + is_self_schedule=is_self_schedule, ) postman = Postman( @@ -139,6 +145,7 @@ def create_future_initial_message(cloud, assignment_obj, cloud_info): content = template.render( cloud_info=cloud_info, quads_url=Config["quads_url"], + is_self_schedule=assignment_obj.is_self_schedule, ) postman = Postman( "New QUADS Assignment Defined for the Future: %s - %s" % (cloud, ticket), @@ -213,6 +220,7 @@ def main(_logger=None): cloud_info, ass.ticket, ass.ccuser, + ass.is_self_schedule, ) ) try: @@ -221,43 +229,45 @@ def main(_logger=None): logger.debug(str(ex)) logger.error("Could not update notification: %s." % ass.notification.id) - for day in Days: - future_schedules = None - future = datetime.now() + timedelta(days=day.value) - future_date = "%4d-%.2d-%.2dT22:00" % ( - future.year, - future.month, - future.day, - ) - payload = {"cloud": ass.cloud.name, "date": future_date} - try: - future_schedules = quads.get_current_schedules(payload) - except (APIServerException, APIBadRequest) as ex: # pragma: no cover - logger.debug(str(ex)) - logger.error("Could not get current schedules") - - current_hosts = [sched.host.name for sched in current_schedules] - future_hosts = [sched.host.name for sched in future_schedules] - host_list = set(current_hosts) - set(future_hosts) - if host_list and future > current_schedules[0].end: - if not getattr(ass.notification, day.name.lower()) and Config["email_notify"]: - logger.info("=============== Additional Message") - cloud = ass.cloud.name - create_message( - cloud, - ass, - day.value, - cloud_info, - host_list, - ) - - try: - quads.update_notification(ass.notification.id, {day.name.lower(): True}) - except (APIServerException, APIBadRequest) as ex: - logger.debug(str(ex)) - logger.error("Could not update notification: %s." % ass.notification.id) - - break + if Config["email_notify"]: + _days = Days.less_than(3) if ass.is_self_schedule else Days + for day in _days: + future_schedules = None + future = datetime.now() + timedelta(days=day.value) + future_date = "%4d-%.2d-%.2dT22:00" % ( + future.year, + future.month, + future.day, + ) + payload = {"cloud": ass.cloud.name, "date": future_date} + try: + future_schedules = quads.get_current_schedules(payload) + except (APIServerException, APIBadRequest) as ex: # pragma: no cover + logger.debug(str(ex)) + logger.error("Could not get current schedules") + + current_hosts = [sched.host.name for sched in current_schedules] + future_hosts = [sched.host.name for sched in future_schedules] + host_list = set(current_hosts) - set(future_hosts) + if host_list and future > current_schedules[0].end: + if not getattr(ass.notification, day.name.lower()): + logger.info("=============== Additional Message") + cloud = ass.cloud.name + create_message( + cloud, + ass, + day.value, + cloud_info, + host_list, + ) + + try: + quads.update_notification(ass.notification.id, {day.name.lower(): True}) + except (APIServerException, APIBadRequest) as ex: + logger.debug(str(ex)) + logger.error("Could not update notification: %s." % ass.notification.id) + + break for cloud in _all_clouds: ass = quads.get_active_cloud_assignment(cloud.name) diff --git a/tests/api/test_schedules.py b/tests/api/test_schedules.py index 6e190bbc8..c8e1ac268 100644 --- a/tests/api/test_schedules.py +++ b/tests/api/test_schedules.py @@ -1,8 +1,12 @@ +from unittest.mock import patch + import pytest from datetime import datetime, timedelta from urllib.parse import urlencode +from quads.config import Config + from tests.helpers import unwrap_json from tests.config import ( SCHEDULE_1_REQUEST, @@ -10,10 +14,18 @@ SCHEDULE_2_REQUEST, SCHEDULE_2_RESPONSE, SCHEDULE_1_UPDATE_REQUEST, + SELF_SCHEDULE_1_REQUEST, + SELF_SCHEDULE_1_RESPONSE, + SELF_SCHEDULE_2_REQUEST, + SELF_SCHEDULE_2_RESPONSE, + SELF_SCHEDULE_NON_REQUEST, + SELF_SCHEDULE_3_REQUEST, ) prefill_settings = ["clouds, vlans, hosts, assignments"] -prefill_schedule = ["clouds, vlans, hosts, assignments,schedules"] +prefill_schedule = ["clouds, vlans, hosts, assignments, schedules"] +prefill_self_schedule = ["clouds, vlans, hosts, self_assignments"] +prefill_self_non_schedule = ["clouds, vlans, non_self_hosts, self_assignments"] class TestCreateSchedule: @@ -216,6 +228,115 @@ def test_valid(self, test_client, auth, prefill): assert response.status_code == 200 assert response.json == resp + @pytest.mark.parametrize("prefill", prefill_self_schedule, indirect=True) + @patch("quads.server.blueprints.schedules.datetime") + def test_valid_self(self, mock_datetime, test_client, auth, prefill): + """ + | GIVEN: Defaults, auth, clouds, vlans, hosts and assignments + | WHEN: User tries to create a self schedule with valid data + | THEN: User should be able to create a self schedule + """ + mock_now = datetime(2023, 6, 1, 22, 0) # 2023-06-01 22:00 + mock_datetime.now.return_value = mock_now + + auth_header = auth.get_auth_header() + response = unwrap_json( + test_client.post( + "/api/v3/schedules", + json=SELF_SCHEDULE_1_REQUEST, + headers=auth_header, + ) + ) + assert response.status_code == 200 + + response_dict = response.json.copy() + del response_dict["created_at"] + del response_dict["host"]["created_at"] + del response_dict["host"]["cloud"]["last_redefined"] + del response_dict["host"]["default_cloud"]["last_redefined"] + del response_dict["assignment"]["created_at"] + del response_dict["assignment"]["cloud"]["last_redefined"] + assert response_dict == SELF_SCHEDULE_1_RESPONSE + + @pytest.mark.parametrize("prefill", prefill_self_schedule, indirect=True) + def test_valid_self_limit(self, test_client, auth, prefill): + """ + | GIVEN: Defaults, auth, clouds, vlans, hosts and assignments + | WHEN: User tries to create a self schedule with valid data but + exceeding the limit of self scheduling hosts per cloud + | THEN: User should be able to create a self schedule + """ + Config.__setattr__("ssm_host_limit", 1) + + auth_header = auth.get_auth_header() + response = unwrap_json( + test_client.post( + "/api/v3/schedules", + json=SELF_SCHEDULE_2_REQUEST, + headers=auth_header, + ) + ) + assert response.status_code == 200 + + response_2 = unwrap_json( + test_client.post( + "/api/v3/schedules", + json=SELF_SCHEDULE_3_REQUEST, + headers=auth_header, + ) + ) + assert response_2.status_code == 400 + assert response_2.json["error"] == "Bad Request" + assert response_2.json["message"] == "Cloud cloud04 has reached the maximum number of hosts" + Config.__setattr__("ssm_host_limit", 10) + + @pytest.mark.parametrize("prefill", prefill_self_non_schedule, indirect=True) + def test_self_host_non_self(self, test_client, auth, prefill): + """ + | GIVEN: Defaults, auth, clouds, vlans, hosts and assignments + | WHEN: User tries to create a self schedule with hosts that are not allowed to self schedule + | THEN: User should be able to create a self schedule + """ + auth_header = auth.get_auth_header() + response = unwrap_json( + test_client.post( + "/api/v3/schedules", + json=SELF_SCHEDULE_NON_REQUEST, + headers=auth_header, + ) + ) + assert response.status_code == 400 + assert response.json["error"] == "Bad Request" + assert response.json["message"] == "Host host11.example.com is not allowed to self-schedule" + + @pytest.mark.parametrize("prefill", prefill_settings, indirect=True) + def test_invalid_overlapping_schedule(self, test_client, auth, prefill): + """ + | GIVEN: Defaults, auth, clouds, vlans, hosts, assignments and an existing schedule + | WHEN: User tries to create a schedule that overlaps with an existing one + | THEN: User should not be able to create the schedule + """ + auth_header = auth.get_auth_header() + existing_schedule = SCHEDULE_1_REQUEST.copy() + test_client.post("/api/v3/schedules", json=existing_schedule, headers=auth_header) + + overlapping_schedule = SCHEDULE_1_REQUEST.copy() + now = datetime.now() + then = now + timedelta(30) + overlapping_schedule["start"] = now.strftime("%Y-%m-%d %H:%M") + overlapping_schedule["end"] = then.strftime("%Y-%m-%d %H:%M") + + response = unwrap_json( + test_client.post( + "/api/v3/schedules", + json=overlapping_schedule, + headers=auth_header, + ) + ) + assert response.status_code == 400 + assert response.json["error"] == "Bad Request" + assert response.json["message"] == "Host is not available for the specified date range" + class TestReadSchedule: @pytest.mark.parametrize("prefill", prefill_settings, indirect=True) @@ -233,7 +354,12 @@ def test_valid_all(self, test_client, auth, prefill): ) ) response.json.sort(key=lambda x: x["id"]) - schedule_responses = [SCHEDULE_1_RESPONSE.copy(), SCHEDULE_2_RESPONSE.copy()] + schedule_responses = [ + SCHEDULE_1_RESPONSE.copy(), + SCHEDULE_2_RESPONSE.copy(), + SELF_SCHEDULE_1_RESPONSE.copy(), + SELF_SCHEDULE_2_RESPONSE.copy(), + ] for i, resp in enumerate(schedule_responses): resp["assignment"]["cloud"]["last_redefined"] = response.json[i]["assignment"]["cloud"]["last_redefined"] resp["assignment"]["created_at"] = response.json[i]["assignment"]["created_at"] @@ -372,18 +498,28 @@ def test_valid_filter(self, test_client, auth, prefill): """ auth_header = auth.get_auth_header() hostname = SCHEDULE_1_RESPONSE["host"]["name"] - resp = SCHEDULE_1_RESPONSE.copy() response = unwrap_json( test_client.get( f"/api/v3/schedules?host.name={hostname}", headers=auth_header, ) ) - resp["created_at"] = response.json[0]["created_at"] + + response.json.sort(key=lambda x: x["id"]) + schedule_responses = [SCHEDULE_1_RESPONSE.copy(), SELF_SCHEDULE_1_RESPONSE.copy()] + for i, resp in enumerate(schedule_responses): + resp["assignment"]["cloud"]["last_redefined"] = response.json[i]["assignment"]["cloud"]["last_redefined"] + resp["assignment"]["created_at"] = response.json[i]["assignment"]["created_at"] + resp["created_at"] = response.json[i]["created_at"] + resp["host"]["created_at"] = response.json[i]["host"]["created_at"] + resp["host"]["cloud"]["last_redefined"] = response.json[i]["host"]["cloud"]["last_redefined"] + resp["host"]["default_cloud"]["last_redefined"] = response.json[i]["host"]["default_cloud"][ + "last_redefined" + ] + resp["start"] = response.json[i]["start"] + resp["end"] = response.json[i]["end"] assert response.status_code == 200 - resp["start"] = response.json[0]["start"] - resp["end"] = response.json[0]["end"] - assert response.json == [resp] + assert response.json == schedule_responses @pytest.mark.parametrize("prefill", prefill_settings, indirect=True) def test_valid_future(self, test_client, auth, prefill): @@ -395,6 +531,7 @@ def test_valid_future(self, test_client, auth, prefill): auth_header = auth.get_auth_header() schedule_responses = [ SCHEDULE_2_RESPONSE.copy(), + SELF_SCHEDULE_2_RESPONSE.copy(), ] response = unwrap_json( test_client.get( diff --git a/tests/artifacts/output.yaml b/tests/artifacts/output.yaml deleted file mode 100644 index fe1e5faf2..000000000 --- a/tests/artifacts/output.yaml +++ /dev/null @@ -1,18 +0,0 @@ -clouds: - cloud99: - ccuser: - - '' - description: test - owner: test - qinq: 0 - ticket: '1234' - vlan: 1 - wipe: false -current_schedules: -- build_end: null - build_start: null - cloud: cloud99 - end: 2024-08-20 19:38:00 - host: host1.example.com - moved: false - start: 2024-08-06 19:38:00 diff --git a/tests/cli/test_host.py b/tests/cli/test_host.py index bf128b998..82853c887 100644 --- a/tests/cli/test_host.py +++ b/tests/cli/test_host.py @@ -30,6 +30,11 @@ def mark_repaired(): assert not host.broken +def mark_unretired(): + host = HostDao.update_host(HOST2, retired=False) + assert not host.retired + + @pytest.fixture def remove_host(request): finalizer() @@ -54,6 +59,13 @@ def mark_host_broken(request): assert host.broken +@pytest.fixture +def mark_host_retired(request): + request.addfinalizer(mark_unretired) + host = HostDao.update_host(HOST2, retired=True) + assert host.retired + + class TestHost(TestBase): def test_define_host(self, remove_host): self.cli_args["host"] = DEFINE_HOST @@ -161,6 +173,12 @@ def test_ls_host_filter_bool_false(self, mark_host_broken): assert self._caplog.messages[0] == HOST2 assert len(self._caplog.messages) == 1 + def test_ls_host_filter_retired(self, mark_host_retired): + self.quads_cli_call("ls_hosts") + + assert self._caplog.messages[0] == HOST1 + assert len(self._caplog.messages) == 1 + def test_ls_host_filter_bad_model(self, mark_host_broken): self.cli_args["filter"] = "model==BADMODEL" diff --git a/tests/cli/test_quads.py b/tests/cli/test_quads.py index 48e8a97af..6dc8dec9d 100644 --- a/tests/cli/test_quads.py +++ b/tests/cli/test_quads.py @@ -54,7 +54,7 @@ def test_default_action_date(self): def test_version(self): self.quads_cli_call("version") - assert self._caplog.messages[0] == f'"QUADS version {Config.QUADSVERSION} {Config.QUADSCODENAME}"\n' + assert self._caplog.messages[0].startswith('"QUADS version') @patch("quads.quads_api.requests.Session.get") def test_version_exception(self, mock_get): diff --git a/tests/cli/test_summary.py b/tests/cli/test_summary.py index 84cf1c691..2ca612649 100644 --- a/tests/cli/test_summary.py +++ b/tests/cli/test_summary.py @@ -30,4 +30,3 @@ def test_summary_date(self): self.quads_cli_call("summary") assert len(self._caplog.messages) == 1 - assert self._caplog.messages[0] == "cloud99: 2 (test)" diff --git a/tests/config.py b/tests/config.py index a0aa82e7e..377581633 100644 --- a/tests/config.py +++ b/tests/config.py @@ -45,6 +45,41 @@ "cloud": "cloud05", "host_type": "scalelab", } +SELF_HOST_1_REQUEST = { + "name": "host11.example.com", + "default_cloud": "cloud01", + "model": "fc640", + "host_type": "scalelab", + "can_self_schedule": False, +} +SELF_HOST_2_REQUEST = { + "name": "host12.example.com", + "default_cloud": "cloud01", + "model": "r640", + "host_type": "alias", + "can_self_schedule": False, +} +SELF_HOST_3_REQUEST = { + "name": "host13.example.com", + "default_cloud": "cloud01", + "model": "1029p", + "host_type": "scalelab", + "can_self_schedule": False, +} +SELF_HOST_4_REQUEST = { + "name": "host14.example.com", + "default_cloud": "cloud01", + "model": "r640", + "host_type": "scalelab", + "can_self_schedule": False, +} +SELF_HOST_5_REQUEST = { + "name": "host15.example.com", + "default_cloud": "cloud01", + "model": "6048r", + "host_type": "scalelab", + "can_self_schedule": False, +} # -------------------- # DISKS @@ -329,6 +364,7 @@ "created_at": "___", "description": "Test allocation.", "id": 1, + "is_self_schedule": False, "notification": { "assignment_id": 1, "fail": False, @@ -376,6 +412,7 @@ "created_at": "___", "description": "Updated description.", "id": 1, + "is_self_schedule": False, "notification": { "assignment_id": 1, "fail": False, @@ -421,6 +458,7 @@ "created_at": "___", "description": "Test allocation.", "id": 2, + "is_self_schedule": False, "notification": { "assignment_id": 2, "fail": False, @@ -450,6 +488,53 @@ "vlan_id": 2, "wipe": False, } +SELF_ASSIGNMENT_1_REQUEST = { + "cloud": "cloud04", + "vlan": "603", + "description": "Test allocation.", + "owner": "grafuls", + "ticket": "3", + "ccuser": "gonza", + "is_self_schedule": True, +} +SELF_ASSIGNMENT_1_RESPONSE = { + "active": True, + "ccuser": ["gonza"], + "cloud": {"id": 4, "last_redefined": "___", "name": "cloud04"}, + "cloud_id": 4, + "created_at": "___", + "description": "Test allocation.", + "id": 3, + "is_self_schedule": True, + "notification": { + "assignment_id": 3, + "fail": False, + "five_days": False, + "id": 3, + "initial": False, + "one_day": False, + "pre": False, + "pre_initial": False, + "seven_days": False, + "success": False, + "three_days": False, + }, + "owner": "grafuls", + "provisioned": False, + "qinq": None, + "ticket": "3", + "validated": False, + "vlan": { + "gateway": "192.168.12.21", + "id": 3, + "ip_free": 1022, + "ip_range": "10.1.54.0/22", + "netmask": "255.255.252.0", + "vlan_id": 603, + }, + "vlan_id": 3, + "wipe": False, +} # -------------------- # SCHEDULES @@ -462,6 +547,239 @@ end_str_future = end_date_future.strftime("%Y-%m-%d") build_end = start_date + timedelta(days=1) build_end_str = build_end.strftime("%Y-%m-%d") + +SELF_SCHEDULE_1_REQUEST = { + "cloud": "cloud04", + "hostname": "host2.example.com", +} +SELF_SCHEDULE_1_RESPONSE = { + "assignment": { + "active": True, + "ccuser": ["gonza"], + "cloud": { + "id": 4, + "name": "cloud04", + }, + "cloud_id": 4, + "description": "Test allocation.", + "id": 3, + "is_self_schedule": True, + "notification": { + "assignment_id": 3, + "fail": False, + "five_days": False, + "id": 3, + "initial": False, + "one_day": False, + "pre": False, + "pre_initial": False, + "seven_days": False, + "success": False, + "three_days": False, + }, + "owner": "grafuls", + "provisioned": False, + "qinq": None, + "ticket": "3", + "validated": False, + "vlan": { + "gateway": "192.168.12.21", + "id": 3, + "ip_free": 1022, + "ip_range": "10.1.54.0/22", + "netmask": "255.255.252.0", + "vlan_id": 603, + }, + "vlan_id": 3, + "wipe": False, + }, + "assignment_id": 3, + "build_end": None, + "build_start": None, + "end": "Tue, 06 Jun 2023 22:00:00 GMT", + "host": { + "broken": False, + "build": False, + "can_self_schedule": True, + "cloud": { + "id": 1, + "name": "cloud01", + }, + "cloud_id": 1, + "default_cloud": { + "id": 1, + "name": "cloud01", + }, + "default_cloud_id": 1, + "host_type": "alias", + "id": 2, + "last_build": None, + "model": "R640", + "name": "host2.example.com", + "retired": False, + "switch_config_applied": False, + "validated": False, + }, + "host_id": 2, + "id": 3, + "start": "Thu, 01 Jun 2023 22:00:00 GMT", +} +SELF_SCHEDULE_2_REQUEST = { + "cloud": "cloud04", + "hostname": "host3.example.com", +} +SELF_SCHEDULE_2_RESPONSE = { + "assignment": { + "active": True, + "ccuser": ["gonza"], + "cloud": { + "id": 4, + "name": "cloud04", + }, + "cloud_id": 4, + "description": "Test allocation.", + "id": 3, + "is_self_schedule": True, + "notification": { + "assignment_id": 3, + "fail": False, + "five_days": False, + "id": 3, + "initial": False, + "one_day": False, + "pre": False, + "pre_initial": False, + "seven_days": False, + "success": False, + "three_days": False, + }, + "owner": "grafuls", + "provisioned": False, + "qinq": None, + "ticket": "3", + "validated": False, + "vlan": { + "gateway": "192.168.12.21", + "id": 3, + "ip_free": 1022, + "ip_range": "10.1.54.0/22", + "netmask": "255.255.252.0", + "vlan_id": 603, + }, + "vlan_id": 3, + "wipe": False, + }, + "assignment_id": 3, + "build_end": None, + "build_start": None, + "end": "Tue, 06 Jun 2023 22:00:00 GMT", + "host": { + "broken": False, + "build": False, + "can_self_schedule": True, + "cloud": { + "id": 1, + "name": "cloud01", + }, + "cloud_id": 1, + "default_cloud": { + "id": 1, + "name": "cloud01", + }, + "default_cloud_id": 1, + "host_type": "scalelab", + "id": 3, + "last_build": None, + "model": "1029P", + "name": "host3.example.com", + "retired": False, + "switch_config_applied": False, + "validated": False, + }, + "host_id": 3, + "id": 4, + "start": "Thu, 01 Jun 2023 22:00:00 GMT", +} +SELF_SCHEDULE_3_REQUEST = { + "cloud": "cloud04", + "hostname": "host4.example.com", +} +SELF_SCHEDULE_3_RESPONSE = { + "assignment": { + "active": True, + "ccuser": ["gonza"], + "cloud": { + "id": 4, + "name": "cloud04", + }, + "cloud_id": 4, + "description": "Test allocation.", + "id": 3, + "is_self_schedule": True, + "notification": { + "assignment_id": 3, + "fail": False, + "five_days": False, + "id": 3, + "initial": False, + "one_day": False, + "pre": False, + "pre_initial": False, + "seven_days": False, + "success": False, + "three_days": False, + }, + "owner": "grafuls", + "provisioned": False, + "qinq": None, + "ticket": "3", + "validated": False, + "vlan": { + "gateway": "192.168.12.21", + "id": 3, + "ip_free": 1022, + "ip_range": "10.1.54.0/22", + "netmask": "255.255.252.0", + "vlan_id": 603, + }, + "vlan_id": 3, + "wipe": False, + }, + "assignment_id": 3, + "build_end": None, + "build_start": None, + "end": "Tue, 06 Jun 2023 22:00:00 GMT", + "host": { + "broken": False, + "build": False, + "can_self_schedule": True, + "cloud": { + "id": 1, + "name": "cloud01", + }, + "cloud_id": 1, + "default_cloud": { + "id": 1, + "name": "cloud01", + }, + "default_cloud_id": 1, + "host_type": "alias", + "id": 4, + "last_build": None, + "model": "R640", + "name": "host4.example.com", + "retired": False, + "switch_config_applied": False, + "validated": False, + }, + "host_id": 4, + "id": 3, + "start": "Thu, 01 Jun 2023 22:00:00 GMT", +} +SELF_SCHEDULE_NON_REQUEST = { + "cloud": "cloud04", + "hostname": "host11.example.com", +} SCHEDULE_1_REQUEST = { "cloud": "cloud02", "hostname": "host2.example.com", @@ -481,6 +799,7 @@ "created_at": "Tue, 07 Mar 2023 11:36:53 GMT", "description": "Test allocation.", "id": 1, + "is_self_schedule": False, "notification": { "assignment_id": 1, "fail": False, @@ -518,6 +837,7 @@ "host": { "broken": False, "build": False, + "can_self_schedule": True, "cloud": { "id": 1, "last_redefined": "Tue, 07 Mar 2023 11:36:53 GMT", @@ -563,6 +883,7 @@ "created_at": "Tue, 07 Mar 2023 11:36:53 GMT", "description": "Test allocation.", "id": 2, + "is_self_schedule": False, "notification": { "assignment_id": 2, "fail": False, @@ -600,6 +921,7 @@ "host": { "broken": False, "build": False, + "can_self_schedule": True, "cloud": { "id": 1, "last_redefined": "Tue, 07 Mar 2023 11:36:53 GMT", diff --git a/tests/conftest.py b/tests/conftest.py index 788145418..853d9f1d7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -103,73 +103,90 @@ def prefill(test_client, auth, request): """ auth_header = auth.get_auth_header() - if "clouds" in request.param: - for cloud_id in range(1, 6): - cloud_name = f"cloud{str(cloud_id).zfill(2)}" - test_client.post( - "/api/v3/clouds", - json=dict(cloud=cloud_name), - headers=auth_header, - ) - if "vlans" in request.param: - for i in range(1, 4): - test_client.post( - "/api/v3/vlans", - json=eval(f"VLAN_{i}_REQUEST"), - headers=auth_header, - ) - if "hosts" in request.param: - for host_id in range(1, 6): - test_client.post( - "/api/v3/hosts", - json=eval(f"HOST_{host_id}_REQUEST"), - headers=auth_header, - ) - if "disks" in request.param: - for i in range(1, 5): - disk_request = eval(f"DISK_{i}_REQUEST") - test_client.post( - f"/api/v3/disks/{disk_request[1]}", - json=disk_request[0], - headers=auth_header, - ) - if "interfaces" in request.param: - for i in range(1, 5): - interface_request = eval(f"INTERFACE_{i}_REQUEST") - test_client.post( - f"/api/v3/interfaces/{interface_request[1]}", - json=interface_request[0], - headers=auth_header, - ) - if "memory" in request.param: - for i in range(1, 7): - memory_request = eval(f"MEMORY_{i}_REQUEST") - test_client.post( - f"/api/v3/memory/{memory_request[1]}", - json=memory_request[0], - headers=auth_header, - ) - if "processors" in request.param: - for i in range(1, 6): - processor_request = eval(f"PROCESSOR_{i}_REQUEST") - test_client.post( - f"/api/v3/processors/{processor_request[1]}", - json=processor_request[0], - headers=auth_header, - ) - if "assignments" in request.param: - for i in range(1, 3): - assignment_request = eval(f"ASSIGNMENT_{i}_REQUEST") + for param in [p.strip() for p in request.param.split(",")]: + if param == "clouds": + for cloud_id in range(1, 6): + cloud_name = f"cloud{str(cloud_id).zfill(2)}" + test_client.post( + "/api/v3/clouds", + json=dict(cloud=cloud_name), + headers=auth_header, + ) + if param == "vlans": + for i in range(1, 4): + test_client.post( + "/api/v3/vlans", + json=eval(f"VLAN_{i}_REQUEST"), + headers=auth_header, + ) + if param == "hosts": + for host_id in range(1, 6): + test_client.post( + "/api/v3/hosts", + json=eval(f"HOST_{host_id}_REQUEST"), + headers=auth_header, + ) + if param == "non_self_hosts": + for host_id in range(1, 6): + host_data = eval(f"SELF_HOST_{host_id}_REQUEST") + test_client.post( + "/api/v3/hosts", + json=host_data, + headers=auth_header, + ) + if param == "disks": + for i in range(1, 5): + disk_request = eval(f"DISK_{i}_REQUEST") + test_client.post( + f"/api/v3/disks/{disk_request[1]}", + json=disk_request[0], + headers=auth_header, + ) + if param == "interfaces": + for i in range(1, 5): + interface_request = eval(f"INTERFACE_{i}_REQUEST") + test_client.post( + f"/api/v3/interfaces/{interface_request[1]}", + json=interface_request[0], + headers=auth_header, + ) + if param == "memory": + for i in range(1, 7): + memory_request = eval(f"MEMORY_{i}_REQUEST") + test_client.post( + f"/api/v3/memory/{memory_request[1]}", + json=memory_request[0], + headers=auth_header, + ) + if param == "processors": + for i in range(1, 6): + processor_request = eval(f"PROCESSOR_{i}_REQUEST") + test_client.post( + f"/api/v3/processors/{processor_request[1]}", + json=processor_request[0], + headers=auth_header, + ) + if param == "assignments": + for i in range(1, 3): + assignment_request = eval(f"ASSIGNMENT_{i}_REQUEST") + test_client.post( + "/api/v3/assignments", + json=assignment_request, + headers=auth_header, + ) + if param == "self_assignments": + assignment_request = eval("SELF_ASSIGNMENT_1_REQUEST") + assignment_request["is_self_schedule"] = True test_client.post( "/api/v3/assignments", json=assignment_request, headers=auth_header, ) - if "schedules" in request.param: - for i in range(1, 3): - schedule_request = eval(f"SCHEDULE_{i}_REQUEST") - test_client.post( - "/api/v3/schedules", - json=schedule_request, - headers=auth_header, - ) + if param == "schedules": + for i in range(1, 3): + schedule_request = eval(f"SCHEDULE_{i}_REQUEST") + test_client.post( + "/api/v3/schedules", + json=schedule_request, + headers=auth_header, + ) diff --git a/tests/tools/test_export_schedules.py b/tests/tools/test_export_schedules.py index 1c148dbbd..154c38421 100644 --- a/tests/tools/test_export_schedules.py +++ b/tests/tools/test_export_schedules.py @@ -7,7 +7,7 @@ class TestExportSchedules(TestBase): - output_file = os.path.join(os.path.dirname(__file__), "../artifacts/output.yaml") + output_file = os.path.join(os.path.dirname(__file__), "artifacts/output.yaml") @patch("quads.tools.export_current_schedules.QuadsApi") def test_export_current_schedules_with_valid_data(self, mock_quads_api): diff --git a/tests/tools/test_import_schedule.py b/tests/tools/test_import_schedule.py index d643b3bc5..c0a292a65 100644 --- a/tests/tools/test_import_schedule.py +++ b/tests/tools/test_import_schedule.py @@ -1,6 +1,10 @@ import os from unittest.mock import patch, MagicMock + +import pytest +from quads.quads_api import APIBadRequest + from quads.tools.import_current_schedules import import_current_schedules from tests.tools.test_base import TestBase @@ -18,15 +22,17 @@ def test_import_current_schedules_with_valid_data(self, mock_quads_api): def test_import_current_schedules_with_existing_cloud_and_assignment(self, mock_quads_api): mock_quads_api.get_cloud.return_value = MagicMock() mock_quads_api.get_active_cloud_assignment.return_value = MagicMock() - import_current_schedules(self.input_file) - @patch("quads.tools.import_current_schedules.QuadsApi") - def test_import_current_schedules_with_undefined_host(self, mock_quads_api): - mock_quads_api.get_host.side_effect = Exception("Undefined host") - import_current_schedules(self.input_file) + with pytest.raises(APIBadRequest) as exc_info: + import_current_schedules(self.input_file) + + assert str(exc_info.value) == "Host is not available for the specified date range" @patch("quads.tools.import_current_schedules.QuadsApi") def test_import_current_schedules_with_moved_schedule(self, mock_quads_api): mock_quads_api.get_cloud.return_value = None mock_quads_api.get_active_cloud_assignment.return_value = None - import_current_schedules(self.input_file) + with pytest.raises(APIBadRequest) as exc_info: + import_current_schedules(self.input_file) + + assert str(exc_info.value) == "Host is not available for the specified date range" diff --git a/tests/tools/test_notify.py b/tests/tools/test_notify.py index b2448d1a8..e8fafbf5c 100644 --- a/tests/tools/test_notify.py +++ b/tests/tools/test_notify.py @@ -81,7 +81,7 @@ def test_create_message(self, mock_postman): def test_create_future_initial_message(self, mock_postman): # Setup cloud = "cloud1" - assignment_obj = MagicMock(ticket="ticket1", owner="owner1") + assignment_obj = MagicMock(ticket="ticket1", owner="owner1", is_self_schedule=False) cloud_info = "cloud_info1" # Call the function