diff --git a/src/quads/cli/cli.py b/src/quads/cli/cli.py index 326d137f..3eff643f 100644 --- a/src/quads/cli/cli.py +++ b/src/quads/cli/cli.py @@ -176,9 +176,7 @@ def _filter_kwargs(self, filter_args): else: if keys[0].strip().lower() == "model": if str(value).upper() not in conf["models"].split(","): - self.logger.warning( - f"Accepted model names are: {conf['models']}" - ) + self.logger.warning(f"Accepted model names are: {conf['models']}") raise CliException("Model type not recognized.") if type(value) == str: @@ -189,13 +187,9 @@ def _filter_kwargs(self, filter_args): if not op_found: self.logger.warning(f"Condition: {condition}") self.logger.warning(f"Accepted operators: {', '.join(ops.keys())}") - raise CliException( - "A filter was defined but not parsed correctly. Check filter operator." - ) + raise CliException("A filter was defined but not parsed correctly. Check filter operator.") if not kwargs: # pragma: no cover - raise CliException( - "A filter was defined but not parsed correctly. Check filter syntax." - ) + raise CliException("A filter was defined but not parsed correctly. Check filter syntax.") return kwargs def _output_json_result(self, request, data): @@ -204,9 +198,7 @@ def _output_json_result(self, request, data): self.logger.info("Successfully removed") else: js = request.json() - self.logger.debug( - "%s %s: %s" % (request.status_code, request.reason, data) - ) + self.logger.debug("%s %s: %s" % (request.status_code, request.reason, data)) if request.request.method == "POST" and request.status_code == 200: self.logger.info("Successful request") if js.get("result"): @@ -271,9 +263,7 @@ def action_ccuser(self): def action_interface(self): hostname = self.cli_args.get("host") if not hostname: - raise CliException( - "Missing option. --host option is required for --ls-interface." - ) + raise CliException("Missing option. --host option is required for --ls-interface.") try: self.quads.get_host(hostname) @@ -299,9 +289,7 @@ def action_interface(self): def action_memory(self): hostname = self.cli_args.get("host") if hostname is None: - raise CliException( - "Missing option. --host option is required for --ls-memory." - ) + raise CliException("Missing option. --host option is required for --ls-memory.") try: host = self.quads.get_host(hostname) @@ -318,9 +306,7 @@ def action_memory(self): def action_disks(self): hostname = self.cli_args.get("host") if hostname is None: - raise CliException( - "Missing option. --host option is required for --ls-disks." - ) + raise CliException("Missing option. --host option is required for --ls-disks.") try: host = self.quads.get_host(hostname) @@ -339,9 +325,7 @@ def action_disks(self): def action_processors(self): hostname = self.cli_args.get("host") if not hostname: - raise CliException( - "Missing option. --host option is required for --ls-processors." - ) + raise CliException("Missing option. --host option is required for --ls-processors.") try: host = self.quads.get_host(hostname) @@ -409,9 +393,7 @@ def action_schedule(self): _cloud_name = schedule.assignment.cloud.name start = ":".join(schedule.start.isoformat().split(":")[:-1]) end = ":".join(schedule.end.isoformat().split(":")[:-1]) - self.logger.info( - f"{schedule.id}| start={start}, end={end}, cloud={_cloud_name}" - ) + self.logger.info(f"{schedule.id}| start={start}, end={end}, cloud={_cloud_name}") else: try: _clouds = self.quads.get_clouds() @@ -422,9 +404,7 @@ def action_schedule(self): _kwargs["cloud"] = cloud.name if cloud.name == conf.get("spare_pool_name"): if self.cli_args.get("datearg"): - _date = datetime.strptime( - self.cli_args.get("datearg"), "%Y-%m-%d %H:%M" - ) + _date = datetime.strptime(self.cli_args.get("datearg"), "%Y-%m-%d %H:%M") _date_iso = ":".join(_date.isoformat().split(":")[:-1]) data = { "start": _date_iso, @@ -489,9 +469,7 @@ def action_free_cloud(self): raise CliException(str(ex)) for cloud in _clouds: cloud_reservation_lock = int(conf["cloud_reservation_lock"]) - last_redefined = datetime.strptime( - str(cloud.last_redefined), "%a, %d %b %Y %H:%M:%S %Z" - ) + last_redefined = datetime.strptime(str(cloud.last_redefined), "%a, %d %b %Y %H:%M:%S %Z") lock_release = last_redefined + timedelta(hours=cloud_reservation_lock) cloud_string = f"{cloud.name}" if lock_release > datetime.now(): @@ -510,9 +488,7 @@ def action_available(self): _filter = self.cli_args.get("filter") _schedstart = self.cli_args.get("schedstart") _schedend = self.cli_args.get("schedend") - _start = _end = "T".join( - ":".join(datetime.now().isoformat().split(":")[:-1]).split() - ) + _start = _end = "T".join(":".join(datetime.now().isoformat().split(":")[:-1]).split()) if _filter: filter_args = self._filter_kwargs(_filter) @@ -548,14 +524,11 @@ def action_available(self): # TODO: check return on this below try: if self.quads.is_available(host.name, data): - current_schedule = self.quads.get_current_schedules( - {"host": host.name} - ) + current_schedule = self.quads.get_current_schedules({"host": host.name}) if current_schedule: if ( host.default_cloud.name == conf["spare_pool_name"] - and current_schedule[0].assignment.cloud.name - != omit_cloud_arg + and current_schedule[0].assignment.cloud.name != omit_cloud_arg ): current.append(host.name) else: @@ -625,9 +598,7 @@ def action_extend(self): raise CliException(msg) if not cloud_name and not host_name: - msg = ( - "Missing option. At least one of either --host or --cloud is required." - ) + msg = "Missing option. At least one of either --host or --cloud is required." raise CliException(msg) if weeks: @@ -642,19 +613,18 @@ def action_extend(self): try: dispatched_obj = dispatch[dispatch_key](data_dispatch[dispatch_key]) + if not dispatched_obj: # pragma: no cover + raise CliException(f"{dispatch_key.capitalize()} not found") schedules = self.quads.get_current_schedules(data_dispatch) if not schedules: - self.logger.warning( - f"The selected {dispatch_key} does not have any active schedules" - ) + self.logger.warning(f"The selected {dispatch_key} does not have any active schedules") future_schedules = self.quads.get_future_schedules(data_dispatch) if not future_schedules: return if not self._confirmation_dialog( - "Would you like to extend a future allocation of " - f"{data_dispatch[dispatch_key]}? (y/N): " + "Would you like to extend a future allocation of " f"{data_dispatch[dispatch_key]}? (y/N): " ): return schedules = future_schedules @@ -663,11 +633,7 @@ def action_extend(self): non_extendable = [] for schedule in schedules: - end_date = ( - schedule.end + timedelta(weeks=weeks) - if weeks - else datetime.strptime(date_arg, "%Y-%m-%d %H:%M") - ) + end_date = schedule.end + timedelta(weeks=weeks) if weeks else datetime.strptime(date_arg, "%Y-%m-%d %H:%M") data = { "start": ":".join(schedule.end.isoformat().split(":")[:-1]), "end": ":".join(end_date.isoformat().split(":")[:-1]), @@ -696,7 +662,7 @@ def action_extend(self): "seven_days": False, } try: - self.quads.update_assignment(schedules[0].assignment.id, data) + self.quads.update_notification(schedules[0].assignment.notification.id, data) for schedule in schedules: end_date = ( @@ -736,14 +702,10 @@ def action_shrink(self): end_date = None if not weeks and not now and not date_arg: - raise CliException( - "Missing option. Need --weeks, --date or --now when using --shrink" - ) + raise CliException("Missing option. Need --weeks, --date or --now when using --shrink") if not cloud_name and not host_name: - raise CliException( - "Missing option. At least one of either --host or --cloud is required" - ) + raise CliException("Missing option. At least one of either --host or --cloud is required") if weeks: try: @@ -770,16 +732,13 @@ def action_shrink(self): schedules = self.quads.get_current_schedules(data_dispatch) if not schedules: - self.logger.error( - f"The selected {dispatch_key} does not have any active schedules" - ) + self.logger.error(f"The selected {dispatch_key} does not have any active schedules") future_schedules = self.quads.get_future_schedules(data_dispatch) if not future_schedules: return if not self._confirmation_dialog( - "Would you like to shrink a future allocation of" - f" {data_dispatch[dispatch_key]}? (y/N): " + "Would you like to shrink a future allocation of" f" {data_dispatch[dispatch_key]}? (y/N): " ): return schedules = future_schedules @@ -789,11 +748,7 @@ def action_shrink(self): non_shrinkable = [] for schedule in schedules: end_date = schedule.end - timedelta(weeks=weeks) if weeks else _date - if ( - end_date < schedule.start - or end_date > schedule.end - or (not now and end_date < threshold) - ): + if end_date < schedule.start or end_date > schedule.end or (not now and end_date < threshold): non_shrinkable.append(schedule.host) if non_shrinkable: @@ -806,14 +761,9 @@ def action_shrink(self): return if not check: - confirm_msg = ( - f"for {weeks} week[s]? (y/N): " - if weeks - else f"to {str(_date)[:16]}? (y/N): " - ) + confirm_msg = f"for {weeks} week[s]? (y/N): " if weeks else f"to {str(_date)[:16]}? (y/N): " if not self._confirmation_dialog( - f"Are you sure you want to shrink {data_dispatch[dispatch_key]} " - + confirm_msg + f"Are you sure you want to shrink {data_dispatch[dispatch_key]} " + confirm_msg ): return @@ -844,9 +794,7 @@ def action_shrink(self): f"{dispatch_key.capitalize()} {data_dispatch[dispatch_key]} can be shrunk to {str(end_date)[:16]}" ) else: - self.logger.info( - f"{dispatch_key.capitalize()} {data_dispatch[dispatch_key]} can be terminated now" - ) + self.logger.info(f"{dispatch_key.capitalize()} {data_dispatch[dispatch_key]} can be terminated now") def action_cloudresource(self): assignment = None @@ -876,16 +824,12 @@ def action_cloudresource(self): if cloud and cloud.name != conf.get("spare_pool_name"): try: - assignment = self.quads.get_active_cloud_assignment( - self.cli_args.get("cloud") - ) + assignment = self.quads.get_active_cloud_assignment(self.cli_args.get("cloud")) except (APIServerException, APIBadRequest) as ex: # pragma: no cover raise CliException(str(ex)) if assignment: - last_redefined = datetime.strptime( - str(cloud.last_redefined), "%a, %d %b %Y %H:%M:%S GMT" - ) + last_redefined = datetime.strptime(str(cloud.last_redefined), "%a, %d %b %Y %H:%M:%S GMT") lock_release = last_redefined + timedelta(hours=cloud_reservation_lock) cloud_string = f"{cloud.name}" if lock_release > datetime.now(): @@ -924,11 +868,7 @@ def action_cloudresource(self): try: self.quads.update_cloud( cloud.name, - { - "last_redefined": ":".join( - datetime.now().isoformat().split(":")[:-1] - ) - }, + {"last_redefined": ":".join(datetime.now().isoformat().split(":")[:-1])}, ) except ( APIServerException, @@ -944,9 +884,7 @@ def action_cloudresource(self): self.logger.warning("No assignment created.") except ConnectionError: # pragma: no cover - raise CliException( - "Could not connect to the quads-server, verify service is up and running." - ) + raise CliException("Could not connect to the quads-server, verify service is up and running.") def action_modcloud(self): data = { @@ -970,16 +908,12 @@ def action_modcloud(self): clean_data["qinq"] = self.cli_args.get("qinq") try: - assignment = self.quads.get_active_cloud_assignment( - self.cli_args.get("cloud") - ) + assignment = self.quads.get_active_cloud_assignment(self.cli_args.get("cloud")) except (APIServerException, APIBadRequest) as ex: # pragma: no cover raise CliException(str(ex)) if not assignment: - raise CliException( - f"No active cloud assignment for {self.cli_args.get('cloud')}" - ) + raise CliException(f"No active cloud assignment for {self.cli_args.get('cloud')}") try: self.quads.update_assignment(assignment.id, clean_data) @@ -1067,9 +1001,7 @@ def action_define_host_metadata(self): hosts_metadata = yaml.safe_load(md) except IOError as ಠ_ಠ: self.logger.debug(ಠ_ಠ, exc_info=ಠ_ಠ) - raise CliException( - f"There was something wrong reading from {self.cli_args['metadata']}" - ) + raise CliException(f"There was something wrong reading from {self.cli_args['metadata']}") for host_md in hosts_metadata: ready_defined = [] @@ -1091,20 +1023,14 @@ def action_define_host_metadata(self): ) as ex: # pragma: no cover raise CliException(str(ex)) else: - self.logger.warning( - f"Host {host_md.get('name')} not found. Skipping." - ) + self.logger.warning(f"Host {host_md.get('name')} not found. Skipping.") continue host = self.quads.get_host(host_md.get("name")) data = {} for key, value in host_md.items(): - if ( - key != "name" - and key != "default_cloud" - and getattr(host, key) is not None - ): + if key != "name" and key != "default_cloud" and getattr(host, key) is not None: ready_defined.append(key) if not self.cli_args.get("force"): # pragma: no cover continue @@ -1122,9 +1048,7 @@ def action_define_host_metadata(self): ) as ex: # pragma: no cover raise CliException(str(ex)) else: # pragma: no cover - raise CliException( - f"Invalid key '{key}' on metadata for {host.name}" - ) + raise CliException(f"Invalid key '{key}' on metadata for {host.name}") else: data[key] = value @@ -1226,9 +1150,7 @@ def action_host_metadata_export(self): self.logger.info(f"Metadata successfully exported to {temp.name}.") except Exception as ಠ益ಠ: # pragma: no cover self.logger.debug(ಠ益ಠ, exc_info=ಠ益ಠ) - raise BaseQuadsException( - "There was something wrong writing to file." - ) from ಠ益ಠ + raise BaseQuadsException("There was something wrong writing to file.") from ಠ益ಠ return 0 @@ -1261,9 +1183,7 @@ def action_add_schedule(self): except (APIServerException, APIBadRequest) as ex: # pragma: no cover raise CliException(str(ex)) if host.cloud.name == self.cli_args.get("omitcloud"): - self.logger.info( - "Host is in part of the cloud specified with --omit-cloud. Nothing has been done." - ) + self.logger.info("Host is in part of the cloud specified with --omit-cloud. Nothing has been done.") else: data = { "cloud": self.cli_args.get("schedcloud"), @@ -1282,23 +1202,15 @@ def action_add_schedule(self): with open(self.cli_args.get("host_list")) as _file: host_list_stream = _file.read() except IOError: - raise CliException( - f"Could not read file: {self.cli_args['host_list']}." - ) + raise CliException(f"Could not read file: {self.cli_args['host_list']}.") host_list = host_list_stream.split() non_available = [] - _sched_start = datetime.strptime( - self.cli_args.get("schedstart"), "%Y-%m-%d %H:%M" - ) - _sched_end = datetime.strptime( - self.cli_args.get("schedend"), "%Y-%m-%d %H:%M" - ) + _sched_start = datetime.strptime(self.cli_args.get("schedstart"), "%Y-%m-%d %H:%M") + _sched_end = datetime.strptime(self.cli_args.get("schedend"), "%Y-%m-%d %H:%M") if self.cli_args.get("omitcloud"): - self.logger.info( - f"INFO - All hosts from {self.cli_args['omitcloud']} will be omitted." - ) + self.logger.info(f"INFO - All hosts from {self.cli_args['omitcloud']} will be omitted.") omitted = [] for host in host_list: @@ -1328,9 +1240,7 @@ def action_add_schedule(self): raise CliException(str(ex)) if non_available: - self.logger.error( - "The following hosts are either broken or unavailable:" - ) + self.logger.error("The following hosts are either broken or unavailable:") for host in non_available: self.logger.error(host) @@ -1353,9 +1263,7 @@ def action_add_schedule(self): raise CliException(str(ex)) self.logger.info(f"Schedule created for {host}") except ConnectionError: - raise CliException( - "Could not connect to the quads-server, verify service is up and running." - ) + raise CliException("Could not connect to the quads-server, verify service is up and running.") template_file = "jira_ticket_assignment" with open(os.path.join(conf.TEMPLATES_PATH, template_file)) as _file: @@ -1367,9 +1275,7 @@ def action_add_schedule(self): raise CliException(str(ex)) jira_docs_links = conf["jira_docs_links"].split(",") jira_vlans_docs_links = conf["jira_vlans_docs_links"].split(",") - ass = self.quads.get_active_cloud_assignment( - self.cli_args.get("schedcloud") - ) + ass = self.quads.get_active_cloud_assignment(self.cli_args.get("schedcloud")) comment = template.render( schedule_start=self.cli_args.get("schedstart"), schedule_end=self.cli_args.get("schedend"), @@ -1399,9 +1305,7 @@ def action_add_schedule(self): t_name = transition.get("name") if t_name and t_name.lower() == "scheduled": transition_id = transition.get("id") - transition_result = loop.run_until_complete( - jira.post_transition(ass.ticket, transition_id) - ) + transition_result = loop.run_until_complete(jira.post_transition(ass.ticket, transition_id)) break if not transition_result: @@ -1437,11 +1341,7 @@ def action_modschedule(self): value = self.cli_args.get(v) if value: if k in ["start", "end"]: - value = ":".join( - datetime.strptime(value, "%Y-%m-%d %H:%M") - .isoformat() - .split(":")[:-1] - ) + value = ":".join(datetime.strptime(value, "%Y-%m-%d %H:%M").isoformat().split(":")[:-1]) data[k] = value try: schedule = self.quads.get_schedule(self.cli_args.get("schedid")) @@ -1453,9 +1353,7 @@ def action_modschedule(self): "seven_day": False, "pre": False, } - self.quads.update_notification( - schedule.assignment["notification"]["id"], not_data - ) + self.quads.update_notification(schedule.assignment["notification"]["id"], not_data) self.logger.info("Schedule updated successfully.") except (APIServerException, APIBadRequest) as ex: # pragma: no cover raise CliException(str(ex)) @@ -1474,13 +1372,7 @@ def action_addinterface(self): _ifmaintenance = self.cli_args.get("ifmaintenance", False) _force = self.cli_args.get("force", None) _host = self.cli_args.get("host", None) - if ( - _ifmac is None - or _ifname is None - or _ifip is None - or _ifport is None - or _ifport is None - ): + if _ifmac is None or _ifname is None or _ifip is None or _ifport is None or _ifport is None: raise CliException( "Missing option. All these options are required for --add-interface:\n" "\t--host\n" @@ -1518,9 +1410,7 @@ def action_addinterface(self): def action_rminterface(self): if not self.cli_args.get("host") or not self.cli_args.get("ifname"): - raise CliException( - "Missing option. --host and --interface-name options are required for --rm-interface" - ) + raise CliException("Missing option. --host and --interface-name options are required for --rm-interface") data = { "hostname": self.cli_args.get("host"), @@ -1563,9 +1453,7 @@ def action_modinterface(self): _host = self.cli_args.get("host", None) # TODO: fix all if _host is None or _ifname is None: - raise CliException( - "Missing option. --host and --interface-name options are required for --mod-interface:" - ) + raise CliException("Missing option. --host and --interface-name options are required for --mod-interface:") try: host = self.quads.get_host(_host) @@ -1620,15 +1508,11 @@ def action_modinterface(self): def action_movehosts(self): # pragma: no cover if self.cli_args.get("datearg") and not self.cli_args.get("dryrun"): - raise CliException( - "--move-hosts and --date are mutually exclusive unless using --dry-run." - ) + raise CliException("--move-hosts and --date are mutually exclusive unless using --dry-run.") date = "" if self.cli_args.get("datearg"): - date = datetime.strptime( - self.cli_args.get("datearg"), "%Y-%m-%d %H:%M" - ).isoformat()[:-3] + date = datetime.strptime(self.cli_args.get("datearg"), "%Y-%m-%d %H:%M").isoformat()[:-3] try: moves = self.quads.get_moves(date) @@ -1669,9 +1553,7 @@ def action_movehosts(self): # pragma: no cover target_assignment = Assignment().from_dict(data=assignment) wipe = target_assignment.wipe if target_assignment else False - self.logger.info( - f"Moving {host} from {current} to {new}, wipe = {wipe}" - ) + self.logger.info(f"Moving {host} from {current} to {new}, wipe = {wipe}") if not self.cli_args.get("dryrun"): try: self.quads.update_host( @@ -1689,16 +1571,10 @@ def action_movehosts(self): # pragma: no cover raise CliException(str(ex)) if new != "cloud01": try: - has_active_schedule = self.quads.get_current_schedules( - {"cloud": f"{cloud.name}"} - ) + has_active_schedule = self.quads.get_current_schedules({"cloud": f"{cloud.name}"}) if has_active_schedule and wipe: - assignment = self.quads.get_active_cloud_assignment( - cloud.name - ) - self.quads.update_assignment( - assignment.id, {"validated": False} - ) + assignment = self.quads.get_active_cloud_assignment(cloud.name) + self.quads.update_assignment(assignment.id, {"validated": False}) except ( APIServerException, APIBadRequest, @@ -1706,25 +1582,15 @@ def action_movehosts(self): # pragma: no cover raise CliException(str(ex)) try: if self.cli_args.get("movecommand") == default_move_command: - fn = functools.partial( - move_and_rebuild, host, new, semaphore, wipe - ) + fn = functools.partial(move_and_rebuild, host, new, semaphore, wipe) tasks.append(fn) omits = conf.get("omit_network_move") omit = False if omits: omits = omits.split(",") - omit = [ - omit - for omit in omits - if omit in host or omit == new - ] + omit = [omit for omit in omits if omit in host or omit == new] if not omit: - switch_tasks.append( - functools.partial( - switch_config, host, current, new - ) - ) + switch_tasks.append(functools.partial(switch_config, host, current, new)) else: if wipe: subprocess.check_call( @@ -1747,27 +1613,19 @@ def action_movehosts(self): # pragma: no cover ) except Exception as ex: self.logger.debug(ex) - self.logger.exception( - "Move command failed for host: %s" % host - ) + self.logger.exception("Move command failed for host: %s" % host) provisioned = False if not self.cli_args.get("dryrun"): try: _old_cloud_obj = self.quads.get_cloud(results[0]["current"]) - old_cloud_schedule = self.quads.get_current_schedules( - {"cloud": _old_cloud_obj.name} - ) + old_cloud_schedule = self.quads.get_current_schedules({"cloud": _old_cloud_obj.name}) if not old_cloud_schedule and _old_cloud_obj.name != "cloud01": - _old_ass_cloud_obj = self.quads.get_active_cloud_assignment( - _old_cloud_obj.name - ) + _old_ass_cloud_obj = self.quads.get_active_cloud_assignment(_old_cloud_obj.name) if _old_ass_cloud_obj: payload = {"active": False} - self.quads.update_assignment( - _old_ass_cloud_obj.id, payload - ) + self.quads.update_assignment(_old_ass_cloud_obj.id, payload) except ( APIServerException, APIBadRequest, @@ -1777,15 +1635,11 @@ def action_movehosts(self): # pragma: no cover done = None loop = asyncio.get_event_loop() loop.set_exception_handler( - lambda _loop, ctx: self.logger.error( - f"Caught exception: {ctx['message']}" - ) + lambda _loop, ctx: self.logger.error(f"Caught exception: {ctx['message']}") ) try: - done = loop.run_until_complete( - asyncio.gather(*[task(loop) for task in tasks]) - ) + done = loop.run_until_complete(asyncio.gather(*[task(loop) for task in tasks])) except ( asyncio.CancelledError, SystemExit, @@ -1805,9 +1659,7 @@ def action_movehosts(self): # pragma: no cover continue if not host_obj.switch_config_applied: - self.logger.info( - f"Running switch config for {task.args[0]}" - ) + self.logger.info(f"Running switch config for {task.args[0]}") try: result = task() @@ -1819,9 +1671,7 @@ def action_movehosts(self): # pragma: no cover if result: try: - self.quads.update_host( - task.args[0], {"switch_config_applied": True} - ) + self.quads.update_host(task.args[0], {"switch_config_applied": True}) except ( APIServerException, APIBadRequest, @@ -1829,9 +1679,7 @@ def action_movehosts(self): # pragma: no cover self.logger.exception(str(ex)) continue else: - self.logger.exception( - "There was something wrong configuring the switch." - ) + self.logger.exception("There was something wrong configuring the switch.") if done: for future in done: @@ -1843,9 +1691,7 @@ def action_movehosts(self): # pragma: no cover if provisioned: try: _new_cloud_obj = self.quads.get_cloud(_cloud) - assignment = self.quads.get_active_cloud_assignment( - _new_cloud_obj.name - ) + assignment = self.quads.get_active_cloud_assignment(_new_cloud_obj.name) if assignment: validate = not assignment.wipe self.quads.update_assignment( @@ -1870,9 +1716,7 @@ def action_mark_broken(self): raise CliException(str(ex)) if host.broken: - self.logger.warning( - f"Host {self.cli_args['host']} has already been marked broken" - ) + self.logger.warning(f"Host {self.cli_args['host']} has already been marked broken") else: try: self.quads.update_host(self.cli_args.get("host"), {"broken": True}) @@ -1890,9 +1734,7 @@ def action_mark_repaired(self): raise CliException(str(ex)) if not host.broken: - self.logger.warning( - f"Host {self.cli_args['host']} has already been marked repaired" - ) + self.logger.warning(f"Host {self.cli_args['host']} has already been marked repaired") else: try: self.quads.update_host(self.cli_args.get("host"), {"broken": False}) @@ -1910,9 +1752,7 @@ def action_retire(self): raise CliException(str(ex)) if host.retired: - self.logger.warning( - f"Host {self.cli_args['host']} has already been marked as retired" - ) + self.logger.warning(f"Host {self.cli_args['host']} has already been marked as retired") else: try: self.quads.update_host(self.cli_args.get("host"), {"retired": True}) @@ -1930,9 +1770,7 @@ def action_unretire(self): raise CliException(str(ex)) if not host.retired: - self.logger.warning( - f"Host {self.cli_args['host']} has already been marked unretired" - ) + self.logger.warning(f"Host {self.cli_args['host']} has already been marked unretired") else: try: self.quads.update_host(self.cli_args.get("host"), {"retired": False}) @@ -1948,9 +1786,7 @@ def action_host(self): _kwargs = {"host": host.name} if self.cli_args.get("datearg"): - datetime_obj = datetime.strptime( - self.cli_args.get("datearg"), "%Y-%m-%d %H:%M" - ) + datetime_obj = datetime.strptime(self.cli_args.get("datearg"), "%Y-%m-%d %H:%M") datearg_iso = datetime_obj.isoformat() date_str = ":".join(datearg_iso.split(":")[:-1]) _kwargs["date"] = date_str @@ -1972,9 +1808,7 @@ def action_cloudonly(self): _kwargs = {"cloud": _cloud.name} if self.cli_args.get("datearg"): - _kwargs["date"] = datetime.strptime( - self.cli_args.get("datearg"), "%Y-%m-%d %H:%M" - ).isoformat()[:-3] + _kwargs["date"] = datetime.strptime(self.cli_args.get("datearg"), "%Y-%m-%d %H:%M").isoformat()[:-3] schedules = self.quads.get_current_schedules(_kwargs) if schedules: host_kwargs = {"retired": False} @@ -1998,9 +1832,7 @@ def action_cloudonly(self): available_hosts = self.quads.filter_available(data) except (APIServerException, APIBadRequest) as ex: # pragma: no cover self.logger.debug(str(ex)) - raise CliException( - "Could not connect to the quads-server, verify service is up and running." - ) + raise CliException("Could not connect to the quads-server, verify service is up and running.") host_kwargs = {} if self.cli_args.get("filter"): @@ -2027,9 +1859,7 @@ def action_cloudonly(self): def action_summary(self): _kwargs = {} if self.cli_args.get("datearg"): - datearg_obj = datetime.strptime( - self.cli_args.get("datearg"), "%Y-%m-%d %H:%M" - ) + datearg_obj = datetime.strptime(self.cli_args.get("datearg"), "%Y-%m-%d %H:%M") datearg_iso = datearg_obj.isoformat() date_str = ":".join(datearg_iso.split(":")[:-1]) _kwargs["date"] = date_str @@ -2050,9 +1880,7 @@ def action_summary(self): f"{cloud_name} ({cloud_owner}): {cloud_count} ({cloud_description}) - {cloud_ticket}" ) else: - self.logger.info( - f"{cloud_name}: {cloud_count} ({cloud_description})" - ) + self.logger.info(f"{cloud_name}: {cloud_count} ({cloud_description})") def action_regen_instack(self): regen_instack() diff --git a/src/quads/server/blueprints/assignments.py b/src/quads/server/blueprints/assignments.py index e5f7d8c6..cbff159d 100644 --- a/src/quads/server/blueprints/assignments.py +++ b/src/quads/server/blueprints/assignments.py @@ -221,39 +221,46 @@ def update_assignment(assignment_id: str) -> Response: obj_attrs = inspect(Assignment).mapper.attrs update_fields = {} - for attr in obj_attrs: - value = data.get(attr.key) - if value is not None or attr.key == "vlan": - if attr.key == "ccuser": - value = re.split(r"[, ]+", value) - value = [user.strip() for user in value] - if attr.key == "cloud": - _cloud = CloudDao.get_cloud(value) - if not _cloud: - response = { - "status_code": 400, - "error": "Bad Request", - "message": f"Cloud not found: {value}", - } - return make_response(jsonify(response), 400) - value = _cloud - if attr.key == "vlan": - if value is None: - update_fields[attr.key] = value - continue - _vlan = VlanDao.get_vlan(value) + for _key, _value in data.items(): + value = _value + if _key not in [attr.key for attr in obj_attrs]: + response = { + "status_code": 400, + "error": "Bad Request", + "message": f"Invalid argument: {_key}", + } + return make_response(jsonify(response), 400) + if _key == "ccuser": + value = re.split(r"[, ]+", _value) + value = [user.strip() for user in value] + if _key == "cloud": + _cloud = CloudDao.get_cloud(_value) + if not _cloud: + response = { + "status_code": 400, + "error": "Bad Request", + "message": f"Cloud not found: {_value}", + } + return make_response(jsonify(response), 400) + value = _cloud + if _key == "vlan": + kambiz_none_values = ["none", "0", "no", "nada", "clear"] + value = None + if _value and str(_value).lower() not in kambiz_none_values: + _vlan = VlanDao.get_vlan(_value) if not _vlan: response = { "status_code": 400, "error": "Bad Request", - "message": f"Vlan not found: {value}", + "message": f"Vlan not found: {_value}, for clearing use any of: {kambiz_none_values}", } return make_response(jsonify(response), 400) value = _vlan - if type(value) is str: - if value.lower() in ["true", "false"]: - value = eval(value.lower().capitalize()) - update_fields[attr.key] = value + if type(_value) is str: + if _value.lower() in ["true", "false"]: + value = eval(_value.lower().capitalize()) + update_fields[_key] = value + for key, value in update_fields.items(): setattr(assignment_obj, key, value) diff --git a/src/quads/server/dao/cloud.py b/src/quads/server/dao/cloud.py index e0f9974f..de1137ec 100644 --- a/src/quads/server/dao/cloud.py +++ b/src/quads/server/dao/cloud.py @@ -72,27 +72,20 @@ def get_clouds() -> List[Cloud]: @staticmethod def get_free_clouds() -> List[Cloud]: - future_schedule_subquery = ( - select(Schedule.id) - .join(Assignment, Schedule.assignment_id == Assignment.id) - .filter(Assignment.cloud_id == Cloud.id, Schedule.start > datetime.now()) - .exists() + active_or_future_schedules = ( + select(Cloud.id) + .join(Assignment) + .join(Schedule) + .filter(or_(Schedule.end > datetime.now(), Schedule.start > datetime.now())) + .distinct() ) free_clouds = ( db.session.query(Cloud) - .outerjoin(Assignment, Cloud.id == Assignment.cloud_id) - .outerjoin(Schedule, Assignment.id == Schedule.assignment_id) - .filter( - Cloud.name != Config["spare_pool_name"], - or_( - Schedule.end <= datetime.now(), - Assignment.id == None, - Schedule.id == None, - ), - ~future_schedule_subquery, - ) - .order_by(Cloud.name.asc()) - .distinct() + .outerjoin(Assignment) + .outerjoin(Schedule) + .filter(~Cloud.id.in_(active_or_future_schedules)) + .filter(Cloud.name != Config["spare_pool_name"]) + .order_by(Cloud.name) .all() ) return free_clouds diff --git a/tests/api/test_assignment.py b/tests/api/test_assignment.py index d2102fae..1ec66821 100644 --- a/tests/api/test_assignment.py +++ b/tests/api/test_assignment.py @@ -77,10 +77,7 @@ def test_invalid_cloud_not_found(self, test_client, auth, prefill): ) assert response.status_code == 400 assert response.json["error"] == "Bad Request" - assert ( - response.json["message"] - == f"Cloud not found: {assignment_request['cloud']}" - ) + assert response.json["message"] == f"Cloud not found: {assignment_request['cloud']}" @pytest.mark.parametrize("prefill", prefill_settings, indirect=True) def test_invalid_vlan_not_found(self, test_client, auth, prefill): @@ -101,9 +98,7 @@ def test_invalid_vlan_not_found(self, test_client, auth, prefill): ) assert response.status_code == 400 assert response.json["error"] == "Bad Request" - assert ( - response.json["message"] == f"Vlan not found: {assignment_request['vlan']}" - ) + assert response.json["message"] == f"Vlan not found: {assignment_request['vlan']}" @pytest.mark.parametrize("prefill", prefill_settings, indirect=True) def test_valid(self, test_client, auth, prefill): @@ -125,12 +120,8 @@ def test_valid(self, test_client, auth, prefill): ) ) assignment_response["created_at"] = response.json["created_at"] - assignment_response["cloud"]["last_redefined"] = response.json["cloud"][ - "last_redefined" - ] - duration = datetime.utcnow() - datetime.strptime( - response.json["created_at"], "%a, %d %b %Y %H:%M:%S GMT" - ) + assignment_response["cloud"]["last_redefined"] = response.json["cloud"]["last_redefined"] + duration = datetime.utcnow() - datetime.strptime(response.json["created_at"], "%a, %d %b %Y %H:%M:%S GMT") assert duration.total_seconds() < 5 assert response.status_code == 200 assert response.json == assignment_response @@ -152,10 +143,7 @@ def test_invalid_already_assigned_cloud(self, test_client, auth, prefill): ) assert response.status_code == 400 assert response.json["error"] == "Bad Request" - assert ( - response.json["message"] - == f"There is an already active assignment for {ASSIGNMENT_1_REQUEST['cloud']}" - ) + assert response.json["message"] == f"There is an already active assignment for {ASSIGNMENT_1_REQUEST['cloud']}" class TestReadAssignment: @@ -176,9 +164,7 @@ def test_invalid_assignment_not_found(self, test_client, auth, prefill): ) assert response.status_code == 400 assert response.json["error"] == "Bad Request" - assert ( - response.json["message"] == f"Assignment not found: {invalid_assignment_id}" - ) + assert response.json["message"] == f"Assignment not found: {invalid_assignment_id}" @pytest.mark.parametrize("prefill", prefill_settings, indirect=True) def test_valid(self, test_client, auth, prefill): @@ -195,9 +181,7 @@ def test_valid(self, test_client, auth, prefill): ) ) assignment_response = ASSIGNMENT_1_RESPONSE.copy() - assignment_response["cloud"]["last_redefined"] = response.json["cloud"][ - "last_redefined" - ] + assignment_response["cloud"]["last_redefined"] = response.json["cloud"]["last_redefined"] assignment_response["created_at"] = response.json["created_at"] assert response.status_code == 200 assert response.json == assignment_response @@ -221,9 +205,7 @@ def test_valid_all(self, test_client, auth, prefill): ASSIGNMENT_2_RESPONSE.copy(), ] for resp, assignment_response in zip(response.json, assignment_responses): - assignment_response["cloud"]["last_redefined"] = resp["cloud"][ - "last_redefined" - ] + assignment_response["cloud"]["last_redefined"] = resp["cloud"]["last_redefined"] assignment_response["created_at"] = resp["created_at"] assert response.status_code == 200 assert response.json == assignment_responses @@ -243,9 +225,7 @@ def test_valid_cloud(self, test_client, auth, prefill): ) ) assignment_response = ASSIGNMENT_1_RESPONSE.copy() - assignment_response["cloud"]["last_redefined"] = response.json["cloud"][ - "last_redefined" - ] + assignment_response["cloud"]["last_redefined"] = response.json["cloud"]["last_redefined"] assignment_response["created_at"] = response.json["created_at"] assert response.status_code == 200 assert response.json == assignment_response @@ -288,9 +268,7 @@ def test_valid_active_all(self, test_client, auth, prefill): ASSIGNMENT_2_RESPONSE.copy(), ] for resp, assignment_response in zip(response.json, assignment_responses): - assignment_response["cloud"]["last_redefined"] = resp["cloud"][ - "last_redefined" - ] + assignment_response["cloud"]["last_redefined"] = resp["cloud"]["last_redefined"] assignment_response["created_at"] = resp["created_at"] assert response.status_code == 200 assert response.json == assignment_responses @@ -333,9 +311,7 @@ def test_invalid_assignment_not_found(self, test_client, auth, prefill): ) assert response.status_code == 400 assert response.json["error"] == "Bad Request" - assert ( - response.json["message"] == f"Assignment not found: {invalid_assignment_id}" - ) + assert response.json["message"] == f"Assignment not found: {invalid_assignment_id}" @pytest.mark.parametrize("prefill", prefill_settings, indirect=True) def test_invalid_cloud_not_found(self, test_client, auth, prefill): @@ -375,7 +351,10 @@ def test_invalid_vlan_not_found(self, test_client, auth, prefill): ) assert response.status_code == 400 assert response.json["error"] == "Bad Request" - assert response.json["message"] == f"Vlan not found: {invalid_vlan_id}" + assert ( + response.json["message"] + == f"Vlan not found: {invalid_vlan_id}, for clearing use any of: ['none', '0', 'no', 'nada', 'clear']" + ) @pytest.mark.parametrize("prefill", prefill_settings, indirect=True) def test_valid(self, test_client, auth, prefill): @@ -393,9 +372,7 @@ def test_valid(self, test_client, auth, prefill): ) ) assignment_response = ASSIGNMENT_1_UPDATE_RESPONSE.copy() - assignment_response["cloud"]["last_redefined"] = response.json["cloud"][ - "last_redefined" - ] + assignment_response["cloud"]["last_redefined"] = response.json["cloud"]["last_redefined"] assignment_response["created_at"] = response.json["created_at"] assert response.status_code == 200 assert response.json == assignment_response @@ -439,9 +416,7 @@ def test_invalid_assignment_not_found(self, test_client, auth, prefill): ) assert response.status_code == 400 assert response.json["error"] == "Bad Request" - assert ( - response.json["message"] == f"Assignment not found: {invalid_assignment_id}" - ) + assert response.json["message"] == f"Assignment not found: {invalid_assignment_id}" @pytest.mark.parametrize("prefill", prefill_settings, indirect=True) def test_valid(self, test_client, auth, prefill):