From f10aa302feecc98e787c35007c3d0cbd5a2725f7 Mon Sep 17 00:00:00 2001 From: Paras Jain Date: Sun, 6 Mar 2022 11:29:58 -0800 Subject: [PATCH 1/2] Updated plotting with new baseline results (#154) * Updated plotting * Rename file * Partial DataSync data * Format notebook * Add new data for baseline (overlay) * format * Clear output * revert * revert --- ...b => 01-04-2022_plot_udp_throughput.ipynb} | 0 ....ipynb => 02-04-22_baseline_results.ipynb} | 0 nb/02-10-22_baseline_results_replot.ipynb | 443 ++++++++++++++++++ scripts/experiment.sh | 8 +- .../experiments/datasync_random_overlay.sh | 21 + skylark/compute/aws/aws_server.py | 11 +- 6 files changed, 475 insertions(+), 8 deletions(-) rename nb/{01-04-2021_plot_udp_throughput.ipynb => 01-04-2022_plot_udp_throughput.ipynb} (100%) rename nb/{02-04-22-baseline-results.ipynb => 02-04-22_baseline_results.ipynb} (100%) create mode 100644 nb/02-10-22_baseline_results_replot.ipynb create mode 100644 scripts/experiments/datasync_random_overlay.sh diff --git a/nb/01-04-2021_plot_udp_throughput.ipynb b/nb/01-04-2022_plot_udp_throughput.ipynb similarity index 100% rename from nb/01-04-2021_plot_udp_throughput.ipynb rename to nb/01-04-2022_plot_udp_throughput.ipynb diff --git a/nb/02-04-22-baseline-results.ipynb b/nb/02-04-22_baseline_results.ipynb similarity index 100% rename from nb/02-04-22-baseline-results.ipynb rename to nb/02-04-22_baseline_results.ipynb diff --git a/nb/02-10-22_baseline_results_replot.ipynb b/nb/02-10-22_baseline_results_replot.ipynb new file mode 100644 index 000000000..804957bba --- /dev/null +++ b/nb/02-10-22_baseline_results_replot.ipynb @@ -0,0 +1,443 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import pandas as pd\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "\n", + "from pathlib import Path\n", + "\n", + "from skylark import skylark_root\n", + "\n", + "data_dir = skylark_root / \"data\"\n", + "figure_dir = data_dir / \"figures\" / \"baseline_plots\"\n", + "figure_dir.mkdir(exist_ok=True, parents=True)\n", + "\n", + "plt.style.use(\"seaborn-colorblind\")\n", + "plt.set_cmap(\"plasma\")\n", + "\n", + "data = []\n", + "data.append(\n", + " dict(\n", + " fig_idx=0,\n", + " src_provider=\"gcp\",\n", + " src_region=\"europe-north1\",\n", + " dst_provider=\"gcp\",\n", + " dst_region=\"us-west4\",\n", + " gcp_cloudtransfer_time=105,\n", + " skylark_overlay_time=57.4,\n", + " skylark_random_time=12.4,\n", + " )\n", + ")\n", + "\n", + "data.append(\n", + " dict(\n", + " fig_idx=0,\n", + " src_provider=\"aws\",\n", + " src_region=\"ap-northeast-2\",\n", + " dst_provider=\"gcp\",\n", + " dst_region=\"us-central1\",\n", + " gcp_cloudtransfer_time=308,\n", + " skylark_overlay_time=61.4,\n", + " skylark_random_time=16.3,\n", + " )\n", + ")\n", + "\n", + "data.append(\n", + " dict(\n", + " fig_idx=0,\n", + " src_provider=\"aws\",\n", + " src_region=\"us-east-1\",\n", + " dst_provider=\"gcp\",\n", + " dst_region=\"us-west4\",\n", + " gcp_cloudtransfer_time=284,\n", + " skylark_overlay_time=55.2,\n", + " skylark_random_time=21.1,\n", + " )\n", + ")\n", + "\n", + "data.append(\n", + " dict(\n", + " fig_idx=0,\n", + " src_provider=\"azure\",\n", + " src_region=\"koreacentral\",\n", + " dst_provider=\"gcp\",\n", + " dst_region=\"na-northeast2\",\n", + " gcp_cloudtransfer_gbps=2.58,\n", + " skylark_overlay_gbps=8.76,\n", + " skylark_random_gbps=46.1,\n", + " gcp_cloudtransfer_time=70 * 8 / 2.58,\n", + " skylark_overlay_time=70 * 8 / 8.76,\n", + " skylark_random_time=70 * 8 / 46.1,\n", + " )\n", + ")\n", + "data.append(\n", + " dict(\n", + " fig_idx=1,\n", + " src_provider=\"aws\",\n", + " src_region=\"ap-northeast-2\",\n", + " dst_provider=\"gcp\",\n", + " dst_region=\"us-central1\",\n", + " gcp_cloudtransfer_gbps=1.89,\n", + " skylark_overlay_gbps=9.07,\n", + " skylark_random_gbps=35.1,\n", + " )\n", + ")\n", + "\n", + "data.append(\n", + " dict(\n", + " fig_idx=1,\n", + " src_provider=\"gcp\",\n", + " src_region=\"europe-north1\",\n", + " dst_provider=\"gcp\",\n", + " dst_region=\"us-west4\",\n", + " gcp_cloudtransfer_gbps=5.57,\n", + " skylark_overlay_gbps=9.71,\n", + " skylark_random_gbps=46.1,\n", + " )\n", + ")\n", + "\n", + "data.append(\n", + " dict(\n", + " fig_idx=1,\n", + " src_provider=\"aws\",\n", + " src_region=\"us-east-1\",\n", + " dst_provider=\"gcp\",\n", + " dst_region=\"us-west4\",\n", + " gcp_cloudtransfer_gbps=2.05,\n", + " skylark_overlay_gbps=10.1,\n", + " skylark_random_gbps=27.4,\n", + " )\n", + ")\n", + "\n", + "data.append(\n", + " dict(\n", + " fig_idx=1,\n", + " src_provider=\"gcp\",\n", + " src_region=\"europe-north1\",\n", + " dst_provider=\"gcp\",\n", + " dst_region=\"us-west4\",\n", + " gcp_cloudtransfer_gbps=5.57,\n", + " skylark_overlay_gbps=9.71,\n", + " skylark_random_gbps=46.1,\n", + " )\n", + ")\n", + "\n", + "data.append(\n", + " dict(\n", + " fig_idx=1,\n", + " src_provider=\"aws\",\n", + " src_region=\"ap-northeast-2\",\n", + " dst_provider=\"gcp\",\n", + " dst_region=\"us-central1\",\n", + " gcp_cloudtransfer_gbps=1.89,\n", + " skylark_overlay_gbps=9.07,\n", + " skylark_random_gbps=35.1,\n", + " )\n", + ")\n", + "\n", + "data.append(\n", + " dict(\n", + " fig_idx=1,\n", + " src_provider=\"aws\",\n", + " src_region=\"us-east-1\",\n", + " dst_provider=\"gcp\",\n", + " dst_region=\"us-west4a\",\n", + " gcp_cloudtransfer_gbps=2.05,\n", + " skylark_overlay_gbps=10.1,\n", + " skylark_random_gbps=27.4,\n", + " )\n", + ")\n", + "\n", + "data.append(\n", + " dict(\n", + " fig_idx=2,\n", + " src_provider=\"aws\",\n", + " src_region=\"us-east-1\",\n", + " dst_provider=\"aws\",\n", + " dst_region=\"us-west-2\",\n", + " aws_datasync_gbps=3.80,\n", + " skylark_overlay_gbps=10.1,\n", + " skylark_random_gbps=17.04309619,\n", + " )\n", + ")\n", + "\n", + "data.append(\n", + " dict(\n", + " fig_idx=2,\n", + " src_provider=\"aws\",\n", + " src_region=\"eu-north-1\",\n", + " dst_provider=\"aws\",\n", + " dst_region=\"us-west-2\",\n", + " aws_datasync_gbps=68 * 8 / 110,\n", + " skylark_overlay_gbps=8.771801195,\n", + " skylark_random_gbps=14.29612542,\n", + " )\n", + ")\n", + "\n", + "data.append(\n", + " dict(\n", + " fig_idx=2,\n", + " src_provider=\"aws\",\n", + " src_region=\"ap-northeast-2\",\n", + " dst_provider=\"aws\",\n", + " dst_region=\"ca-central-1\",\n", + " aws_datasync_gbps=3.05,\n", + " skylark_overlay_gbps=8.783821139,\n", + " skylark_random_gbps=18.54111491,\n", + " )\n", + ")\n", + "\n", + "data.append(\n", + " dict(\n", + " fig_idx=2,\n", + " src_provider=\"aws\",\n", + " src_region=\"ap-northeast-2\",\n", + " dst_provider=\"aws\",\n", + " dst_region=\"us-west-2\",\n", + " aws_datasync_gbps=3.09,\n", + " skylark_overlay_gbps=8.97258181,\n", + " skylark_random_gbps=17.36482038,\n", + " )\n", + ")\n", + "\n", + "# REGION NOT SUPPORTED\n", + "# data.append(dict(\n", + "# fig_idx=2,\n", + "# src_provider='aws',\n", + "# src_region='aws:ap-southeast-2',\n", + "# dst_provider='aws',\n", + "# dst_region='af-south-1',\n", + "# aws_datasync_gbps=-1,\n", + "# skylark_overlay_gbps=-1,\n", + "# skylark_random_gbps=-1, # TODO\n", + "# ))\n", + "\n", + "data.append(\n", + " dict(\n", + " fig_idx=2,\n", + " src_provider=\"aws\",\n", + " src_region=\"ap-southeast-2\",\n", + " dst_provider=\"aws\",\n", + " dst_region=\"eu-west-3\",\n", + " aws_datasync_gbps=2.26,\n", + " skylark_overlay_gbps=10.32025699,\n", + " skylark_random_gbps=18.52570443,\n", + " )\n", + ")\n", + "\n", + "\n", + "df = pd.DataFrame(data)\n", + "\n", + "# normalize source provider (aws => AWS, azure => Azure, gcp => GCP)\n", + "map_provider = lambda x: {\"aws\": \"AWS\", \"azure\": \"Azure\", \"gcp\": \"GCP\"}[x]\n", + "df[\"src_provider\"] = df[\"src_provider\"].map(map_provider)\n", + "df[\"dst_provider\"] = df[\"dst_provider\"].map(map_provider)\n", + "bar_label = lambda x: f\"{x['src_provider']} {x['src_region']}\\nto {x['dst_provider']} {x['dst_region']}\"\n", + "df[\"bar_label\"] = df.apply(bar_label, axis=1)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "df_fig0 = df[df.fig_idx == 0].sort_values(by=[\"gcp_cloudtransfer_time\"], ascending=False)\n", + "\n", + "# colors = plt.rcParams[\"axes.prop_cycle\"].by_key()[\"color\"]\n", + "# colors = [\"#FFB000\", \"#dc267f\", \"#648fff\", \"#785ef0\", \"#fe6100\"]\n", + "cblue = \"#4589ff\"\n", + "cpurple = \"#785ef0\"\n", + "cmagenta = \"#dc267f\"\n", + "corange = \"#fe6100\"\n", + "cyellow = \"#ffb000\"\n", + "cgreen = \"#24a148\"\n", + "\n", + "with plt.style.context({\"figure.figsize\": (6, 5), \"font.size\": 14}):\n", + " fig, ax = plt.subplots()\n", + " # ax.set_ylabel(\"Transfer Regions\", fontsize=14, fontweight='bold')\n", + " ax.set_xlabel(\"Transfer Time (s)\", fontsize=16, fontweight=\"bold\")\n", + " fig.set_facecolor(\"w\")\n", + "\n", + " width = 0.25\n", + " labels = list(reversed([\"GCP Data\\nTransfer\", \"Skylark\", \"Skylark\\n(overlay only)\"]))\n", + " for x, (_, row) in enumerate(df_fig0.iterrows()):\n", + " points = reversed([row[\"gcp_cloudtransfer_time\"], row[\"skylark_overlay_time\"], row[\"skylark_random_time\"]])\n", + " cmap = reversed([cmagenta, cyellow, cblue])\n", + " for idx, (point, label, color) in list(enumerate(zip(points, labels, cmap))):\n", + " if x > 0:\n", + " label = \"_nolegend_\"\n", + " ax.barh(x + idx * width, point, width, label=label, color=color, edgecolor=\"black\", alpha=1)\n", + " ax.text(point + 5, x + idx * width, f\"{int(point)}s\".ljust(4), fontsize=12, ha=\"left\", va=\"center\")\n", + " ax.set_xlim(0, df_fig0[\"gcp_cloudtransfer_time\"].max() + 40)\n", + "\n", + " # render bar_label as x-axis label\n", + " ax.set_yticks(np.arange(len(df_fig0)) + width)\n", + " ax.set_yticklabels(df_fig0[\"bar_label\"], rotation=0)\n", + " # show legend over bar chart with 3 columns\n", + " ax.legend(\n", + " *([x[i] for i in [2, 1, 0]] for x in plt.gca().get_legend_handles_labels()),\n", + " loc=\"upper center\",\n", + " bbox_to_anchor=(0.5, 1.2),\n", + " ncol=3,\n", + " frameon=False,\n", + " )\n", + " fig.show()\n", + " fig.savefig(figure_dir / \"fig0.png\", dpi=300, bbox_inches=\"tight\")\n", + " fig.savefig(figure_dir / \"fig0.pdf\", dpi=300, bbox_inches=\"tight\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "df_fig1 = df[df.fig_idx == 1].sort_values(by=[\"gcp_cloudtransfer_time\"], ascending=False)\n", + "df_fig1[\"gcp_cloudtransfer_time\"] = 68 * 8 / df_fig1[\"gcp_cloudtransfer_gbps\"]\n", + "df_fig1[\"skylark_overlay_time\"] = 68 * 8 / df_fig1[\"skylark_overlay_gbps\"]\n", + "df_fig1[\"skylark_random_time\"] = 68 * 8 / df_fig1[\"skylark_random_gbps\"]\n", + "\n", + "# colors = plt.rcParams[\"axes.prop_cycle\"].by_key()[\"color\"]\n", + "# colors = [\"#FFB000\", \"#dc267f\", \"#648fff\", \"#785ef0\", \"#fe6100\"]\n", + "cblue = \"#4589ff\"\n", + "cpurple = \"#785ef0\"\n", + "cmagenta = \"#dc267f\"\n", + "corange = \"#fe6100\"\n", + "cyellow = \"#ffb000\"\n", + "cgreen = \"#24a148\"\n", + "\n", + "with plt.style.context({\"figure.figsize\": (6, 5), \"font.size\": 14}):\n", + " fig, ax = plt.subplots()\n", + " # ax.set_ylabel(\"Transfer Regions\", fontsize=14, fontweight='bold')\n", + " ax.set_xlabel(\"Transfer Time (s)\", fontsize=16, fontweight=\"bold\")\n", + " fig.set_facecolor(\"w\")\n", + "\n", + " width = 0.25\n", + " labels = list(reversed([\"GCP Data\\nTransfer\", \"Skylark\", \"Skylark\\n(overlay only)\"]))\n", + " for x, (_, row) in enumerate(df_fig1.iterrows()):\n", + " points = reversed([row[\"gcp_cloudtransfer_time\"], row[\"skylark_overlay_time\"], row[\"skylark_random_time\"]])\n", + " cmap = reversed([cmagenta, cyellow, cblue])\n", + " for idx, (point, label, color) in list(enumerate(zip(points, labels, cmap))):\n", + " if np.isnan(point):\n", + " print(f\"NaN: {row['src_provider']} {row['src_region']} to {row['dst_provider']} {row['dst_region']}\")\n", + " continue\n", + " if x > 0:\n", + " label = \"_nolegend_\"\n", + " ax.barh(x + idx * width, point, width, label=label, color=color, edgecolor=\"black\", alpha=1)\n", + " ax.text(point + 5, x + idx * width, f\"{int(point)}s\".ljust(4), fontsize=12, ha=\"left\", va=\"center\")\n", + " ax.set_xlim(0, df_fig1[\"gcp_cloudtransfer_time\"].max() + 40)\n", + "\n", + " # render bar_label as x-axis label\n", + " ax.set_yticks(np.arange(len(df_fig1)) + width)\n", + " ax.set_yticklabels(df_fig1[\"bar_label\"], rotation=0)\n", + " # show legend over bar chart with 3 columns\n", + " ax.legend(\n", + " *([x[i] for i in [2, 1, 0]] for x in plt.gca().get_legend_handles_labels()),\n", + " loc=\"upper center\",\n", + " bbox_to_anchor=(0.5, 1.2),\n", + " ncol=3,\n", + " frameon=False,\n", + " )\n", + " fig.show()\n", + " fig.savefig(figure_dir / \"fig0.png\", dpi=300, bbox_inches=\"tight\")\n", + " fig.savefig(figure_dir / \"fig0.pdf\", dpi=300, bbox_inches=\"tight\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "df_fig2 = df[df.fig_idx == 2].sort_values(by=[\"aws_datasync_gbps\"], ascending=True)\n", + "df_fig2[\"aws_datasync_time\"] = 68 * 8 / df_fig2[\"aws_datasync_gbps\"]\n", + "df_fig2[\"skylark_overlay_time\"] = 68 * 8 / df_fig2[\"skylark_overlay_gbps\"]\n", + "df_fig2[\"skylark_random_time\"] = 68 * 8 / df_fig2[\"skylark_random_gbps\"]\n", + "\n", + "# colors = plt.rcParams[\"axes.prop_cycle\"].by_key()[\"color\"]\n", + "# colors = [\"#FFB000\", \"#dc267f\", \"#648fff\", \"#785ef0\", \"#fe6100\"]\n", + "cblue = \"#4589ff\"\n", + "cpurple = \"#785ef0\"\n", + "cmagenta = \"#dc267f\"\n", + "corange = \"#fe6100\"\n", + "cyellow = \"#ffb000\"\n", + "cgreen = \"#24a148\"\n", + "\n", + "with plt.style.context({\"figure.figsize\": (6, 5), \"font.size\": 14}):\n", + " fig, ax = plt.subplots()\n", + " # ax.set_ylabel(\"Transfer Regions\", fontsize=14, fontweight='bold')\n", + " ax.set_xlabel(\"Transfer Time (s)\", fontsize=16, fontweight=\"bold\")\n", + " fig.set_facecolor(\"w\")\n", + "\n", + " width = 0.25\n", + " labels = list(reversed([\"AWS DataSync\", \"Skylark\", \"Skylark\\n(overlay only)\"]))\n", + " for x, (_, row) in enumerate(df_fig2.iterrows()):\n", + " points = reversed([row[\"aws_datasync_time\"], row[\"skylark_overlay_time\"], row[\"skylark_random_time\"]])\n", + " cmap = reversed([cmagenta, cyellow, cblue])\n", + " for idx, (point, label, color) in list(enumerate(zip(points, labels, cmap))):\n", + " if np.isnan(point):\n", + " print(f\"NaN: {row['src_provider']} {row['src_region']} to {row['dst_provider']} {row['dst_region']}\")\n", + " continue\n", + " if x > 0:\n", + " label = \"_nolegend_\"\n", + " ax.barh(x + idx * width, point, width, label=label, color=color, edgecolor=\"black\", alpha=1)\n", + " ax.text(point + 5, x + idx * width, f\"{int(point)}s\".ljust(4), fontsize=12, ha=\"left\", va=\"center\")\n", + " ax.set_xlim(0, df_fig2[\"aws_datasync_time\"].max() + 35)\n", + "\n", + " # render bar_label as x-axis label\n", + " ax.set_yticks(np.arange(len(df_fig2)) + width)\n", + " ax.set_yticklabels(df_fig2[\"bar_label\"], rotation=0)\n", + " # show legend over bar chart with 3 columns\n", + " ax.legend(\n", + " *([x[i] for i in [2, 1, 0]] for x in plt.gca().get_legend_handles_labels()),\n", + " loc=\"upper center\",\n", + " bbox_to_anchor=(0.5, 1.2),\n", + " ncol=3,\n", + " frameon=False,\n", + " )\n", + " fig.show()\n", + " fig.savefig(figure_dir / \"fig2.png\", dpi=300, bbox_inches=\"tight\")\n", + " fig.savefig(figure_dir / \"fig2.pdf\", dpi=300, bbox_inches=\"tight\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "interpreter": { + "hash": "e615813c5489b1590d5f8b2d596a39a5f3baf5ccbb6dde5ecf5546914cc6cb8b" + }, + "kernelspec": { + "display_name": "Python 3.8.12 ('base')", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.12" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/scripts/experiment.sh b/scripts/experiment.sh index 930d71caa..3c93094cd 100755 --- a/scripts/experiment.sh +++ b/scripts/experiment.sh @@ -59,10 +59,10 @@ cp ${filename} data/results/${experiment} ## run replication (random) #skylark replicate-json ${filename} \ -# --gcp-project skylark-sarah \ -# --use-random-data \ -# --size-total-mb 73728 \ -# --n-chunks 1152 > data/results/${experiment}/random-logs.txt +# --gcp-project skylark-sarah \ +# --use-random-data \ +# --size-total-mb 73728 \ +# --n-chunks 1152 &> data/results/${experiment}/random-logs.txt #tail -1 data/results/${experiment}/random-logs.txt; # run replication (obj store) diff --git a/scripts/experiments/datasync_random_overlay.sh b/scripts/experiments/datasync_random_overlay.sh new file mode 100644 index 000000000..b03f6ae44 --- /dev/null +++ b/scripts/experiments/datasync_random_overlay.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +set -xe +mkdir -p data/results_datasync_random_overlay + +for pair in "aws:eu-north-1 aws:us-west-2" "aws:ap-northeast-2 aws:ca-central-1" "aws:us-east-1 aws:us-west-2" "aws:ap-northeast-2 aws:us-west-2" "aws:ap-southeast-2 aws:af-south-1" "aws:ap-southeast-2 aws:eu-west-3"; do + src=$(echo ${pair} | cut -d' ' -f1) + dest=$(echo ${pair} | cut -d' ' -f2) + filename=data/results_datasync_random_overlay/plan_${src}_${dest}.json + throughput=25 + max_instance=8 + skylark solver solve-throughput ${src} ${dest} ${throughput} -o ${filename} --max-instances ${max_instance} + + # run replicate random + skylark replicate-json ${filename} \ + --gcp-project skylark-333700 \ + --use-random-data \ + --size-total-mb 73728 \ + --n-chunks 1152 > data/results_datasync_random_overlay/random_logs_${src}_${dest}.txt + tail -1 data/results_datasync_random_overlay/random_logs_${src}_${dest}.txt +done \ No newline at end of file diff --git a/skylark/compute/aws/aws_server.py b/skylark/compute/aws/aws_server.py index 8055ca3d6..7bed09007 100644 --- a/skylark/compute/aws/aws_server.py +++ b/skylark/compute/aws/aws_server.py @@ -62,15 +62,18 @@ def create_keyfile(): if key_name in keys_in_region: logger.warning(f"Deleting key {key_name} in region {aws_region}") ec2_client.delete_key_pair(KeyName=key_name) - key_pair = ec2.create_key_pair(KeyName=f"skylark-{aws_region}") + key_pair = ec2.create_key_pair(KeyName=f"skylark-{aws_region}", KeyType="rsa") with local_key_file.open("w") as f: - f.write(key_pair.key_material) + key_str = key_pair.key_material + if not key_str.endswith("\n"): + key_str += "\n" + f.write(key_str) f.flush() # sometimes generates keys with zero bytes, so we flush to ensure it's written os.chmod(local_key_file, 0o600) + logger.info(f"Created key file {local_key_file}") if not local_key_file.exists(): create_keyfile() - logger.info(f"Created key file {local_key_file}") return local_key_file @ignore_lru_cache() @@ -113,7 +116,7 @@ def get_ssh_client_impl(self): client.connect( self.public_ip(), username="ec2-user", - key_filename=str(self.local_keyfile), + pkey=paramiko.RSAKey.from_private_key_file(self.local_keyfile), look_for_keys=False, allow_agent=False, banner_timeout=200, From 5d1a748107979cb91b76a0d364f5a48b47d22d53 Mon Sep 17 00:00:00 2001 From: Anton Zabreyko <55911777+antonzabreyko@users.noreply.github.com> Date: Mon, 7 Mar 2022 15:03:45 -0800 Subject: [PATCH 2/2] Added better error handling for disabled regions (#190) --- skylark/compute/aws/aws_cloud_provider.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/skylark/compute/aws/aws_cloud_provider.py b/skylark/compute/aws/aws_cloud_provider.py index 4530e397f..16f820653 100644 --- a/skylark/compute/aws/aws_cloud_provider.py +++ b/skylark/compute/aws/aws_cloud_provider.py @@ -75,7 +75,12 @@ def get_instance_list(self, region: str) -> List[AWSServer]: ec2 = AWSServer.get_boto3_resource("ec2", region) valid_states = ["pending", "running", "stopped", "stopping"] instances = ec2.instances.filter(Filters=[{"Name": "instance-state-name", "Values": valid_states}]) - instance_ids = [i.id for i in instances] + try: + instance_ids = [i.id for i in instances] + except botocore.exceptions.ClientError as e: + logger.error(f"error provisioning in {region}: {e}") + return [] + return [AWSServer(f"aws:{region}", i) for i in instance_ids] def get_security_group(self, region: str, vpc_name="skylark", sg_name="skylark"):