Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

SSH task exit code added to XCOM as 'ssh_exit' key #27370

Merged
merged 10 commits into from
Dec 4, 2022
14 changes: 10 additions & 4 deletions airflow/providers/ssh/operators/ssh.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,9 @@ class SSHOperator(BaseOperator):
The default is ``False`` but note that `get_pty` is forced to ``True``
when the `command` starts with ``sudo``.
:param banner_timeout: timeout to wait for banner from the server in seconds

If *do_xcom_push* is *True*, the numeric exit code emitted by
the ssh session is pushed to XCom under key ``ssh_exit``.
"""

template_fields: Sequence[str] = ("command", "environment", "remote_host")
Expand Down Expand Up @@ -134,16 +137,19 @@ def exec_ssh_client_command(self, ssh_client: SSHClient, command: str):
ssh_client, command, timeout=self.cmd_timeout, environment=self.environment, get_pty=self.get_pty
)

def raise_for_status(self, exit_status: int, stderr: bytes) -> None:
def raise_for_status(self, exit_status: int, stderr: bytes, context=None) -> None:
if context and self.do_xcom_push:
ti = context.get("task_instance")
ti.xcom_push(key="ssh_exit", value=exit_status)
if exit_status != 0:
raise AirflowException(f"SSH operator error: exit status = {exit_status}")

def run_ssh_client_command(self, ssh_client: SSHClient, command: str) -> bytes:
def run_ssh_client_command(self, ssh_client: SSHClient, command: str, context=None) -> bytes:
assert self.ssh_hook
exit_status, agg_stdout, agg_stderr = self.ssh_hook.exec_ssh_client_command(
ssh_client, command, timeout=self.cmd_timeout, environment=self.environment, get_pty=self.get_pty
)
self.raise_for_status(exit_status, agg_stderr)
self.raise_for_status(exit_status, agg_stderr, context=context)
return agg_stdout

def execute(self, context=None) -> bytes | str:
Expand All @@ -155,7 +161,7 @@ def execute(self, context=None) -> bytes | str:
self.get_pty = self.command.startswith("sudo") or self.get_pty

with self.get_ssh_client() as ssh_client:
result = self.run_ssh_client_command(ssh_client, self.command)
result = self.run_ssh_client_command(ssh_client, self.command, context=context)
enable_pickling = conf.getboolean("core", "enable_xcom_pickling")
if not enable_pickling:
result = b64encode(result).decode("utf-8")
Expand Down
53 changes: 53 additions & 0 deletions tests/providers/ssh/operators/test_ssh.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,20 @@
# under the License.
from __future__ import annotations

from random import randrange
from unittest import mock

import pendulum
import pytest
from paramiko.client import SSHClient

from airflow.exceptions import AirflowException
from airflow.models import DAG, DagModel, DagRun, TaskInstance
from airflow.providers.ssh.hooks.ssh import SSHHook
from airflow.providers.ssh.operators.ssh import SSHOperator
from airflow.utils.session import create_session
from airflow.utils.timezone import datetime
from airflow.utils.types import DagRunType
from tests.test_utils.config import conf_vars

TEST_DAG_ID = "unit_tests_ssh_test_op"
Expand All @@ -39,6 +44,37 @@
COMMAND_WITH_SUDO = "sudo " + COMMAND


def create_context(task, persist_to_db=False, map_index=None):
if task.has_dag():
dag = task.dag
else:
dag = DAG(dag_id="dag", start_date=pendulum.now())
dag.add_task(task)
dag_run = DagRun(
run_id=DagRun.generate_run_id(DagRunType.MANUAL, DEFAULT_DATE),
run_type=DagRunType.MANUAL,
dag_id=dag.dag_id,
)
task_instance = TaskInstance(task=task, run_id=dag_run.run_id)
task_instance.dag_run = dag_run
if map_index is not None:
task_instance.map_index = map_index
if persist_to_db:
with create_session() as session:
session.add(DagModel(dag_id=dag.dag_id))
session.add(dag_run)
session.add(task_instance)
session.commit()
return {
"dag": dag,
"ts": DEFAULT_DATE.isoformat(),
"task": task,
"ti": task_instance,
"task_instance": task_instance,
"run_id": "test",
}


class SSHClientSideEffect:
def __init__(self, hook):
self.hook = hook
Expand Down Expand Up @@ -198,3 +234,20 @@ def test_command_errored(self):
self.exec_ssh_client_command.return_value = (1, b"", b"Error here")
with pytest.raises(AirflowException, match="SSH operator error: exit status = 1"):
task.execute(None)

def test_push_ssh_exit_to_xcom(self, context=None):
# Test pulls the value previously pushed to xcom and checks if it's the same
command = "not_a_real_command"
ssh_exit_code = randrange(0, 100)
task_push = SSHOperator(task_id="test_push", ssh_hook=self.hook, command=command)
task_context = create_context(task_push, persist_to_db=True)
self.exec_ssh_client_command.return_value = (ssh_exit_code, b"", b"ssh output")
try:
task_push.execute(context=task_context)
except AirflowException:
pass
finally:
assert (
task_push.xcom_pull(key="ssh_exit", context=task_context, task_ids=["test_push"])[0]
== ssh_exit_code
)