Skip to content

Commit

Permalink
Fix code docstrings (#28622)
Browse files Browse the repository at this point in the history
Fix docstrings grammar

Co-authored-by: kazanau <[email protected]>
  • Loading branch information
stamixthereal and kazanau authored Dec 28, 2022
1 parent 2169b39 commit 76186bb
Show file tree
Hide file tree
Showing 4 changed files with 23 additions and 23 deletions.
16 changes: 8 additions & 8 deletions airflow/models/baseoperator.py
Original file line number Diff line number Diff line change
Expand Up @@ -413,8 +413,8 @@ def apply_defaults(self: BaseOperator, *args: Any, **kwargs: Any) -> Any:
def __new__(cls, name, bases, namespace, **kwargs):
new_cls = super().__new__(cls, name, bases, namespace, **kwargs)
with contextlib.suppress(KeyError):
# Update the partial descriptor with the class method so it call call the actual function (but let
# subclasses override it if they need to)
# Update the partial descriptor with the class method, so it calls the actual function
# (but let subclasses override it if they need to)
partial_desc = vars(new_cls)["partial"]
if isinstance(partial_desc, _PartialDescriptor):
partial_desc.class_method = classmethod(partial)
Expand Down Expand Up @@ -448,7 +448,7 @@ class derived from this one results in the creation of a task object,
(e.g. user/person/team/role name) to clarify ownership is recommended.
:param email: the 'to' email address(es) used in email alerts. This can be a
single email or multiple ones. Multiple addresses can be specified as a
comma or semi-colon separated string or by passing a list of strings.
comma or semicolon separated string or by passing a list of strings.
:param email_on_retry: Indicates whether email alerts should be sent when a
task is retried
:param email_on_failure: Indicates whether email alerts should be sent when
Expand Down Expand Up @@ -932,7 +932,7 @@ def __hash__(self):
def __or__(self, other):
"""
Called for [This Operator] | [Operator], The inlets of other
will be set to pickup the outlets from this operator. Other will
will be set to pick up the outlets from this operator. Other will
be set as a downstream task of this operator.
"""
if isinstance(other, BaseOperator):
Expand Down Expand Up @@ -1124,9 +1124,9 @@ def post_execute(self, context: Any, result: Any = None):

def on_kill(self) -> None:
"""
Override this method to cleanup subprocesses when a task instance
Override this method to clean up subprocesses when a task instance
gets killed. Any use of the threading, subprocess or multiprocessing
module within an operator needs to be cleaned up or it will leave
module within an operator needs to be cleaned up, or it will leave
ghost processes behind.
"""

Expand Down Expand Up @@ -1172,7 +1172,7 @@ def render_template_fields(
This mutates the attributes in-place and is irreversible.
:param context: Context dict with values to apply on content.
:param jinja_env: Jinja environment to use for rendering.
:param jinja_env: Jinja's environment to use for rendering.
"""
if not jinja_env:
jinja_env = self.get_template_env()
Expand Down Expand Up @@ -1463,7 +1463,7 @@ def inherits_from_empty_operator(self):
"""Used to determine if an Operator is inherited from EmptyOperator"""
# This looks like `isinstance(self, EmptyOperator) would work, but this also
# needs to cope when `self` is a Serialized instance of a EmptyOperator or one
# of its sub-classes (which don't inherit from anything but BaseOperator).
# of its subclasses (which don't inherit from anything but BaseOperator).
return getattr(self, "_is_empty", False)

def defer(
Expand Down
14 changes: 7 additions & 7 deletions airflow/models/dag.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ def create_timetable(interval: ScheduleIntervalArg, timezone: Timezone) -> Timet
def get_last_dagrun(dag_id, session, include_externally_triggered=False):
"""
Returns the last dag run for a dag, None if there was none.
Last dag run can be any type of run eg. scheduled or backfilled.
Last dag run can be any type of run e.g. scheduled or backfilled.
Overridden DagRuns are ignored.
"""
DR = DagRun
Expand Down Expand Up @@ -276,8 +276,8 @@ class DAG(LoggingMixin):
:param start_date: The timestamp from which the scheduler will
attempt to backfill
:param end_date: A date beyond which your DAG won't run, leave to None
for open ended scheduling
:param template_searchpath: This list of folders (non relative)
for open-ended scheduling
:param template_searchpath: This list of folders (non-relative)
defines where jinja will look for your templates. Order matters.
Note that jinja/airflow includes the path of your DAG file by
default
Expand Down Expand Up @@ -807,7 +807,7 @@ def get_next_data_interval(self, dag_model: DagModel) -> DataInterval | None:
schedule if the run does not have an explicit one set, which is possible
for runs created prior to AIP-39.
This function is private to Airflow core and should not be depended as a
This function is private to Airflow core and should not be depended on as a
part of the Python API.
:meta private:
Expand All @@ -832,7 +832,7 @@ def get_run_data_interval(self, run: DagRun) -> DataInterval:
schedule if the run does not have an explicit one set, which is possible for
runs created prior to AIP-39.
This function is private to Airflow core and should not be depended as a
This function is private to Airflow core and should not be depended on as a
part of the Python API.
:meta private:
Expand Down Expand Up @@ -2808,7 +2808,7 @@ def bulk_write_to_db(

# here we go through dags and tasks to check for dataset references
# if there are now None and previously there were some, we delete them
# if there are now *any*, we add them to the above data structures and
# if there are now *any*, we add them to the above data structures, and
# later we'll persist them to the database.
for dag in dags:
curr_orm_dag = existing_dags.get(dag.dag_id)
Expand Down Expand Up @@ -3498,7 +3498,7 @@ def wrapper(f: Callable) -> Callable[..., DAG]:
@functools.wraps(f)
def factory(*args, **kwargs):
# Generate signature for decorated function and bind the arguments when called
# we do this to extract parameters so we can annotate them on the DAG object.
# we do this to extract parameters, so we can annotate them on the DAG object.
# In addition, this fails if we are missing any args/kwargs with TypeError as expected.
f_sig = signature(f).bind(*args, **kwargs)
# Apply defaults to capture default values if set.
Expand Down
4 changes: 2 additions & 2 deletions airflow/models/dagbag.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ def size(self) -> int:

@property
def store_serialized_dags(self) -> bool:
"""Whether or not to read dags from DB"""
"""Whether to read dags from DB"""
warnings.warn(
"The store_serialized_dags property has been deprecated. Use read_dags_from_db instead.",
RemovedInAirflow3Warning,
Expand All @@ -176,7 +176,7 @@ def get_dag(self, dag_id, session: Session = None):
"""
Gets the DAG out of the dictionary, and refreshes it if expired
:param dag_id: DAG Id
:param dag_id: DAG ID
"""
# Avoid circular import
from airflow.models.dag import DagModel
Expand Down
12 changes: 6 additions & 6 deletions airflow/models/dagrun.py
Original file line number Diff line number Diff line change
Expand Up @@ -529,7 +529,7 @@ def update_state(
of its TaskInstances.
:param session: Sqlalchemy ORM Session
:param execute_callbacks: Should dag callbacks (success/failure, SLA etc) be invoked
:param execute_callbacks: Should dag callbacks (success/failure, SLA etc.) be invoked
directly (default: true) or recorded as a pending request in the ``returned_callback`` property
:return: Tuple containing tis that can be scheduled in the current loop & `returned_callback` that
needs to be executed
Expand Down Expand Up @@ -713,7 +713,7 @@ def _filter_tis_and_exclude_removed(dag: DAG, tis: list[TI]) -> Iterable[TI]:
session=session,
)

# During expansion we may change some tis into non-schedulable
# During expansion, we may change some tis into non-schedulable
# states, so we need to re-compute.
if expansion_happened:
changed_tis = True
Expand Down Expand Up @@ -829,8 +829,8 @@ def _are_premature_tis(
ignore_in_reschedule_period=True,
finished_tis=finished_tis,
)
# there might be runnable tasks that are up for retry and for some reason(retry delay, etc) are
# not ready yet so we set the flags to count them in
# there might be runnable tasks that are up for retry and for some reason(retry delay, etc.) are
# not ready yet, so we set the flags to count them in
return (
any(ut.are_dependencies_met(dep_context=dep_context, session=session) for ut in unfinished_tis),
dep_context.have_changed_ti_states,
Expand All @@ -844,7 +844,7 @@ def _emit_true_scheduling_delay_stats_for_finished_state(self, finished_tis: lis
is updated to a completed status (either success or failure). The method will find the first
started task within the DAG and calculate the expected DagRun start time (based on
dag.execution_date & dag.timetable), and minus these two values to get the delay.
The emitted data may contains outlier (e.g. when the first task was cleared, so
The emitted data may contain outlier (e.g. when the first task was cleared, so
the second task's start_date will be used), but we can get rid of the outliers
on the stats side through the dashboards tooling built.
Note, the stat will only be emitted if the DagRun is a scheduler triggered one
Expand Down Expand Up @@ -993,7 +993,7 @@ def _check_for_removed_or_restored_tasks(
)
ti.state = State.REMOVED
else:
# Check if the number of mapped literals has changed and we need to mark this TI as removed.
# Check if the number of mapped literals has changed, and we need to mark this TI as removed.
if ti.map_index >= num_mapped_tis:
self.log.debug(
"Removing task '%s' as the map_index is longer than the literal mapping list (%s)",
Expand Down

0 comments on commit 76186bb

Please sign in to comment.