-
Notifications
You must be signed in to change notification settings - Fork 14.3k
/
conftest.py
1503 lines (1250 loc) · 55.6 KB
/
conftest.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import os
import platform
import re
import subprocess
import sys
from contextlib import ExitStack, suppress
from datetime import datetime, timedelta, timezone
from pathlib import Path
from typing import TYPE_CHECKING
import pytest
import time_machine
from itsdangerous import URLSafeSerializer
if TYPE_CHECKING:
from tests._internals.capture_warnings import CaptureWarningsPlugin # noqa: F401
from tests._internals.forbidden_warnings import ForbiddenWarningsPlugin # noqa: F401
# We should set these before loading _any_ of the rest of airflow so that the
# unit test mode config is set as early as possible.
assert "airflow" not in sys.modules, "No airflow module can be imported before these lines"
keep_env_variables = "--keep-env-variables" in sys.argv
if not keep_env_variables:
# Clear all Environment Variables that might have side effect,
# For example, defined in /files/airflow-breeze-config/variables.env
_AIRFLOW_CONFIG_PATTERN = re.compile(r"^AIRFLOW__(.+)__(.+)$")
_KEEP_CONFIGS_SETTINGS: dict[str, dict[str, set[str]]] = {
# Keep always these configurations
"always": {
"database": {"sql_alchemy_conn"},
"core": {"sql_alchemy_conn"},
"celery": {"result_backend", "broker_url"},
},
# Keep per enabled integrations
"celery": {"celery": {"*"}, "celery_broker_transport_options": {"*"}},
"kerberos": {"kerberos": {"*"}},
}
if os.environ.get("RUN_TESTS_WITH_DATABASE_ISOLATION", "false").lower() == "true":
_KEEP_CONFIGS_SETTINGS["always"].update(
{
"core": {
"internal_api_url",
"fernet_key",
"database_access_isolation",
"internal_api_secret_key",
"internal_api_clock_grace",
},
}
)
_ENABLED_INTEGRATIONS = {e.split("_", 1)[-1].lower() for e in os.environ if e.startswith("INTEGRATION_")}
_KEEP_CONFIGS: dict[str, set[str]] = {}
for keep_settings_key in ("always", *_ENABLED_INTEGRATIONS):
if keep_settings := _KEEP_CONFIGS_SETTINGS.get(keep_settings_key):
for section, options in keep_settings.items():
if section not in _KEEP_CONFIGS:
_KEEP_CONFIGS[section] = options
else:
_KEEP_CONFIGS[section].update(options)
for env_key in os.environ.copy():
if m := _AIRFLOW_CONFIG_PATTERN.match(env_key):
section, option = m.group(1).lower(), m.group(2).lower()
if not (ko := _KEEP_CONFIGS.get(section)) or not ("*" in ko or option in ko):
del os.environ[env_key]
SUPPORTED_DB_BACKENDS = ("sqlite", "postgres", "mysql")
# A bit of a Hack - but we need to check args before they are parsed by pytest in order to
# configure the DB before Airflow gets initialized (which happens at airflow import time).
# Using env variables also handles the case, when python-xdist is used - python-xdist spawns separate
# processes and does not pass all args to them (it's done via env variables) so we are doing the
# same here and detect whether `--skip-db-tests` or `--run-db-tests-only` is passed to pytest
# and set env variables so the processes spawned by python-xdist can read the status from there
skip_db_tests = "--skip-db-tests" in sys.argv or os.environ.get("_AIRFLOW_SKIP_DB_TESTS") == "true"
run_db_tests_only = (
"--run-db-tests-only" in sys.argv or os.environ.get("_AIRFLOW_RUN_DB_TESTS_ONLY") == "true"
)
if skip_db_tests:
if run_db_tests_only:
raise Exception("You cannot specify both --skip-db-tests and --run-db-tests-only together")
# Make sure sqlalchemy will not be usable for pure unit tests even if initialized
os.environ["AIRFLOW__CORE__SQL_ALCHEMY_CONN"] = "bad_schema:///"
os.environ["AIRFLOW__DATABASE__SQL_ALCHEMY_CONN"] = "bad_schema:///"
os.environ["_IN_UNIT_TESTS"] = "true"
# Set it here to pass the flag to python-xdist spawned processes
os.environ["_AIRFLOW_SKIP_DB_TESTS"] = "true"
if run_db_tests_only:
# Set it here to pass the flag to python-xdist spawned processes
os.environ["_AIRFLOW_RUN_DB_TESTS_ONLY"] = "true"
AIRFLOW_TESTS_DIR = Path(os.path.dirname(os.path.realpath(__file__))).resolve()
AIRFLOW_SOURCES_ROOT_DIR = AIRFLOW_TESTS_DIR.parent.parent
os.environ["AIRFLOW__CORE__PLUGINS_FOLDER"] = os.fspath(AIRFLOW_TESTS_DIR / "plugins")
os.environ["AIRFLOW__CORE__DAGS_FOLDER"] = os.fspath(AIRFLOW_TESTS_DIR / "dags")
os.environ["AIRFLOW__CORE__UNIT_TEST_MODE"] = "True"
os.environ["AWS_DEFAULT_REGION"] = os.environ.get("AWS_DEFAULT_REGION") or "us-east-1"
os.environ["CREDENTIALS_DIR"] = os.environ.get("CREDENTIALS_DIR") or "/files/airflow-breeze-config/keys"
os.environ["AIRFLOW_ENABLE_AIP_44"] = os.environ.get("AIRFLOW_ENABLE_AIP_44") or "true"
if platform.system() == "Darwin":
# mocks from unittest.mock work correctly in subprocesses only if they are created by "fork" method
# but macOS uses "spawn" by default
os.environ["AIRFLOW__CORE__MP_START_METHOD"] = "fork"
# Ignore files that are really test dags to be ignored by pytest
collect_ignore = [
"tests/dags/subdir1/test_ignore_this.py",
"tests/dags/test_invalid_dup_task.py",
"tests/dags_corrupted/test_impersonation_custom.py",
"tests/test_utils/perf/dags/elastic_dag.py",
]
# https://docs.pytest.org/en/stable/reference/reference.html#stash
capture_warnings_key = pytest.StashKey["CaptureWarningsPlugin"]()
forbidden_warnings_key = pytest.StashKey["ForbiddenWarningsPlugin"]()
@pytest.fixture
def reset_environment():
"""Resets env variables."""
init_env = os.environ.copy()
yield
changed_env = os.environ
for key in changed_env:
if key not in init_env:
del os.environ[key]
else:
os.environ[key] = init_env[key]
@pytest.fixture
def secret_key() -> str:
"""Return secret key configured."""
from airflow.configuration import conf
the_key = conf.get("webserver", "SECRET_KEY")
if the_key is None:
raise RuntimeError(
"The secret key SHOULD be configured as `[webserver] secret_key` in the "
"configuration/environment at this stage! "
)
return the_key
@pytest.fixture
def url_safe_serializer(secret_key) -> URLSafeSerializer:
return URLSafeSerializer(secret_key)
@pytest.fixture
def reset_db():
"""Resets Airflow db."""
from airflow.utils import db
db.resetdb()
ALLOWED_TRACE_SQL_COLUMNS = ["num", "time", "trace", "sql", "parameters", "count"]
@pytest.fixture(autouse=True)
def trace_sql(request):
from tests.test_utils.perf.perf_kit.sqlalchemy import ( # isort: skip
count_queries,
trace_queries,
)
"""Displays queries from the tests to console."""
trace_sql_option = request.config.option.trace_sql
if not trace_sql_option:
yield
return
terminal_reporter = request.config.pluginmanager.getplugin("terminalreporter")
# if no terminal reporter plugin is present, nothing we can do here;
# this can happen when this function executes in a worker node
# when using pytest-xdist, for example
if terminal_reporter is None:
yield
return
columns = [col.strip() for col in trace_sql_option.split(",")]
def pytest_print(text):
return terminal_reporter.write_line(text)
with ExitStack() as exit_stack:
if columns == ["num"]:
# It is very unlikely that the user wants to display only numbers, but probably
# the user just wants to count the queries.
exit_stack.enter_context(count_queries(print_fn=pytest_print))
elif any(c in columns for c in ["time", "trace", "sql", "parameters"]):
exit_stack.enter_context(
trace_queries(
display_num="num" in columns,
display_time="time" in columns,
display_trace="trace" in columns,
display_sql="sql" in columns,
display_parameters="parameters" in columns,
print_fn=pytest_print,
)
)
yield
@pytest.fixture(autouse=True, scope="session")
def set_db_isolation_mode():
if os.environ.get("RUN_TESTS_WITH_DATABASE_ISOLATION", "false").lower() == "true":
from airflow.api_internal.internal_api_call import InternalApiConfig
InternalApiConfig.set_use_internal_api("tests", allow_tests_to_use_db=True)
def skip_if_database_isolation_mode(item):
if os.environ.get("RUN_TESTS_WITH_DATABASE_ISOLATION", "false").lower() == "true":
for _ in item.iter_markers(name="skip_if_database_isolation_mode"):
pytest.skip("This test is skipped because it is not allowed in database isolation mode.")
def pytest_addoption(parser: pytest.Parser):
"""Add options parser for custom plugins."""
group = parser.getgroup("airflow")
group.addoption(
"--with-db-init",
action="store_true",
dest="db_init",
help="Forces database initialization before tests",
)
group.addoption(
"--integration",
action="append",
dest="integration",
metavar="INTEGRATIONS",
help="only run tests matching integration specified: "
"[cassandra,kerberos,mongo,celery,statsd,trino]. ",
)
group.addoption(
"--keep-env-variables",
action="store_true",
dest="keep_env_variables",
help="do not clear environment variables that might have side effect while running tests",
)
group.addoption(
"--skip-db-tests",
action="store_true",
dest="skip_db_tests",
help="skip tests that require database",
)
group.addoption(
"--run-db-tests-only",
action="store_true",
dest="run_db_tests_only",
help="only run tests requiring database",
)
group.addoption(
"--backend",
action="store",
dest="backend",
metavar="BACKEND",
help="only run tests matching the backend: [sqlite,postgres,mysql].",
)
group.addoption(
"--system",
action="append",
dest="system",
metavar="SYSTEMS",
help="only run tests matching the system specified [google.cloud, google.marketing_platform]",
)
group.addoption(
"--include-long-running",
action="store_true",
dest="include_long_running",
help="Includes long running tests (marked with long_running marker). They are skipped by default.",
)
group.addoption(
"--include-quarantined",
action="store_true",
dest="include_quarantined",
help="Includes quarantined tests (marked with quarantined marker). They are skipped by default.",
)
group.addoption(
"--exclude-virtualenv-operator",
action="store_true",
dest="exclude_virtualenv_operator",
help="Excludes virtualenv operators tests (marked with virtualenv_test marker).",
)
group.addoption(
"--exclude-external-python-operator",
action="store_true",
dest="exclude_external_python_operator",
help="Excludes external python operator tests (marked with external_python_test marker).",
)
allowed_trace_sql_columns_list = ",".join(ALLOWED_TRACE_SQL_COLUMNS)
group.addoption(
"--trace-sql",
action="store",
dest="trace_sql",
help=(
"Trace SQL statements. As an argument, you must specify the columns to be "
f"displayed as a comma-separated list. Supported values: [f{allowed_trace_sql_columns_list}]"
),
metavar="COLUMNS",
)
group.addoption(
"--no-db-cleanup",
action="store_false",
dest="db_cleanup",
help="Disable DB clear before each test module.",
)
group.addoption(
"--disable-forbidden-warnings",
action="store_true",
dest="disable_forbidden_warnings",
help="Disable raising an error if forbidden warnings detected.",
)
group.addoption(
"--disable-capture-warnings",
action="store_true",
dest="disable_capture_warnings",
help="Disable internal capture warnings.",
)
group.addoption(
"--warning-output-path",
action="store",
dest="warning_output_path",
metavar="PATH",
help=(
"Path for resulting captured warnings. Absolute or relative to the `tests` directory. "
"If not provided or environment variable `CAPTURE_WARNINGS_OUTPUT` not set "
"then 'warnings.txt' will be used."
),
)
parser.addini(
name="forbidden_warnings",
type="linelist",
help="List of internal Airflow warnings which are prohibited during tests execution.",
)
def initial_db_init():
from flask import Flask
from airflow.configuration import conf
from airflow.utils import db
from airflow.www.extensions.init_appbuilder import init_appbuilder
from airflow.www.extensions.init_auth_manager import get_auth_manager
from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS
db.resetdb()
db.bootstrap_dagbag()
# minimal app to add roles
flask_app = Flask(__name__)
flask_app.config["SQLALCHEMY_DATABASE_URI"] = conf.get("database", "SQL_ALCHEMY_CONN")
init_appbuilder(flask_app)
if AIRFLOW_V_2_8_PLUS:
get_auth_manager().init()
@pytest.fixture(autouse=True, scope="session")
def initialize_airflow_tests(request):
"""Helper that setups Airflow testing environment."""
print(" AIRFLOW ".center(60, "="))
# Setup test environment for breeze
home = os.path.expanduser("~")
airflow_home = os.environ.get("AIRFLOW_HOME") or os.path.join(home, "airflow")
print(f"Home of the user: {home}\nAirflow home {airflow_home}")
# Initialize Airflow db if required
lock_file = os.path.join(airflow_home, ".airflow_db_initialised")
if not skip_db_tests:
if request.config.option.db_init:
print("Initializing the DB - forced with --with-db-init switch.")
initial_db_init()
elif not os.path.exists(lock_file):
print(
"Initializing the DB - first time after entering the container.\n"
"You can force re-initialization the database by adding --with-db-init switch to run-tests."
)
initial_db_init()
# Create pid file
with open(lock_file, "w+"):
pass
else:
print(
"Skipping initializing of the DB as it was initialized already.\n"
"You can re-initialize the database by adding --with-db-init flag when running tests."
)
integration_kerberos = os.environ.get("INTEGRATION_KERBEROS")
if integration_kerberos == "true":
# Initialize kerberos
kerberos = os.environ.get("KRB5_KTNAME")
if kerberos:
subprocess.check_call(["kinit", "-kt", kerberos, "[email protected]"])
else:
print("Kerberos enabled! Please setup KRB5_KTNAME environment variable")
sys.exit(1)
def pytest_configure(config: pytest.Config) -> None:
if (backend := config.getoption("backend", default=None)) and backend not in SUPPORTED_DB_BACKENDS:
msg = (
f"Provided DB backend {backend!r} not supported, "
f"expected one of: {', '.join(map(repr, SUPPORTED_DB_BACKENDS))}"
)
pytest.exit(msg, returncode=6)
config.addinivalue_line("markers", "integration(name): mark test to run with named integration")
config.addinivalue_line("markers", "backend(name): mark test to run with named backend")
config.addinivalue_line("markers", "system(name): mark test to run with named system")
config.addinivalue_line("markers", "platform(name): mark test to run with specific platform/environment")
config.addinivalue_line("markers", "long_running: mark test that run for a long time (many minutes)")
config.addinivalue_line(
"markers", "quarantined: mark test that are in quarantine (i.e. flaky, need to be isolated and fixed)"
)
config.addinivalue_line(
"markers", "credential_file(name): mark tests that require credential file in CREDENTIALS_DIR"
)
config.addinivalue_line(
"markers", "need_serialized_dag: mark tests that require dags in serialized form to be present"
)
config.addinivalue_line(
"markers",
"db_test: mark tests that require database to be present",
)
config.addinivalue_line(
"markers",
"non_db_test_override: you can mark individual tests with this marker to override the db_test marker",
)
config.addinivalue_line(
"markers",
"virtualenv_operator: virtualenv operator tests are 'long', we should run them separately",
)
config.addinivalue_line(
"markers",
"external_python_operator: external python operator tests are 'long', we should run them separately",
)
config.addinivalue_line("markers", "enable_redact: do not mock redact secret masker")
config.addinivalue_line("markers", "skip_if_database_isolation_mode: skip if DB isolation is enabled")
os.environ["_AIRFLOW__SKIP_DATABASE_EXECUTOR_COMPATIBILITY_CHECK"] = "1"
# Setup internal warnings plugins
if "ignore" in sys.warnoptions:
config.option.disable_forbidden_warnings = True
config.option.disable_capture_warnings = True
if not config.pluginmanager.get_plugin("warnings"):
# Internal forbidden warnings plugin depends on builtin pytest warnings plugin
config.option.disable_forbidden_warnings = True
forbidden_warnings: list[str] | None = config.getini("forbidden_warnings")
if not config.option.disable_forbidden_warnings and forbidden_warnings:
from tests._internals.forbidden_warnings import ForbiddenWarningsPlugin
forbidden_warnings_plugin = ForbiddenWarningsPlugin(
config=config,
forbidden_warnings=tuple(map(str.strip, forbidden_warnings)),
)
config.pluginmanager.register(forbidden_warnings_plugin)
config.stash[forbidden_warnings_key] = forbidden_warnings_plugin
if not config.option.disable_capture_warnings:
from tests._internals.capture_warnings import CaptureWarningsPlugin
capture_warnings_plugin = CaptureWarningsPlugin(
config=config, output_path=config.getoption("warning_output_path", default=None)
)
config.pluginmanager.register(capture_warnings_plugin)
config.stash[capture_warnings_key] = capture_warnings_plugin
def pytest_unconfigure(config: pytest.Config) -> None:
os.environ.pop("_AIRFLOW__SKIP_DATABASE_EXECUTOR_COMPATIBILITY_CHECK", None)
if forbidden_warnings_plugin := config.stash.get(forbidden_warnings_key, None):
del config.stash[forbidden_warnings_key]
config.pluginmanager.unregister(forbidden_warnings_plugin)
if capture_warnings_plugin := config.stash.get(capture_warnings_key, None):
del config.stash[capture_warnings_key]
config.pluginmanager.unregister(capture_warnings_plugin)
def skip_if_not_marked_with_integration(selected_integrations, item):
for marker in item.iter_markers(name="integration"):
integration_name = marker.args[0]
if integration_name in selected_integrations or "all" in selected_integrations:
return
pytest.skip(
f"The test is skipped because it does not have the right integration marker. "
f"Only tests marked with pytest.mark.integration(INTEGRATION) are run with INTEGRATION "
f"being one of {selected_integrations}. {item}"
)
def skip_if_not_marked_with_backend(selected_backend, item):
for marker in item.iter_markers(name="backend"):
backend_names = marker.args
if selected_backend in backend_names:
return
pytest.skip(
f"The test is skipped because it does not have the right backend marker. "
f"Only tests marked with pytest.mark.backend('{selected_backend}') are run: {item}"
)
def skip_if_platform_doesnt_match(marker):
allowed_platforms = ("linux", "breeze")
if not (args := marker.args):
pytest.fail(f"No platform specified, expected one of: {', '.join(map(repr, allowed_platforms))}")
elif not all(a in allowed_platforms for a in args):
pytest.fail(
f"Allowed platforms {', '.join(map(repr, allowed_platforms))}; "
f"but got: {', '.join(map(repr, args))}"
)
if "linux" in args:
if not sys.platform.startswith("linux"):
pytest.skip("Test expected to run on Linux platform.")
if "breeze" in args:
if not os.path.isfile("/.dockerenv") or os.environ.get("BREEZE", "").lower() != "true":
raise pytest.skip(
"Test expected to run into Airflow Breeze container. "
"Maybe because it is to dangerous to run it outside."
)
def skip_if_not_marked_with_system(selected_systems, item):
for marker in item.iter_markers(name="system"):
systems_name = marker.args[0]
if systems_name in selected_systems or "all" in selected_systems:
return
pytest.skip(
f"The test is skipped because it does not have the right system marker. "
f"Only tests marked with pytest.mark.system(SYSTEM) are run with SYSTEM "
f"being one of {selected_systems}. {item}"
)
def skip_system_test(item):
for marker in item.iter_markers(name="system"):
pytest.skip(
f"The test is skipped because it has system marker. System tests are only run when "
f"--system flag with the right system ({marker.args[0]}) is passed to pytest. {item}"
)
def skip_long_running_test(item):
for _ in item.iter_markers(name="long_running"):
pytest.skip(
f"The test is skipped because it has long_running marker. "
f"And --include-long-running flag is not passed to pytest. {item}"
)
def skip_quarantined_test(item):
for _ in item.iter_markers(name="quarantined"):
pytest.skip(
f"The test is skipped because it has quarantined marker. "
f"And --include-quarantined flag is not passed to pytest. {item}"
)
def skip_virtualenv_operator_test(item):
for _ in item.iter_markers(name="virtualenv_operator"):
pytest.skip(
f"The test is skipped because it has virtualenv_operator marker. "
f"And --exclude-virtualenv-operator flag is not passed to pytest. {item}"
)
def skip_external_python_operator_test(item):
for _ in item.iter_markers(name="external_python_operator"):
pytest.skip(
f"The test is skipped because it has external_python_operator marker. "
f"And --exclude-external-python-operator flag is not passed to pytest. {item}"
)
def skip_db_test(item):
if next(item.iter_markers(name="db_test"), None):
if next(item.iter_markers(name="non_db_test_override"), None):
# non_db_test can override the db_test set for example on module or class level
return
else:
pytest.skip(
f"The test is skipped as it is DB test "
f"and --skip-db-tests is flag is passed to pytest. {item}"
)
if next(item.iter_markers(name="backend"), None):
# also automatically skip tests marked with `backend` marker as they are implicitly
# db tests
pytest.skip(
f"The test is skipped as it is DB test "
f"and --skip-db-tests is flag is passed to pytest. {item}"
)
def only_run_db_test(item):
if next(item.iter_markers(name="db_test"), None) and not next(
item.iter_markers(name="non_db_test_override"), None
):
# non_db_test at individual level can override the db_test set for example on module or class level
return
else:
if next(item.iter_markers(name="backend"), None):
# Also do not skip the tests marked with `backend` marker - as it is implicitly a db test
return
pytest.skip(
f"The test is skipped as it is not a DB tests "
f"and --run-db-tests-only flag is passed to pytest. {item}"
)
def skip_if_integration_disabled(marker, item):
integration_name = marker.args[0]
environment_variable_name = "INTEGRATION_" + integration_name.upper()
environment_variable_value = os.environ.get(environment_variable_name)
if not environment_variable_value or environment_variable_value != "true":
pytest.skip(
f"The test requires {integration_name} integration started and "
f"{environment_variable_name} environment variable to be set to true (it is '{environment_variable_value}')."
f" It can be set by specifying '--integration {integration_name}' at breeze startup"
f": {item}"
)
def skip_if_wrong_backend(marker: pytest.Mark, item: pytest.Item) -> None:
if not (backend_names := marker.args):
reason = (
"`pytest.mark.backend` expect to get at least one of the following backends: "
f"{', '.join(map(repr, SUPPORTED_DB_BACKENDS))}."
)
pytest.fail(reason)
elif unsupported_backends := list(filter(lambda b: b not in SUPPORTED_DB_BACKENDS, backend_names)):
reason = (
"Airflow Tests supports only the following backends in `pytest.mark.backend` marker: "
f"{', '.join(map(repr, SUPPORTED_DB_BACKENDS))}, "
f"but got {', '.join(map(repr, unsupported_backends))}."
)
pytest.fail(reason)
env_name = "BACKEND"
if not (backend := os.environ.get(env_name)) or backend not in backend_names:
reason = (
f"The test {item.nodeid!r} requires one of {', '.join(map(repr, backend_names))} backend started "
f"and {env_name!r} environment variable to be set (currently it set to {backend!r}). "
f"It can be set by specifying backend at breeze startup."
)
pytest.skip(reason)
def skip_if_credential_file_missing(item):
for marker in item.iter_markers(name="credential_file"):
credential_file = marker.args[0]
credential_path = os.path.join(os.environ.get("CREDENTIALS_DIR"), credential_file)
if not os.path.exists(credential_path):
pytest.skip(f"The test requires credential file {credential_path}: {item}")
def pytest_runtest_setup(item):
selected_integrations_list = item.config.option.integration
selected_systems_list = item.config.option.system
include_long_running = item.config.option.include_long_running
include_quarantined = item.config.option.include_quarantined
exclude_virtualenv_operator = item.config.option.exclude_virtualenv_operator
exclude_external_python_operator = item.config.option.exclude_external_python_operator
for marker in item.iter_markers(name="integration"):
skip_if_integration_disabled(marker, item)
if selected_integrations_list:
skip_if_not_marked_with_integration(selected_integrations_list, item)
if selected_systems_list:
skip_if_not_marked_with_system(selected_systems_list, item)
else:
skip_system_test(item)
for marker in item.iter_markers(name="platform"):
skip_if_platform_doesnt_match(marker)
for marker in item.iter_markers(name="backend"):
skip_if_wrong_backend(marker, item)
skip_if_database_isolation_mode(item)
selected_backend = item.config.option.backend
if selected_backend:
skip_if_not_marked_with_backend(selected_backend, item)
if not include_long_running:
skip_long_running_test(item)
if not include_quarantined:
skip_quarantined_test(item)
if exclude_virtualenv_operator:
skip_virtualenv_operator_test(item)
if exclude_external_python_operator:
skip_external_python_operator_test(item)
if skip_db_tests:
skip_db_test(item)
if run_db_tests_only:
only_run_db_test(item)
skip_if_credential_file_missing(item)
@pytest.fixture
def frozen_sleep(monkeypatch):
"""Use time-machine to "stub" sleep.
This means the ``sleep()`` takes no time, but ``datetime.now()`` appears to move forwards.
If your module under test does ``import time`` and then ``time.sleep``:
.. code-block:: python
def test_something(frozen_sleep):
my_mod.fn_under_test()
If your module under test does ``from time import sleep`` then you will
have to mock that sleep function directly:
.. code-block:: python
def test_something(frozen_sleep, monkeypatch):
monkeypatch.setattr("my_mod.sleep", frozen_sleep)
my_mod.fn_under_test()
"""
traveller = None
def fake_sleep(seconds):
nonlocal traveller
utcnow = datetime.now(tz=timezone.utc)
if traveller is not None:
traveller.stop()
traveller = time_machine.travel(utcnow + timedelta(seconds=seconds))
traveller.start()
monkeypatch.setattr("time.sleep", fake_sleep)
yield fake_sleep
if traveller is not None:
traveller.stop()
@pytest.fixture(scope="session")
def app():
from tests.test_utils.config import conf_vars
with conf_vars({("fab", "auth_rate_limited"): "False"}):
from airflow.www import app
yield app.create_app(testing=True)
@pytest.fixture
def dag_maker(request):
"""Fixture to help create DAG, DagModel, and SerializedDAG automatically.
You have to use the dag_maker as a context manager and it takes
the same argument as DAG::
with dag_maker(dag_id="mydag") as dag:
task1 = EmptyOperator(task_id="mytask")
task2 = EmptyOperator(task_id="mytask2")
If the DagModel you want to use needs different parameters than the one
automatically created by the dag_maker, you have to update the DagModel as below::
dag_maker.dag_model.is_active = False
session.merge(dag_maker.dag_model)
session.commit()
For any test you use the dag_maker, make sure to create a DagRun::
dag_maker.create_dagrun()
The dag_maker.create_dagrun takes the same arguments as dag.create_dagrun
If you want to operate on serialized DAGs, then either pass
``serialized=True`` to the ``dag_maker()`` call, or you can mark your
test/class/file with ``@pytest.mark.need_serialized_dag(True)``. In both of
these cases the ``dag`` returned by the context manager will be a
lazily-evaluated proxy object to the SerializedDAG.
"""
import lazy_object_proxy
# IMPORTANT: Delay _all_ imports from `airflow.*` to _inside a method_.
# This fixture is "called" early on in the pytest collection process, and
# if we import airflow.* here the wrong (non-test) config will be loaded
# and "baked" in to various constants
want_serialized = False
# Allow changing default serialized behaviour with `@pytest.mark.need_serialized_dag` or
# `@pytest.mark.need_serialized_dag(False)`
serialized_marker = request.node.get_closest_marker("need_serialized_dag")
if serialized_marker:
(want_serialized,) = serialized_marker.args or (True,)
from airflow.utils.log.logging_mixin import LoggingMixin
class DagFactory(LoggingMixin):
_own_session = False
def __init__(self):
from airflow.models import DagBag
# Keep all the serialized dags we've created in this test
self.dagbag = DagBag(os.devnull, include_examples=False, read_dags_from_db=False)
def __enter__(self):
self.dag.__enter__()
if self.want_serialized:
return lazy_object_proxy.Proxy(self._serialized_dag)
return self.dag
def _serialized_dag(self):
return self.serialized_model.dag
def get_serialized_data(self):
try:
data = self.serialized_model.data
except AttributeError:
raise RuntimeError("DAG serialization not requested")
if isinstance(data, str):
return json.loads(data)
return data
def _bag_dag_compat(self, dag):
# This is a compatibility shim for the old bag_dag method in Airflow <3.0
# TODO: Remove this when we drop support for Airflow <3.0 in Providers
if hasattr(dag, "parent_dag"):
return self.dagbag.bag_dag(dag, root_dag=dag)
return self.dagbag.bag_dag(dag)
def __exit__(self, type, value, traceback):
from airflow.models import DagModel
from airflow.models.serialized_dag import SerializedDagModel
dag = self.dag
dag.__exit__(type, value, traceback)
if type is not None:
return
dag.clear(session=self.session)
dag.sync_to_db(processor_subdir=self.processor_subdir, session=self.session)
self.dag_model = self.session.get(DagModel, dag.dag_id)
if self.want_serialized:
self.serialized_model = SerializedDagModel(
dag, processor_subdir=self.dag_model.processor_subdir
)
self.session.merge(self.serialized_model)
serialized_dag = self._serialized_dag()
self._bag_dag_compat(serialized_dag)
self.session.flush()
else:
self._bag_dag_compat(self.dag)
def create_dagrun(self, **kwargs):
from airflow.utils import timezone
from airflow.utils.state import State
from airflow.utils.types import DagRunType
from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS
if AIRFLOW_V_3_0_PLUS:
from airflow.utils.types import DagRunTriggeredByType
dag = self.dag
kwargs = {
"state": State.RUNNING,
"start_date": self.start_date,
"session": self.session,
**kwargs,
}
# Need to provide run_id if the user does not either provide one
# explicitly, or pass run_type for inference in dag.create_dagrun().
if "run_id" not in kwargs and "run_type" not in kwargs:
kwargs["run_id"] = "test"
if "run_type" not in kwargs:
kwargs["run_type"] = DagRunType.from_run_id(kwargs["run_id"])
if kwargs.get("execution_date") is None:
if kwargs["run_type"] == DagRunType.MANUAL:
kwargs["execution_date"] = self.start_date
else:
kwargs["execution_date"] = dag.next_dagrun_info(None).logical_date
if "data_interval" not in kwargs:
logical_date = timezone.coerce_datetime(kwargs["execution_date"])
if kwargs["run_type"] == DagRunType.MANUAL:
data_interval = dag.timetable.infer_manual_data_interval(run_after=logical_date)
else:
data_interval = dag.infer_automated_data_interval(logical_date)
kwargs["data_interval"] = data_interval
if AIRFLOW_V_3_0_PLUS and "triggered_by" not in kwargs:
kwargs["triggered_by"] = DagRunTriggeredByType.TEST
self.dag_run = dag.create_dagrun(**kwargs)
for ti in self.dag_run.task_instances:
ti.refresh_from_task(dag.get_task(ti.task_id))
if self.want_serialized:
self.session.commit()
return self.dag_run
def create_dagrun_after(self, dagrun, **kwargs):
next_info = self.dag.next_dagrun_info(self.dag.get_run_data_interval(dagrun))
if next_info is None:
raise ValueError(f"cannot create run after {dagrun}")
return self.create_dagrun(
execution_date=next_info.logical_date,
data_interval=next_info.data_interval,
**kwargs,
)
def __call__(
self,
dag_id="test_dag",
schedule=timedelta(days=1),
serialized=want_serialized,
fileloc=None,
processor_subdir=None,
session=None,
**kwargs,
):
from airflow import settings
from airflow.models.dag import DAG
from airflow.utils import timezone
if session is None:
self._own_session = True
session = settings.Session()
self.kwargs = kwargs
self.session = session
self.start_date = self.kwargs.get("start_date", None)
default_args = kwargs.get("default_args", None)
if default_args and not self.start_date:
if "start_date" in default_args:
self.start_date = default_args.get("start_date")
if not self.start_date:
if hasattr(request.module, "DEFAULT_DATE"):
self.start_date = getattr(request.module, "DEFAULT_DATE")
else:
DEFAULT_DATE = timezone.datetime(2016, 1, 1)
self.start_date = DEFAULT_DATE
self.kwargs["start_date"] = self.start_date
# Set schedule argument to explicitly set value, or a default if no
# other scheduling arguments are set.
self.dag = DAG(dag_id, schedule=schedule, **self.kwargs)
self.dag.fileloc = fileloc or request.module.__file__
self.want_serialized = serialized
self.processor_subdir = processor_subdir
return self
def cleanup(self):
from airflow.models import DagModel, DagRun, TaskInstance, XCom
from airflow.models.dataset import DatasetEvent
from airflow.models.serialized_dag import SerializedDagModel
from airflow.models.taskmap import TaskMap
from airflow.utils.retries import run_with_db_retries
for attempt in run_with_db_retries(logger=self.log):
with attempt:
dag_ids = list(self.dagbag.dag_ids)
if not dag_ids:
return
# To isolate problems here with problems from elsewhere on the session object
self.session.rollback()
self.session.query(SerializedDagModel).filter(
SerializedDagModel.dag_id.in_(dag_ids)
).delete(synchronize_session=False)
self.session.query(DagRun).filter(DagRun.dag_id.in_(dag_ids)).delete(
synchronize_session=False,
)
self.session.query(TaskInstance).filter(TaskInstance.dag_id.in_(dag_ids)).delete(
synchronize_session=False,
)
self.session.query(XCom).filter(XCom.dag_id.in_(dag_ids)).delete(
synchronize_session=False,
)