diff --git a/tests/api/common/test_delete_dag.py b/tests/api/common/test_delete_dag.py index d9dc0b0a01c7f..e18aceb2d80d4 100644 --- a/tests/api/common/test_delete_dag.py +++ b/tests/api/common/test_delete_dag.py @@ -19,11 +19,12 @@ import pytest +import datetime +import pendulum from airflow import models from airflow.api.common.delete_dag import delete_dag from airflow.exceptions import AirflowException, DagNotFound from airflow.operators.dummy import DummyOperator -from airflow.utils.dates import days_ago from airflow.utils.session import create_session from airflow.utils.state import State from airflow.utils.types import DagRunType @@ -73,11 +74,11 @@ def setup_dag_models(self, for_sub_dag=False): task = DummyOperator( task_id='dummy', - dag=models.DAG(dag_id=self.key, default_args={'start_date': days_ago(2)}), + dag=models.DAG(dag_id=self.key, default_args={'start_date': pendulum.datetime(2022, 1, 1)}), owner='airflow', ) - test_date = days_ago(1) + test_date = datetime.datetime(2022, 1, 1) with create_session() as session: session.add(DM(dag_id=self.key, fileloc=self.dag_file_path, is_subdag=for_sub_dag)) dr = DR(dag_id=self.key, run_type=DagRunType.MANUAL, run_id="test", execution_date=test_date) diff --git a/tests/api/common/test_mark_tasks.py b/tests/api/common/test_mark_tasks.py index fd14a77c1f06e..c57f93150c956 100644 --- a/tests/api/common/test_mark_tasks.py +++ b/tests/api/common/test_mark_tasks.py @@ -16,7 +16,7 @@ # specific language governing permissions and limitations # under the License. -from datetime import timedelta +from datetime import datetime,timedelta import pytest from sqlalchemy.orm import eagerload @@ -32,7 +32,6 @@ ) from airflow.models import DagRun from airflow.utils import timezone -from airflow.utils.dates import days_ago from airflow.utils.session import create_session, provide_session from airflow.utils.state import State from airflow.utils.types import DagRunType @@ -58,7 +57,7 @@ def create_dags(cls, dagbag): cls.dag1 = dagbag.get_dag('miscellaneous_test_dag') cls.dag2 = dagbag.get_dag('example_subdag_operator') cls.dag3 = dagbag.get_dag('example_trigger_target_dag') - cls.execution_dates = [days_ago(2), days_ago(1)] + cls.execution_dates = [datetime.datetime(2022, 1, 1), datetime.datetime(2022, 1, 1)] start_date3 = cls.dag3.start_date cls.dag3_execution_dates = [ start_date3, @@ -425,7 +424,7 @@ def setup_class(cls): cls.dag1.sync_to_db() cls.dag2 = dagbag.dags['example_subdag_operator'] cls.dag2.sync_to_db() - cls.execution_dates = [days_ago(2), days_ago(1), days_ago(0)] + cls.execution_dates = [datetime.datetime(2022, 1, 1), datetime.datetime(2022, 1, 1), datetime.datetime(2022, 1, 1)] def setup_method(self): clear_db_runs() diff --git a/tests/api_connexion/endpoints/test_extra_link_endpoint.py b/tests/api_connexion/endpoints/test_extra_link_endpoint.py index 4d53119ebdb7c..5e48da5cf764e 100644 --- a/tests/api_connexion/endpoints/test_extra_link_endpoint.py +++ b/tests/api_connexion/endpoints/test_extra_link_endpoint.py @@ -16,7 +16,7 @@ # under the License. import os from urllib.parse import quote_plus - +import pendulum import pytest from parameterized import parameterized @@ -29,7 +29,6 @@ from airflow.plugins_manager import AirflowPlugin from airflow.providers.google.cloud.operators.bigquery import BigQueryExecuteQueryOperator from airflow.security import permissions -from airflow.utils.dates import days_ago from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType from tests.test_utils.api_connexion_utils import create_user, delete_user @@ -95,7 +94,7 @@ def _create_dag(): with DAG( dag_id="TEST_DAG_ID", default_args=dict( - start_date=days_ago(2), + start_date=pendulum.datetime(2022, 1, 1), ), ) as dag: BigQueryExecuteQueryOperator(task_id="TEST_SINGLE_QUERY", sql="SELECT 1") diff --git a/tests/cli/commands/test_task_command.py b/tests/cli/commands/test_task_command.py index 3cf5bae7787bd..04d89e8aad266 100644 --- a/tests/cli/commands/test_task_command.py +++ b/tests/cli/commands/test_task_command.py @@ -30,6 +30,7 @@ import pytest from parameterized import parameterized +import datetime from airflow import DAG from airflow.cli import cli_parser from airflow.cli.commands import task_command @@ -37,14 +38,13 @@ from airflow.exceptions import AirflowException, DagRunNotFound from airflow.models import DagBag, DagRun, TaskInstance from airflow.utils import timezone -from airflow.utils.dates import days_ago from airflow.utils.session import create_session from airflow.utils.state import State from airflow.utils.types import DagRunType from tests.test_utils.config import conf_vars from tests.test_utils.db import clear_db_runs -DEFAULT_DATE = days_ago(1) +DEFAULT_DATE = datetime.datetime(2022, 1, 1) ROOT_FOLDER = os.path.realpath( os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir) ) diff --git a/tests/dag_processing/test_processor.py b/tests/dag_processing/test_processor.py index d8801c347400d..038111aa7cd96 100644 --- a/tests/dag_processing/test_processor.py +++ b/tests/dag_processing/test_processor.py @@ -22,7 +22,7 @@ from unittest import mock from unittest.mock import MagicMock, patch from zipfile import ZipFile - +import pendulum import pytest from airflow import settings @@ -34,7 +34,6 @@ from airflow.models.taskinstance import SimpleTaskInstance from airflow.operators.dummy import DummyOperator from airflow.utils import timezone -from airflow.utils.dates import days_ago from airflow.utils.session import create_session from airflow.utils.state import State from airflow.utils.types import DagRunType @@ -113,7 +112,7 @@ def test_dag_file_processor_sla_miss_callback(self, create_dummy_dag): # Create dag with a start of 1 day ago, but an sla of 0 # so we'll already have an sla_miss on the books. - test_start_date = days_ago(1) + test_start_date = pendulum.datetime(2022, 1, 1) dag, task = create_dummy_dag( dag_id='test_sla_miss', task_id='dummy', @@ -142,7 +141,7 @@ def test_dag_file_processor_sla_miss_callback_invalid_sla(self, create_dummy_dag # Create dag with a start of 1 day ago, but an sla of 0 # so we'll already have an sla_miss on the books. # Pass anything besides a timedelta object to the sla argument. - test_start_date = days_ago(1) + test_start_date = pendulum.datetime(2022, 1, 1) dag, task = create_dummy_dag( dag_id='test_sla_miss', task_id='dummy', @@ -170,7 +169,7 @@ def test_dag_file_processor_sla_miss_callback_sent_notification(self, create_dum # Create dag with a start of 2 days ago, but an sla of 1 day # ago so we'll already have an sla_miss on the books - test_start_date = days_ago(2) + test_start_date = pendulum.datetime(2022, 1, 1) dag, task = create_dummy_dag( dag_id='test_sla_miss', task_id='dummy', @@ -206,7 +205,7 @@ def test_dag_file_processor_sla_miss_doesnot_raise_integrity_error(self, dag_mak # Create dag with a start of 2 days ago, but an sla of 1 day # ago so we'll already have an sla_miss on the books - test_start_date = days_ago(2) + test_start_date = pendulum.datetime(2022, 1, 1) with dag_maker( dag_id='test_sla_miss', default_args={'start_date': test_start_date, 'sla': datetime.timedelta(days=1)}, @@ -247,7 +246,7 @@ def test_dag_file_processor_sla_miss_callback_exception(self, mock_stats_incr, c sla_callback = MagicMock(side_effect=RuntimeError('Could not call function')) - test_start_date = days_ago(2) + test_start_date = pendulum.datetime(2022, 1, 1) dag, task = create_dummy_dag( dag_id='test_sla_miss', task_id='dummy', @@ -277,7 +276,7 @@ def test_dag_file_processor_only_collect_emails_from_sla_missed_tasks( ): session = settings.Session() - test_start_date = days_ago(2) + test_start_date = pendulum.datetime(2022, 1, 1) email1 = 'test1@test.com' dag, task = create_dummy_dag( dag_id='test_sla_miss', @@ -317,7 +316,7 @@ def test_dag_file_processor_sla_miss_email_exception( # Mock the callback function so we can verify that it was not called mock_send_email.side_effect = RuntimeError('Could not send an email') - test_start_date = days_ago(2) + test_start_date = pendulum.datetime(2022, 1, 1) dag, task = create_dummy_dag( dag_id='test_sla_miss', task_id='dummy', @@ -347,7 +346,7 @@ def test_dag_file_processor_sla_miss_deleted_task(self, create_dummy_dag): """ session = settings.Session() - test_start_date = days_ago(2) + test_start_date = pendulum.datetime(2022, 1, 1) dag, task = create_dummy_dag( dag_id='test_sla_miss', task_id='dummy', diff --git a/tests/dags/test_default_views.py b/tests/dags/test_default_views.py index f30177f356f5c..984c9c1422234 100644 --- a/tests/dags/test_default_views.py +++ b/tests/dags/test_default_views.py @@ -16,9 +16,9 @@ # specific language governing permissions and limitations # under the License. from airflow.models import DAG -from airflow.utils.dates import days_ago +import pendulum -args = {'owner': 'airflow', 'retries': 3, 'start_date': days_ago(2)} +args = {'owner': 'airflow', 'retries': 3, 'start_date': pendulum.datetime(2022,1,1)} tree_dag = DAG( dag_id='test_tree_view', diff --git a/tests/dags/test_example_bash_operator.py b/tests/dags/test_example_bash_operator.py index 5daafc2204bb1..02407c448a766 100644 --- a/tests/dags/test_example_bash_operator.py +++ b/tests/dags/test_example_bash_operator.py @@ -15,14 +15,13 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from datetime import timedelta - +from datetime import datetime,timedelta +import pendulum from airflow.models import DAG from airflow.operators.bash import BashOperator from airflow.operators.dummy import DummyOperator -from airflow.utils.dates import days_ago -args = {'owner': 'airflow', 'retries': 3, 'start_date': days_ago(2)} +args = {'owner': 'airflow', 'retries': 3, 'start_date': pendulum.datetime(2022,1,1)} dag = DAG( dag_id='test_example_bash_operator', diff --git a/tests/jobs/test_backfill_job.py b/tests/jobs/test_backfill_job.py index 40593d526a328..745214f389411 100644 --- a/tests/jobs/test_backfill_job.py +++ b/tests/jobs/test_backfill_job.py @@ -41,7 +41,6 @@ from airflow.models.taskinstance import TaskInstanceKey from airflow.operators.dummy import DummyOperator from airflow.utils import timezone -from airflow.utils.dates import days_ago from airflow.utils.session import create_session from airflow.utils.state import State from airflow.utils.timeout import timeout @@ -1532,8 +1531,8 @@ def test_mapped_dag(self, dag_id): job = BackfillJob( dag=dag, - start_date=days_ago(1), - end_date=days_ago(1), + start_date= datetime.datetime(2022,1,1), + end_date=datetime.datetime(2022,1,1), donot_pickle=True, executor=SequentialExecutor(), ) diff --git a/tests/operators/test_python.py b/tests/operators/test_python.py index 3bf24929bd047..46b5959656f2a 100644 --- a/tests/operators/test_python.py +++ b/tests/operators/test_python.py @@ -25,7 +25,7 @@ from datetime import date, datetime, timedelta from subprocess import CalledProcessError from typing import List - +import pendulum import pytest from airflow.exceptions import AirflowException @@ -42,7 +42,6 @@ ) from airflow.utils import timezone from airflow.utils.context import AirflowContextDeprecationWarning, Context -from airflow.utils.dates import days_ago from airflow.utils.session import create_session from airflow.utils.state import State from airflow.utils.trigger_rule import TriggerRule @@ -1134,7 +1133,7 @@ def f(): DEFAULT_ARGS = { "owner": "test", "depends_on_past": True, - "start_date": days_ago(1), + "start_date": pendulum.datetime(2022,1,1), "end_date": datetime.today(), "schedule_interval": "@once", "retries": 1, diff --git a/tests/test_utils/perf/dags/perf_dag_1.py b/tests/test_utils/perf/dags/perf_dag_1.py index 3757c7d40e092..83c0590b69d32 100644 --- a/tests/test_utils/perf/dags/perf_dag_1.py +++ b/tests/test_utils/perf/dags/perf_dag_1.py @@ -22,11 +22,11 @@ from airflow.models import DAG from airflow.operators.bash_operator import BashOperator -from airflow.utils.dates import days_ago +import pendulum args = { 'owner': 'airflow', - 'start_date': days_ago(3), + 'start_date': pendulum.datetime(2022,1,1), } dag = DAG( diff --git a/tests/test_utils/perf/dags/perf_dag_2.py b/tests/test_utils/perf/dags/perf_dag_2.py index 208ea49b927df..d5889332b09d1 100644 --- a/tests/test_utils/perf/dags/perf_dag_2.py +++ b/tests/test_utils/perf/dags/perf_dag_2.py @@ -22,11 +22,11 @@ from airflow.models import DAG from airflow.operators.bash import BashOperator -from airflow.utils.dates import days_ago +import pendulum args = { 'owner': 'airflow', - 'start_date': days_ago(3), + 'start_date': pendulum.datetime(2022,1,1), } dag = DAG( diff --git a/tests/utils/test_task_group.py b/tests/utils/test_task_group.py index 3238aec0763a7..82df6a873aafc 100644 --- a/tests/utils/test_task_group.py +++ b/tests/utils/test_task_group.py @@ -26,7 +26,6 @@ from airflow.operators.bash import BashOperator from airflow.operators.dummy import DummyOperator from airflow.operators.python import PythonOperator -from airflow.utils.dates import days_ago from airflow.utils.task_group import MappedTaskGroup, TaskGroup from airflow.www.views import dag_edges, task_group_to_dict from tests.models import DEFAULT_DATE @@ -1027,7 +1026,7 @@ def add_one(i): def increment(num): return num + 1 - @dag(schedule_interval=None, start_date=days_ago(1), default_args={"owner": "airflow"}) + @dag(schedule_interval=None, start_date=pendulum.datetime(2022,1,1), default_args={"owner": "airflow"}) def wrap(): total_1 = one() assert isinstance(total_1, XComArg)