diff --git a/docs/apache-airflow-providers-airbyte/index.rst b/docs/apache-airflow-providers-airbyte/index.rst index a807de80b1f3d..e4a2c5803f3fa 100644 --- a/docs/apache-airflow-providers-airbyte/index.rst +++ b/docs/apache-airflow-providers-airbyte/index.rst @@ -38,7 +38,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-airbyte/operators/airbyte.rst b/docs/apache-airflow-providers-airbyte/operators/airbyte.rst index b67462734e486..68fd8c44cb987 100644 --- a/docs/apache-airflow-providers-airbyte/operators/airbyte.rst +++ b/docs/apache-airflow-providers-airbyte/operators/airbyte.rst @@ -45,14 +45,14 @@ return the ``job_id`` that should be pass to the AirbyteSensor. An example using the synchronous way: -.. exampleinclude:: /../../airflow/providers/airbyte/example_dags/example_airbyte_trigger_job.py +.. exampleinclude:: /../../tests/system/providers/airbyte/example_airbyte_trigger_job.py :language: python :start-after: [START howto_operator_airbyte_synchronous] :end-before: [END howto_operator_airbyte_synchronous] An example using the async way: -.. exampleinclude:: /../../airflow/providers/airbyte/example_dags/example_airbyte_trigger_job.py +.. exampleinclude:: /../../tests/system/providers/airbyte/example_airbyte_trigger_job.py :language: python :start-after: [START howto_operator_airbyte_asynchronous] :end-before: [END howto_operator_airbyte_asynchronous] diff --git a/docs/apache-airflow-providers-amazon/operators/s3.rst b/docs/apache-airflow-providers-amazon/operators/s3.rst index bbb13d976366c..4893997dc441a 100644 --- a/docs/apache-airflow-providers-amazon/operators/s3.rst +++ b/docs/apache-airflow-providers-amazon/operators/s3.rst @@ -38,7 +38,7 @@ Create an Amazon S3 bucket To create an Amazon S3 bucket you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3CreateBucketOperator`. -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_create_bucket] @@ -52,7 +52,7 @@ Delete an Amazon S3 bucket To delete an Amazon S3 bucket you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3DeleteBucketOperator`. -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_delete_bucket] @@ -66,7 +66,7 @@ Set the tags for an Amazon S3 bucket To set the tags for an Amazon S3 bucket you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3PutBucketTaggingOperator`. -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_put_bucket_tagging] @@ -80,7 +80,7 @@ Get the tag of an Amazon S3 bucket To get the tag set associated with an Amazon S3 bucket you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3GetBucketTaggingOperator`. -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_get_bucket_tagging] @@ -94,7 +94,7 @@ Delete the tags of an Amazon S3 bucket To delete the tags of an Amazon S3 bucket you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3DeleteBucketTaggingOperator`. -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_delete_bucket_tagging] @@ -108,7 +108,7 @@ Create an Amazon S3 object To create a new (or replace) Amazon S3 object you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3CreateObjectOperator`. -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_create_object] @@ -123,7 +123,7 @@ To copy an Amazon S3 object from one bucket to another you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3CopyObjectOperator`. The Amazon S3 connection used here needs to have access to both source and destination bucket/key. -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_copy_object] @@ -137,7 +137,7 @@ Delete Amazon S3 objects To delete one or multiple Amazon S3 objects you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3DeleteObjectsOperator`. -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_delete_objects] @@ -153,7 +153,7 @@ To transform the data from one Amazon S3 object and save it to another object yo You can also apply an optional [Amazon S3 Select expression](https://docs.aws.amazon.com/AmazonS3/latest/userguide/s3-glacier-select-sql-reference-select.html) to select the data you want to retrieve from ``source_s3_key`` using ``select_expression``. -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_file_transform] @@ -169,7 +169,7 @@ To list all Amazon S3 prefixes within an Amazon S3 bucket you can use See `here `__ for more information about Amazon S3 prefixes. -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_list_prefixes] @@ -184,7 +184,7 @@ To list all Amazon S3 objects within an Amazon S3 bucket you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3ListOperator`. You can specify a ``prefix`` to filter the objects whose name begins with such prefix. -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_list] @@ -208,7 +208,7 @@ Please keep in mind, especially when used to check a large volume of keys, that To check one file: -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_single_key] @@ -216,7 +216,7 @@ To check one file: To check multiple files: -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_multiple_keys] @@ -236,13 +236,13 @@ multiple files can match one key. The list of matched S3 object attributes conta [{"Size": int}] -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_function_definition] :end-before: [END howto_sensor_s3_key_function_definition] -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_function] @@ -259,7 +259,7 @@ the inactivity period has passed with no increase in the number of objects you c Note, this sensor will not behave correctly in reschedule mode, as the state of the listed objects in the Amazon S3 bucket will be lost between rescheduled invocations. -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_keys_unchanged] diff --git a/docs/apache-airflow-providers-amazon/operators/transfer/dynamodb_to_s3.rst b/docs/apache-airflow-providers-amazon/operators/transfer/dynamodb_to_s3.rst index 61727140a048c..3c10d1a278283 100644 --- a/docs/apache-airflow-providers-amazon/operators/transfer/dynamodb_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/operators/transfer/dynamodb_to_s3.rst @@ -48,7 +48,7 @@ To get more information visit: Example usage: -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_dynamodb_to_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_dynamodb_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_dynamodb_to_s3] @@ -57,7 +57,7 @@ Example usage: To parallelize the replication, users can create multiple ``DynamoDBToS3Operator`` tasks using the ``TotalSegments`` parameter. For instance to replicate with parallelism of 2, create two tasks: -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_dynamodb_to_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_dynamodb_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_dynamodb_to_s3_segmented] diff --git a/docs/apache-airflow-providers-amazon/operators/transfer/ftp_to_s3.rst b/docs/apache-airflow-providers-amazon/operators/transfer/ftp_to_s3.rst index eb4b63c6a5aba..f3dd9638a23bd 100644 --- a/docs/apache-airflow-providers-amazon/operators/transfer/ftp_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/operators/transfer/ftp_to_s3.rst @@ -41,7 +41,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_ftp_to_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_ftp_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_ftp_to_s3] diff --git a/docs/apache-airflow-providers-amazon/operators/transfer/gcs_to_s3.rst b/docs/apache-airflow-providers-amazon/operators/transfer/gcs_to_s3.rst index 6ad2efe169046..d6220a42ab354 100644 --- a/docs/apache-airflow-providers-amazon/operators/transfer/gcs_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/operators/transfer/gcs_to_s3.rst @@ -39,7 +39,7 @@ To copy data from a Google Cloud Storage bucket to an Amazon S3 bucket you can u Example usage: -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_gcs_to_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_gcs_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_gcs_to_s3] diff --git a/docs/apache-airflow-providers-amazon/operators/transfer/google_api_to_s3.rst b/docs/apache-airflow-providers-amazon/operators/transfer/google_api_to_s3.rst index 50259b5e19c0b..79c09d1a4f536 100644 --- a/docs/apache-airflow-providers-amazon/operators/transfer/google_api_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/operators/transfer/google_api_to_s3.rst @@ -38,7 +38,7 @@ Google Sheets to Amazon S3 transfer operator This example loads data from Google Sheets and save it to an Amazon S3 file. -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_google_api_sheets_to_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_google_api_sheets_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_google_api_sheets_to_s3] @@ -57,7 +57,7 @@ It searches for up to 50 videos (due to pagination) in a given time range (``YOUTUBE_VIDEO_PUBLISHED_AFTER``, ``YOUTUBE_VIDEO_PUBLISHED_BEFORE``) on a YouTube channel (``YOUTUBE_CHANNEL_ID``) saves the response in Amazon S3 and also pushes the data to xcom. -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_google_api_youtube_to_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_google_api_youtube_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_google_api_youtube_search_to_s3] @@ -66,7 +66,7 @@ saves the response in Amazon S3 and also pushes the data to xcom. It passes over the YouTube IDs to the next request which then gets the information (``YOUTUBE_VIDEO_FIELDS``) for the requested videos and saves them in Amazon S3 (``S3_BUCKET_NAME``). -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_google_api_youtube_to_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_google_api_youtube_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_google_api_youtube_list_to_s3] diff --git a/docs/apache-airflow-providers-amazon/operators/transfer/imap_attachment_to_s3.rst b/docs/apache-airflow-providers-amazon/operators/transfer/imap_attachment_to_s3.rst index a104889595ce6..a8857ef622571 100644 --- a/docs/apache-airflow-providers-amazon/operators/transfer/imap_attachment_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/operators/transfer/imap_attachment_to_s3.rst @@ -38,7 +38,7 @@ Imap Attachment To Amazon S3 transfer operator To save an email attachment via IMAP protocol from an email server to an Amazon S3 Bucket you can use :class:`~airflow.providers.amazon.aws.transfers.imap_attachment_to_s3.ImapAttachmentToS3Operator` -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_imap_attachment_to_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_imap_attachment_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_imap_attachment_to_s3] diff --git a/docs/apache-airflow-providers-amazon/operators/transfer/local_to_s3.rst b/docs/apache-airflow-providers-amazon/operators/transfer/local_to_s3.rst index 549ad8e2b1c93..999ffb2841164 100644 --- a/docs/apache-airflow-providers-amazon/operators/transfer/local_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/operators/transfer/local_to_s3.rst @@ -42,7 +42,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_local_to_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_local_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_local_to_s3] diff --git a/docs/apache-airflow-providers-amazon/operators/transfer/mongo_to_s3.rst b/docs/apache-airflow-providers-amazon/operators/transfer/mongo_to_s3.rst index aeab7083cbd32..f4d9d77c05ffd 100644 --- a/docs/apache-airflow-providers-amazon/operators/transfer/mongo_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/operators/transfer/mongo_to_s3.rst @@ -43,7 +43,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_mongo_to_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_mongo_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_mongo_to_s3] diff --git a/docs/apache-airflow-providers-amazon/operators/transfer/s3_to_ftp.rst b/docs/apache-airflow-providers-amazon/operators/transfer/s3_to_ftp.rst index 953ded33ba067..b3591c6bce5cf 100644 --- a/docs/apache-airflow-providers-amazon/operators/transfer/s3_to_ftp.rst +++ b/docs/apache-airflow-providers-amazon/operators/transfer/s3_to_ftp.rst @@ -42,7 +42,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_s3_to_ftp.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_s3_to_ftp.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_ftp] diff --git a/docs/apache-airflow-providers-amazon/operators/transfer/s3_to_redshift.rst b/docs/apache-airflow-providers-amazon/operators/transfer/s3_to_redshift.rst index cfc9d404f5949..96b548728b4c7 100644 --- a/docs/apache-airflow-providers-amazon/operators/transfer/s3_to_redshift.rst +++ b/docs/apache-airflow-providers-amazon/operators/transfer/s3_to_redshift.rst @@ -42,7 +42,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_s3_to_redshift.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_s3_to_redshift.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_redshift] diff --git a/docs/apache-airflow-providers-amazon/operators/transfer/s3_to_sftp.rst b/docs/apache-airflow-providers-amazon/operators/transfer/s3_to_sftp.rst index a1c9d4c1efe1f..6e793445c7ad1 100644 --- a/docs/apache-airflow-providers-amazon/operators/transfer/s3_to_sftp.rst +++ b/docs/apache-airflow-providers-amazon/operators/transfer/s3_to_sftp.rst @@ -43,7 +43,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_s3_to_sftp.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_s3_to_sftp.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_sftp] diff --git a/docs/apache-airflow-providers-amazon/operators/transfer/salesforce_to_s3.rst b/docs/apache-airflow-providers-amazon/operators/transfer/salesforce_to_s3.rst index 1f9725eb2f5c5..c3b619afa8400 100644 --- a/docs/apache-airflow-providers-amazon/operators/transfer/salesforce_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/operators/transfer/salesforce_to_s3.rst @@ -38,7 +38,7 @@ Extract data from Salesforce to Amazon S3 transfer operator The following example demonstrates a use case of extracting account data from a Salesforce instance and upload to an Amazon S3 bucket. -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_salesforce_to_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_salesforce_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_salesforce_to_s3] diff --git a/docs/apache-airflow-providers-amazon/operators/transfer/sftp_to_s3.rst b/docs/apache-airflow-providers-amazon/operators/transfer/sftp_to_s3.rst index 67f91c9edb408..356caa4201f12 100644 --- a/docs/apache-airflow-providers-amazon/operators/transfer/sftp_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/operators/transfer/sftp_to_s3.rst @@ -42,7 +42,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_sftp_to_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_sftp_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_sftp_to_s3] diff --git a/docs/apache-airflow-providers-amazon/operators/transfer/sql_to_s3.rst b/docs/apache-airflow-providers-amazon/operators/transfer/sql_to_s3.rst index 0a75b7cd9c7ef..380cfc3571a43 100644 --- a/docs/apache-airflow-providers-amazon/operators/transfer/sql_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/operators/transfer/sql_to_s3.rst @@ -44,7 +44,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../airflow/providers/amazon/aws/example_dags/example_sql_to_s3.py +.. exampleinclude:: /../../tests/system/providers/amazon/aws/s3/example_sql_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_sql_to_s3] diff --git a/airflow/providers/airbyte/example_dags/__init__.py b/tests/system/providers/airbyte/__init__.py similarity index 100% rename from airflow/providers/airbyte/example_dags/__init__.py rename to tests/system/providers/airbyte/__init__.py diff --git a/airflow/providers/airbyte/example_dags/example_airbyte_trigger_job.py b/tests/system/providers/airbyte/example_airbyte_trigger_job.py similarity index 83% rename from airflow/providers/airbyte/example_dags/example_airbyte_trigger_job.py rename to tests/system/providers/airbyte/example_airbyte_trigger_job.py index 55563ff5e03bd..f7b9bdf7002e8 100644 --- a/airflow/providers/airbyte/example_dags/example_airbyte_trigger_job.py +++ b/tests/system/providers/airbyte/example_airbyte_trigger_job.py @@ -18,14 +18,19 @@ """Example DAG demonstrating the usage of the AirbyteTriggerSyncOperator.""" +import os from datetime import datetime, timedelta from airflow import DAG from airflow.providers.airbyte.operators.airbyte import AirbyteTriggerSyncOperator from airflow.providers.airbyte.sensors.airbyte import AirbyteJobSensor +ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID") +DAG_ID = "example_airbyte_operator" +CONN_ID = '15bc3800-82e4-48c3-a32d-620661273f28' + with DAG( - dag_id='example_airbyte_operator', + dag_id=DAG_ID, schedule_interval=None, start_date=datetime(2021, 1, 1), dagrun_timeout=timedelta(minutes=60), @@ -36,14 +41,14 @@ # [START howto_operator_airbyte_synchronous] sync_source_destination = AirbyteTriggerSyncOperator( task_id='airbyte_sync_source_dest_example', - connection_id='15bc3800-82e4-48c3-a32d-620661273f28', + connection_id=CONN_ID, ) # [END howto_operator_airbyte_synchronous] # [START howto_operator_airbyte_asynchronous] async_source_destination = AirbyteTriggerSyncOperator( task_id='airbyte_async_source_dest_example', - connection_id='15bc3800-82e4-48c3-a32d-620661273f28', + connection_id=CONN_ID, asynchronous=True, ) @@ -55,3 +60,8 @@ # Task dependency created via `XComArgs`: # async_source_destination >> airbyte_sensor + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag) diff --git a/tests/providers/amazon/aws/operators/test_s3_bucket_tagging_system.py b/tests/system/providers/amazon/aws/s3/__init__.py similarity index 57% rename from tests/providers/amazon/aws/operators/test_s3_bucket_tagging_system.py rename to tests/system/providers/amazon/aws/s3/__init__.py index 4b6cbf66e5e48..217e5db960782 100644 --- a/tests/providers/amazon/aws/operators/test_s3_bucket_tagging_system.py +++ b/tests/system/providers/amazon/aws/s3/__init__.py @@ -15,24 +15,3 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -import pytest - -from tests.test_utils.amazon_system_helpers import AWS_DAG_FOLDER, AmazonSystemTest, provide_aws_context - -BUCKET_NAME = "AmazonSystemTestBucket" - - -@pytest.mark.backend("mysql", "postgres") -@pytest.mark.system("amazon.aws") -class ExampleDagsSystemTest(AmazonSystemTest): - @provide_aws_context() - def setUp(self): - super().setUp() - - @provide_aws_context() - def tearDown(self): - super().tearDown() - - @provide_aws_context() - def test_run_example_dag_bucket_tagging(self): - self.run_dag('s3_bucket_tagging_dag', AWS_DAG_FOLDER) diff --git a/airflow/providers/amazon/aws/example_dags/example_dynamodb_to_s3.py b/tests/system/providers/amazon/aws/s3/example_dynamodb_to_s3.py similarity index 89% rename from airflow/providers/amazon/aws/example_dags/example_dynamodb_to_s3.py rename to tests/system/providers/amazon/aws/s3/example_dynamodb_to_s3.py index 66334fc996522..f82dc58dcbe99 100644 --- a/airflow/providers/amazon/aws/example_dags/example_dynamodb_to_s3.py +++ b/tests/system/providers/amazon/aws/s3/example_dynamodb_to_s3.py @@ -20,13 +20,16 @@ from airflow import DAG from airflow.models.baseoperator import chain from airflow.providers.amazon.aws.transfers.dynamodb_to_s3 import DynamoDBToS3Operator +from tests.system.providers.amazon.aws.utils import set_env_id +ENV_ID = set_env_id() +DAG_ID = 'example_dynamodb_to_s3' TABLE_NAME = environ.get('DYNAMO_TABLE_NAME', 'ExistingDynamoDbTableName') BUCKET_NAME = environ.get('S3_BUCKET_NAME', 'ExistingS3BucketName') with DAG( - dag_id='example_dynamodb_to_s3', + dag_id=DAG_ID, schedule_interval=None, start_date=datetime(2021, 1, 1), tags=['example'], @@ -70,3 +73,8 @@ # [END howto_transfer_dynamodb_to_s3_segmented] chain(backup_db, [backup_db_segment_1, backup_db_segment_2]) + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag) diff --git a/airflow/providers/amazon/aws/example_dags/example_ftp_to_s3.py b/tests/system/providers/amazon/aws/s3/example_ftp_to_s3.py similarity index 82% rename from airflow/providers/amazon/aws/example_dags/example_ftp_to_s3.py rename to tests/system/providers/amazon/aws/s3/example_ftp_to_s3.py index d01ca38810729..b51849d495e29 100644 --- a/airflow/providers/amazon/aws/example_dags/example_ftp_to_s3.py +++ b/tests/system/providers/amazon/aws/s3/example_ftp_to_s3.py @@ -21,12 +21,15 @@ from airflow import models from airflow.providers.amazon.aws.transfers.ftp_to_s3 import FTPToS3Operator +from tests.system.providers.amazon.aws.utils import set_env_id +ENV_ID = set_env_id() +DAG_ID = 'example_ftp_to_s3' S3_BUCKET = os.environ.get("S3_BUCKET", "test-bucket") S3_KEY = os.environ.get("S3_KEY", "key") with models.DAG( - "example_ftp_to_s3", + DAG_ID, schedule_interval=None, start_date=datetime(2021, 1, 1), catchup=False, @@ -40,3 +43,9 @@ replace=True, ) # [END howto_transfer_ftp_to_s3] + + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag) diff --git a/airflow/providers/amazon/aws/example_dags/example_gcs_to_s3.py b/tests/system/providers/amazon/aws/s3/example_gcs_to_s3.py similarity index 81% rename from airflow/providers/amazon/aws/example_dags/example_gcs_to_s3.py rename to tests/system/providers/amazon/aws/s3/example_gcs_to_s3.py index d9d04c73ffa31..9126ad9aa4314 100644 --- a/airflow/providers/amazon/aws/example_dags/example_gcs_to_s3.py +++ b/tests/system/providers/amazon/aws/s3/example_gcs_to_s3.py @@ -20,12 +20,15 @@ from airflow import DAG from airflow.providers.amazon.aws.transfers.gcs_to_s3 import GCSToS3Operator +from tests.system.providers.amazon.aws.utils import set_env_id +ENV_ID = set_env_id() +DAG_ID = 'example_gcs_to_s3' BUCKET = os.getenv("BUCKET", "bucket") S3_KEY = os.getenv("S3_KEY", "s3:///") with DAG( - dag_id="example_gcs_to_s3", + dag_id=DAG_ID, schedule_interval=None, start_date=datetime(2021, 1, 1), tags=["example"], @@ -39,3 +42,9 @@ replace=True, ) # [END howto_transfer_gcs_to_s3] + + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag) diff --git a/airflow/providers/amazon/aws/example_dags/example_glacier_to_gcs.py b/tests/system/providers/amazon/aws/s3/example_glacier_to_gcs.py similarity index 80% rename from airflow/providers/amazon/aws/example_dags/example_glacier_to_gcs.py rename to tests/system/providers/amazon/aws/s3/example_glacier_to_gcs.py index 2df40a7d0c37b..711c9b0064f2d 100644 --- a/airflow/providers/amazon/aws/example_dags/example_glacier_to_gcs.py +++ b/tests/system/providers/amazon/aws/s3/example_glacier_to_gcs.py @@ -21,13 +21,17 @@ from airflow.providers.amazon.aws.operators.glacier import GlacierCreateJobOperator from airflow.providers.amazon.aws.sensors.glacier import GlacierJobOperationSensor from airflow.providers.amazon.aws.transfers.glacier_to_gcs import GlacierToGCSOperator +from airflow.utils.trigger_rule import TriggerRule +from tests.system.providers.amazon.aws.utils import set_env_id +ENV_ID = set_env_id() +DAG_ID = 'example_glacier_to_gcs' VAULT_NAME = "airflow" BUCKET_NAME = os.environ.get("GLACIER_GCS_BUCKET_NAME", "gs://INVALID BUCKET NAME") OBJECT_NAME = os.environ.get("GLACIER_OBJECT", "example-text.txt") with DAG( - "example_glacier_to_gcs", + DAG_ID, schedule_interval=None, start_date=datetime(2021, 1, 1), # Override to match your needs catchup=False, @@ -56,7 +60,20 @@ # If chunk size is bigger than actual file size # then whole file will be downloaded chunk_size=1024, + trigger_rule=TriggerRule.ALL_DONE, ) # [END howto_transfer_glacier_to_gcs] create_glacier_job >> wait_for_operation_complete >> transfer_archive_to_gcs + + from tests.system.utils.watcher import watcher + + # This test needs watcher in order to properly mark success/failure + # when "tearDown" task with trigger rule is part of the DAG + list(dag.tasks) >> watcher() + + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag) diff --git a/airflow/providers/amazon/aws/example_dags/example_google_api_sheets_to_s3.py b/tests/system/providers/amazon/aws/s3/example_google_api_sheets_to_s3.py similarity index 85% rename from airflow/providers/amazon/aws/example_dags/example_google_api_sheets_to_s3.py rename to tests/system/providers/amazon/aws/s3/example_google_api_sheets_to_s3.py index 7b6e4b291a66c..ce53f1b538a60 100644 --- a/airflow/providers/amazon/aws/example_dags/example_google_api_sheets_to_s3.py +++ b/tests/system/providers/amazon/aws/s3/example_google_api_sheets_to_s3.py @@ -24,13 +24,16 @@ from airflow import DAG from airflow.providers.amazon.aws.transfers.google_api_to_s3 import GoogleApiToS3Operator +from tests.system.providers.amazon.aws.utils import set_env_id +ENV_ID = set_env_id() +DAG_ID = 'example_google_api_sheets_to_s3' GOOGLE_SHEET_ID = getenv("GOOGLE_SHEET_ID", "test-google-sheet-id") GOOGLE_SHEET_RANGE = getenv("GOOGLE_SHEET_RANGE", "test-google-sheet-range") S3_DESTINATION_KEY = getenv("S3_DESTINATION_KEY", "s3://test-bucket/key.json") with DAG( - dag_id="example_google_api_sheets_to_s3", + dag_id=DAG_ID, schedule_interval=None, start_date=datetime(2021, 1, 1), catchup=False, @@ -46,3 +49,9 @@ s3_destination_key=S3_DESTINATION_KEY, ) # [END howto_transfer_google_api_sheets_to_s3] + + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag) diff --git a/airflow/providers/amazon/aws/example_dags/example_google_api_youtube_to_s3.py b/tests/system/providers/amazon/aws/s3/example_google_api_youtube_to_s3.py similarity index 93% rename from airflow/providers/amazon/aws/example_dags/example_google_api_youtube_to_s3.py rename to tests/system/providers/amazon/aws/s3/example_google_api_youtube_to_s3.py index 8909adbf41598..1fd3f18efde5b 100644 --- a/airflow/providers/amazon/aws/example_dags/example_google_api_youtube_to_s3.py +++ b/tests/system/providers/amazon/aws/s3/example_google_api_youtube_to_s3.py @@ -41,7 +41,10 @@ from airflow.decorators import task from airflow.models.baseoperator import chain from airflow.providers.amazon.aws.transfers.google_api_to_s3 import GoogleApiToS3Operator +from tests.system.providers.amazon.aws.utils import set_env_id +ENV_ID = set_env_id() +DAG_ID = 'example_google_api_youtube_to_s3' YOUTUBE_CHANNEL_ID = getenv( "YOUTUBE_CHANNEL_ID", "UCSXwxpWZQ7XZ1WL3wqevChA" ) # Youtube channel "Apache Airflow" @@ -65,7 +68,7 @@ def transform_video_ids(**kwargs): with DAG( - dag_id="example_google_api_youtube_to_s3", + dag_id=DAG_ID, schedule_interval=None, start_date=datetime(2021, 1, 1), catchup=False, @@ -112,3 +115,9 @@ def transform_video_ids(**kwargs): # [END howto_transfer_google_api_youtube_list_to_s3] chain(task_video_ids_to_s3, task_transform_video_ids, task_video_data_to_s3) + + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag) diff --git a/airflow/providers/amazon/aws/example_dags/example_imap_attachment_to_s3.py b/tests/system/providers/amazon/aws/s3/example_imap_attachment_to_s3.py similarity index 85% rename from airflow/providers/amazon/aws/example_dags/example_imap_attachment_to_s3.py rename to tests/system/providers/amazon/aws/s3/example_imap_attachment_to_s3.py index 357d92a6f5694..d4f644b68f83a 100644 --- a/airflow/providers/amazon/aws/example_dags/example_imap_attachment_to_s3.py +++ b/tests/system/providers/amazon/aws/s3/example_imap_attachment_to_s3.py @@ -25,7 +25,10 @@ from airflow import DAG from airflow.providers.amazon.aws.transfers.imap_attachment_to_s3 import ImapAttachmentToS3Operator +from tests.system.providers.amazon.aws.utils import set_env_id +ENV_ID = set_env_id() +DAG_ID = 'example_imap_attachment_to_s3' IMAP_ATTACHMENT_NAME = getenv("IMAP_ATTACHMENT_NAME", "test.txt") IMAP_MAIL_FOLDER = getenv("IMAP_MAIL_FOLDER", "INBOX") IMAP_MAIL_FILTER = getenv("IMAP_MAIL_FILTER", "All") @@ -33,7 +36,7 @@ S3_KEY = getenv("S3_KEY", "key") with DAG( - dag_id="example_imap_attachment_to_s3", + dag_id=DAG_ID, start_date=datetime(2021, 1, 1), schedule_interval=None, catchup=False, @@ -49,3 +52,9 @@ imap_mail_filter=IMAP_MAIL_FILTER, ) # [END howto_transfer_imap_attachment_to_s3] + + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag) diff --git a/airflow/providers/amazon/aws/example_dags/example_local_to_s3.py b/tests/system/providers/amazon/aws/s3/example_local_to_s3.py similarity index 83% rename from airflow/providers/amazon/aws/example_dags/example_local_to_s3.py rename to tests/system/providers/amazon/aws/s3/example_local_to_s3.py index 05f9c74d7685b..cc3fe1df059a9 100644 --- a/airflow/providers/amazon/aws/example_dags/example_local_to_s3.py +++ b/tests/system/providers/amazon/aws/s3/example_local_to_s3.py @@ -21,12 +21,15 @@ from airflow import models from airflow.providers.amazon.aws.transfers.local_to_s3 import LocalFilesystemToS3Operator from airflow.utils.dates import datetime +from tests.system.providers.amazon.aws.utils import set_env_id +ENV_ID = set_env_id() +DAG_ID = 'example_local_to_s3' S3_BUCKET = os.environ.get("S3_BUCKET", "test-bucket") S3_KEY = os.environ.get("S3_KEY", "key") with models.DAG( - "example_local_to_s3", + DAG_ID, schedule_interval=None, start_date=datetime(2021, 1, 1), # Override to match your needs catchup=False, @@ -40,3 +43,9 @@ replace=True, ) # [END howto_transfer_local_to_s3] + + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag) diff --git a/airflow/providers/amazon/aws/example_dags/example_mongo_to_s3.py b/tests/system/providers/amazon/aws/s3/example_mongo_to_s3.py similarity index 85% rename from airflow/providers/amazon/aws/example_dags/example_mongo_to_s3.py rename to tests/system/providers/amazon/aws/s3/example_mongo_to_s3.py index e95964b59cee1..12fd67aeb0ba7 100644 --- a/airflow/providers/amazon/aws/example_dags/example_mongo_to_s3.py +++ b/tests/system/providers/amazon/aws/s3/example_mongo_to_s3.py @@ -21,14 +21,17 @@ from airflow import models from airflow.providers.amazon.aws.transfers.mongo_to_s3 import MongoToS3Operator from airflow.utils.dates import datetime +from tests.system.providers.amazon.aws.utils import set_env_id +ENV_ID = set_env_id() +DAG_ID = 'example_mongo_to_s3' S3_BUCKET = os.environ.get("S3_BUCKET", "test-bucket") S3_KEY = os.environ.get("S3_KEY", "key") MONGO_DATABASE = os.environ.get("MONGO_DATABASE", "Test") MONGO_COLLECTION = os.environ.get("MONGO_COLLECTION", "Test") with models.DAG( - "example_mongo_to_s3", + DAG_ID, schedule_interval=None, start_date=datetime(2021, 1, 1), catchup=False, @@ -46,3 +49,9 @@ replace=True, ) # [END howto_transfer_mongo_to_s3] + + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag) diff --git a/airflow/providers/amazon/aws/example_dags/example_s3.py b/tests/system/providers/amazon/aws/s3/example_s3.py similarity index 89% rename from airflow/providers/amazon/aws/example_dags/example_s3.py rename to tests/system/providers/amazon/aws/s3/example_s3.py index 4be800a9933ea..50c4d47cfdf78 100644 --- a/airflow/providers/amazon/aws/example_dags/example_s3.py +++ b/tests/system/providers/amazon/aws/s3/example_s3.py @@ -35,7 +35,11 @@ S3PutBucketTaggingOperator, ) from airflow.providers.amazon.aws.sensors.s3 import S3KeySensor, S3KeysUnchangedSensor +from airflow.utils.trigger_rule import TriggerRule +from tests.system.providers.amazon.aws.utils import set_env_id +ENV_ID = set_env_id() +DAG_ID = 'example_s3' BUCKET_NAME = os.environ.get('BUCKET_NAME', 'test-airflow-12345') BUCKET_NAME_2 = os.environ.get('BUCKET_NAME_2', 'test-airflow-123456') KEY = os.environ.get('KEY', 'key') @@ -56,7 +60,7 @@ ) with DAG( - dag_id='example_s3', + dag_id=DAG_ID, schedule_interval=None, start_date=datetime(2021, 1, 1), catchup=False, @@ -102,6 +106,7 @@ def check_fn(files: List) -> bool: delete_tagging = S3DeleteBucketTaggingOperator( task_id='s3_delete_bucket_tagging', bucket_name=BUCKET_NAME, + trigger_rule=TriggerRule.ALL_DONE, ) # [END howto_operator_s3_delete_bucket_tagging] @@ -195,12 +200,16 @@ def check_fn(files: List) -> bool: task_id="s3_delete_objects", bucket=BUCKET_NAME_2, keys=KEY_2, + trigger_rule=TriggerRule.ALL_DONE, ) # [END howto_operator_s3_delete_objects] # [START howto_operator_s3_delete_bucket] delete_bucket = S3DeleteBucketOperator( - task_id='s3_delete_bucket', bucket_name=BUCKET_NAME, force_delete=True + task_id='s3_delete_bucket', + bucket_name=BUCKET_NAME, + force_delete=True, + trigger_rule=TriggerRule.ALL_DONE, ) # [END howto_operator_s3_delete_bucket] @@ -219,3 +228,14 @@ def check_fn(files: List) -> bool: delete_objects, delete_bucket, ) + + from tests.system.utils.watcher import watcher + + # This test needs watcher in order to properly mark success/failure + # when "tearDown" task with trigger rule is part of the DAG + list(dag.tasks) >> watcher() + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag) diff --git a/airflow/providers/amazon/aws/example_dags/example_s3_to_ftp.py b/tests/system/providers/amazon/aws/s3/example_s3_to_ftp.py similarity index 81% rename from airflow/providers/amazon/aws/example_dags/example_s3_to_ftp.py rename to tests/system/providers/amazon/aws/s3/example_s3_to_ftp.py index 6ebe2501c613b..c9438568a91b4 100644 --- a/airflow/providers/amazon/aws/example_dags/example_s3_to_ftp.py +++ b/tests/system/providers/amazon/aws/s3/example_s3_to_ftp.py @@ -21,12 +21,15 @@ from airflow import models from airflow.providers.amazon.aws.transfers.s3_to_ftp import S3ToFTPOperator +from tests.system.providers.amazon.aws.utils import set_env_id +ENV_ID = set_env_id() +DAG_ID = 'example_s3_to_ftp' S3_BUCKET = os.environ.get("S3_BUCKET", "test-bucket") S3_KEY = os.environ.get("S3_KEY", "key") with models.DAG( - "example_s3_to_ftp", + DAG_ID, schedule_interval=None, start_date=datetime(2021, 1, 1), catchup=False, @@ -39,3 +42,9 @@ s3_key=S3_KEY, ) # [END howto_transfer_s3_to_ftp] + + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag) diff --git a/airflow/providers/amazon/aws/example_dags/example_s3_to_redshift.py b/tests/system/providers/amazon/aws/s3/example_s3_to_redshift.py similarity index 79% rename from airflow/providers/amazon/aws/example_dags/example_s3_to_redshift.py rename to tests/system/providers/amazon/aws/s3/example_s3_to_redshift.py index 82ae0660053f1..216d74586981c 100644 --- a/airflow/providers/amazon/aws/example_dags/example_s3_to_redshift.py +++ b/tests/system/providers/amazon/aws/s3/example_s3_to_redshift.py @@ -24,7 +24,11 @@ from airflow.providers.amazon.aws.hooks.s3 import S3Hook from airflow.providers.amazon.aws.operators.redshift_sql import RedshiftSQLOperator from airflow.providers.amazon.aws.transfers.s3_to_redshift import S3ToRedshiftOperator +from airflow.utils.trigger_rule import TriggerRule +from tests.system.providers.amazon.aws.utils import set_env_id +ENV_ID = set_env_id() +DAG_ID = 'example_s3_to_redshift' S3_BUCKET_NAME = getenv("S3_BUCKET_NAME", "s3_bucket_name") S3_KEY = getenv("S3_KEY", "s3_filename") REDSHIFT_TABLE = getenv("REDSHIFT_TABLE", "redshift_table") @@ -36,7 +40,10 @@ def task_add_sample_data_to_s3(): s3_hook.load_string("0,Airflow", f'{S3_KEY}/{REDSHIFT_TABLE}', S3_BUCKET_NAME, replace=True) -@task(task_id='teardown__remove_sample_data_from_s3') +@task( + task_id='teardown__remove_sample_data_from_s3', + trigger_rule=TriggerRule.ALL_DONE, +) def task_remove_sample_data_from_s3(): s3_hook = S3Hook() if s3_hook.check_for_key(f'{S3_KEY}/{REDSHIFT_TABLE}', S3_BUCKET_NAME): @@ -44,7 +51,7 @@ def task_remove_sample_data_from_s3(): with DAG( - dag_id="example_s3_to_redshift", + dag_id=DAG_ID, start_date=datetime(2021, 1, 1), schedule_interval=None, catchup=False, @@ -69,6 +76,7 @@ def task_remove_sample_data_from_s3(): teardown__task_drop_table = RedshiftSQLOperator( sql=f'DROP TABLE IF EXISTS {REDSHIFT_TABLE}', task_id='teardown__drop_table', + trigger_rule=TriggerRule.ALL_DONE, ) remove_sample_data_from_s3 = task_remove_sample_data_from_s3() @@ -78,3 +86,15 @@ def task_remove_sample_data_from_s3(): task_transfer_s3_to_redshift, [teardown__task_drop_table, remove_sample_data_from_s3], ) + + from tests.system.utils.watcher import watcher + + # This test needs watcher in order to properly mark success/failure + # when "tearDown" task with trigger rule is part of the DAG + list(dag.tasks) >> watcher() + + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag) diff --git a/airflow/providers/amazon/aws/example_dags/example_s3_to_sftp.py b/tests/system/providers/amazon/aws/s3/example_s3_to_sftp.py similarity index 82% rename from airflow/providers/amazon/aws/example_dags/example_s3_to_sftp.py rename to tests/system/providers/amazon/aws/s3/example_s3_to_sftp.py index d7983265e7697..e1d6a34784fdf 100644 --- a/airflow/providers/amazon/aws/example_dags/example_s3_to_sftp.py +++ b/tests/system/providers/amazon/aws/s3/example_s3_to_sftp.py @@ -20,12 +20,15 @@ from airflow import models from airflow.providers.amazon.aws.transfers.s3_to_sftp import S3ToSFTPOperator +from tests.system.providers.amazon.aws.utils import set_env_id +ENV_ID = set_env_id() +DAG_ID = 'example_s3_to_sftp' S3_BUCKET = os.environ.get("S3_BUCKET", "test-bucket") S3_KEY = os.environ.get("S3_KEY", "key") with models.DAG( - "example_s3_to_sftp", + DAG_ID, schedule_interval=None, start_date=datetime(2021, 1, 1), catchup=False, @@ -38,3 +41,9 @@ s3_key=S3_KEY, ) # [END howto_transfer_s3_to_sftp] + + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag) diff --git a/airflow/providers/amazon/aws/example_dags/example_salesforce_to_s3.py b/tests/system/providers/amazon/aws/s3/example_salesforce_to_s3.py similarity index 84% rename from airflow/providers/amazon/aws/example_dags/example_salesforce_to_s3.py rename to tests/system/providers/amazon/aws/s3/example_salesforce_to_s3.py index 735cda4d3af14..07d4694ec906e 100644 --- a/airflow/providers/amazon/aws/example_dags/example_salesforce_to_s3.py +++ b/tests/system/providers/amazon/aws/s3/example_salesforce_to_s3.py @@ -25,13 +25,16 @@ from airflow import DAG from airflow.providers.amazon.aws.transfers.salesforce_to_s3 import SalesforceToS3Operator +from tests.system.providers.amazon.aws.utils import set_env_id +ENV_ID = set_env_id() +DAG_ID = 'example_salesforce_to_s3' S3_BUCKET_NAME = getenv("S3_BUCKET_NAME", "s3_bucket_name") S3_KEY = getenv("S3_KEY", "s3_filename") with DAG( - dag_id="example_salesforce_to_s3", + dag_id=DAG_ID, schedule_interval=None, start_date=datetime(2021, 7, 8), catchup=False, @@ -47,3 +50,9 @@ replace=True, ) # [END howto_transfer_salesforce_to_s3] + + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag) diff --git a/airflow/providers/amazon/aws/example_dags/example_sftp_to_s3.py b/tests/system/providers/amazon/aws/s3/example_sftp_to_s3.py similarity index 82% rename from airflow/providers/amazon/aws/example_dags/example_sftp_to_s3.py rename to tests/system/providers/amazon/aws/s3/example_sftp_to_s3.py index 0e2407a7d3546..cacb1677c7b50 100644 --- a/airflow/providers/amazon/aws/example_dags/example_sftp_to_s3.py +++ b/tests/system/providers/amazon/aws/s3/example_sftp_to_s3.py @@ -21,12 +21,15 @@ from airflow import models from airflow.providers.amazon.aws.transfers.sftp_to_s3 import SFTPToS3Operator +from tests.system.providers.amazon.aws.utils import set_env_id +ENV_ID = set_env_id() +DAG_ID = 'example_sftp_to_s3' S3_BUCKET = os.environ.get("S3_BUCKET", "test-bucket") S3_KEY = os.environ.get("S3_KEY", "key") with models.DAG( - "example_sftp_to_s3", + DAG_ID, schedule_interval=None, start_date=datetime(2021, 1, 1), catchup=False, @@ -39,3 +42,9 @@ s3_key=S3_KEY, ) # [END howto_transfer_sftp_to_s3] + + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag) diff --git a/airflow/providers/amazon/aws/example_dags/example_sql_to_s3.py b/tests/system/providers/amazon/aws/s3/example_sql_to_s3.py similarity index 82% rename from airflow/providers/amazon/aws/example_dags/example_sql_to_s3.py rename to tests/system/providers/amazon/aws/s3/example_sql_to_s3.py index df2abee0f3052..ee30be4540d48 100644 --- a/airflow/providers/amazon/aws/example_dags/example_sql_to_s3.py +++ b/tests/system/providers/amazon/aws/s3/example_sql_to_s3.py @@ -21,13 +21,16 @@ from airflow import models from airflow.providers.amazon.aws.transfers.sql_to_s3 import SqlToS3Operator +from tests.system.providers.amazon.aws.utils import set_env_id +ENV_ID = set_env_id() +DAG_ID = 'example_sql_to_s3' S3_BUCKET = os.environ.get("S3_BUCKET", "test-bucket") S3_KEY = os.environ.get("S3_KEY", "key") SQL_QUERY = os.environ.get("SQL_QUERY", "SHOW tables") with models.DAG( - "example_sql_to_s3", + DAG_ID, schedule_interval=None, start_date=datetime(2021, 1, 1), catchup=False, @@ -42,3 +45,9 @@ replace=True, ) # [END howto_transfer_sql_to_s3] + + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag) diff --git a/tests/system/providers/google/firebase/__init__.py b/tests/system/providers/google/firebase/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/firebase/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/system/providers/google/suite/__init__.py b/tests/system/providers/google/suite/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/system/providers/google/suite/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License.