airflow-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From GitBox <...@apache.org>
Subject [GitHub] hugoprudente closed pull request #4230: [AIRFLOW-3066] Adding support for Parameters on AWS Batch Operator
Date Sat, 24 Nov 2018 01:18:26 GMT
hugoprudente closed pull request #4230: [AIRFLOW-3066] Adding support for Parameters on AWS
Batch Operator
URL: https://github.com/apache/incubator-airflow/pull/4230
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/airflow/contrib/example_dags/example_awsbatch_operator.py b/airflow/contrib/example_dags/example_awsbatch_operator.py
new file mode 100644
index 0000000000..6174873118
--- /dev/null
+++ b/airflow/contrib/example_dags/example_awsbatch_operator.py
@@ -0,0 +1,95 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions andf limitations
+# under the License.
+
+import airflow
+from airflow.utils.log.logging_mixin import LoggingMixin
+from airflow.models import DAG
+from datetime import timedelta
+
+log = LoggingMixin().log
+
+try:
+    # AWS Batch is optional, so not available in vanilla Airflow
+    # pip install apache-airflow[boto3]
+    from airflow.contrib.operators.awsbatch_operator import AWSBatchOperator
+
+    default_args = {
+        'owner': 'airflow',
+        'depends_on_past': False,
+        'start_date': airflow.utils.dates.days_ago(2),
+        'email': ['airflow@airflow.com'],
+        'email_on_failure': False,
+        'email_on_retry': False,
+        'retries': 1,
+        'retry_delay': timedelta(minutes=5),
+    }
+
+    dag = DAG(
+        'example_awsbatch_dag', default_args=default_args, schedule_interval=timedelta(1))
+
+    # vanilla example
+    t0 = AWSBatchOperator(
+        task_id='airflow-vanilla',
+        job_name='airflow-vanilla',
+        job_queue='airflow',
+        job_definition='airflow',
+        overrides={},
+        queue='airflow',
+        dag=dag)
+
+    # overrides example
+    t1 = AWSBatchOperator(
+        job_name='airflow-overrides',
+        task_id='airflow-overrides',
+        job_queue='airflow',
+        job_definition='airflow',
+        overrides={
+            "command": [
+                "echo",
+                "overrides"
+            ]
+        },
+        queue='airflow',
+        dag=dag)
+
+    # parameters example
+    t2 = AWSBatchOperator(
+        job_name='airflow-parameters',
+        task_id='airflow-parameters',
+        job_queue='airflow',
+        job_definition='airflow',
+        overrides={
+            "command": [
+                "echo",
+                "Ref::input"
+            ]
+        },
+        parameters={
+            "input": "Airflow2000"
+        },
+        queue='airflow',
+        dag=dag)
+
+    t0.set_upstream(t1)
+    t1.set_upstream(t2)
+
+except ImportError as e:
+    log.warn("Could not import AWSBatchOperator: " + str(e))
+    log.warn("Install AWS Batch dependencies with: "
+             "    pip install apache-airflow[boto3]")
diff --git a/airflow/contrib/operators/awsbatch_operator.py b/airflow/contrib/operators/awsbatch_operator.py
index 3c778e6e68..8852861a24 100644
--- a/airflow/contrib/operators/awsbatch_operator.py
+++ b/airflow/contrib/operators/awsbatch_operator.py
@@ -46,6 +46,10 @@ class AWSBatchOperator(BaseOperator):
         containerOverrides (templated):
         http://boto3.readthedocs.io/en/latest/reference/services/batch.html#submit_job
     :type overrides: dict
+    :param parameters: the same parameter that boto3 will receive on
+        parameters (templated):
+        http://boto3.readthedocs.io/en/latest/reference/services/batch.html#submit_job
+    :type parameters: dict
     :param max_retries: exponential backoff retries while waiter is not
         merged, 4200 = 48 hours
     :type max_retries: int
@@ -61,10 +65,11 @@ class AWSBatchOperator(BaseOperator):
     ui_color = '#c3dae0'
     client = None
     arn = None
-    template_fields = ('job_name', 'overrides',)
+    template_fields = ('job_name', 'overrides', 'parameters',)
 
     @apply_defaults
-    def __init__(self, job_name, job_definition, job_queue, overrides, max_retries=4200,
+    def __init__(self, job_name, job_definition, job_queue, overrides,
+                 parameters=None, max_retries=4200,
                  aws_conn_id=None, region_name=None, **kwargs):
         super(AWSBatchOperator, self).__init__(**kwargs)
 
@@ -74,6 +79,7 @@ def __init__(self, job_name, job_definition, job_queue, overrides, max_retries=4
         self.job_definition = job_definition
         self.job_queue = job_queue
         self.overrides = overrides
+        self.parameters = parameters
         self.max_retries = max_retries
 
         self.jobId = None
@@ -94,11 +100,21 @@ def execute(self, context):
         )
 
         try:
-            response = self.client.submit_job(
-                jobName=self.job_name,
-                jobQueue=self.job_queue,
-                jobDefinition=self.job_definition,
-                containerOverrides=self.overrides)
+            if self.parameters is None:
+                response = self.client.submit_job(
+                    jobName=self.job_name,
+                    jobQueue=self.job_queue,
+                    jobDefinition=self.job_definition,
+                    containerOverrides=self.overrides,
+                )
+            else:
+                response = self.client.submit_job(
+                    jobName=self.job_name,
+                    jobQueue=self.job_queue,
+                    jobDefinition=self.job_definition,
+                    containerOverrides=self.overrides,
+                    parameters=self.parameters,
+                )
 
             self.log.info('AWS Batch Job started: %s', response)
 
@@ -109,7 +125,8 @@ def execute(self, context):
 
             self._check_success_task()
 
-            self.log.info('AWS Batch Job has been successfully executed: %s', response)
+            self.log.info('AWS Batch Job has been successfully executed: %s',
+                          response)
         except Exception as e:
             self.log.info('AWS Batch Job has failed executed')
             raise AirflowException(e)
diff --git a/tests/contrib/operators/test_awsbatch_operator.py b/tests/contrib/operators/test_awsbatch_operator.py
index 4808574f23..6acbd4e48f 100644
--- a/tests/contrib/operators/test_awsbatch_operator.py
+++ b/tests/contrib/operators/test_awsbatch_operator.py
@@ -62,6 +62,7 @@ def test_init(self):
         self.assertEqual(self.batch.job_definition, 'hello-world')
         self.assertEqual(self.batch.max_retries, 5)
         self.assertEqual(self.batch.overrides, {})
+        self.assertEqual(self.batch.parameters, None)
         self.assertEqual(self.batch.region_name, 'eu-west-1')
         self.assertEqual(self.batch.aws_conn_id, None)
         self.assertEqual(self.batch.hook, self.aws_hook_mock.return_value)
@@ -69,7 +70,8 @@ def test_init(self):
         self.aws_hook_mock.assert_called_once_with(aws_conn_id=None)
 
     def test_template_fields_overrides(self):
-        self.assertEqual(self.batch.template_fields, ('job_name', 'overrides',))
+        self.assertEqual(self.batch.template_fields,
+                         ('job_name', 'overrides', 'parameters'))
 
     @mock.patch.object(AWSBatchOperator, '_wait_for_task_ended')
     @mock.patch.object(AWSBatchOperator, '_check_success_task')
@@ -77,10 +79,11 @@ def test_execute_without_failures(self, check_mock, wait_mock):
         client_mock = self.aws_hook_mock.return_value.get_client_type.return_value
         client_mock.submit_job.return_value = RESPONSE_WITHOUT_FAILURES
 
+        self.batch.parameters = None
         self.batch.execute(None)
 
-        self.aws_hook_mock.return_value.get_client_type.assert_called_once_with('batch',
-                                                                                region_name='eu-west-1')
+        self.aws_hook_mock.return_value.get_client_type.assert_called_once_with(
+            'batch', region_name='eu-west-1')
         client_mock.submit_job.assert_called_once_with(
             jobQueue='queue',
             jobName='51455483-c62c-48ac-9b88-53a6a725baa3',
@@ -99,8 +102,8 @@ def test_execute_with_failures(self):
         with self.assertRaises(AirflowException):
             self.batch.execute(None)
 
-        self.aws_hook_mock.return_value.get_client_type.assert_called_once_with('batch',
-                                                                                region_name='eu-west-1')
+        self.aws_hook_mock.return_value.get_client_type.assert_called_once_with(
+            'batch', region_name='eu-west-1')
         client_mock.submit_job.assert_called_once_with(
             jobQueue='queue',
             jobName='51455483-c62c-48ac-9b88-53a6a725baa3',
@@ -119,7 +122,8 @@ def test_wait_end_tasks(self):
         client_mock.get_waiter.return_value.wait.assert_called_once_with(
             jobs=['8ba9d676-4108-4474-9dca-8bbac1da9b19']
         )
-        self.assertEquals(sys.maxsize, client_mock.get_waiter.return_value.config.max_attempts)
+        self.assertEquals(
+            sys.maxsize, client_mock.get_waiter.return_value.config.max_attempts)
 
     def test_check_success_tasks_raises(self):
         client_mock = mock.Mock()
@@ -211,7 +215,8 @@ def test_check_success_task_not_raises(self):
         self.batch._check_success_task()
 
         # Ordering of str(dict) is not guaranteed.
-        client_mock.describe_jobs.assert_called_once_with(jobs=['8ba9d676-4108-4474-9dca-8bbac1da9b19'])
+        client_mock.describe_jobs.assert_called_once_with(
+            jobs=['8ba9d676-4108-4474-9dca-8bbac1da9b19'])
 
 
 if __name__ == '__main__':


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

Mime
View raw message