From commits-return-27426-archive-asf-public=cust-asf.ponee.io@airflow.incubator.apache.org Wed Nov 7 17:41:32 2018 Return-Path: X-Original-To: archive-asf-public@cust-asf.ponee.io Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by mx-eu-01.ponee.io (Postfix) with SMTP id C0B6818067A for ; Wed, 7 Nov 2018 17:41:31 +0100 (CET) Received: (qmail 10407 invoked by uid 500); 7 Nov 2018 16:41:30 -0000 Mailing-List: contact commits-help@airflow.incubator.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@airflow.incubator.apache.org Delivered-To: mailing list commits@airflow.incubator.apache.org Received: (qmail 10398 invoked by uid 99); 7 Nov 2018 16:41:30 -0000 Received: from ec2-52-202-80-70.compute-1.amazonaws.com (HELO gitbox.apache.org) (52.202.80.70) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 07 Nov 2018 16:41:30 +0000 From: GitBox To: commits@airflow.apache.org Subject: [GitHub] bkvarda commented on a change in pull request #4134: [AIRFLOW-3213] Create ADLS to GCS operator Message-ID: <154160889028.10717.914950766588023246.gitbox@gitbox.apache.org> Date: Wed, 07 Nov 2018 16:41:30 -0000 Content-Type: text/plain; charset=utf-8 Content-Transfer-Encoding: 8bit bkvarda commented on a change in pull request #4134: [AIRFLOW-3213] Create ADLS to GCS operator URL: https://github.com/apache/incubator-airflow/pull/4134#discussion_r231582800 ########## File path: airflow/contrib/operators/adls_to_gcs.py ########## @@ -0,0 +1,144 @@ +# -*- coding: utf-8 -*- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from airflow.contrib.hooks.azure_data_lake_hook import AzureDataLakeHook +from airflow.contrib.operators.adls_list_operator import AzureDataLakeStorageListOperator +from airflow.utils.decorators import apply_defaults +from airflow.contrib.hooks.gcs_hook import GoogleCloudStorageHook, _parse_gcs_url + +import os +from tempfile import NamedTemporaryFile + + +class AdlsToGoogleCloudStorageOperator(AzureDataLakeStorageListOperator): + """ + Synchronizes an Azure Data Lake Storage path with a GCS bucket + :param path: The Azure Data Lake path to find the objects (templated) + :type path: str + :param dest_gcs: The Google Cloud Storage bucket and prefix to + store the objects. (templated) + :type dest_gcs: str + :param replace: If true, replaces same-named files in GCS + :type replace: bool + :param azure_data_lake_conn_id: The connection ID to use when + connecting to Azure Data Lake Storage. + :type azure_data_lake_conn_id: str + :param dest_google_cloud_storage_conn_id: The connection ID to use when + connecting to Google Cloud Storage. + :type dest_google_cloud_storage_conn_id: str + :param delegate_to: The account to impersonate, if any. + For this to work, the service account making the request must have + domain-wide delegation enabled. + :type delegate_to: str + + **Examples**: + The following Operator would copy a single file named + ``hello/world.avro`` from ADLS to the GCS bucket ``mybucket``. Its full + resulting gcs path will be ``gs://mybucket/hello/world.avro`` :: + copy_single_file = AdlsToGoogleCloudStorageOperator( + task_id='copy_single_file', + path='hello/world.avro', + dest_gcs='gs://mybucket', + replace=False, + azure_data_lake_conn_id='azure_data_lake_default', + google_cloud_storage_conn_id='google_cloud_default' + ) + + The following Operator would copy all parquet files from ADLS + to the GCS bucket ``mybucket``. :: + copy_all_files = AdlsToGoogleCloudStorageOperator( + task_id='copy_all_files', + path='*.parquet', + dest_gcs='gs://mybucket', + replace=False, + azure_data_lake_conn_id='azure_data_lake_default', + google_cloud_storage_conn_id='google_cloud_default' + ) + + The following Operator would copy all parquet files from ADLS + path ``/hello/world``to the GCS bucket ``mybucket``. :: + copy_world_files = AdlsToGoogleCloudStorageOperator( + task_id='copy_world_files', + path='hello/world/*.parquet', + dest_gcs='gs://mybucket', + replace=False, + azure_data_lake_conn_id='azure_data_lake_default', + google_cloud_storage_conn_id='google_cloud_default' + ) + """ + template_fields = ('path', 'dest_gcs') + ui_color = '#f0eee4' + + @apply_defaults + def __init__(self, + path, + dest_gcs=None, + replace=False, + azure_data_lake_conn_id='azure_data_lake_default', + google_cloud_storage_conn_id='google_cloud_default', + delegate_to=None, + *args, + **kwargs): + + super(AdlsToGoogleCloudStorageOperator, self).__init__( + path=path, + azure_data_lake_conn_id=azure_data_lake_conn_id, + *args, + **kwargs + ) + self.dest_gcs = dest_gcs + self.replace = replace + self.google_cloud_storage_conn_id = google_cloud_storage_conn_id + self.delegate_to = delegate_to + + def execute(self, context): + # use the super to list all files in an Azure Data Lake path + files = super(AdlsToGoogleCloudStorageOperator, self).execute(context) + g_hook = GoogleCloudStorageHook( + google_cloud_storage_conn_id=self.google_cloud_storage_conn_id, + delegate_to=self.delegate_to) + + if not self.replace: + # if we are not replacing -> list all files in the ADLS path + # and only keep those files which are present in + # ADLS and not in Google Cloud Storage + bucket_name, prefix = _parse_gcs_url(self.dest_gcs) + existing_files = g_hook.list(bucket=bucket_name, prefix=prefix) + files = set(files) - set(existing_files) + + if files: + hook = AzureDataLakeHook( + azure_data_lake_conn_id=self.azure_data_lake_conn_id + ) + + for file in files: + with NamedTemporaryFile(mode='wb', delete=True) as f: + hook.download_file(local_path=f.name, remote_path=file) + f.flush() + dest_gcs_bucket, dest_gcs_prefix = _parse_gcs_url(self.dest_gcs) + dest_path = os.path.join(dest_gcs_prefix, file) + self.log.info("Saving file to %s", dest_path) + + g_hook.upload(bucket=dest_gcs_bucket, object=dest_path, filename=f.name) + + self.log.info("All done, uploaded %d files to GCS", len(files)) + else: + self.log.info("In sync, no files needed to be uploaded to GCS") + + return files Review comment: Since it's just a collection of strings, it would be pretty unlikely. XCOM values are stored as sqlalchemy.LargeBinary, correct? ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: users@infra.apache.org With regards, Apache Git Services