From commits-return-70060-archive-asf-public=cust-asf.ponee.io@airflow.apache.org Sun Oct 13 17:44:34 2019 Return-Path: X-Original-To: archive-asf-public@cust-asf.ponee.io Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [207.244.88.153]) by mx-eu-01.ponee.io (Postfix) with SMTP id F2E6718065D for ; Sun, 13 Oct 2019 19:44:33 +0200 (CEST) Received: (qmail 21637 invoked by uid 500); 13 Oct 2019 17:44:33 -0000 Mailing-List: contact commits-help@airflow.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@airflow.apache.org Delivered-To: mailing list commits@airflow.apache.org Received: (qmail 21628 invoked by uid 99); 13 Oct 2019 17:44:33 -0000 Received: from ec2-52-202-80-70.compute-1.amazonaws.com (HELO gitbox.apache.org) (52.202.80.70) by apache.org (qpsmtpd/0.29) with ESMTP; Sun, 13 Oct 2019 17:44:33 +0000 From: GitBox To: commits@airflow.apache.org Subject: [GitHub] [airflow] potiuk commented on a change in pull request #6266: [AIRFLOW-2439] Production Docker image support including refactoring of build scripts Message-ID: <157098867328.31099.9941010934362312679.gitbox@gitbox.apache.org> Date: Sun, 13 Oct 2019 17:44:33 -0000 Content-Type: text/plain; charset=utf-8 Content-Transfer-Encoding: 8bit potiuk commented on a change in pull request #6266: [AIRFLOW-2439] Production Docker image support including refactoring of build scripts URL: https://github.com/apache/airflow/pull/6266#discussion_r334288055 ########## File path: scripts/ci/utils/_init.sh ########## @@ -0,0 +1,322 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# Assume AIRFLOW_SOURCES are set to point to sources of Airflow + +# +# Sets verbosity of shell in case VERBOSE is true +# Input: VERBOSE +# +function _set_verbosity() { + if [[ ${VERBOSE:=} == "true" ]]; then + set -x + else + set +x + fi +} + +# Creates temp directories +function _create_ignored_directories() { + mkdir -p "${AIRFLOW_SOURCES}/.mypy_cache" + mkdir -p "${AIRFLOW_SOURCES}/logs" + mkdir -p "${AIRFLOW_SOURCES}/tmp" +} + +function _set_python_version_for_default_image() { + PYTHON_VERSION_FOR_DEFAULT_IMAGE="${PYTHON_VERSION_FOR_DEFAULT_IMAGE:="3.6"}" + export PYTHON_VERSION_FOR_DEFAULT_IMAGE +} +# +# Creates cache directory where we will keep temporary files needed for the build +# +# This directory will be automatically deleted when the script is killed or exists (via trap) +# Unless SKIP_CACHE_DELETION variable is set. You can set this variable and then see +# the output/files generated by the scripts in this directory. +# +# Most useful is out.log file in this directory - storing verbose output of the scripts. +# +# Output: CACHE_TMP_FILE_DIR, OUTPUT_LOG +function _create_temp_cache_directory() { + CACHE_TMP_FILE_DIR=$(mktemp -d) + export CACHE_TMP_FILE_DIR + + if [[ ${SKIP_CACHE_DELETION:=} != "true" ]]; then + trap 'rm -rf -- "${CACHE_TMP_FILE_DIR}"' INT TERM HUP + fi + + OUTPUT_LOG="${CACHE_TMP_FILE_DIR}/out.log" + export OUTPUT_LOG +} + +# +# Removes temporary cache directory +# +function _remove_temp_cache_directory() { + if [[ -z "${CACHE_TMP_FILE_DIR}" ]]; then + rm -rf -- "${CACHE_TMP_FILE_DIR}" + fi +} + +# +# Sets up cache variable and creates the cache directory if needed +# +# Input: AIRFLOW_SOURCES +# Output: BUILD_CACHE_DIR +function _setup_cache_directory() { + BUILD_CACHE_DIR="${AIRFLOW_SOURCES}/.build" + export BUILD_CACHE_DIR + mkdir -p "${BUILD_CACHE_DIR}" +} + +# +# Sets up locally built images as array +# +# Output: LOCALLY_BUILT_IMAGES (array) +function _setup_locally_build_images() { + LOCALLY_BUILT_IMAGES=("CI" "CHECKLICENCE") + export LOCALLY_BUILT_IMAGES +} + +# +# Sets up files that should be checked for rebuilding +# Output: FILES_FOR_REBUILD_CHECK (array) +function _setup_files_for_rebuild(){ + FILES_FOR_REBUILD_CHECK=( + "setup.py" + "setup.cfg" + "Dockerfile" + ".dockerignore" + "airflow/version.py" + "airflow/www/package.json" + "airflow/www/package-lock.json" + ) + export FILES_FOR_REBUILD_CHECK +} + +# +# Sets up ports forwarded when entering the container +# Output: WEBSERVER_HOST_PORT, POSTGRES_HOST_PORT, MYSQL_HOST_PORT +# +function _setup_forwarded_ports() { + export WEBSERVER_HOST_PORT=${WEBSERVER_HOST_PORT:="28080"} + export POSTGRES_HOST_PORT=${POSTGRES_HOST_PORT:="25433"} + export MYSQL_HOST_PORT=${MYSQL_HOST_PORT:="23306"} +} + +# +# Checks if cache is going to be used +function _check_if_cache_used() { + # You can set AIRFLOW_CONTAINER_USE_CACHE to false if you do not want to use standard Docker + # cache during build. This way you can test building everything from the scratch + AIRFLOW_CONTAINER_USE_CACHE=${AIRFLOW_CONTAINER_USE_CACHE:="true"} + + # You can set AIRFLOW_CONTAINER_USE_LOCAL_DOCKER_CACHE to true if you do not want to use + # pulled images at all but you rely on local cache + AIRFLOW_CONTAINER_USE_LOCAL_DOCKER_CACHE=${AIRFLOW_CONTAINER_USE_LOCAL_DOCKER_CACHE:="false"} +} + +# +# Checks if PROD build is enabled +function _check_if_prod_build_enabled() { + # You can set AIRFLOW_CONTAINER_BUILD_PROD_IMAGE to true if you want to enable PROD build + # PROD build takes a lot of time so it is disabled by default + AIRFLOW_CONTAINER_BUILD_PROD_IMAGE=${AIRFLOW_CONTAINER_BUILD_PROD_IMAGE:="false"} +} + +# +# Checks if core utils required in the host system are installed and explain what needs to be done if not +# exits if it's not +function _check_if_coreutils_installed() { + set +e + getopt -T >/dev/null + local GETOPT_RETVAL=$? + + if [[ $(uname -s) == 'Darwin' ]] ; then + command -v gstat >/dev/null + local STAT_PRESENT=$? + else + command -v stat >/dev/null + local STAT_PRESENT=$? + fi + + command -v md5sum >/dev/null + local MD5SUM_PRESENT=$? + + set -e + + local CMDNAME + CMDNAME="$(basename -- "$0")" + + #################### Parsing options/arguments + if [[ ${GETOPT_RETVAL} != 4 || "${STAT_PRESENT}" != "0" || "${MD5SUM_PRESENT}" != "0" ]]; then + echo + if [[ $(uname -s) == 'Darwin' ]] ; then + echo >&2 "You are running ${CMDNAME} in OSX environment" + echo >&2 "And you need to install gnu commands" + echo >&2 + echo >&2 "Run 'brew install gnu-getopt coreutils'" + echo >&2 + echo >&2 "Then link the gnu-getopt to become default as suggested by brew." Review comment: :D yeah .... Though I am just about to switch to Pixelbook for local development. Keyboard problems and overall stability issues for Mac has gotten much worse ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: users@infra.apache.org With regards, Apache Git Services