spark-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sro...@apache.org
Subject spark git commit: [BUILD] update release scripts
Date Sat, 09 Dec 2017 15:28:51 GMT
Repository: spark
Updated Branches:
  refs/heads/master 251b2c03b -> ab1b6ee73


[BUILD] update release scripts

## What changes were proposed in this pull request?

Change to dist.apache.org instead of home directory
sha512 should have .sha512 extension. From ASF release signing doc: "The checksum SHOULD be
generated using SHA-512. A .sha file SHOULD contain a SHA-1 checksum, for historical reasons."

NOTE: I *think* should require some changes to work with Jenkins' release build

## How was this patch tested?

manually

Author: Felix Cheung <felixcheung_m@hotmail.com>

Closes #19754 from felixcheung/releasescript.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/ab1b6ee7
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/ab1b6ee7
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/ab1b6ee7

Branch: refs/heads/master
Commit: ab1b6ee73157fa5401b189924263795bfad919c9
Parents: 251b2c0
Author: Felix Cheung <felixcheung_m@hotmail.com>
Authored: Sat Dec 9 09:28:46 2017 -0600
Committer: Sean Owen <sowen@cloudera.com>
Committed: Sat Dec 9 09:28:46 2017 -0600

----------------------------------------------------------------------
 dev/create-release/release-build.sh | 122 ++++++++++++++-----------------
 dev/create-release/release-tag.sh   |   8 +-
 2 files changed, 60 insertions(+), 70 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/ab1b6ee7/dev/create-release/release-build.sh
----------------------------------------------------------------------
diff --git a/dev/create-release/release-build.sh b/dev/create-release/release-build.sh
index 5b43f9b..c711374 100755
--- a/dev/create-release/release-build.sh
+++ b/dev/create-release/release-build.sh
@@ -23,23 +23,19 @@ usage: release-build.sh <package|docs|publish-snapshot|publish-release>
 Creates build deliverables from a Spark commit.
 
 Top level targets are
-  package: Create binary packages and copy them to home.apache
-  docs: Build docs and copy them to home.apache
+  package: Create binary packages and commit them to dist.apache.org/repos/dist/dev/spark/
+  docs: Build docs and commit them to dist.apache.org/repos/dist/dev/spark/
   publish-snapshot: Publish snapshot release to Apache snapshots
   publish-release: Publish a release to Apache release repo
 
 All other inputs are environment variables
 
 GIT_REF - Release tag or commit to build from
-SPARK_VERSION - Version of Spark being built (e.g. 2.1.2)
 SPARK_PACKAGE_VERSION - Release identifier in top level package directory (e.g. 2.1.2-rc1)
-REMOTE_PARENT_DIR - Parent in which to create doc or release builds.
-REMOTE_PARENT_MAX_LENGTH - If set, parent directory will be cleaned to only
- have this number of subdirectories (by deleting old ones). WARNING: This deletes data.
+SPARK_VERSION - (optional) Version of Spark being built (e.g. 2.1.2)
 
 ASF_USERNAME - Username of ASF committer account
 ASF_PASSWORD - Password of ASF committer account
-ASF_RSA_KEY - RSA private key file for ASF committer account
 
 GPG_KEY - GPG key used to sign release artifacts
 GPG_PASSPHRASE - Passphrase for GPG key
@@ -57,7 +53,20 @@ if [[ $@ == *"help"* ]]; then
   exit_with_usage
 fi
 
-for env in ASF_USERNAME ASF_RSA_KEY GPG_PASSPHRASE GPG_KEY; do
+if [[ -z "$ASF_PASSWORD" ]]; then
+  echo 'The environment variable ASF_PASSWORD is not set. Enter the password.'
+  echo
+  stty -echo && printf "ASF password: " && read ASF_PASSWORD && printf
'\n' && stty echo
+fi
+
+if [[ -z "$GPG_PASSPHRASE" ]]; then
+  echo 'The environment variable GPG_PASSPHRASE is not set. Enter the passphrase to'
+  echo 'unlock the GPG signing key that will be used to sign the release!'
+  echo
+  stty -echo && printf "GPG passphrase: " && read GPG_PASSPHRASE &&
printf '\n' && stty echo
+fi
+
+for env in ASF_USERNAME GPG_PASSPHRASE GPG_KEY; do
   if [ -z "${!env}" ]; then
     echo "ERROR: $env must be set to run this script"
     exit_with_usage
@@ -71,8 +80,7 @@ export LC_ALL=C
 # Commit ref to checkout when building
 GIT_REF=${GIT_REF:-master}
 
-# Destination directory parent on remote server
-REMOTE_PARENT_DIR=${REMOTE_PARENT_DIR:-/home/$ASF_USERNAME/public_html}
+RELEASE_STAGING_LOCATION="https://dist.apache.org/repos/dist/dev/spark"
 
 GPG="gpg -u $GPG_KEY --no-tty --batch"
 NEXUS_ROOT=https://repository.apache.org/service/local/staging
@@ -142,42 +150,16 @@ if [ -z "$SPARK_PACKAGE_VERSION" ]; then
   SPARK_PACKAGE_VERSION="${SPARK_VERSION}-$(date +%Y_%m_%d_%H_%M)-${git_hash}"
 fi
 
-DEST_DIR_NAME="spark-$SPARK_PACKAGE_VERSION"
-
-function LFTP {
-  SSH="ssh -o ConnectTimeout=300 -o StrictHostKeyChecking=no -i $ASF_RSA_KEY"
-  COMMANDS=$(cat <<EOF
-     set net:max-retries 2 &&
-     set sftp:connect-program $SSH &&
-     connect -u $ASF_USERNAME,p sftp://home.apache.org &&
-     $@
-EOF
-)
-  lftp --norc -c "$COMMANDS"
-}
-export -f LFTP
-
+DEST_DIR_NAME="$SPARK_PACKAGE_VERSION"
 
 git clean -d -f -x
 rm .gitignore
 rm -rf .git
 cd ..
 
-if [ -n "$REMOTE_PARENT_MAX_LENGTH" ]; then
-  old_dirs=$(
-    LFTP nlist $REMOTE_PARENT_DIR \
-        | grep -v "^\." \
-        | sort -r \
-        | tail -n +$REMOTE_PARENT_MAX_LENGTH)
-  for old_dir in $old_dirs; do
-    echo "Removing directory: $old_dir"
-    LFTP "rm -rf $REMOTE_PARENT_DIR/$old_dir && exit 0"
-  done
-fi
-
 if [[ "$1" == "package" ]]; then
   # Source and binary tarballs
-  echo "Packaging release tarballs"
+  echo "Packaging release source tarballs"
   cp -r spark spark-$SPARK_VERSION
   tar cvzf spark-$SPARK_VERSION.tgz spark-$SPARK_VERSION
   echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --armour --output spark-$SPARK_VERSION.tgz.asc
\
@@ -185,7 +167,7 @@ if [[ "$1" == "package" ]]; then
   echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --print-md MD5 spark-$SPARK_VERSION.tgz >
\
     spark-$SPARK_VERSION.tgz.md5
   echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --print-md \
-    SHA512 spark-$SPARK_VERSION.tgz > spark-$SPARK_VERSION.tgz.sha
+    SHA512 spark-$SPARK_VERSION.tgz > spark-$SPARK_VERSION.tgz.sha512
   rm -rf spark-$SPARK_VERSION
 
   # Updated for each binary build
@@ -238,7 +220,7 @@ if [[ "$1" == "package" ]]; then
         $R_DIST_NAME.md5
       echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --print-md \
         SHA512 $R_DIST_NAME > \
-        $R_DIST_NAME.sha
+        $R_DIST_NAME.sha512
     else
       echo "Creating distribution with PIP package"
       ./dev/make-distribution.sh --name $NAME --mvn $MVN_HOME/bin/mvn --tgz --pip $FLAGS
\
@@ -257,7 +239,7 @@ if [[ "$1" == "package" ]]; then
         $PYTHON_DIST_NAME.md5
       echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --print-md \
         SHA512 $PYTHON_DIST_NAME > \
-        $PYTHON_DIST_NAME.sha
+        $PYTHON_DIST_NAME.sha512
     fi
 
     echo "Copying and signing regular binary distribution"
@@ -270,7 +252,7 @@ if [[ "$1" == "package" ]]; then
       spark-$SPARK_VERSION-bin-$NAME.tgz.md5
     echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --print-md \
       SHA512 spark-$SPARK_VERSION-bin-$NAME.tgz > \
-      spark-$SPARK_VERSION-bin-$NAME.tgz.sha
+      spark-$SPARK_VERSION-bin-$NAME.tgz.sha512
   }
 
   # TODO: Check exit codes of children here:
@@ -284,22 +266,20 @@ if [[ "$1" == "package" ]]; then
   wait
   rm -rf spark-$SPARK_VERSION-bin-*/
 
-  # Copy data
-  dest_dir="$REMOTE_PARENT_DIR/${DEST_DIR_NAME}-bin"
-  echo "Copying release tarballs to $dest_dir"
-  # Put to new directory:
-  LFTP mkdir -p $dest_dir || true
-  LFTP mput -O $dest_dir 'spark-*'
-  LFTP mput -O $dest_dir 'pyspark-*'
-  LFTP mput -O $dest_dir 'SparkR_*'
-  # Delete /latest directory and rename new upload to /latest
-  LFTP "rm -r -f $REMOTE_PARENT_DIR/latest || exit 0"
-  LFTP mv $dest_dir "$REMOTE_PARENT_DIR/latest"
-  # Re-upload a second time and leave the files in the timestamped upload directory:
-  LFTP mkdir -p $dest_dir || true
-  LFTP mput -O $dest_dir 'spark-*'
-  LFTP mput -O $dest_dir 'pyspark-*'
-  LFTP mput -O $dest_dir 'SparkR_*'
+  svn co --depth=empty $RELEASE_STAGING_LOCATION svn-spark
+  rm -rf "svn-spark/${DEST_DIR_NAME}-bin"
+  mkdir -p "svn-spark/${DEST_DIR_NAME}-bin"
+
+  echo "Copying release tarballs"
+  cp spark-* "svn-spark/${DEST_DIR_NAME}-bin/"
+  cp pyspark-* "svn-spark/${DEST_DIR_NAME}-bin/"
+  cp SparkR_* "svn-spark/${DEST_DIR_NAME}-bin/"
+  svn add "svn-spark/${DEST_DIR_NAME}-bin"
+
+  cd svn-spark
+  svn ci --username $ASF_USERNAME --password "$ASF_PASSWORD" -m"Apache Spark $SPARK_PACKAGE_VERSION"
+  cd ..
+  rm -rf svn-spark
   exit 0
 fi
 
@@ -307,21 +287,24 @@ if [[ "$1" == "docs" ]]; then
   # Documentation
   cd spark
   echo "Building Spark docs"
-  dest_dir="$REMOTE_PARENT_DIR/${DEST_DIR_NAME}-docs"
   cd docs
   # TODO: Make configurable to add this: PRODUCTION=1
   PRODUCTION=1 RELEASE_VERSION="$SPARK_VERSION" jekyll build
-  echo "Copying release documentation to $dest_dir"
-  # Put to new directory:
-  LFTP mkdir -p $dest_dir || true
-  LFTP mirror -R _site $dest_dir
-  # Delete /latest directory and rename new upload to /latest
-  LFTP "rm -r -f $REMOTE_PARENT_DIR/latest || exit 0"
-  LFTP mv $dest_dir "$REMOTE_PARENT_DIR/latest"
-  # Re-upload a second time and leave the files in the timestamped upload directory:
-  LFTP mkdir -p $dest_dir || true
-  LFTP mirror -R _site $dest_dir
   cd ..
+  cd ..
+
+  svn co --depth=empty $RELEASE_STAGING_LOCATION svn-spark
+  rm -rf "svn-spark/${DEST_DIR_NAME}-docs"
+  mkdir -p "svn-spark/${DEST_DIR_NAME}-docs"
+
+  echo "Copying release documentation"
+  cp -R "spark/docs/_site" "svn-spark/${DEST_DIR_NAME}-docs/"
+  svn add "svn-spark/${DEST_DIR_NAME}-docs"
+
+  cd svn-spark
+  svn ci --username $ASF_USERNAME --password "$ASF_PASSWORD" -m"Apache Spark $SPARK_PACKAGE_VERSION
docs"
+  cd ..
+  rm -rf svn-spark
   exit 0
 fi
 
@@ -399,6 +382,7 @@ if [[ "$1" == "publish-release" ]]; then
   find . -type f |grep -v \.jar |grep -v \.pom | xargs rm
 
   echo "Creating hash and signature files"
+  # this must have .asc, .md5 and .sha1 - it really doesn't like anything else there
   for file in $(find . -type f)
   do
     echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --output $file.asc \

http://git-wip-us.apache.org/repos/asf/spark/blob/ab1b6ee7/dev/create-release/release-tag.sh
----------------------------------------------------------------------
diff --git a/dev/create-release/release-tag.sh b/dev/create-release/release-tag.sh
index 370a62c..a05716a 100755
--- a/dev/create-release/release-tag.sh
+++ b/dev/create-release/release-tag.sh
@@ -41,6 +41,12 @@ if [[ $@ == *"help"* ]]; then
   exit_with_usage
 fi
 
+if [[ -z "$ASF_PASSWORD" ]]; then
+  echo 'The environment variable ASF_PASSWORD is not set. Enter the password.'
+  echo
+  stty -echo && printf "ASF password: " && read ASF_PASSWORD && printf
'\n' && stty echo
+fi
+
 for env in ASF_USERNAME ASF_PASSWORD RELEASE_VERSION RELEASE_TAG NEXT_VERSION GIT_EMAIL GIT_NAME
GIT_BRANCH; do
   if [ -z "${!env}" ]; then
     echo "$env must be set to run this script"
@@ -52,7 +58,7 @@ ASF_SPARK_REPO="git-wip-us.apache.org/repos/asf/spark.git"
 MVN="build/mvn --force"
 
 rm -rf spark
-git clone https://$ASF_USERNAME:$ASF_PASSWORD@$ASF_SPARK_REPO -b $GIT_BRANCH
+git clone "https://$ASF_USERNAME:$ASF_PASSWORD@$ASF_SPARK_REPO" -b $GIT_BRANCH
 cd spark
 
 git config user.name "$GIT_NAME"


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


Mime
View raw message