spark-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From pwend...@apache.org
Subject [03/21] moved user scripts to bin folder
Date Fri, 03 Jan 2014 19:24:54 GMT
http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/1d1a6258/pagerank_data.txt
----------------------------------------------------------------------
diff --git a/pagerank_data.txt b/pagerank_data.txt
deleted file mode 100644
index 95755ab..0000000
--- a/pagerank_data.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-1 2
-1 3
-1 4
-2 1
-3 1
-4 1

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/1d1a6258/pyspark
----------------------------------------------------------------------
diff --git a/pyspark b/pyspark
deleted file mode 100755
index 4941a36..0000000
--- a/pyspark
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env bash
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Figure out where the Scala framework is installed
-FWDIR="$(cd `dirname $0`; pwd)"
-
-# Export this as SPARK_HOME
-export SPARK_HOME="$FWDIR"
-
-SCALA_VERSION=2.9.3
-
-# Exit if the user hasn't compiled Spark
-if [ ! -f "$FWDIR/RELEASE" ]; then
-  # Exit if the user hasn't compiled Spark
-  ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*.jar >& /dev/null
-  if [[ $? != 0 ]]; then
-    echo "Failed to find Spark assembly in $FWDIR/assembly/target" >&2
-    echo "You need to build Spark with sbt/sbt assembly before running this program" >&2
-    exit 1
-  fi
-fi
-
-# Load environment variables from conf/spark-env.sh, if it exists
-if [ -e $FWDIR/conf/spark-env.sh ] ; then
-  . $FWDIR/conf/spark-env.sh
-fi
-
-# Figure out which Python executable to use
-if [ -z "$PYSPARK_PYTHON" ] ; then
-  PYSPARK_PYTHON="python"
-fi
-export PYSPARK_PYTHON
-
-# Add the PySpark classes to the Python path:
-export PYTHONPATH=$SPARK_HOME/python/:$PYTHONPATH
-
-# Load the PySpark shell.py script when ./pyspark is used interactively:
-export OLD_PYTHONSTARTUP=$PYTHONSTARTUP
-export PYTHONSTARTUP=$FWDIR/python/pyspark/shell.py
-
-if [ -n "$IPYTHON_OPTS" ]; then
-  IPYTHON=1
-fi
-
-if [[ "$IPYTHON" = "1" ]] ; then
-  IPYTHON_OPTS=${IPYTHON_OPTS:--i}
-  exec ipython "$IPYTHON_OPTS" -c "%run $PYTHONSTARTUP"
-else
-  exec "$PYSPARK_PYTHON" "$@"
-fi

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/1d1a6258/pyspark.cmd
----------------------------------------------------------------------
diff --git a/pyspark.cmd b/pyspark.cmd
deleted file mode 100644
index 7c26fbb..0000000
--- a/pyspark.cmd
+++ /dev/null
@@ -1,23 +0,0 @@
-@echo off
-
-rem
-rem Licensed to the Apache Software Foundation (ASF) under one or more
-rem contributor license agreements.  See the NOTICE file distributed with
-rem this work for additional information regarding copyright ownership.
-rem The ASF licenses this file to You under the Apache License, Version 2.0
-rem (the "License"); you may not use this file except in compliance with
-rem the License.  You may obtain a copy of the License at
-rem
-rem    http://www.apache.org/licenses/LICENSE-2.0
-rem
-rem Unless required by applicable law or agreed to in writing, software
-rem distributed under the License is distributed on an "AS IS" BASIS,
-rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-rem See the License for the specific language governing permissions and
-rem limitations under the License.
-rem
-
-rem This is the entry point for running PySpark. To avoid polluting the
-rem environment, it just launches a new cmd to do the real work.
-
-cmd /V /E /C %~dp0pyspark2.cmd %*

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/1d1a6258/pyspark2.cmd
----------------------------------------------------------------------
diff --git a/pyspark2.cmd b/pyspark2.cmd
deleted file mode 100644
index f58e349..0000000
--- a/pyspark2.cmd
+++ /dev/null
@@ -1,55 +0,0 @@
-@echo off
-
-rem
-rem Licensed to the Apache Software Foundation (ASF) under one or more
-rem contributor license agreements.  See the NOTICE file distributed with
-rem this work for additional information regarding copyright ownership.
-rem The ASF licenses this file to You under the Apache License, Version 2.0
-rem (the "License"); you may not use this file except in compliance with
-rem the License.  You may obtain a copy of the License at
-rem
-rem    http://www.apache.org/licenses/LICENSE-2.0
-rem
-rem Unless required by applicable law or agreed to in writing, software
-rem distributed under the License is distributed on an "AS IS" BASIS,
-rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-rem See the License for the specific language governing permissions and
-rem limitations under the License.
-rem
-
-set SCALA_VERSION=2.9.3
-
-rem Figure out where the Spark framework is installed
-set FWDIR=%~dp0
-
-rem Export this as SPARK_HOME
-set SPARK_HOME=%FWDIR%
-
-rem Test whether the user has built Spark
-if exist "%FWDIR%RELEASE" goto skip_build_test
-set FOUND_JAR=0
-for %%d in ("%FWDIR%assembly\target\scala-%SCALA_VERSION%\spark-assembly*hadoop*.jar") do
(
-  set FOUND_JAR=1
-)
-if "%FOUND_JAR%"=="0" (
-  echo Failed to find Spark assembly JAR.
-  echo You need to build Spark with sbt\sbt assembly before running this program.
-  goto exit
-)
-:skip_build_test
-
-rem Load environment variables from conf\spark-env.cmd, if it exists
-if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd"
-
-rem Figure out which Python to use.
-if "x%PYSPARK_PYTHON%"=="x" set PYSPARK_PYTHON=python
-
-set PYTHONPATH=%FWDIR%python;%PYTHONPATH%
-
-set OLD_PYTHONSTARTUP=%PYTHONSTARTUP%
-set PYTHONSTARTUP=%FWDIR%python\pyspark\shell.py
-
-echo Running %PYSPARK_PYTHON% with PYTHONPATH=%PYTHONPATH%
-
-"%PYSPARK_PYTHON%" %*
-:exit

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/1d1a6258/run-example
----------------------------------------------------------------------
diff --git a/run-example b/run-example
deleted file mode 100755
index 08ec717..0000000
--- a/run-example
+++ /dev/null
@@ -1,81 +0,0 @@
-#!/usr/bin/env bash
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-SCALA_VERSION=2.9.3
-
-# Figure out where the Scala framework is installed
-FWDIR="$(cd `dirname $0`; pwd)"
-
-# Export this as SPARK_HOME
-export SPARK_HOME="$FWDIR"
-
-# Load environment variables from conf/spark-env.sh, if it exists
-if [ -e $FWDIR/conf/spark-env.sh ] ; then
-  . $FWDIR/conf/spark-env.sh
-fi
-
-if [ -z "$1" ]; then
-  echo "Usage: run-example <example-class> [<args>]" >&2
-  exit 1
-fi
-
-# Figure out the JAR file that our examples were packaged into. This includes a bit of a
hack
-# to avoid the -sources and -doc packages that are built by publish-local.
-EXAMPLES_DIR="$FWDIR"/examples
-SPARK_EXAMPLES_JAR=""
-if [ -e "$EXAMPLES_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar ]; then
-  # Use the JAR from the SBT build
-  export SPARK_EXAMPLES_JAR=`ls "$EXAMPLES_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar`
-fi
-if [ -e "$EXAMPLES_DIR"/target/spark-examples*[0-9Tg].jar ]; then
-  # Use the JAR from the Maven build
-  # TODO: this also needs to become an assembly!
-  export SPARK_EXAMPLES_JAR=`ls "$EXAMPLES_DIR"/target/spark-examples*[0-9Tg].jar`
-fi
-if [[ -z $SPARK_EXAMPLES_JAR ]]; then
-  echo "Failed to find Spark examples assembly in $FWDIR/examples/target" >&2
-  echo "You need to build Spark with sbt/sbt assembly before running this program" >&2
-  exit 1
-fi
-
-# Since the examples JAR ideally shouldn't include spark-core (that dependency should be
-# "provided"), also add our standard Spark classpath, built using compute-classpath.sh.
-CLASSPATH=`$FWDIR/bin/compute-classpath.sh`
-CLASSPATH="$SPARK_EXAMPLES_JAR:$CLASSPATH"
-
-# Find java binary
-if [ -n "${JAVA_HOME}" ]; then
-  RUNNER="${JAVA_HOME}/bin/java"
-else
-  if [ `command -v java` ]; then
-    RUNNER="java"
-  else
-    echo "JAVA_HOME is not set" >&2
-    exit 1
-  fi
-fi
-
-if [ "$SPARK_PRINT_LAUNCH_COMMAND" == "1" ]; then
-  echo -n "Spark Command: "
-  echo "$RUNNER" -cp "$CLASSPATH" "$@"
-  echo "========================================"
-  echo
-fi
-
-exec "$RUNNER" -cp "$CLASSPATH" "$@"

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/1d1a6258/run-example.cmd
----------------------------------------------------------------------
diff --git a/run-example.cmd b/run-example.cmd
deleted file mode 100644
index 5b2d048..0000000
--- a/run-example.cmd
+++ /dev/null
@@ -1,23 +0,0 @@
-@echo off
-
-rem
-rem Licensed to the Apache Software Foundation (ASF) under one or more
-rem contributor license agreements.  See the NOTICE file distributed with
-rem this work for additional information regarding copyright ownership.
-rem The ASF licenses this file to You under the Apache License, Version 2.0
-rem (the "License"); you may not use this file except in compliance with
-rem the License.  You may obtain a copy of the License at
-rem
-rem    http://www.apache.org/licenses/LICENSE-2.0
-rem
-rem Unless required by applicable law or agreed to in writing, software
-rem distributed under the License is distributed on an "AS IS" BASIS,
-rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-rem See the License for the specific language governing permissions and
-rem limitations under the License.
-rem
-
-rem This is the entry point for running a Spark example. To avoid polluting
-rem the environment, it just launches a new cmd to do the real work.
-
-cmd /V /E /C %~dp0run-example2.cmd %*

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/1d1a6258/run-example2.cmd
----------------------------------------------------------------------
diff --git a/run-example2.cmd b/run-example2.cmd
deleted file mode 100644
index dbb371e..0000000
--- a/run-example2.cmd
+++ /dev/null
@@ -1,61 +0,0 @@
-@echo off
-
-rem
-rem Licensed to the Apache Software Foundation (ASF) under one or more
-rem contributor license agreements.  See the NOTICE file distributed with
-rem this work for additional information regarding copyright ownership.
-rem The ASF licenses this file to You under the Apache License, Version 2.0
-rem (the "License"); you may not use this file except in compliance with
-rem the License.  You may obtain a copy of the License at
-rem
-rem    http://www.apache.org/licenses/LICENSE-2.0
-rem
-rem Unless required by applicable law or agreed to in writing, software
-rem distributed under the License is distributed on an "AS IS" BASIS,
-rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-rem See the License for the specific language governing permissions and
-rem limitations under the License.
-rem
-
-set SCALA_VERSION=2.9.3
-
-rem Figure out where the Spark framework is installed
-set FWDIR=%~dp0
-
-rem Export this as SPARK_HOME
-set SPARK_HOME=%FWDIR%
-
-rem Load environment variables from conf\spark-env.cmd, if it exists
-if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd"
-
-rem Test that an argument was given
-if not "x%1"=="x" goto arg_given
-  echo Usage: run-example ^<example-class^> [^<args^>]
-  goto exit
-:arg_given
-
-set EXAMPLES_DIR=%FWDIR%examples
-
-rem Figure out the JAR file that our examples were packaged into.
-set SPARK_EXAMPLES_JAR=
-for %%d in ("%EXAMPLES_DIR%\target\scala-%SCALA_VERSION%\spark-examples*assembly*.jar") do
(
-  set SPARK_EXAMPLES_JAR=%%d
-)
-if "x%SPARK_EXAMPLES_JAR%"=="x" (
-  echo Failed to find Spark examples assembly JAR.
-  echo You need to build Spark with sbt\sbt assembly before running this program.
-  goto exit
-)
-
-rem Compute Spark classpath using external script
-set DONT_PRINT_CLASSPATH=1
-call "%FWDIR%bin\compute-classpath.cmd"
-set DONT_PRINT_CLASSPATH=0
-set CLASSPATH=%SPARK_EXAMPLES_JAR%;%CLASSPATH%
-
-rem Figure out where java is.
-set RUNNER=java
-if not "x%JAVA_HOME%"=="x" set RUNNER=%JAVA_HOME%\bin\java
-
-"%RUNNER%" -cp "%CLASSPATH%" %JAVA_OPTS% %*
-:exit

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/1d1a6258/spark-shell
----------------------------------------------------------------------
diff --git a/spark-shell b/spark-shell
deleted file mode 100755
index 9608bd3..0000000
--- a/spark-shell
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/usr/bin/env bash
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-#
-# Shell script for starting the Spark Shell REPL
-# Note that it will set MASTER to spark://${SPARK_MASTER_IP}:${SPARK_MASTER_PORT}
-# if those two env vars are set in spark-env.sh but MASTER is not.
-# Options:
-#    -c <cores>    Set the number of cores for REPL to use
-#
-
-# Enter posix mode for bash
-set -o posix
-
-FWDIR="`dirname $0`"
-
-for o in "$@"; do
-  if [ "$1" = "-c" -o "$1" = "--cores" ]; then
-    shift
-    if [ -n "$1" ]; then
-      OPTIONS="-Dspark.cores.max=$1"
-      shift
-    fi
-  fi
-done
-
-# Set MASTER from spark-env if possible
-if [ -z "$MASTER" ]; then
-  if [ -e "$FWDIR/conf/spark-env.sh" ]; then
-    . "$FWDIR/conf/spark-env.sh"
-  fi
-  if [[ "x" != "x$SPARK_MASTER_IP" && "y" != "y$SPARK_MASTER_PORT" ]]; then
-    MASTER="spark://${SPARK_MASTER_IP}:${SPARK_MASTER_PORT}"
-    export MASTER
-  fi
-fi
-
-# Copy restore-TTY-on-exit functions from Scala script so spark-shell exits properly even
in
-# binary distribution of Spark where Scala is not installed
-exit_status=127
-saved_stty=""
-
-# restore stty settings (echo in particular)
-function restoreSttySettings() {
-  stty $saved_stty
-  saved_stty=""
-}
-
-function onExit() {
-  if [[ "$saved_stty" != "" ]]; then
-    restoreSttySettings
-  fi
-  exit $exit_status
-}
-
-# to reenable echo if we are interrupted before completing.
-trap onExit INT
-
-# save terminal settings
-saved_stty=$(stty -g 2>/dev/null)
-# clear on error so we don't later try to restore them
-if [[ ! $? ]]; then
-  saved_stty=""
-fi
-
-$FWDIR/spark-class $OPTIONS org.apache.spark.repl.Main "$@"
-
-# record the exit status lest it be overwritten:
-# then reenable echo and propagate the code.
-exit_status=$?
-onExit

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/1d1a6258/spark-shell.cmd
----------------------------------------------------------------------
diff --git a/spark-shell.cmd b/spark-shell.cmd
deleted file mode 100644
index 3e52bf8..0000000
--- a/spark-shell.cmd
+++ /dev/null
@@ -1,22 +0,0 @@
-@echo off
-
-rem
-rem Licensed to the Apache Software Foundation (ASF) under one or more
-rem contributor license agreements.  See the NOTICE file distributed with
-rem this work for additional information regarding copyright ownership.
-rem The ASF licenses this file to You under the Apache License, Version 2.0
-rem (the "License"); you may not use this file except in compliance with
-rem the License.  You may obtain a copy of the License at
-rem
-rem    http://www.apache.org/licenses/LICENSE-2.0
-rem
-rem Unless required by applicable law or agreed to in writing, software
-rem distributed under the License is distributed on an "AS IS" BASIS,
-rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-rem See the License for the specific language governing permissions and
-rem limitations under the License.
-rem
-
-set FWDIR=%~dp0
-
-cmd /V /E /C %FWDIR%spark-class2.cmd org.apache.spark.repl.Main %*


Mime
View raw message