Skip to content

Commit

Permalink
use Spark 3.0.0 and 2.4.6 (#366)
Browse files Browse the repository at this point in the history
  • Loading branch information
mengxr authored Jun 18, 2020
1 parent 8bde5d1 commit 7209013
Show file tree
Hide file tree
Showing 4 changed files with 5 additions and 15 deletions.
4 changes: 2 additions & 2 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@ cache:

matrix:
include:
- env: PYSPARK_PYTHON=python3 SCALA_VERSION=2.11.12 SPARK_VERSION=2.4.5 SPARK_BUILD="spark-${SPARK_VERSION}-bin-hadoop2.7"
- env: PYSPARK_PYTHON=python3 SCALA_VERSION=2.12.11 SPARK_VERSION=3.0.0-preview2 SPARK_BUILD="spark-${SPARK_VERSION}-bin-hadoop2.7"
- env: PYSPARK_PYTHON=python3 SCALA_VERSION=2.11.12 SPARK_VERSION=2.4.6 SPARK_BUILD="spark-${SPARK_VERSION}-bin-hadoop2.7"
- env: PYSPARK_PYTHON=python3 SCALA_VERSION=2.12.11 SPARK_VERSION=3.0.0 SPARK_BUILD="spark-${SPARK_VERSION}-bin-hadoop2.7"

before_install:
- ./bin/download_travis_dependencies.sh
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ RUN apt-get update && \

# Install Spark and update env variables.
ENV SCALA_VERSION 2.11.12
ENV SPARK_VERSION "3.0.0-preview2"
ENV SPARK_VERSION "3.0.0"
ENV SPARK_BUILD "spark-${SPARK_VERSION}-bin-hadoop2.7"
ENV SPARK_BUILD_URL "https://dist.apache.org/repos/dist/release/spark/spark-${SPARK_VERSION}/${SPARK_BUILD}.tgz"
RUN wget --quiet "$SPARK_BUILD_URL" -O /tmp/spark.tgz && \
Expand Down
12 changes: 1 addition & 11 deletions bin/download_travis_dependencies.sh
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,6 @@ __PY_SCRIPT_EOF__
) | xargs curl --retry 3 --retry-delay 7 -O
}

function try_download_latest_snapshot {
local spark_url="https://ml-team-public-read.s3-us-west-2.amazonaws.com/spark-3.0.0-SNAPSHOT-bin-hadoop2.7.tgz"
echo "Spark build URL = $spark_url"
wget --tries=3 ${spark_url}
}

echo "Downloading Spark if necessary"
echo "Spark version = $SPARK_VERSION"
echo "Spark build = $SPARK_BUILD"
Expand All @@ -43,11 +37,7 @@ rm -f "${spark_tarball}"
# Remove existing Spark extracted directory
rm -rf "${SPARK_BUILD}"

if [ ${SPARK_VERSION} = '3.0.0-SNAPSHOT' ]; then
try_download_latest_snapshot
else
try_download_from_apache || try_download_from_apache || try_download_from_apache
fi;
try_download_from_apache || try_download_from_apache || try_download_from_apache

echo "Content of directory:"
ls -la
Expand Down
2 changes: 1 addition & 1 deletion build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import ReleaseTransformations._

resolvers += "Spark snapshot repository" at "https://repository.apache.org/snapshots/"

val sparkVer = sys.props.getOrElse("spark.version", "3.0.0-preview2")
val sparkVer = sys.props.getOrElse("spark.version", "3.0.0")
val sparkBranch = sparkVer.substring(0, 3)
val defaultScalaVer = sparkBranch match {
case "3.0" => "2.12.11"
Expand Down

0 comments on commit 7209013

Please sign in to comment.