Skip to content

Commit

Permalink
commit Spark cluster changes before shutting down
Browse files Browse the repository at this point in the history
  • Loading branch information
tuzhucheng committed Dec 15, 2017
1 parent 09937f9 commit 6e083cc
Show file tree
Hide file tree
Showing 2 changed files with 41 additions and 1 deletion.
40 changes: 40 additions & 0 deletions scripts/run.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
#!/usr/bin/env bash

set -x

declare -a classes=("RandomProjection" "RandomProjectionWithDistance")

for class in "${classes[@]}"
do
for i in $(seq 2 4)
do
for j in $(seq 1 4)
do
/usr/bin/time spark-submit --num-executors $i --executor-cores $j --executor-memory 4g --conf "spark.driver.extraJavaOptions=-Dlog4j.configuration=log4j-spark.properties" --class "largelsh.$class" target/scala-2.11/LargeLSH-assembly-0.1.0-SNAPSHOT.jar --dataset svhn --k 3 --m 10
/usr/bin/time spark-submit --num-executors $i --executor-cores $j --executor-memory 4g --conf "spark.driver.extraJavaOptions=-Dlog4j.configuration=log4j-spark.properties" --class "largelsh.$class" target/scala-2.11/LargeLSH-assembly-0.1.0-SNAPSHOT.jar --dataset svhn --k 5 --m 20
/usr/bin/time spark-submit --num-executors $i --executor-cores $j --executor-memory 4g --conf "spark.driver.extraJavaOptions=-Dlog4j.configuration=log4j-spark.properties" --class "largelsh.$class" target/scala-2.11/LargeLSH-assembly-0.1.0-SNAPSHOT.jar --dataset svhn --k 7 --m 25
/usr/bin/time spark-submit --num-executors $i --executor-cores $j --executor-memory 4g --conf "spark.driver.extraJavaOptions=-Dlog4j.configuration=log4j-spark.properties" --class "largelsh.$class" target/scala-2.11/LargeLSH-assembly-0.1.0-SNAPSHOT.jar --dataset svhn --k 5 --m 10
done
done
done

declare -a classes=("SparkLSH")

for class in "${classes[@]}"
do
/usr/bin/time spark-submit --num-executors 2 --executor-cores 1 --executor-memory 4g --conf "spark.driver.extraJavaOptions=-Dlog4j.configuration=log4j-spark.properties" --class "largelsh.$class" target/scala-2.11/LargeLSH-assembly-0.1.0-SNAPSHOT.jar --dataset svhn --mode eval
/usr/bin/time spark-submit --num-executors 2 --executor-cores 2 --executor-memory 4g --conf "spark.driver.extraJavaOptions=-Dlog4j.configuration=log4j-spark.properties" --class "largelsh.$class" target/scala-2.11/LargeLSH-assembly-0.1.0-SNAPSHOT.jar --dataset svhn --mode eval
/usr/bin/time spark-submit --num-executors 2 --executor-cores 3 --executor-memory 4g --conf "spark.driver.extraJavaOptions=-Dlog4j.configuration=log4j-spark.properties" --class "largelsh.$class" target/scala-2.11/LargeLSH-assembly-0.1.0-SNAPSHOT.jar --dataset svhn --mode eval
/usr/bin/time spark-submit --num-executors 2 --executor-cores 4 --executor-memory 4g --conf "spark.driver.extraJavaOptions=-Dlog4j.configuration=log4j-spark.properties" --class "largelsh.$class" target/scala-2.11/LargeLSH-assembly-0.1.0-SNAPSHOT.jar --dataset svhn --mode eval
/usr/bin/time spark-submit --num-executors 2 --executor-cores 5 --executor-memory 4g --conf "spark.driver.extraJavaOptions=-Dlog4j.configuration=log4j-spark.properties" --class "largelsh.$class" target/scala-2.11/LargeLSH-assembly-0.1.0-SNAPSHOT.jar --dataset svhn --mode eval
/usr/bin/time spark-submit --num-executors 3 --executor-cores 1 --executor-memory 4g --conf "spark.driver.extraJavaOptions=-Dlog4j.configuration=log4j-spark.properties" --class "largelsh.$class" target/scala-2.11/LargeLSH-assembly-0.1.0-SNAPSHOT.jar --dataset svhn --mode eval
/usr/bin/time spark-submit --num-executors 3 --executor-cores 2 --executor-memory 4g --conf "spark.driver.extraJavaOptions=-Dlog4j.configuration=log4j-spark.properties" --class "largelsh.$class" target/scala-2.11/LargeLSH-assembly-0.1.0-SNAPSHOT.jar --dataset svhn --mode eval
/usr/bin/time spark-submit --num-executors 3 --executor-cores 3 --executor-memory 4g --conf "spark.driver.extraJavaOptions=-Dlog4j.configuration=log4j-spark.properties" --class "largelsh.$class" target/scala-2.11/LargeLSH-assembly-0.1.0-SNAPSHOT.jar --dataset svhn --mode eval
/usr/bin/time spark-submit --num-executors 3 --executor-cores 4 --executor-memory 4g --conf "spark.driver.extraJavaOptions=-Dlog4j.configuration=log4j-spark.properties" --class "largelsh.$class" target/scala-2.11/LargeLSH-assembly-0.1.0-SNAPSHOT.jar --dataset svhn --mode eval
/usr/bin/time spark-submit --num-executors 3 --executor-cores 5 --executor-memory 4g --conf "spark.driver.extraJavaOptions=-Dlog4j.configuration=log4j-spark.properties" --class "largelsh.$class" target/scala-2.11/LargeLSH-assembly-0.1.0-SNAPSHOT.jar --dataset svhn --mode eval
/usr/bin/time spark-submit --num-executors 4 --executor-cores 1 --executor-memory 4g --conf "spark.driver.extraJavaOptions=-Dlog4j.configuration=log4j-spark.properties" --class "largelsh.$class" target/scala-2.11/LargeLSH-assembly-0.1.0-SNAPSHOT.jar --dataset svhn --mode eval
/usr/bin/time spark-submit --num-executors 4 --executor-cores 2 --executor-memory 4g --conf "spark.driver.extraJavaOptions=-Dlog4j.configuration=log4j-spark.properties" --class "largelsh.$class" target/scala-2.11/LargeLSH-assembly-0.1.0-SNAPSHOT.jar --dataset svhn --mode eval
/usr/bin/time spark-submit --num-executors 4 --executor-cores 3 --executor-memory 4g --conf "spark.driver.extraJavaOptions=-Dlog4j.configuration=log4j-spark.properties" --class "largelsh.$class" target/scala-2.11/LargeLSH-assembly-0.1.0-SNAPSHOT.jar --dataset svhn --mode eval
/usr/bin/time spark-submit --num-executors 4 --executor-cores 4 --executor-memory 4g --conf "spark.driver.extraJavaOptions=-Dlog4j.configuration=log4j-spark.properties" --class "largelsh.$class" target/scala-2.11/LargeLSH-assembly-0.1.0-SNAPSHOT.jar --dataset svhn --mode eval
/usr/bin/time spark-submit --num-executors 5 --executor-cores 3 --executor-memory 4g --conf "spark.driver.extraJavaOptions=-Dlog4j.configuration=log4j-spark.properties" --class "largelsh.$class" target/scala-2.11/LargeLSH-assembly-0.1.0-SNAPSHOT.jar --dataset svhn --mode eval
done
2 changes: 1 addition & 1 deletion src/main/scala/largelsh/SparkLSHSift.scala
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ object SparkLSHSift {

val pre_gt = prediction.join(groundtruth_id, "testID")
val res = pre_gt.map{
case Row(testID: Int, pred: Array[Int], gts: Array[Int]) =>
case Row(testID: Int, pred: collection.mutable.WrappedArray[Int], gts: collection.mutable.WrappedArray[Int]) =>
(gts intersect pred).size
}.reduce(_+_)

Expand Down

0 comments on commit 6e083cc

Please sign in to comment.