Skip to content

Commit 648d10d

Browse files
robert3005Robert Kruszewski
authored andcommitted
1 parent f62ddc5 commit 648d10d

File tree

8 files changed

+103
-20
lines changed

8 files changed

+103
-20
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,7 @@ streaming-tests.log
6767
target/
6868
unit-tests.log
6969
work/
70+
pr-deps/
7071

7172
# For Hive
7273
TempStatsStore/

build/mvn

Lines changed: 7 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -36,15 +36,8 @@ install_app() {
3636
local binary="${_DIR}/$3"
3737

3838
# setup `curl` and `wget` silent options if we're running on Jenkins
39-
local curl_opts="-L"
40-
local wget_opts=""
41-
if [ -n "$AMPLAB_JENKINS" ]; then
42-
curl_opts="-s ${curl_opts}"
43-
wget_opts="--quiet ${wget_opts}"
44-
else
45-
curl_opts="--progress-bar ${curl_opts}"
46-
wget_opts="--progress=bar:force ${wget_opts}"
47-
fi
39+
local curl_opts="-s -L"
40+
local wget_opts="--quiet"
4841

4942
if [ -z "$3" -o ! -f "$binary" ]; then
5043
# check if we already have the tarball
@@ -85,7 +78,10 @@ install_mvn() {
8578
"apache-maven-${MVN_VERSION}-bin.tar.gz" \
8679
"apache-maven-${MVN_VERSION}/bin/mvn"
8780

88-
MVN_BIN="${_DIR}/apache-maven-${MVN_VERSION}/bin/mvn"
81+
export M2_HOME="${_DIR}/apache-maven-${MVN_VERSION}"
82+
export M2="$M2_HOME/bin"
83+
export PATH=$M2:$PATH
84+
MVN_BIN="$M2/mvn"
8985
fi
9086
}
9187

@@ -158,4 +154,4 @@ export MAVEN_OPTS=${MAVEN_OPTS:-"$_COMPILE_JVM_OPTS"}
158154
echo "Using \`mvn\` from path: $MVN_BIN" 1>&2
159155

160156
# Last, call the `mvn` command as usual
161-
${MVN_BIN} -DzincPort=${ZINC_PORT} "$@"
157+
${MVN_BIN} -DzincPort=${ZINC_PORT} -e "$@"

circle.yml

Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
general:
2+
artifacts:
3+
- target
4+
5+
machine:
6+
pre:
7+
- sudo pkill -u postgres
8+
java:
9+
version: oraclejdk8
10+
environment:
11+
TERM: dumb
12+
HADOOP_PROFILE: "hadoop2.7"
13+
post:
14+
- sudo pkill mongod || true
15+
- sudo pkill mysqld || true
16+
- sudo pkill postgres || true
17+
18+
checkout:
19+
post:
20+
- "[[ ! -s \"$(git rev-parse --git-dir)/shallow\" ]] || git fetch --unshallow"
21+
- echo "user=$BINTRAY_USER" > .credentials
22+
- echo "password=$BINTRAY_PASSWORD" >> .credentials
23+
- echo "realm=Bintray API Realm" >> .credentials
24+
- echo "host=api.bintray.com" >> .credentials
25+
26+
dependencies:
27+
override:
28+
- ./build/sbt -Phadoop-2.7 -Pmesos -Pkinesis-asl -Pyarn -Phive-thriftserver -Phive update
29+
30+
test:
31+
override:
32+
- /bin/true
33+
#- ./dev/run-tests -p 1
34+
35+
deployment:
36+
release:
37+
tag: /[0-9]+(?:\.[0-9]+)+-palantir[0-9]+/
38+
commands:
39+
- ? |
40+
set -euo pipefail
41+
FLAGS="-Psparkr -Phive -Phive-thriftserver -Pyarn -Pmesos"
42+
case $CIRCLE_NODE_INDEX in
43+
0)
44+
./build/sbt -Phadoop-2.7 -Pmesos -Pkinesis-asl -Pyarn -Phive-thriftserver -Phive publish
45+
;;
46+
1)
47+
./dev/make-distribution.sh --name without-hadoop --tgz "-Psparkr -Phadoop-provided -Pyarn -Pmesos" \
48+
2>&1 > binary-release-without-hadoop.log
49+
;;
50+
2)
51+
./dev/make-distribution.sh --name hadoop2.7 --tgz "-Phadoop2.7 $FLAGS" \
52+
2>&1 > binary-release-hadoop2.7.log
53+
;;
54+
esac
55+
:
56+
parallel: true
57+
timeout: 1200
58+

dev/.rat-excludes

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -102,3 +102,5 @@ org.apache.spark.scheduler.ExternalClusterManager
102102
.Rbuildignore
103103
org.apache.spark.deploy.yarn.security.ServiceCredentialProvider
104104
spark-warehouse
105+
circle.yml
106+
.credentials

dev/check-license

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ acquire_rat_jar () {
4040
fi
4141

4242
unzip -tq "$JAR" &> /dev/null
43-
if [ $? -ne 0 ]; then
43+
if [ $? -ne 0 ]; then
4444
# We failed to download
4545
rm "$JAR"
4646
printf "Our attempt to download rat locally to ${JAR} failed. Please install rat manually.\n"
@@ -67,7 +67,7 @@ mkdir -p "$FWDIR"/lib
6767
exit 1
6868
}
6969

70-
mkdir target
70+
mkdir -p target
7171
$java_cmd -jar "$rat_jar" -E "$FWDIR"/dev/.rat-excludes -d "$FWDIR" > target/rat-results.txt
7272

7373
if [ $? -ne 0 ]; then
@@ -77,10 +77,10 @@ fi
7777

7878
ERRORS="$(cat target/rat-results.txt | grep -e "??")"
7979

80-
if test ! -z "$ERRORS"; then
80+
if test ! -z "$ERRORS"; then
8181
echo "Could not find Apache license headers in the following files:"
8282
echo "$ERRORS"
8383
exit 1
84-
else
84+
else
8585
echo -e "RAT checks passed."
8686
fi

dev/test-dependencies.sh

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -32,10 +32,6 @@ export LC_ALL=C
3232
HADOOP2_MODULE_PROFILES="-Phive-thriftserver -Pmesos -Pyarn -Phive"
3333
MVN="build/mvn"
3434
HADOOP_PROFILES=(
35-
hadoop-2.2
36-
hadoop-2.3
37-
hadoop-2.4
38-
hadoop-2.6
3935
hadoop-2.7
4036
)
4137

project/SparkBuild.scala

Lines changed: 27 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,12 +23,15 @@ import scala.util.Properties
2323
import scala.collection.JavaConverters._
2424
import scala.collection.mutable.Stack
2525

26+
import bintray.BintrayKeys
27+
import bintray.BintrayPlugin._
2628
import sbt._
2729
import sbt.Classpaths.publishTask
2830
import sbt.Keys._
2931
import sbtunidoc.Plugin.UnidocKeys.unidocGenjavadocVersion
3032
import com.simplytyped.Antlr4Plugin._
3133
import com.typesafe.sbt.pom.{PomBuild, SbtPomKeys}
34+
import com.typesafe.sbt.SbtGit._
3235
import com.typesafe.tools.mima.plugin.MimaKeys
3336
import org.scalastyle.sbt.ScalastylePlugin._
3437
import org.scalastyle.sbt.Tasks
@@ -233,7 +236,7 @@ object SparkBuild extends PomBuild {
233236
}
234237
)
235238

236-
lazy val sharedSettings = sparkGenjavadocSettings ++
239+
lazy val sharedSettings = bintraySettings ++ versionWithGit ++ sparkGenjavadocSettings ++
237240
(if (sys.env.contains("NOLINT_ON_COMPILE")) Nil else enableScalaStyle) ++ Seq(
238241
exportJars in Compile := true,
239242
exportJars in Test := false,
@@ -243,6 +246,29 @@ object SparkBuild extends PomBuild {
243246
incOptions := incOptions.value.withNameHashing(true),
244247
publishMavenStyle := true,
245248
unidocGenjavadocVersion := "0.10",
249+
git.useGitDescribe := true,
250+
useJGit,
251+
version := {
252+
val uncommittedSuffix =
253+
git.makeUncommittedSignifierSuffix(git.gitUncommittedChanges.value, git.uncommittedSignifier.value)
254+
val releaseVersion =
255+
git.releaseVersion(git.gitCurrentTags.value, git.gitTagToVersionNumber.value, uncommittedSuffix)
256+
val describedVersion =
257+
git.flaggedOptional(git.useGitDescribe.value, git.describeVersion(git.gitDescribedVersion.value, uncommittedSuffix))
258+
val commitVersion = git.formattedShaVersion.value
259+
//Now we fall through the potential version numbers...
260+
git.makeVersion(Seq(
261+
releaseVersion,
262+
describedVersion,
263+
commitVersion
264+
)) get
265+
},
266+
BintrayKeys.bintrayCredentialsFile := new File(".credentials"),
267+
licenses += ("Apache 2.0", url("https://www.apache.org/licenses/LICENSE-2.0")),
268+
BintrayKeys.bintrayOrganization := Some("palantir"),
269+
BintrayKeys.bintrayRepository := "releases",
270+
BintrayKeys.bintrayVcsUrl := Some("https://github.com/palantir/parquet-mr"),
271+
concurrentRestrictions in Global += Tags.limit(Tags.Test, 1),
246272

247273
// Override SBT's default resolvers:
248274
resolvers := Seq(

project/plugins.sbt

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,7 @@
1+
addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "0.8.5")
2+
3+
addSbtPlugin("me.lessis" % "bintray-sbt" % "0.3.0")
4+
15
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.11.2")
26

37
addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "4.0.0")

0 commit comments

Comments
 (0)