forked from palantir/spark
-
Notifications
You must be signed in to change notification settings - Fork 0
/
circle.yml
85 lines (80 loc) · 3.44 KB
/
circle.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
machine:
java:
version: oraclejdk8
post:
- sudo apt-get --assume-yes install r-base r-base-dev
- |
if [[ ! -d ${CONDA_ROOT} ]]; then
echo "Installing Miniconda...";
wget --quiet https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh &&
bash Miniconda-latest-Linux-x86_64.sh -b -p ${CONDA_ROOT};
else
echo "Using cached Miniconda install";
fi
- ln -s ~/miniconda $(pyenv root)/versions/miniconda3-latest
- 'pyenv versions | grep -q miniconda3-latest/envs/python2 || $CONDA_BIN create -y -n python2 python==2.7.11 numpy | tee -a "$CIRCLE_ARTIFACTS/run-conda.log"'
- 'pyenv versions | grep -q miniconda3-latest/envs/python3 || $CONDA_BIN create -y -n python3 python==3.4.4 numpy | tee -a "$CIRCLE_ARTIFACTS/run-conda.log"'
- pyenv global miniconda3-latest/envs/python2 miniconda3-latest/envs/python3 #pypy-4.0.1
- pyenv rehash
environment:
TERM: dumb
R_HOME: /usr/lib/R
CONDA_BIN: $HOME/miniconda/bin/conda
CONDA_ROOT: $HOME/miniconda
checkout:
post:
- "[[ ! -s \"$(git rev-parse --git-dir)/shallow\" ]] || git fetch --unshallow"
- echo "user=$BINTRAY_USERNAME" > .credentials
- echo "password=$BINTRAY_PASSWORD" >> .credentials
- echo "realm=Bintray API Realm" >> .credentials
- echo "host=api.bintray.com" >> .credentials
dependencies:
override:
- |
if [[ -d build_classes ]]; then
# Copy contents into current build directory
rsync --info=stats2,misc1,flist0 -a build_classes/ .
fi
- ./build/mvn -DskipTests -Phadoop-cloud -Phadoop-palantir -Pkinesis-asl -Pkubernetes -Pyarn -Phive -Psparkr install
# Copy all of */target/scala_2.11/classes to build_classes/
- >
rsync --info=stats2,misc1,flist0 -a --delete-excluded --prune-empty-dirs --exclude build_classes/ --exclude 'target/streams' --exclude 'assembly/target' --exclude 'common/network-yarn/target' --exclude 'examples/target' --exclude '***/*.jar' --include 'target/***' --include '**/' --exclude '*' . build_classes/
- |
# Make sbt fetch all the external deps to ~/.ivy2 so it gets cached
./build/sbt -Phadoop-cloud -Phadoop-palantir -Pkinesis-asl -Pkubernetes -Pyarn -Phive -Psparkr externalDependencyClasspath
cache_directories:
- "build_classes"
- "build"
- "~/miniconda"
general:
artifacts:
- "python/unit-tests.log"
- "R/unit-tests.out"
test:
override:
- set -o pipefail && HADOOP_PROFILE=hadooppalantir ./dev/run-tests | tee -a "$CIRCLE_ARTIFACTS/run-tests.log" | grep -v -i info:
parallel: true
timeout: 1800
max-runtime: 14400
post:
- find . -name unit-tests.log -exec rsync -R {} $CIRCLE_ARTIFACTS \;:
parallel: true
- ? |
shopt -s nullglob
files=(resource-managers/yarn/target/./org.apache.spark.deploy.yarn.*/*-logDir-*)
if [[ ${#files[@]} != 0 ]]; then
rsync -Rrm "${files[@]}" $CIRCLE_ARTIFACTS
fi
:
parallel: true
deployment:
release:
tag: /[0-9]+(?:\.[0-9]+){2,}-palantir\.[0-9]+(?:\.[0-9]+)*/
commands:
- dev/publish.sh
- curl -u $BINTRAY_USERNAME:$BINTRAY_PASSWORD -X POST https://api.bintray.com/content/palantir/releases/spark/$(git describe --tags)/publish
snapshot:
branch: master
commands:
- dev/publish.sh
- curl -u $BINTRAY_USERNAME:$BINTRAY_PASSWORD -X POST https://api.bintray.com/content/palantir/releases/spark/$(git describe --tags)/publish