forked from databricks/koalas
-
Notifications
You must be signed in to change notification settings - Fork 0
/
.travis.yml
99 lines (88 loc) · 3.32 KB
/
.travis.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
language: python
cache:
directories:
- $HOME/.cache/spark-versions
matrix:
include:
- python: "3.5"
jdk:
- oraclejdk8
env:
- PATH=$(echo "$PATH" | sed -e 's/:\/usr\/local\/lib\/jvm\/openjdk11\/bin//')
- JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-amd64
- SPARK_VERSION=2.3.4
- PANDAS_VERSION=0.23.4
- PYARROW_VERSION=0.10.0
- python: "3.6"
jdk:
- oraclejdk8
env:
- PATH=$(echo "$PATH" | sed -e 's/:\/usr\/local\/lib\/jvm\/openjdk11\/bin//')
- JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-amd64
- SPARK_VERSION=2.4.4
- PANDAS_VERSION=0.24.2
- PYARROW_VERSION=0.13.0
- KOALAS_USAGE_LOGGER='databricks.koalas.usage_logging.usage_logger'
# When we upgrade Spark 3.0 for this, we could try to just use JDK 11 at 'xenial'.
# Python 3.7 and higher are only available at this image.
- python: 3.7
dist: xenial
sudo: true
jdk:
- oraclejdk8
env:
- PATH=$(echo "$PATH" | sed -e 's/:\/usr\/local\/lib\/jvm\/openjdk11\/bin//')
- JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-amd64
- SPARK_VERSION=2.4.4
- PANDAS_VERSION=0.25.3
- PYARROW_VERSION=0.14.1
before_install:
- ./dev/download_travis_dependencies.sh
# See this page: http://conda.pydata.org/docs/travis.html
install:
- sudo apt-get update
# We do this conditionally because it saves us some downloading if the
# version is the same.
- if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then
curl -s https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh > miniconda.sh;
else
curl -s https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh > miniconda.sh;
fi
- bash miniconda.sh -b -p $HOME/miniconda
- . $HOME/miniconda/etc/profile.d/conda.sh
- hash -r
- conda config --set always_yes yes --set changeps1 no
- conda update -q conda
# Useful for debugging any issues with conda
- conda info -a
# Replace dep1 dep2 ... with your dependencies
- conda create -c conda-forge -q -n test-environment python=$TRAVIS_PYTHON_VERSION
- conda activate test-environment
- conda install -c conda-forge --yes codecov
# Test PyPI installation at Python 3.5. This is also because
# one of the dependency requires Python 3.6 Conda specifically.
- |
if [[ $TRAVIS_PYTHON_VERSION == "3.5" ]]; then
pip install -r requirements-dev.txt && \
pip install pandas==$PANDAS_VERSION pyarrow==$PYARROW_VERSION && \
pip list;
else
conda config --env --add pinned_packages python=$TRAVIS_PYTHON_VERSION && \
conda config --env --add pinned_packages pandas==$PANDAS_VERSION && \
conda config --env --add pinned_packages pyarrow==$PYARROW_VERSION && \
conda install -c conda-forge --yes pandas==$PANDAS_VERSION pyarrow==$PYARROW_VERSION && \
conda install -c conda-forge --yes --freeze-installed --file requirements-dev.txt && \
conda list;
fi
# Useful for to_clipboard test
- export DISPLAY=:0.0
- sudo apt-get install xclip
before_script:
- export SPARK_HOME="$HOME/.cache/spark-versions/spark-$SPARK_VERSION-bin-hadoop2.7"
- export QT_QPA_PLATFORM="offscreen"
- ./dev/lint-python
script:
- ./dev/pytest
# Push the results back to codecov
after_success:
- bash <(curl -s https://codecov.io/bash)