|
1 | | -dist: xenial |
| 1 | +dist: trusty |
2 | 2 |
|
3 | 3 | language: java |
4 | 4 |
|
5 | 5 | jdk: oraclejdk8 |
6 | 6 |
|
7 | 7 | sudo: required |
8 | 8 |
|
| 9 | +services: |
| 10 | + - docker |
| 11 | + |
9 | 12 | cache: |
10 | 13 | directories: |
11 | 14 | - $HOME/.ivy2/ |
|
28 | 31 | - SCALA_BINARY_VERSION=2.10.6 SPARK_VERSION=2.2.0 SPARK_BUILD="spark-2.2.0-bin-hadoop2.7" |
29 | 32 | SPARK_BUILD_URL="http://d3kbcqa49mib13.cloudfront.net/spark-2.2.0-bin-hadoop2.7.tgz" |
30 | 33 | TF_PY2_BUILD=tensorflow-1.3.0-cp27-none-linux_x86_64.whl PYTHON_VERSION=3.6.2 |
31 | | - - SCALA_BINARY_VERSION=2.11.8 SPARK_VERSION=2.0.2 SPARK_BUILD="spark-2.0.2-bin-hadoop2.7" |
32 | | - SPARK_BUILD_URL="http://d3kbcqa49mib13.cloudfront.net/spark-2.0.2-bin-hadoop2.7.tgz" |
33 | | - TF_PY2_BUILD=tensorflow-1.3.0-cp27-none-linux_x86_64.whl PYTHON_VERSION=2.7.13 |
34 | 34 | - SCALA_BINARY_VERSION=2.11.8 SPARK_VERSION=2.1.1 SPARK_BUILD="spark-2.1.1-bin-hadoop2.7" |
35 | 35 | SPARK_BUILD_URL="http://d3kbcqa49mib13.cloudfront.net/spark-2.1.1-bin-hadoop2.7.tgz" |
36 | 36 | TF_PY2_BUILD=tensorflow-1.3.0-cp27-none-linux_x86_64.whl PYTHON_VERSION=2.7.13 |
|
52 | 52 | PYTHON_VERSION=3.6.2 |
53 | 53 |
|
54 | 54 | before_install: |
55 | | - - ./bin/download_travis_dependencies.sh |
| 55 | + - ./bin/download_travis_dependencies.sh |
| 56 | + - if [[ "$PYTHON_VERSION" == 2.* ]]; then |
| 57 | + export CONDA_URL="repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh" |
| 58 | + export PYSPARK_PYTHON=python2; |
| 59 | + else |
| 60 | + export CONDA_URL="repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh"; |
| 61 | + export PYSPARK_PYTHON=python3; |
| 62 | + fi |
| 63 | + - docker run -e "JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64" |
| 64 | + -e SPARK_VERSION |
| 65 | + -e SPARK_BUILD |
| 66 | + -e SCALA_BINARY_VERSION |
| 67 | + -e PYTHON_VERSION |
| 68 | + -e PYSPARK_PYTHON |
| 69 | + -e CONDA_URL |
| 70 | + -d --name ubuntu-test -v $HOME ubuntu:16.04 tail -f /dev/null |
| 71 | + - docker cp `pwd` ubuntu-test:$HOME/ |
| 72 | + - docker cp $HOME/.cache ubuntu-test:$HOME/ |
| 73 | + - docker ps |
56 | 74 |
|
57 | 75 | # See this page: http://conda.pydata.org/docs/travis.html |
58 | 76 | install: |
59 | | - - sudo apt-get update |
60 | | - # We do this conditionally because it saves us some downloading if the |
61 | | - # version is the same. |
62 | | - - if [[ "$PYTHON_VERSION" == 2.* ]]; then |
63 | | - wget https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh; |
64 | | - export PYSPARK_PYTHON=python2; |
65 | | - else |
66 | | - wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh; |
67 | | - export PYSPARK_PYTHON=python3; |
68 | | - fi |
69 | | - - bash miniconda.sh -b -p $HOME/miniconda |
70 | | - - export PATH="$HOME/miniconda/bin:$PATH" |
71 | | - - hash -r |
72 | | - - conda config --set always_yes yes --set changeps1 no |
73 | | - - conda update -q conda |
74 | | - # Useful for debugging any issues with conda |
75 | | - - conda info -a |
| 77 | + # install needed ubuntu packages |
| 78 | + - docker exec -t ubuntu-test bash -c "apt-get update && apt-get upgrade -y" |
| 79 | + - docker exec -t ubuntu-test bash -c "apt-get install -y curl bzip2 openjdk-8-jdk" |
| 80 | + # download and set up miniconda |
| 81 | + - docker exec -t ubuntu-test bash -c " |
| 82 | + curl https://$CONDA_URL >> $HOME/miniconda.sh; |
| 83 | + bash $HOME/miniconda.sh -b -p $HOME/miniconda; |
| 84 | + bash $HOME/miniconda.sh -b -p $HOME/miniconda; |
| 85 | + $HOME/miniconda/bin/conda config --set always_yes yes --set changeps1 no; |
| 86 | + $HOME/miniconda/bin/conda update -q conda; |
| 87 | + $HOME/miniconda/bin/conda info -a; |
| 88 | + $HOME/miniconda/bin/conda create -q -n test-environment python=$PYTHON_VERSION" |
76 | 89 |
|
77 | | - # Create and activate conda environment |
78 | | - - conda create -q -n test-environment python=$PYTHON_VERSION |
79 | | - - source activate test-environment |
80 | | - # Log python & pip versions |
81 | | - - python --version |
82 | | - - pip --version |
83 | | - # Install dependencies from requirements.txt |
84 | | - - pip install --user -r ./python/requirements.txt |
85 | | - # Install tensorflow |
86 | | - - pip install --user tensorflow==1.6 |
| 90 | + # Activate conda environment ad install required packages |
| 91 | + - docker exec -t ubuntu-test bash -c " |
| 92 | + source $HOME/miniconda/bin/activate test-environment; |
| 93 | + python --version; |
| 94 | + pip --version; |
| 95 | + pip install --user -r $HOME/tensorframes/python/requirements.txt;" |
87 | 96 |
|
88 | 97 | script: |
89 | | - - rm -rf /home/travis/.javacpp |
90 | | - # Manually remove previous versions, otherwise the snaphsot will not get updated. |
91 | | - - rm -rf /home/travis/.ivy2/cache/org.bytedeco.javacpp-presets/* |
92 | 98 | # Run the scala unit tests first |
93 | | - - sbt -Dspark.version=$SPARK_VERSION -Dpython.version=$PYSPARK_PYTHON -Dscala.version=$SCALA_BINARY_VERSION tfs_testing/test |
94 | | - # Run the python unit tests. |
95 | | - - sbt -Dspark.version=$SPARK_VERSION -Dscala.version=$SCALA_BINARY_VERSION tfs_testing/assembly |
96 | | - - SPARK_HOME=$HOME/.cache/spark-versions/$SPARK_BUILD ./python/run-tests.sh |
| 99 | + - docker exec -t ubuntu-test bash -c " |
| 100 | + source $HOME/miniconda/bin/activate test-environment; |
| 101 | + cd $HOME/tensorframes; |
| 102 | + ./build/sbt -Dspark.version=$SPARK_VERSION |
| 103 | + -Dpython.version=$PYSPARK_PYTHON |
| 104 | + -Dscala.version=$SCALA_BINARY_VERSION |
| 105 | + tfs_testing/test" |
| 106 | + |
| 107 | + # Build the assembly |
| 108 | + - docker exec -t ubuntu-test bash -c " |
| 109 | + source $HOME/miniconda/bin/activate test-environment; |
| 110 | + cd $HOME/tensorframes; |
| 111 | + ./build/sbt -Dspark.version=$SPARK_VERSION |
| 112 | + -Dscala.version=$SCALA_BINARY_VERSION |
| 113 | + tfs_testing/assembly" |
| 114 | + |
| 115 | + # Run python tests |
| 116 | + - docker exec -t ubuntu-test bash -c " |
| 117 | + source $HOME/miniconda/bin/activate test-environment; |
| 118 | + cd $HOME/tensorframes; |
| 119 | + SPARK_HOME=$HOME/.cache/spark-versions/$SPARK_BUILD ./python/run-tests.sh" |
0 commit comments