@@ -35,32 +35,37 @@ function delete_virtualenv() {
3535}
3636trap delete_virtualenv EXIT
3737
38+ PYTHON_EXECS=()
3839# Some systems don't have pip or virtualenv - in those cases our tests won't work.
39- if ! hash virtualenv 2> /dev/null; then
40- echo " Missing virtualenv skipping pip installability tests."
40+ if hash virtualenv 2> /dev/null && [ ! -n " $USE_CONDA " ]; then
41+ echo " virtualenv installed - using. Note if this is a conda virtual env you may wish to set USE_CONDA"
42+ # Figure out which Python execs we should test pip installation with
43+ if hash python2 2> /dev/null; then
44+ # We do this since we are testing with virtualenv and the default virtual env python
45+ # is in /usr/bin/python
46+ PYTHON_EXECS+=(' python2' )
47+ elif hash python 2> /dev/null; then
48+ # If python2 isn't installed fallback to python if available
49+ PYTHON_EXECS+=(' python' )
50+ fi
51+ if hash python3 2> /dev/null; then
52+ PYTHON_EXECS+=(' python3' )
53+ fi
54+ elif hash conda 2> /dev/null; then
55+ echo " Using conda virtual enviroments"
56+ PYTHON_EXECS=(' 3.5' )
57+ USE_CONDA=1
58+ else
59+ echo " Missing virtualenv & conda, skipping pip installability tests"
4160 exit 0
4261fi
4362if ! hash pip 2> /dev/null; then
4463 echo " Missing pip, skipping pip installability tests."
4564 exit 0
4665fi
4766
48- # Figure out which Python execs we should test pip installation with
49- PYTHON_EXECS=()
50- if hash python2 2> /dev/null; then
51- # We do this since we are testing with virtualenv and the default virtual env python
52- # is in /usr/bin/python
53- PYTHON_EXECS+=(' python2' )
54- elif hash python 2> /dev/null; then
55- # If python2 isn't installed fallback to python if available
56- PYTHON_EXECS+=(' python' )
57- fi
58- if hash python3 2> /dev/null; then
59- PYTHON_EXECS+=(' python3' )
60- fi
61-
6267# Determine which version of PySpark we are building for archive name
63- PYSPARK_VERSION=$( python -c " exec(open('python/pyspark/version.py').read());print __version__" )
68+ PYSPARK_VERSION=$( python3 -c " exec(open('python/pyspark/version.py').read());print( __version__) " )
6469PYSPARK_DIST=" $FWDIR /python/dist/pyspark-$PYSPARK_VERSION .tar.gz"
6570# The pip install options we use for all the pip commands
6671PIP_OPTIONS=" --upgrade --no-cache-dir --force-reinstall "
@@ -75,18 +80,24 @@ for python in "${PYTHON_EXECS[@]}"; do
7580 echo " Using $VIRTUALENV_BASE for virtualenv"
7681 VIRTUALENV_PATH=" $VIRTUALENV_BASE " /$python
7782 rm -rf " $VIRTUALENV_PATH "
78- mkdir -p " $VIRTUALENV_PATH "
79- virtualenv --python=$python " $VIRTUALENV_PATH "
80- source " $VIRTUALENV_PATH " /bin/activate
81- # Upgrade pip & friends
82- pip install --upgrade pip pypandoc wheel
83- pip install numpy # Needed so we can verify mllib imports
83+ if [ -n " $USE_CONDA " ]; then
84+ conda create -y -p " $VIRTUALENV_PATH " python=$python numpy pandas pip setuptools
85+ source activate " $VIRTUALENV_PATH "
86+ else
87+ mkdir -p " $VIRTUALENV_PATH "
88+ virtualenv --python=$python " $VIRTUALENV_PATH "
89+ source " $VIRTUALENV_PATH " /bin/activate
90+ fi
91+ # Upgrade pip & friends if using virutal env
92+ if [ ! -n " USE_CONDA" ]; then
93+ pip install --upgrade pip pypandoc wheel numpy
94+ fi
8495
8596 echo " Creating pip installable source dist"
8697 cd " $FWDIR " /python
8798 # Delete the egg info file if it exists, this can cache the setup file.
8899 rm -rf pyspark.egg-info || echo " No existing egg info file, skipping deletion"
89- $ python setup.py sdist
100+ python setup.py sdist
90101
91102
92103 echo " Installing dist into virtual env"
@@ -112,6 +123,13 @@ for python in "${PYTHON_EXECS[@]}"; do
112123
113124 cd " $FWDIR "
114125
126+ # conda / virtualenv enviroments need to be deactivated differently
127+ if [ -n " $USE_CONDA " ]; then
128+ source deactivate
129+ else
130+ deactivate
131+ fi
132+
115133 done
116134done
117135
0 commit comments