From cbc396ff90e8f6ad2b533ba1d309f70f55aa42bb Mon Sep 17 00:00:00 2001 From: Dan Becker Date: Thu, 10 May 2018 13:50:03 -0600 Subject: [PATCH 001/251] Add gym and tensorforce --- Dockerfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index fedb0cd0..192ee289 100644 --- a/Dockerfile +++ b/Dockerfile @@ -490,7 +490,8 @@ RUN pip install flashtext && \ pip install kmapper && \ pip install shap && \ pip install ray && \ - + pip install gym && \ + pip install tensorforce && \ ##### ^^^^ Add new contributions above here ^^^^ ##### From 80fe4ad542feea446815f50041cc4d31e436fd04 Mon Sep 17 00:00:00 2001 From: Chris Crawford Date: Tue, 31 Jul 2018 22:35:51 +0000 Subject: [PATCH 002/251] fix geopandas, pin vowpalwabbit, python3.6.6 --- Dockerfile | 76 +++++++++++++++++++++++------------------------------- 1 file changed, 32 insertions(+), 44 deletions(-) diff --git a/Dockerfile b/Dockerfile index 9f241b5d..cbbec9f9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -10,27 +10,21 @@ RUN sed -i "s/httpredir.debian.org/debian.uchicago.edu/" /etc/apt/sources.list & # https://stackoverflow.com/a/46498173 conda update -y conda && conda update -y python && \ pip install --upgrade pip && \ - apt-get -y install cmake && \ - # Vowpal Rabbit - #apt-get install -y libboost-program-options-dev zlib1g-dev libboost-python-dev && \ - #cd /usr/lib/x86_64-linux-gnu/ && rm -f libboost_python.a && rm -f libboost_python.so && \ - #ln -sf libboost_python-py34.so libboost_python.so && ln -sf libboost_python-py34.a libboost_python.a && \ - #pip install vowpalwabbit && \ - # Anaconda's scipy is currently behind the main release (1.0) - #pip install scipy --upgrade && \ - pip install seaborn python-dateutil dask pytagcloud pyyaml joblib \ + apt-get -y install cmake + +RUN pip install seaborn python-dateutil dask pytagcloud pyyaml joblib \ husl geopy ml_metrics mne pyshp gensim && \ conda install -y -c conda-forge spacy && python -m spacy download en && \ python -m spacy download en_core_web_lg && \ # The apt-get version of imagemagick is out of date and has compatibility issues, so we build from source apt-get -y install dbus fontconfig fontconfig-config fonts-dejavu-core fonts-droid ghostscript gsfonts hicolor-icon-theme \ -libavahi-client3 libavahi-common-data libavahi-common3 libcairo2 libcap-ng0 libcroco3 \ -libcups2 libcupsfilters1 libcupsimage2 libdatrie1 libdbus-1-3 libdjvulibre-text libdjvulibre21 libfftw3-double3 libfontconfig1 \ -libfreetype6 libgdk-pixbuf2.0-0 libgdk-pixbuf2.0-common libgomp1 libgraphite2-3 libgs9 libgs9-common libharfbuzz0b libijs-0.35 \ -libilmbase6 libjasper1 libjbig0 libjbig2dec0 libjpeg62-turbo liblcms2-2 liblqr-1-0 libltdl7 libmagickcore-6.q16-2 \ -libmagickcore-6.q16-2-extra libmagickwand-6.q16-2 libnetpbm10 libopenexr6 libpango-1.0-0 libpangocairo-1.0-0 libpangoft2-1.0-0 \ -libpaper-utils libpaper1 libpixman-1-0 libpng12-0 librsvg2-2 librsvg2-common libthai-data libthai0 libtiff5 libwmf0.2-7 \ -libxcb-render0 libxcb-shm0 netpbm poppler-data p7zip-full && \ + libavahi-client3 libavahi-common-data libavahi-common3 libcairo2 libcap-ng0 libcroco3 \ + libcups2 libcupsfilters1 libcupsimage2 libdatrie1 libdbus-1-3 libdjvulibre-text libdjvulibre21 libfftw3-double3 libfontconfig1 \ + libfreetype6 libgdk-pixbuf2.0-0 libgdk-pixbuf2.0-common libgomp1 libgraphite2-3 libgs9 libgs9-common libharfbuzz0b libijs-0.35 \ + libilmbase6 libjasper1 libjbig0 libjbig2dec0 libjpeg62-turbo liblcms2-2 liblqr-1-0 libltdl7 libmagickcore-6.q16-2 \ + libmagickcore-6.q16-2-extra libmagickwand-6.q16-2 libnetpbm10 libopenexr6 libpango-1.0-0 libpangocairo-1.0-0 libpangoft2-1.0-0 \ + libpaper-utils libpaper1 libpixman-1-0 libpng12-0 librsvg2-2 librsvg2-common libthai-data libthai0 libtiff5 libwmf0.2-7 \ + libxcb-render0 libxcb-shm0 netpbm poppler-data p7zip-full && \ cd /usr/local/src && \ wget http://transloadit.imagemagick.org/download/ImageMagick.tar.gz && \ tar xzf ImageMagick.tar.gz && cd `ls -d ImageMagick-*` && pwd && ls -al && ./configure && \ @@ -40,19 +34,6 @@ libxcb-render0 libxcb-shm0 netpbm poppler-data p7zip-full && \ # OpenCV install (from pip or source) RUN pip install opencv-python - #apt-get -y install libgtk2.0-dev pkg-config libavcodec-dev libavformat-dev libswscale-dev && \ - #apt-get -y install libtbb2 libtbb-dev libjpeg-dev libtiff-dev libjasper-dev && \ - #cd /usr/local/src && git clone --depth 1 https://github.com/Itseez/opencv.git && \ - #cd opencv && \ - #mkdir build && cd build && \ - #cmake -D CMAKE_BUILD_TYPE=RELEASE -D CMAKE_INSTALL_PREFIX=/usr/local -D WITH_TBB=ON -D WITH_FFMPEG=OFF -D WITH_V4L=ON -D WITH_QT=OFF -D WITH_OPENGL=ON -D PYTHON3_LIBRARY=/opt/conda/lib/libpython3.6m.so -D PYTHON3_INCLUDE_DIR=/opt/conda/include/python3.6m/ -D PYTHON_LIBRARY=/opt/conda/lib/libpython3.6m.so -D PYTHON_INCLUDE_DIR=/opt/conda/include/python3.6m/ -D BUILD_PNG=TRUE .. && \ - #make -j $(nproc) && make install && \ - #echo "/usr/local/lib/python3.6/site-packages" > /etc/ld.so.conf.d/opencv.conf && ldconfig && \ - #cp /usr/local/lib/python3.6/site-packages/cv2.cpython-36m-x86_64-linux-gnu.so /opt/conda/lib/python3.6/site-packages/ && \ - # Clean up install cruft - #rm -rf /usr/local/src/opencv && \ - #rm -rf /root/.cache/pip/* && \ - #apt-get autoremove -y && apt-get clean RUN apt-get update && apt-get install -y python-software-properties zip && \ echo "deb http://ppa.launchpad.net/webupd8team/java/ubuntu precise main" | tee -a /etc/apt/sources.list && \ @@ -67,8 +48,11 @@ RUN apt-get update && apt-get install -y python-software-properties zip && \ apt-get update && apt-get install -y bazel && \ apt-get upgrade -y bazel -# Tensorflow source build -RUN cd /usr/local/src && \ +# Tensorflow +# Temp fixes: Downgrade python 3.7->3.6.6 and downgrade Pandas 0.23.3->0.23.2 +RUN conda install -y python=3.6.6 && \ + pip install pandas==0.23.2 && \ + cd /usr/local/src && \ git clone https://github.com/tensorflow/tensorflow && \ cd tensorflow && \ cat /dev/null | ./configure && \ @@ -132,18 +116,18 @@ RUN apt-get install -y libfreetype6-dev && \ # the corpuses that work python -m nltk.downloader -d /usr/share/nltk_data abc alpino averaged_perceptron_tagger \ basque_grammars biocreative_ppi bllip_wsj_no_aux \ -book_grammars brown brown_tei cess_cat cess_esp chat80 city_database cmudict \ -comtrans conll2000 conll2002 conll2007 crubadan dependency_treebank \ -europarl_raw floresta gazetteers genesis gutenberg \ -ieer inaugural indian jeita kimmo knbc large_grammars lin_thesaurus mac_morpho machado \ -masc_tagged maxent_ne_chunker maxent_treebank_pos_tagger moses_sample movie_reviews \ -mte_teip5 names nps_chat omw opinion_lexicon paradigms \ -pil pl196x porter_test ppattach problem_reports product_reviews_1 product_reviews_2 propbank \ -pros_cons ptb punkt qc reuters rslp rte sample_grammars semcor senseval sentence_polarity \ -sentiwordnet shakespeare sinica_treebank smultron snowball_data spanish_grammars \ -state_union stopwords subjectivity swadesh switchboard tagsets timit toolbox treebank \ -twitter_samples udhr2 udhr unicode_samples universal_tagset universal_treebanks_v20 \ -vader_lexicon verbnet webtext word2vec_sample wordnet wordnet_ic words ycoe && \ + book_grammars brown brown_tei cess_cat cess_esp chat80 city_database cmudict \ + comtrans conll2000 conll2002 conll2007 crubadan dependency_treebank \ + europarl_raw floresta gazetteers genesis gutenberg \ + ieer inaugural indian jeita kimmo knbc large_grammars lin_thesaurus mac_morpho machado \ + masc_tagged maxent_ne_chunker maxent_treebank_pos_tagger moses_sample movie_reviews \ + mte_teip5 names nps_chat omw opinion_lexicon paradigms \ + pil pl196x porter_test ppattach problem_reports product_reviews_1 product_reviews_2 propbank \ + pros_cons ptb punkt qc reuters rslp rte sample_grammars semcor senseval sentence_polarity \ + sentiwordnet shakespeare sinica_treebank smultron snowball_data spanish_grammars \ + state_union stopwords subjectivity swadesh switchboard tagsets timit toolbox treebank \ + twitter_samples udhr2 udhr unicode_samples universal_tagset universal_treebanks_v20 \ + vader_lexicon verbnet webtext word2vec_sample wordnet wordnet_ic words ycoe && \ # Stop-words pip install stop-words && \ # clean up @@ -339,6 +323,8 @@ RUN cd /usr/local/src && git clone https://github.com/iskandr/fancyimpute && \ pip install stemming && \ conda install -y -c conda-forge fbprophet && \ conda install -y -c conda-forge -c ioam holoviews geoviews && \ +# Temp fix : Fiona is already installed by pip and conda installs another version as a dependency for holoviews + conda uninstall -y fiona && \ pip install hypertools && \ # Nxviz has been causing an installation issue by trying unsuccessfully to remove setuptools. #pip install nxviz && \ @@ -488,6 +474,7 @@ RUN pip install bcolz && \ # of all non-final lines. Thanks! # ########### + RUN pip install flashtext && \ pip install marisa-trie && \ pip install pyemd && \ @@ -506,11 +493,12 @@ RUN pip install flashtext && \ pip install conx && \ pip install pandasql && \ pip install trackml && \ - cd /usr/local/src && git clone https://github.com/JohnLangford/vowpal_wabbit.git && ./vowpal_wabbit/python/conda_install.sh && \ ##### ^^^^ Add new contributions above here ^^^^ ##### # clean up pip cache rm -rf /root/.cache/pip/* +# Pin Vowpal Wabbit v8.6.0 because 8.6.1 does not build or install successfully +RUN cd /usr/local/src && git clone -b 8.6.0 https://github.com/JohnLangford/vowpal_wabbit.git && ./vowpal_wabbit/python/conda_install.sh # For Facets ENV PYTHONPATH=$PYTHONPATH:/opt/facets/facets_overview/python/ From 460916383daa1b8c2bd94a48c0169bfc901febf8 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Wed, 1 Aug 2018 10:17:31 -0700 Subject: [PATCH 003/251] Workaround to build a new image with the latest learn tools --- README.md | 2 +- kaggle_tools_update.Dockerfile | 14 ++++++++++++++ test_build.py | 2 ++ 3 files changed, 17 insertions(+), 1 deletion(-) create mode 100644 kaggle_tools_update.Dockerfile diff --git a/README.md b/README.md index 402eda9c..90faac2e 100644 --- a/README.md +++ b/README.md @@ -51,7 +51,7 @@ Next run the build: Finally run the tests: ``` -./tests +./test ``` Then submit your pull request, and you're all set! diff --git a/kaggle_tools_update.Dockerfile b/kaggle_tools_update.Dockerfile new file mode 100644 index 00000000..375f5884 --- /dev/null +++ b/kaggle_tools_update.Dockerfile @@ -0,0 +1,14 @@ +# This Dockerfile is a temporary solution to unblock the +# community team by updating their library latest using +# the latest python image. +# This is needed until we resolved the issues with the +# main build. + +# Usage: +# docker build --rm -t kaggle/python-build -f kaggle_tools_update.Dockerfile . +# ./test +# ./push (if tests are passing) + +FROM gcr.io/kaggle-images/python:latest + +RUN pip install git+https://github.com/Kaggle/learntools diff --git a/test_build.py b/test_build.py index 416e3c25..44bb5861 100644 --- a/test_build.py +++ b/test_build.py @@ -15,6 +15,8 @@ from keras.optimizers import SGD print("Keras ok") +# Test learntools + # PyTorch smoke test based on http://pytorch.org/tutorials/beginner/nlp/deep_learning_tutorial.html import torch import torch.nn as tnn From 29bff4ff33181b447808bce1deecd31cade63d94 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Wed, 1 Aug 2018 10:30:36 -0700 Subject: [PATCH 004/251] add learntools test --- test_build.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/test_build.py b/test_build.py index 44bb5861..5c57afde 100644 --- a/test_build.py +++ b/test_build.py @@ -15,7 +15,11 @@ from keras.optimizers import SGD print("Keras ok") -# Test learntools +# Test Kaggle learntools +from learntools.core import binder; binder.bind(globals()) +from learntools.python.ex1 import * +color="blue" +q0.check() # PyTorch smoke test based on http://pytorch.org/tutorials/beginner/nlp/deep_learning_tutorial.html import torch From 7ac2658b9d5abd7ea98002eb78b6f5080446ed8f Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Wed, 1 Aug 2018 10:34:43 -0700 Subject: [PATCH 005/251] Add print statement to test --- test_build.py | 1 + 1 file changed, 1 insertion(+) diff --git a/test_build.py b/test_build.py index 5c57afde..52d88f33 100644 --- a/test_build.py +++ b/test_build.py @@ -20,6 +20,7 @@ from learntools.python.ex1 import * color="blue" q0.check() +print("learntools ok") # PyTorch smoke test based on http://pytorch.org/tutorials/beginner/nlp/deep_learning_tutorial.html import torch From c65e1de5c1c17e25798f6495d5ffdce4905eba08 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Wed, 1 Aug 2018 11:26:21 -0700 Subject: [PATCH 006/251] Addressed comments --- kaggle_tools_update.Dockerfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/kaggle_tools_update.Dockerfile b/kaggle_tools_update.Dockerfile index 375f5884..970fdf9d 100644 --- a/kaggle_tools_update.Dockerfile +++ b/kaggle_tools_update.Dockerfile @@ -1,14 +1,14 @@ # This Dockerfile is a temporary solution to unblock the -# community team by updating their library latest using +# Kaggle team by updating their library latest using # the latest python image. # This is needed until we resolved the issues with the # main build. # Usage: -# docker build --rm -t kaggle/python-build -f kaggle_tools_update.Dockerfile . +# docker build --pull --rm -t kaggle/python-build -f kaggle_tools_update.Dockerfile . # ./test # ./push (if tests are passing) FROM gcr.io/kaggle-images/python:latest -RUN pip install git+https://github.com/Kaggle/learntools +RUN pip install --upgrade git+https://github.com/Kaggle/learntools From 2c38297fff17e8331c831798690ca0b6c3efb835 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Wed, 1 Aug 2018 15:21:24 -0700 Subject: [PATCH 007/251] addressed feedback --- kaggle_tools_update.Dockerfile | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/kaggle_tools_update.Dockerfile b/kaggle_tools_update.Dockerfile index 970fdf9d..6dadda9a 100644 --- a/kaggle_tools_update.Dockerfile +++ b/kaggle_tools_update.Dockerfile @@ -1,11 +1,14 @@ -# This Dockerfile is a temporary solution to unblock the -# Kaggle team by updating their library latest using -# the latest python image. -# This is needed until we resolved the issues with the -# main build. +# This Dockerfile is a temporary solution until we +# resolved the broken main build. + +# This Dockerfile creates a new image based on our +# current published python image with the latest +# version of the LearnTools library to allow us +# to release new Learn content. # Usage: -# docker build --pull --rm -t kaggle/python-build -f kaggle_tools_update.Dockerfile . +# docker rmi gcr.io/kaggle-images/python:latest +# docker build --rm -t kaggle/python-build -f kaggle_tools_update.Dockerfile . # ./test # ./push (if tests are passing) From 4d4487e6b69a58b915ea0aa3876e5cf393b52328 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Wed, 1 Aug 2018 15:56:08 -0700 Subject: [PATCH 008/251] setup pip for internet access --- kaggle_tools_update.Dockerfile | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/kaggle_tools_update.Dockerfile b/kaggle_tools_update.Dockerfile index 6dadda9a..b1bd79db 100644 --- a/kaggle_tools_update.Dockerfile +++ b/kaggle_tools_update.Dockerfile @@ -4,7 +4,9 @@ # This Dockerfile creates a new image based on our # current published python image with the latest # version of the LearnTools library to allow us -# to release new Learn content. +# to release new Learn content. It also configures +# pip to work out-of-the-box when internet access +# is enabled. # Usage: # docker rmi gcr.io/kaggle-images/python:latest @@ -15,3 +17,9 @@ FROM gcr.io/kaggle-images/python:latest RUN pip install --upgrade git+https://github.com/Kaggle/learntools + +# Set up pip to enable pip install. +ADD patches/kaggle_bashrc /root +# Patch the system-wide bashrc file for non-root users. +RUN cat /root/kaggle_bashrc >> /etc/bash.bashrc +RUN rm /root/kaggle_bashrc From cddee841c94fe5bb1d953422d91988046ee21590 Mon Sep 17 00:00:00 2001 From: d1jang Date: Thu, 2 Aug 2018 13:06:45 -0700 Subject: [PATCH 009/251] Add ENTRYPOINT script to initialize the image. --- Dockerfile | 10 +++++----- kaggle_tools_update.Dockerfile | 10 +++++----- patches/{kaggle_bashrc => entrypoint.sh} | 13 +++++++++---- 3 files changed, 19 insertions(+), 14 deletions(-) rename patches/{kaggle_bashrc => entrypoint.sh} (79%) diff --git a/Dockerfile b/Dockerfile index cbbec9f9..27a7fa89 100644 --- a/Dockerfile +++ b/Dockerfile @@ -523,11 +523,11 @@ ADD patches/sitecustomize.py /root/.local/lib/python3.6/site-packages/sitecustom # Set backend for matplotlib ENV MPLBACKEND "agg" -# Set up pip to enable pip install. -ADD patches/kaggle_bashrc /root -# Patch the system-wide bashrc file for non-root users. -RUN cat /root/kaggle_bashrc >> /etc/bash.bashrc -RUN rm /root/kaggle_bashrc +# Set up an initialization script to be executed before any other commands initiated by the user. +ADD patches/entrypoint.sh /root/entrypoint.sh +RUN chmod +x /root/entrypoint.sh +# This script gets executed by "docker run " and it runs at the end of its execution. +ENTRYPOINT ["/root/entrypoint.sh"] # Finally, apply any locally defined patches. RUN /bin/bash -c \ diff --git a/kaggle_tools_update.Dockerfile b/kaggle_tools_update.Dockerfile index b1bd79db..79e18812 100644 --- a/kaggle_tools_update.Dockerfile +++ b/kaggle_tools_update.Dockerfile @@ -18,8 +18,8 @@ FROM gcr.io/kaggle-images/python:latest RUN pip install --upgrade git+https://github.com/Kaggle/learntools -# Set up pip to enable pip install. -ADD patches/kaggle_bashrc /root -# Patch the system-wide bashrc file for non-root users. -RUN cat /root/kaggle_bashrc >> /etc/bash.bashrc -RUN rm /root/kaggle_bashrc +# Set up an initialization script to be executed before any other commands initiated by the user. +ADD patches/entrypoint.sh /root/entrypoint.sh +RUN chmod +x /root/entrypoint.sh +# This script gets executed by "docker run " and it runs at the end of its execution. +ENTRYPOINT ["/root/entrypoint.sh"] \ No newline at end of file diff --git a/patches/kaggle_bashrc b/patches/entrypoint.sh similarity index 79% rename from patches/kaggle_bashrc rename to patches/entrypoint.sh index b19e2748..98d4d968 100644 --- a/patches/kaggle_bashrc +++ b/patches/entrypoint.sh @@ -1,9 +1,11 @@ +#!/usr/bin/env bash + +# This shell script is executed as an entrypoint file for the Kernels docker image. +# It sets up the execution environment before starting the Jypyter Notebook Server. +# Refer to https://docs.docker.com/engine/reference/builder/#entrypoint for details. -# Kaggle-specific .bashrc script to be appended to /etc/bash.bashrc to apply it -# when any user session starts. # This script sets up pip to enable a user to install and use python modules via # pip intall. $KAGGLE_WORKING_DIR should be available for it to work. - if [[ ! -z "${KAGGLE_WORKING_DIR}" ]]; then PIP_INSTALL_PREFIX_DIR="${KAGGLE_WORKING_DIR}/pip" PIP_CONFIG_FILE_PATH="${KAGGLE_WORKING_DIR}/config/pip/pip.conf" @@ -28,4 +30,7 @@ if [[ ! -z "${KAGGLE_WORKING_DIR}" ]]; then # Include pip-installed binaries in PATH. export PATH=${PATH}:${PIP_INSTALL_PREFIX_DIR}/bin -fi \ No newline at end of file +fi + +# Execute the command provided from "docker run" in the current process. +exec "$@" \ No newline at end of file From 810893186d9adafb0367ecbdb78e8d00042a02a6 Mon Sep 17 00:00:00 2001 From: d1jang Date: Thu, 2 Aug 2018 16:16:13 -0700 Subject: [PATCH 010/251] use echo -e to print newlines correctly. --- patches/entrypoint.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/patches/entrypoint.sh b/patches/entrypoint.sh index 98d4d968..12c0305c 100644 --- a/patches/entrypoint.sh +++ b/patches/entrypoint.sh @@ -17,7 +17,7 @@ if [[ ! -z "${KAGGLE_WORKING_DIR}" ]]; then # installation. Also, ignore-installed is set to true to prevent pip # from trying to remove existing modules from the read-only filesystem. mkdir -p `dirname ${PIP_CONFIG_FILE_PATH}` - echo "[install]\nprefix=${PIP_INSTALL_PREFIX_DIR}\nignore-installed=true" > ${PIP_CONFIG_FILE_PATH} + echo -e "[install]\nprefix=${PIP_INSTALL_PREFIX_DIR}\nignore-installed=true" > ${PIP_CONFIG_FILE_PATH} # Instruct pip to use this config file. export PIP_CONFIG_FILE=${PIP_CONFIG_FILE_PATH} From b404a22087732795f486e8025ecfc589cc66ecf4 Mon Sep 17 00:00:00 2001 From: d1jang Date: Thu, 2 Aug 2018 16:20:36 -0700 Subject: [PATCH 011/251] add comment. --- kaggle_tools_update.Dockerfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/kaggle_tools_update.Dockerfile b/kaggle_tools_update.Dockerfile index 79e18812..752c5567 100644 --- a/kaggle_tools_update.Dockerfile +++ b/kaggle_tools_update.Dockerfile @@ -18,6 +18,8 @@ FROM gcr.io/kaggle-images/python:latest RUN pip install --upgrade git+https://github.com/Kaggle/learntools +# TODO(dsjang): Remove these lines once the docker image build turns green since they are copied from Dockerfile +# to apply on top of the last green. # Set up an initialization script to be executed before any other commands initiated by the user. ADD patches/entrypoint.sh /root/entrypoint.sh RUN chmod +x /root/entrypoint.sh From dfbe82ee4f83f76742bf0897ec932ea2246720a0 Mon Sep 17 00:00:00 2001 From: d1jang Date: Thu, 2 Aug 2018 17:41:33 -0700 Subject: [PATCH 012/251] Add a custom ENTRYPOINT on top of the existing ENTRYPOINT. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit When a docker run runs with this change, it's running under tini as follows: root@byod-test-x1qm:/home/kaggle_admin/tmp# docker ps CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES 955c97d95ebd kaggle/python-build "/usr/bin/tini -- /r…" 5 minutes ago Up 5 minutes hungry_ritchie root@byod-test-x1qm:/home/kaggle_admin/tmp# --- Dockerfile | 6 ++++-- kaggle_tools_update.Dockerfile | 6 ++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index 27a7fa89..afb36544 100644 --- a/Dockerfile +++ b/Dockerfile @@ -526,8 +526,10 @@ ENV MPLBACKEND "agg" # Set up an initialization script to be executed before any other commands initiated by the user. ADD patches/entrypoint.sh /root/entrypoint.sh RUN chmod +x /root/entrypoint.sh -# This script gets executed by "docker run " and it runs at the end of its execution. -ENTRYPOINT ["/root/entrypoint.sh"] +# This script gets executed by "docker run " and it runs at the end of its execution. +# NOTE: ENTRYPOINT set by "FROM " should preceed the our own custom entrypoint. +# Specifically, tini can be combined with another entrypoint (https://github.com/krallin/tini). +ENTRYPOINT ["/usr/bin/tini", "--", "/root/entrypoint.sh"] # Finally, apply any locally defined patches. RUN /bin/bash -c \ diff --git a/kaggle_tools_update.Dockerfile b/kaggle_tools_update.Dockerfile index 752c5567..e6f6dabe 100644 --- a/kaggle_tools_update.Dockerfile +++ b/kaggle_tools_update.Dockerfile @@ -23,5 +23,7 @@ RUN pip install --upgrade git+https://github.com/Kaggle/learntools # Set up an initialization script to be executed before any other commands initiated by the user. ADD patches/entrypoint.sh /root/entrypoint.sh RUN chmod +x /root/entrypoint.sh -# This script gets executed by "docker run " and it runs at the end of its execution. -ENTRYPOINT ["/root/entrypoint.sh"] \ No newline at end of file +# This script gets executed by "docker run " and it runs at the end of its execution. +# NOTE: ENTRYPOINT set by "FROM " should preceed the our own custom entrypoint. +# Specifically, tini can be combined with another entrypoint (https://github.com/krallin/tini). +ENTRYPOINT ["/usr/bin/tini", "--", "/root/entrypoint.sh"] \ No newline at end of file From 6f8d681dbbd2709af002de4d2e72d7f532709957 Mon Sep 17 00:00:00 2001 From: d1jang Date: Thu, 2 Aug 2018 17:50:29 -0700 Subject: [PATCH 013/251] Add a safety check for tini. --- Dockerfile | 6 ++++-- kaggle_tools_update.Dockerfile | 6 ++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index afb36544..fdcb1d9c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -526,9 +526,11 @@ ENV MPLBACKEND "agg" # Set up an initialization script to be executed before any other commands initiated by the user. ADD patches/entrypoint.sh /root/entrypoint.sh RUN chmod +x /root/entrypoint.sh -# This script gets executed by "docker run " and it runs at the end of its execution. # NOTE: ENTRYPOINT set by "FROM " should preceed the our own custom entrypoint. -# Specifically, tini can be combined with another entrypoint (https://github.com/krallin/tini). +# Currently tini is set as ENTRYPOINT for the base image, and it can be combined with our entrypoint (https://github.com/krallin/tini). +# ENTRYPOINT gets executed by "docker run " and it runs at the end of its execution. +# Make sure tini exists. +RUN /usr/bin/tini -h ENTRYPOINT ["/usr/bin/tini", "--", "/root/entrypoint.sh"] # Finally, apply any locally defined patches. diff --git a/kaggle_tools_update.Dockerfile b/kaggle_tools_update.Dockerfile index e6f6dabe..46f143f4 100644 --- a/kaggle_tools_update.Dockerfile +++ b/kaggle_tools_update.Dockerfile @@ -23,7 +23,9 @@ RUN pip install --upgrade git+https://github.com/Kaggle/learntools # Set up an initialization script to be executed before any other commands initiated by the user. ADD patches/entrypoint.sh /root/entrypoint.sh RUN chmod +x /root/entrypoint.sh -# This script gets executed by "docker run " and it runs at the end of its execution. # NOTE: ENTRYPOINT set by "FROM " should preceed the our own custom entrypoint. -# Specifically, tini can be combined with another entrypoint (https://github.com/krallin/tini). +# Currently tini is set as ENTRYPOINT for the base image, and it can be combined with our entrypoint (https://github.com/krallin/tini). +# ENTRYPOINT gets executed by "docker run " and it runs at the end of its execution. +# Make sure tini exists. +RUN /usr/bin/tini -h ENTRYPOINT ["/usr/bin/tini", "--", "/root/entrypoint.sh"] \ No newline at end of file From 1426423ed4fff17e5dbf31d9d98ee2230232f999 Mon Sep 17 00:00:00 2001 From: d1jang Date: Thu, 2 Aug 2018 17:54:38 -0700 Subject: [PATCH 014/251] suppress output from the check. --- Dockerfile | 2 +- kaggle_tools_update.Dockerfile | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index fdcb1d9c..ac7339b9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -530,7 +530,7 @@ RUN chmod +x /root/entrypoint.sh # Currently tini is set as ENTRYPOINT for the base image, and it can be combined with our entrypoint (https://github.com/krallin/tini). # ENTRYPOINT gets executed by "docker run " and it runs at the end of its execution. # Make sure tini exists. -RUN /usr/bin/tini -h +RUN /usr/bin/tini -h > /dev/null ENTRYPOINT ["/usr/bin/tini", "--", "/root/entrypoint.sh"] # Finally, apply any locally defined patches. diff --git a/kaggle_tools_update.Dockerfile b/kaggle_tools_update.Dockerfile index 46f143f4..4d9d2b86 100644 --- a/kaggle_tools_update.Dockerfile +++ b/kaggle_tools_update.Dockerfile @@ -27,5 +27,5 @@ RUN chmod +x /root/entrypoint.sh # Currently tini is set as ENTRYPOINT for the base image, and it can be combined with our entrypoint (https://github.com/krallin/tini). # ENTRYPOINT gets executed by "docker run " and it runs at the end of its execution. # Make sure tini exists. -RUN /usr/bin/tini -h +RUN /usr/bin/tini -h > /dev/null ENTRYPOINT ["/usr/bin/tini", "--", "/root/entrypoint.sh"] \ No newline at end of file From a66bcee4c0ddf3415cfcb947d40c1b1485eae393 Mon Sep 17 00:00:00 2001 From: d1jang Date: Fri, 3 Aug 2018 12:19:01 -0700 Subject: [PATCH 015/251] Create a symbolic link to pip directory --- patches/entrypoint.sh | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/patches/entrypoint.sh b/patches/entrypoint.sh index 12c0305c..f6f79765 100644 --- a/patches/entrypoint.sh +++ b/patches/entrypoint.sh @@ -8,6 +8,7 @@ # pip intall. $KAGGLE_WORKING_DIR should be available for it to work. if [[ ! -z "${KAGGLE_WORKING_DIR}" ]]; then PIP_INSTALL_PREFIX_DIR="${KAGGLE_WORKING_DIR}/pip" + PIP_INSTALLED_MODULE_DIR="${PIP_INSTALL_PREFIX_DIR}/pip_installed" PIP_CONFIG_FILE_PATH="${KAGGLE_WORKING_DIR}/config/pip/pip.conf" # Create a directory for pip to install modules. @@ -25,8 +26,15 @@ if [[ ! -z "${KAGGLE_WORKING_DIR}" ]]; then # Note that the pip prefix directory overrides the system default to enable # a user to use his/her installed one. # TODO(dsjang): Currently "lib/python3.6/site-packages" is hard-coded - # throughout Dockerfile. Parameterize it to avoid a version mismatch. - export PYTHONPATH=${PIP_INSTALL_PREFIX_DIR}/lib/python3.6/site-packages:${PYTHONPATH} + # throughout Dockerfile. Parameterize it to avoid a version mismatch. + # TODO(dsjang): This is a hack to sidestep a problem of Python ignoring modules + # in .../site-packages other than the system-wide and user-specific site packages. + mkdir -p "${PIP_INSTALL_PREFIX_DIR}/lib/python3.6/site-packages" + ln -s "${PIP_INSTALL_PREFIX_DIR}/lib/python3.6/site-packages" ${PIP_INSTALLED_MODULE_DIR} + export PYTHONPATH=${PIP_INSTALLED_MODULE_DIR}:${PYTHONPATH} + + # Create a symbolic link to site-packages. + # can't override conda installed package. # Include pip-installed binaries in PATH. export PATH=${PATH}:${PIP_INSTALL_PREFIX_DIR}/bin From ab7f1a3e5382720cbdf8d00c45cef685f94ff00e Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Fri, 3 Aug 2018 20:13:48 +0000 Subject: [PATCH 016/251] Remove tornado patch The patch was released in tornado v4.5.3. We are now pulling v5.0.2 which includes the patch already. --- patches/tornado-fix-issue-2097.patch | 27 --------------------------- 1 file changed, 27 deletions(-) delete mode 100644 patches/tornado-fix-issue-2097.patch diff --git a/patches/tornado-fix-issue-2097.patch b/patches/tornado-fix-issue-2097.patch deleted file mode 100644 index f3c7a82f..00000000 --- a/patches/tornado-fix-issue-2097.patch +++ /dev/null @@ -1,27 +0,0 @@ -diff -Naur /base/opt/conda/pkgs/tornado-4.5.1-py36_0/lib/python3.6/site-packages/tornado/http1connection.py /patched/opt.new/conda/lib/python3.6/site-packages/tornado/http1connection.py ---- /base/opt/conda/lib/python3.6/site-packages/tornado/http1connection.py 2017-04-16 23:47:57.000000000 +0000 -+++ /patched/opt.new/conda/lib/python3.6/site-packages/tornado/http1connection.py 2017-06-28 21:18:59.902402220 +0000 -@@ -349,6 +349,11 @@ - # self._request_start_line.version or - # start_line.version? - self._request_start_line.version == 'HTTP/1.1' and -+ # 1xx, 204 and 304 responses have no body (not even a zero-length -+ # body), and so should not have either Content-Length or -+ # Transfer-Encoding headers. -+ start_line.code not in (204, 304) and -+ (start_line.code < 100 or start_line.code >= 200) and - # 304 responses have no body (not even a zero-length body), and so - # should not have either Content-Length or Transfer-Encoding. - # headers. -diff -Naur /base/opt/conda/lib/python3.6/site-packages/tornado/web.py /patched/opt.new/conda/lib/python3.6/site-packages/tornado/web.py ---- /base/opt/conda/lib/python3.6/site-packages/tornado/web.py 2017-06-28 21:22:56.505186743 +0000 -+++ /patched/opt.new/conda/lib/python3.6/site-packages/tornado/web.py 2017-06-28 21:22:48.421228255 +0000 -@@ -977,7 +977,8 @@ - if self._status_code in (204, 304): - assert not self._write_buffer, "Cannot send body with %s" % self._status_code - self._clear_headers_for_304() -- elif "Content-Length" not in self._headers: -+ elif ("Content-Length" not in self._headers and -+ (self._status_code < 100 or self._status_code >= 200)): - content_length = sum(len(part) for part in self._write_buffer) - self.set_header("Content-Length", content_length) From b82e854111bca5316b8027eecbae502b4c078629 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Tue, 7 Aug 2018 13:00:44 -0700 Subject: [PATCH 017/251] remove patching mecanism --- Dockerfile | 4 ---- 1 file changed, 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index ac7339b9..95d88360 100644 --- a/Dockerfile +++ b/Dockerfile @@ -532,7 +532,3 @@ RUN chmod +x /root/entrypoint.sh # Make sure tini exists. RUN /usr/bin/tini -h > /dev/null ENTRYPOINT ["/usr/bin/tini", "--", "/root/entrypoint.sh"] - -# Finally, apply any locally defined patches. -RUN /bin/bash -c \ - "cd / && for p in $(ls /tmp/patches/*.patch); do echo '= Applying patch '\${p}; patch -p2 < \${p}; done" From fc690383de8b501135df21c043f3d4c184f61e75 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Wed, 8 Aug 2018 10:18:58 -0700 Subject: [PATCH 018/251] Simplify build script by using the --pull flag. With the --pull flag, it always attempt to pull a newer version of the image --- build | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/build b/build index 642cc268..d3f34959 100755 --- a/build +++ b/build @@ -6,6 +6,5 @@ set -e if [[ "$1" == "--use-cache" ]]; then docker build --rm -t kaggle/python-build . else - docker pull continuumio/anaconda3:latest - docker build --rm --no-cache -t kaggle/python-build . + docker build --pull --rm --no-cache -t kaggle/python-build . fi From 3b64add9f7d5b29a53d36130b51f7a7502f6f987 Mon Sep 17 00:00:00 2001 From: d1jang Date: Fri, 10 Aug 2018 11:05:27 -0700 Subject: [PATCH 019/251] Delete pip install support script & use a manual tag for update dockerfile. --- Dockerfile | 12 +--------- kaggle_tools_update.Dockerfile | 19 ++++----------- patches/entrypoint.sh | 44 ---------------------------------- 3 files changed, 5 insertions(+), 70 deletions(-) delete mode 100644 patches/entrypoint.sh diff --git a/Dockerfile b/Dockerfile index 95d88360..2c8a70b6 100644 --- a/Dockerfile +++ b/Dockerfile @@ -521,14 +521,4 @@ ENV PYTHONUSERBASE "/root/.local" ADD patches/sitecustomize.py /root/.local/lib/python3.6/site-packages/sitecustomize.py # Set backend for matplotlib -ENV MPLBACKEND "agg" - -# Set up an initialization script to be executed before any other commands initiated by the user. -ADD patches/entrypoint.sh /root/entrypoint.sh -RUN chmod +x /root/entrypoint.sh -# NOTE: ENTRYPOINT set by "FROM " should preceed the our own custom entrypoint. -# Currently tini is set as ENTRYPOINT for the base image, and it can be combined with our entrypoint (https://github.com/krallin/tini). -# ENTRYPOINT gets executed by "docker run " and it runs at the end of its execution. -# Make sure tini exists. -RUN /usr/bin/tini -h > /dev/null -ENTRYPOINT ["/usr/bin/tini", "--", "/root/entrypoint.sh"] +ENV MPLBACKEND "agg" \ No newline at end of file diff --git a/kaggle_tools_update.Dockerfile b/kaggle_tools_update.Dockerfile index 4d9d2b86..8f7fac20 100644 --- a/kaggle_tools_update.Dockerfile +++ b/kaggle_tools_update.Dockerfile @@ -9,23 +9,12 @@ # is enabled. # Usage: -# docker rmi gcr.io/kaggle-images/python:latest +# docker rmi gcr.io/kaggle-images/python:pinned # docker build --rm -t kaggle/python-build -f kaggle_tools_update.Dockerfile . # ./test # ./push (if tests are passing) -FROM gcr.io/kaggle-images/python:latest +# Pull the last build manually tagged as "pinned". +FROM gcr.io/kaggle-images/python:pinned -RUN pip install --upgrade git+https://github.com/Kaggle/learntools - -# TODO(dsjang): Remove these lines once the docker image build turns green since they are copied from Dockerfile -# to apply on top of the last green. -# Set up an initialization script to be executed before any other commands initiated by the user. -ADD patches/entrypoint.sh /root/entrypoint.sh -RUN chmod +x /root/entrypoint.sh -# NOTE: ENTRYPOINT set by "FROM " should preceed the our own custom entrypoint. -# Currently tini is set as ENTRYPOINT for the base image, and it can be combined with our entrypoint (https://github.com/krallin/tini). -# ENTRYPOINT gets executed by "docker run " and it runs at the end of its execution. -# Make sure tini exists. -RUN /usr/bin/tini -h > /dev/null -ENTRYPOINT ["/usr/bin/tini", "--", "/root/entrypoint.sh"] \ No newline at end of file +RUN pip install --upgrade git+https://github.com/Kaggle/learntools \ No newline at end of file diff --git a/patches/entrypoint.sh b/patches/entrypoint.sh deleted file mode 100644 index f6f79765..00000000 --- a/patches/entrypoint.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env bash - -# This shell script is executed as an entrypoint file for the Kernels docker image. -# It sets up the execution environment before starting the Jypyter Notebook Server. -# Refer to https://docs.docker.com/engine/reference/builder/#entrypoint for details. - -# This script sets up pip to enable a user to install and use python modules via -# pip intall. $KAGGLE_WORKING_DIR should be available for it to work. -if [[ ! -z "${KAGGLE_WORKING_DIR}" ]]; then - PIP_INSTALL_PREFIX_DIR="${KAGGLE_WORKING_DIR}/pip" - PIP_INSTALLED_MODULE_DIR="${PIP_INSTALL_PREFIX_DIR}/pip_installed" - PIP_CONFIG_FILE_PATH="${KAGGLE_WORKING_DIR}/config/pip/pip.conf" - - # Create a directory for pip to install modules. - mkdir -p ${PIP_INSTALL_PREFIX_DIR} - - # Create pip config file to use the prefix directory created above for - # installation. Also, ignore-installed is set to true to prevent pip - # from trying to remove existing modules from the read-only filesystem. - mkdir -p `dirname ${PIP_CONFIG_FILE_PATH}` - echo -e "[install]\nprefix=${PIP_INSTALL_PREFIX_DIR}\nignore-installed=true" > ${PIP_CONFIG_FILE_PATH} - # Instruct pip to use this config file. - export PIP_CONFIG_FILE=${PIP_CONFIG_FILE_PATH} - - # Set up PYTHONPATH correctly to include the user-installed library. - # Note that the pip prefix directory overrides the system default to enable - # a user to use his/her installed one. - # TODO(dsjang): Currently "lib/python3.6/site-packages" is hard-coded - # throughout Dockerfile. Parameterize it to avoid a version mismatch. - # TODO(dsjang): This is a hack to sidestep a problem of Python ignoring modules - # in .../site-packages other than the system-wide and user-specific site packages. - mkdir -p "${PIP_INSTALL_PREFIX_DIR}/lib/python3.6/site-packages" - ln -s "${PIP_INSTALL_PREFIX_DIR}/lib/python3.6/site-packages" ${PIP_INSTALLED_MODULE_DIR} - export PYTHONPATH=${PIP_INSTALLED_MODULE_DIR}:${PYTHONPATH} - - # Create a symbolic link to site-packages. - # can't override conda installed package. - - # Include pip-installed binaries in PATH. - export PATH=${PATH}:${PIP_INSTALL_PREFIX_DIR}/bin -fi - -# Execute the command provided from "docker run" in the current process. -exec "$@" \ No newline at end of file From dcb594f2fe32e101b010e540ea6299c89f753ee2 Mon Sep 17 00:00:00 2001 From: Chris Crawford Date: Fri, 10 Aug 2018 21:22:25 +0000 Subject: [PATCH 020/251] Install keras_applications before tensorflow --- Dockerfile | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 95d88360..452e5d0c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -49,9 +49,12 @@ RUN apt-get update && apt-get install -y python-software-properties zip && \ apt-get upgrade -y bazel # Tensorflow -# Temp fixes: Downgrade python 3.7->3.6.6 and downgrade Pandas 0.23.3->0.23.2 +# Fix to install tf 1.10:: Downgrade python 3.7->3.6.6 and downgrade Pandas 0.23.3->0.23.2 RUN conda install -y python=3.6.6 && \ pip install pandas==0.23.2 && \ +# Another fix for TF 1.10 https://github.com/tensorflow/tensorflow/issues/21518 + pip install keras_applications==1.0.4 --no-deps && \ + pip install keras_preprocessing==1.0.2 --no-deps && \ cd /usr/local/src && \ git clone https://github.com/tensorflow/tensorflow && \ cd tensorflow && \ From 58b7c82d809d37b9f8c66fec3142314f200ecf08 Mon Sep 17 00:00:00 2001 From: Chris Crawford Date: Fri, 10 Aug 2018 21:59:09 +0000 Subject: [PATCH 021/251] change install method for fancyimpute --- Dockerfile | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 452e5d0c..1e272503 100644 --- a/Dockerfile +++ b/Dockerfile @@ -298,9 +298,7 @@ RUN pip install --upgrade mpld3 && \ conda install -y ecos && \ conda install -y CVXcanon -RUN cd /usr/local/src && git clone https://github.com/iskandr/fancyimpute && \ - cd fancyimpute && \ - python setup.py install && \ +RUN pip install fancyimpute && \ pip install git+https://github.com/pymc-devs/pymc3 && \ pip install tifffile && \ pip install spectral && \ From 520b1c27fd706a7a04874c4e9e1b304317383c76 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Sat, 11 Aug 2018 00:43:55 +0000 Subject: [PATCH 022/251] Refactor test and improve coverage --- test | 13 ++- test_build.py | 167 ------------------------------------- tests/data/dot.png | Bin 0 -> 150 bytes tests/data/text.txt | 1 + tests/data/train.csv | 20 +++++ tests/test_bigquery.py | 38 +++++++++ tests/test_bokeh.py | 7 ++ tests/test_essentia.py | 7 ++ tests/test_fastai.py | 9 ++ tests/test_fasttext.py | 9 ++ tests/test_keras.py | 25 ++++++ tests/test_kmapper.py | 7 ++ tests/test_learntools.py | 9 ++ tests/test_matplotlib.py | 18 ++++ tests/test_nltk.py | 9 ++ tests/test_numpy.py | 16 ++++ tests/test_opencv.py | 9 ++ tests/test_pandas.py | 9 ++ tests/test_plotly.py | 9 ++ tests/test_pyfasttext.py | 9 ++ tests/test_pytorch.py | 13 +++ tests/test_seaborn.py | 7 ++ tests/test_shap.py | 7 ++ tests/test_skimage.py | 8 ++ tests/test_sklearn.py | 18 ++++ tests/test_tensorflow.py | 12 +++ tests/test_theano.py | 20 +++++ tests/test_vowpalwabbit.py | 10 +++ tests/test_wordbatch.py | 15 ++++ tests/test_xgboost.py | 12 +++ tests/text_mxnet.py | 9 ++ 31 files changed, 351 insertions(+), 171 deletions(-) delete mode 100644 test_build.py create mode 100644 tests/data/dot.png create mode 100644 tests/data/text.txt create mode 100644 tests/data/train.csv create mode 100644 tests/test_bigquery.py create mode 100644 tests/test_bokeh.py create mode 100644 tests/test_essentia.py create mode 100644 tests/test_fastai.py create mode 100644 tests/test_fasttext.py create mode 100644 tests/test_keras.py create mode 100644 tests/test_kmapper.py create mode 100644 tests/test_learntools.py create mode 100644 tests/test_matplotlib.py create mode 100644 tests/test_nltk.py create mode 100644 tests/test_numpy.py create mode 100644 tests/test_opencv.py create mode 100644 tests/test_pandas.py create mode 100644 tests/test_plotly.py create mode 100644 tests/test_pyfasttext.py create mode 100644 tests/test_pytorch.py create mode 100644 tests/test_seaborn.py create mode 100644 tests/test_shap.py create mode 100644 tests/test_skimage.py create mode 100644 tests/test_sklearn.py create mode 100644 tests/test_tensorflow.py create mode 100644 tests/test_theano.py create mode 100644 tests/test_vowpalwabbit.py create mode 100644 tests/test_wordbatch.py create mode 100644 tests/test_xgboost.py create mode 100644 tests/text_mxnet.py diff --git a/test b/test index 5de44db7..2528b6af 100755 --- a/test +++ b/test @@ -11,7 +11,12 @@ mkdir -p /tmp/python-build/working docker run -d --name=jupyter_test --read-only --net=none -e HOME=/tmp -v $PWD:/input:ro -v /tmp/python-build/working:/working -w=/working -v /tmp/python-build/tmp:/tmp -v /tmp/python-build/devshm:/dev/shm kaggle/python-build jupyter notebook --allow-root --ip="*" sleep 3 docker kill jupyter_test && docker rm jupyter_test -docker run --rm -t --read-only --net=none -e HOME=/tmp -e KAGGLE_DATA_PROXY_TOKEN=test-key -e KAGGLE_DATA_PROXY_URL=http://127.0.0.1:8000 -e KAGGLE_DATA_PROXY_PROJECT=test -v $PWD:/input:ro -v /tmp/python-build/working:/working -w=/working -v /tmp/python-build/tmp:/tmp -v /tmp/python-build/devshm:/dev/shm kaggle/python-build /bin/bash -c 'python /input/test_build.py' - -# The test_build.py script creates a plot called plot1.png; check that it exists -[ -s /tmp/python-build/working/plot1.png ] || exit 1 +docker run --rm -t --read-only --net=none \ + -e HOME=/tmp -e KAGGLE_DATA_PROXY_TOKEN=test-key \ + -e KAGGLE_DATA_PROXY_URL=http://127.0.0.1:8000 \ + -e KAGGLE_DATA_PROXY_PROJECT=test \ + -v $PWD:/input:ro -v /tmp/python-build/working:/working \ + -v /tmp/python-build/tmp:/tmp -v /tmp/python-build/devshm:/dev/shm \ + -w=/working \ + kaggle/python-build \ + /bin/bash -c 'python -m unittest discover -s /input/tests' diff --git a/test_build.py b/test_build.py deleted file mode 100644 index 52d88f33..00000000 --- a/test_build.py +++ /dev/null @@ -1,167 +0,0 @@ -# This script should run without errors whenever we update the -# kaggle/python container. It checks that all our most popular packages can -# be loaded and used without errors. - -import tensorflow as tf -print(tf.__version__) -hello = tf.constant('TensorFlow ok') -sess = tf.Session() -print(sess.run(hello)) -print("Tensorflow ok") - -from keras.models import Sequential -from keras.layers.core import Dense, Dropout, Activation, Flatten -from keras.layers.convolutional import Convolution2D, MaxPooling2D -from keras.optimizers import SGD -print("Keras ok") - -# Test Kaggle learntools -from learntools.core import binder; binder.bind(globals()) -from learntools.python.ex1 import * -color="blue" -q0.check() -print("learntools ok") - -# PyTorch smoke test based on http://pytorch.org/tutorials/beginner/nlp/deep_learning_tutorial.html -import torch -import torch.nn as tnn -import torch.autograd as autograd -torch.manual_seed(31337) -linear_torch = tnn.Linear(5,3) -data_torch = autograd.Variable(torch.randn(2, 5)) -print(linear_torch(data_torch)) -print("PyTorch ok") - -import fastai -from fastai.io import get_data -print("fast.ai ok") - -import numpy as np -print("Numpy imported ok") -print("Your lucky number is: " + str(np.random.randint(100))) - -# Numpy must be linked to the MKL. (Occasionally, a third-party package will muck up the installation -# and numpy will be reinstalled with an OpenBLAS backing.) -from numpy.distutils.system_info import get_info -# This will throw an exception if the MKL is not linked correctly. -get_info("blas_mkl") - -import pandas as pd -print("Pandas imported ok") - -from sklearn import datasets -print("sklearn imported ok") -iris = datasets.load_iris() -X, y = iris.data, iris.target - -from sklearn.ensemble import RandomForestClassifier -rf1 = RandomForestClassifier() -rf1.fit(X,y) -print("sklearn RandomForestClassifier: ok") - -from sklearn.linear_model import LinearRegression -boston = datasets.load_boston() -X, y = boston.data, boston.target -lr1 = LinearRegression() -lr1.fit(X,y) -print("sklearn LinearRegression: ok") - -from xgboost import XGBClassifier -xgb1 = XGBClassifier(n_estimators=3) -xgb1.fit(X[0:70],y[0:70]) -print("xgboost XGBClassifier: ok") - -import matplotlib.pyplot as plt -plt.plot(np.linspace(0,1,50), np.random.rand(50)) -plt.savefig("plot1.png") -print("matplotlib.pyplot ok") - -from mpl_toolkits.basemap import Basemap -print("Basemap ok") - -import plotly.plotly as py -import plotly.graph_objs as go -print("plotly ok") - -import theano -print("Theano ok") - -import nltk -from nltk.stem import WordNetLemmatizer -print("nltk ok") - -import cv2 -img = cv2.imread('plot1.png',0) -print("OpenCV ok") - -from skimage.io import imread -print("skimage ok") - -from wordbatch.extractors import WordBag -print("wordbatch ok") - -import pyfasttext -print("pyfasttext ok") - -import fastText -print("fastText ok") - -import mxnet -import mxnet.gluon -print("mxnet ok") - -import bokeh -print("bokeh ok") - -import seaborn -print("seaborn ok") - -# Test BigQuery -import os -import threading -from http.server import BaseHTTPRequestHandler, HTTPServer -from google.cloud import bigquery -HOSTNAME = "127.0.0.1" -PORT = 8000 -URL = "http://%s:%s" % (HOSTNAME, PORT) -fake_bq_called = False -fake_bq_header_found = False -class HTTPHandler(BaseHTTPRequestHandler): - def do_HEAD(s): - s.send_response(200) - - def do_GET(s): - global fake_bq_called - global fake_bq_header_found - fake_bq_called = True - fake_bq_header_found = any(k for k in s.headers if k == "X-KAGGLE-PROXY-DATA" and s.headers[k] == "test-key") - s.send_response(200) - -httpd = HTTPServer((HOSTNAME, PORT), HTTPHandler) -threading.Thread(target=httpd.serve_forever).start() -client = bigquery.Client() -try: - for ds in client.list_datasets(): pass -except: - pass -httpd.shutdown() -assert fake_bq_called, "Fake server did not recieve a request from the BQ client." -assert fake_bq_header_found, "X-KAGGLE-PROXY-DATA header was missing from the BQ request." -print("bigquery proxy ok") - -import shap -print("shap ok") - -import kmapper -print("kmapper ok") - -from vowpalwabbit import pyvw -vw = pyvw.vw(quiet=True) -ex = vw.example('1 | a b c') -vw.learn(ex) -print(vw.predict(ex)) -print('vowpalwabbit ok') - -import essentia -print(essentia.__version__) -print("Essentia ok") diff --git a/tests/data/dot.png b/tests/data/dot.png new file mode 100644 index 0000000000000000000000000000000000000000..482df8ba39602ac729cb25528b0cc090001836e6 GIT binary patch literal 150 zcmeAS@N?(olHy`uVBq!ia0vp^j3CUx1SBVv2j2ryoCO|{#S9GG!XV7ZFl&wkP>{XE z)7O>#5eJu;Is3iXz?(oJ+02lL66gHf+|;}hAeVu`xhOTUBsE2$JhLQ2!QIn0AVn{g l9VpJ@>Eal|aXmR9Apyu^VPI4@Q3ttz!PC{xWt~$(699(VAFlua literal 0 HcmV?d00001 diff --git a/tests/data/text.txt b/tests/data/text.txt new file mode 100644 index 00000000..80aa862f --- /dev/null +++ b/tests/data/text.txt @@ -0,0 +1 @@ +Kaggle is a platform for predictive modelling and analytics competitions in which statisticians and data miners compete to produce the best models for predicting and describing the datasets uploaded by companies and users. diff --git a/tests/data/train.csv b/tests/data/train.csv new file mode 100644 index 00000000..229dbc7c --- /dev/null +++ b/tests/data/train.csv @@ -0,0 +1,20 @@ +label,pixel0,pixel1,pixel2,pixel3,pixel4,pixel5,pixel6,pixel7,pixel8,pixel9,pixel10,pixel11,pixel12,pixel13,pixel14,pixel15,pixel16,pixel17,pixel18,pixel19,pixel20,pixel21,pixel22,pixel23,pixel24,pixel25,pixel26,pixel27,pixel28,pixel29,pixel30,pixel31,pixel32,pixel33,pixel34,pixel35,pixel36,pixel37,pixel38,pixel39,pixel40,pixel41,pixel42,pixel43,pixel44,pixel45,pixel46,pixel47,pixel48,pixel49,pixel50,pixel51,pixel52,pixel53,pixel54,pixel55,pixel56,pixel57,pixel58,pixel59,pixel60,pixel61,pixel62,pixel63,pixel64,pixel65,pixel66,pixel67,pixel68,pixel69,pixel70,pixel71,pixel72,pixel73,pixel74,pixel75,pixel76,pixel77,pixel78,pixel79,pixel80,pixel81,pixel82,pixel83,pixel84,pixel85,pixel86,pixel87,pixel88,pixel89,pixel90,pixel91,pixel92,pixel93,pixel94,pixel95,pixel96,pixel97,pixel98,pixel99,pixel100,pixel101,pixel102,pixel103,pixel104,pixel105,pixel106,pixel107,pixel108,pixel109,pixel110,pixel111,pixel112,pixel113,pixel114,pixel115,pixel116,pixel117,pixel118,pixel119,pixel120,pixel121,pixel122,pixel123,pixel124,pixel125,pixel126,pixel127,pixel128,pixel129,pixel130,pixel131,pixel132,pixel133,pixel134,pixel135,pixel136,pixel137,pixel138,pixel139,pixel140,pixel141,pixel142,pixel143,pixel144,pixel145,pixel146,pixel147,pixel148,pixel149,pixel150,pixel151,pixel152,pixel153,pixel154,pixel155,pixel156,pixel157,pixel158,pixel159,pixel160,pixel161,pixel162,pixel163,pixel164,pixel165,pixel166,pixel167,pixel168,pixel169,pixel170,pixel171,pixel172,pixel173,pixel174,pixel175,pixel176,pixel177,pixel178,pixel179,pixel180,pixel181,pixel182,pixel183,pixel184,pixel185,pixel186,pixel187,pixel188,pixel189,pixel190,pixel191,pixel192,pixel193,pixel194,pixel195,pixel196,pixel197,pixel198,pixel199,pixel200,pixel201,pixel202,pixel203,pixel204,pixel205,pixel206,pixel207,pixel208,pixel209,pixel210,pixel211,pixel212,pixel213,pixel214,pixel215,pixel216,pixel217,pixel218,pixel219,pixel220,pixel221,pixel222,pixel223,pixel224,pixel225,pixel226,pixel227,pixel228,pixel229,pixel230,pixel231,pixel232,pixel233,pixel234,pixel235,pixel236,pixel237,pixel238,pixel239,pixel240,pixel241,pixel242,pixel243,pixel244,pixel245,pixel246,pixel247,pixel248,pixel249,pixel250,pixel251,pixel252,pixel253,pixel254,pixel255,pixel256,pixel257,pixel258,pixel259,pixel260,pixel261,pixel262,pixel263,pixel264,pixel265,pixel266,pixel267,pixel268,pixel269,pixel270,pixel271,pixel272,pixel273,pixel274,pixel275,pixel276,pixel277,pixel278,pixel279,pixel280,pixel281,pixel282,pixel283,pixel284,pixel285,pixel286,pixel287,pixel288,pixel289,pixel290,pixel291,pixel292,pixel293,pixel294,pixel295,pixel296,pixel297,pixel298,pixel299,pixel300,pixel301,pixel302,pixel303,pixel304,pixel305,pixel306,pixel307,pixel308,pixel309,pixel310,pixel311,pixel312,pixel313,pixel314,pixel315,pixel316,pixel317,pixel318,pixel319,pixel320,pixel321,pixel322,pixel323,pixel324,pixel325,pixel326,pixel327,pixel328,pixel329,pixel330,pixel331,pixel332,pixel333,pixel334,pixel335,pixel336,pixel337,pixel338,pixel339,pixel340,pixel341,pixel342,pixel343,pixel344,pixel345,pixel346,pixel347,pixel348,pixel349,pixel350,pixel351,pixel352,pixel353,pixel354,pixel355,pixel356,pixel357,pixel358,pixel359,pixel360,pixel361,pixel362,pixel363,pixel364,pixel365,pixel366,pixel367,pixel368,pixel369,pixel370,pixel371,pixel372,pixel373,pixel374,pixel375,pixel376,pixel377,pixel378,pixel379,pixel380,pixel381,pixel382,pixel383,pixel384,pixel385,pixel386,pixel387,pixel388,pixel389,pixel390,pixel391,pixel392,pixel393,pixel394,pixel395,pixel396,pixel397,pixel398,pixel399,pixel400,pixel401,pixel402,pixel403,pixel404,pixel405,pixel406,pixel407,pixel408,pixel409,pixel410,pixel411,pixel412,pixel413,pixel414,pixel415,pixel416,pixel417,pixel418,pixel419,pixel420,pixel421,pixel422,pixel423,pixel424,pixel425,pixel426,pixel427,pixel428,pixel429,pixel430,pixel431,pixel432,pixel433,pixel434,pixel435,pixel436,pixel437,pixel438,pixel439,pixel440,pixel441,pixel442,pixel443,pixel444,pixel445,pixel446,pixel447,pixel448,pixel449,pixel450,pixel451,pixel452,pixel453,pixel454,pixel455,pixel456,pixel457,pixel458,pixel459,pixel460,pixel461,pixel462,pixel463,pixel464,pixel465,pixel466,pixel467,pixel468,pixel469,pixel470,pixel471,pixel472,pixel473,pixel474,pixel475,pixel476,pixel477,pixel478,pixel479,pixel480,pixel481,pixel482,pixel483,pixel484,pixel485,pixel486,pixel487,pixel488,pixel489,pixel490,pixel491,pixel492,pixel493,pixel494,pixel495,pixel496,pixel497,pixel498,pixel499,pixel500,pixel501,pixel502,pixel503,pixel504,pixel505,pixel506,pixel507,pixel508,pixel509,pixel510,pixel511,pixel512,pixel513,pixel514,pixel515,pixel516,pixel517,pixel518,pixel519,pixel520,pixel521,pixel522,pixel523,pixel524,pixel525,pixel526,pixel527,pixel528,pixel529,pixel530,pixel531,pixel532,pixel533,pixel534,pixel535,pixel536,pixel537,pixel538,pixel539,pixel540,pixel541,pixel542,pixel543,pixel544,pixel545,pixel546,pixel547,pixel548,pixel549,pixel550,pixel551,pixel552,pixel553,pixel554,pixel555,pixel556,pixel557,pixel558,pixel559,pixel560,pixel561,pixel562,pixel563,pixel564,pixel565,pixel566,pixel567,pixel568,pixel569,pixel570,pixel571,pixel572,pixel573,pixel574,pixel575,pixel576,pixel577,pixel578,pixel579,pixel580,pixel581,pixel582,pixel583,pixel584,pixel585,pixel586,pixel587,pixel588,pixel589,pixel590,pixel591,pixel592,pixel593,pixel594,pixel595,pixel596,pixel597,pixel598,pixel599,pixel600,pixel601,pixel602,pixel603,pixel604,pixel605,pixel606,pixel607,pixel608,pixel609,pixel610,pixel611,pixel612,pixel613,pixel614,pixel615,pixel616,pixel617,pixel618,pixel619,pixel620,pixel621,pixel622,pixel623,pixel624,pixel625,pixel626,pixel627,pixel628,pixel629,pixel630,pixel631,pixel632,pixel633,pixel634,pixel635,pixel636,pixel637,pixel638,pixel639,pixel640,pixel641,pixel642,pixel643,pixel644,pixel645,pixel646,pixel647,pixel648,pixel649,pixel650,pixel651,pixel652,pixel653,pixel654,pixel655,pixel656,pixel657,pixel658,pixel659,pixel660,pixel661,pixel662,pixel663,pixel664,pixel665,pixel666,pixel667,pixel668,pixel669,pixel670,pixel671,pixel672,pixel673,pixel674,pixel675,pixel676,pixel677,pixel678,pixel679,pixel680,pixel681,pixel682,pixel683,pixel684,pixel685,pixel686,pixel687,pixel688,pixel689,pixel690,pixel691,pixel692,pixel693,pixel694,pixel695,pixel696,pixel697,pixel698,pixel699,pixel700,pixel701,pixel702,pixel703,pixel704,pixel705,pixel706,pixel707,pixel708,pixel709,pixel710,pixel711,pixel712,pixel713,pixel714,pixel715,pixel716,pixel717,pixel718,pixel719,pixel720,pixel721,pixel722,pixel723,pixel724,pixel725,pixel726,pixel727,pixel728,pixel729,pixel730,pixel731,pixel732,pixel733,pixel734,pixel735,pixel736,pixel737,pixel738,pixel739,pixel740,pixel741,pixel742,pixel743,pixel744,pixel745,pixel746,pixel747,pixel748,pixel749,pixel750,pixel751,pixel752,pixel753,pixel754,pixel755,pixel756,pixel757,pixel758,pixel759,pixel760,pixel761,pixel762,pixel763,pixel764,pixel765,pixel766,pixel767,pixel768,pixel769,pixel770,pixel771,pixel772,pixel773,pixel774,pixel775,pixel776,pixel777,pixel778,pixel779,pixel780,pixel781,pixel782,pixel783 +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,188,255,94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,191,250,253,93,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,123,248,253,167,10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,80,247,253,208,13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,29,207,253,235,77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,54,209,253,253,88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,93,254,253,238,170,17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23,210,254,253,159,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16,209,253,254,240,81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27,253,253,254,13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,206,254,254,198,7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,168,253,253,196,7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,203,253,248,76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,22,188,253,245,93,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,103,253,253,191,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,89,240,253,195,25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15,220,253,253,80,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,94,253,253,253,94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,89,251,253,250,131,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,214,218,95,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,18,30,137,137,192,86,72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13,86,250,254,254,254,254,217,246,151,32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16,179,254,254,254,254,254,254,254,254,254,231,54,15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,72,254,254,254,254,254,254,254,254,254,254,254,254,104,0,0,0,0,0,0,0,0,0,0,0,0,0,61,191,254,254,254,254,254,109,83,199,254,254,254,254,243,85,0,0,0,0,0,0,0,0,0,0,0,0,172,254,254,254,202,147,147,45,0,11,29,200,254,254,254,171,0,0,0,0,0,0,0,0,0,0,0,1,174,254,254,89,67,0,0,0,0,0,0,128,252,254,254,212,76,0,0,0,0,0,0,0,0,0,0,47,254,254,254,29,0,0,0,0,0,0,0,0,83,254,254,254,153,0,0,0,0,0,0,0,0,0,0,80,254,254,240,24,0,0,0,0,0,0,0,0,25,240,254,254,153,0,0,0,0,0,0,0,0,0,0,64,254,254,186,7,0,0,0,0,0,0,0,0,0,166,254,254,224,12,0,0,0,0,0,0,0,0,14,232,254,254,254,29,0,0,0,0,0,0,0,0,0,75,254,254,254,17,0,0,0,0,0,0,0,0,18,254,254,254,254,29,0,0,0,0,0,0,0,0,0,48,254,254,254,17,0,0,0,0,0,0,0,0,2,163,254,254,254,29,0,0,0,0,0,0,0,0,0,48,254,254,254,17,0,0,0,0,0,0,0,0,0,94,254,254,254,200,12,0,0,0,0,0,0,0,16,209,254,254,150,1,0,0,0,0,0,0,0,0,0,15,206,254,254,254,202,66,0,0,0,0,0,21,161,254,254,245,31,0,0,0,0,0,0,0,0,0,0,0,60,212,254,254,254,194,48,48,34,41,48,209,254,254,254,171,0,0,0,0,0,0,0,0,0,0,0,0,0,86,243,254,254,254,254,254,233,243,254,254,254,254,254,86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,114,254,254,254,254,254,254,254,254,254,254,239,86,11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13,182,254,254,254,254,254,254,254,254,243,70,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,76,146,254,255,254,255,146,19,15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,141,139,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9,254,254,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9,254,254,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9,254,254,106,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9,254,254,184,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9,254,254,184,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9,254,254,184,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6,185,254,184,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,89,254,184,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,146,254,184,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9,254,254,184,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9,254,254,184,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9,254,254,184,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9,254,254,184,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9,254,254,184,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,156,254,254,184,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,185,255,255,184,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,185,254,254,184,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,185,254,254,184,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,63,254,254,62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 +4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,220,179,6,0,0,0,0,0,0,0,0,9,77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28,247,17,0,0,0,0,0,0,0,0,27,202,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,242,155,0,0,0,0,0,0,0,0,27,254,63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,160,207,6,0,0,0,0,0,0,0,27,254,65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,127,254,21,0,0,0,0,0,0,0,20,239,65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,77,254,21,0,0,0,0,0,0,0,0,195,65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,70,254,21,0,0,0,0,0,0,0,0,195,142,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,56,251,21,0,0,0,0,0,0,0,0,195,227,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,222,153,5,0,0,0,0,0,0,0,120,240,13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,67,251,40,0,0,0,0,0,0,0,94,255,69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,234,184,0,0,0,0,0,0,0,19,245,69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,234,169,0,0,0,0,0,0,0,3,199,182,10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,154,205,4,0,0,26,72,128,203,208,254,254,131,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,61,254,129,113,186,245,251,189,75,56,136,254,73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15,216,233,233,159,104,52,0,0,0,38,254,73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,18,254,73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,18,254,73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,206,106,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,186,159,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6,209,101,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,25,130,155,254,254,254,157,30,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,103,253,253,253,253,253,253,253,253,114,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11,208,253,253,253,253,253,253,253,253,253,253,107,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,31,253,253,253,253,253,253,253,253,253,253,253,215,101,3,0,0,0,0,0,0,0,0,0,0,0,0,23,210,253,253,253,248,161,222,222,246,253,253,253,253,253,39,0,0,0,0,0,0,0,0,0,0,0,0,136,253,253,253,229,77,0,0,0,70,218,253,253,253,253,215,91,0,0,0,0,0,0,0,0,0,0,5,214,253,253,253,195,0,0,0,0,0,104,224,253,253,253,253,215,29,0,0,0,0,0,0,0,0,0,116,253,253,253,247,75,0,0,0,0,0,0,26,200,253,253,253,253,216,4,0,0,0,0,0,0,0,0,254,253,253,253,195,0,0,0,0,0,0,0,0,26,200,253,253,253,253,5,0,0,0,0,0,0,0,0,254,253,253,253,99,0,0,0,0,0,0,0,0,0,25,231,253,253,253,36,0,0,0,0,0,0,0,0,254,253,253,253,99,0,0,0,0,0,0,0,0,0,0,223,253,253,253,129,0,0,0,0,0,0,0,0,254,253,253,253,99,0,0,0,0,0,0,0,0,0,0,127,253,253,253,129,0,0,0,0,0,0,0,0,254,253,253,253,99,0,0,0,0,0,0,0,0,0,0,139,253,253,253,90,0,0,0,0,0,0,0,0,254,253,253,253,99,0,0,0,0,0,0,0,0,0,78,248,253,253,253,5,0,0,0,0,0,0,0,0,254,253,253,253,216,34,0,0,0,0,0,0,0,33,152,253,253,253,107,1,0,0,0,0,0,0,0,0,206,253,253,253,253,140,0,0,0,0,0,30,139,234,253,253,253,154,2,0,0,0,0,0,0,0,0,0,16,205,253,253,253,250,208,106,106,106,200,237,253,253,253,253,209,22,0,0,0,0,0,0,0,0,0,0,0,82,253,253,253,253,253,253,253,253,253,253,253,253,253,209,22,0,0,0,0,0,0,0,0,0,0,0,0,1,91,253,253,253,253,253,253,253,253,253,253,213,90,7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,18,129,208,253,253,253,253,159,129,90,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,141,202,254,193,44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,165,254,179,163,249,244,72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,135,254,150,0,0,189,254,243,31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,82,248,209,5,0,0,164,236,254,115,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,211,254,58,0,0,0,0,33,230,212,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,119,254,156,3,0,0,0,0,18,230,254,33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10,212,254,35,0,0,0,0,0,33,254,254,33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,116,254,154,3,0,0,0,0,0,33,254,254,33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,124,254,115,0,0,0,0,0,0,160,254,239,23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,203,254,35,0,0,0,0,0,0,197,254,178,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23,239,221,11,0,0,0,0,0,0,198,255,123,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23,238,178,0,0,0,0,0,0,10,219,254,96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,30,249,204,0,0,0,0,0,0,25,235,254,62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,26,243,204,0,0,0,0,0,0,91,254,248,36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,33,254,204,0,0,0,0,0,67,241,254,133,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,33,254,214,7,0,0,0,50,242,254,194,24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,193,254,78,0,0,19,128,254,195,36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,103,254,222,74,143,235,254,228,83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,30,242,254,254,254,254,252,84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23,64,158,200,174,61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 +7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,82,152,71,51,51,21,41,51,51,51,51,113,193,152,30,0,0,0,0,0,0,0,0,0,0,0,0,0,122,253,252,253,252,223,243,253,252,253,252,253,252,233,30,0,0,0,0,0,0,0,0,0,0,0,0,0,123,102,41,102,102,102,102,102,102,102,162,254,253,142,0,0,0,0,0,0,0,0,0,0,0,0,0,0,203,102,0,0,0,0,0,0,0,0,183,253,212,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,203,142,0,0,0,0,0,0,0,11,213,254,91,0,0,0,0,0,0,0,0,0,0,0,0,0,0,41,243,102,0,0,0,0,0,0,0,51,252,172,10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,41,223,102,0,0,0,0,0,0,0,214,253,102,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,20,0,0,0,0,0,0,0,253,252,102,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,62,254,253,41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,102,253,171,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,163,254,91,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,203,253,50,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,51,253,254,50,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,51,252,253,50,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,51,253,254,50,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,51,252,213,10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,152,253,82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,233,252,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21,255,253,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,253,212,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21,130,190,254,254,250,175,135,96,96,16,4,0,0,0,0,0,0,0,0,0,0,0,0,0,26,102,186,254,254,248,222,222,225,254,254,254,254,254,206,112,4,0,0,0,0,0,0,0,0,0,0,0,207,254,254,177,117,39,0,0,56,248,102,48,48,103,192,254,135,0,0,0,0,0,0,0,0,0,0,0,91,111,36,0,0,0,0,0,72,92,0,0,0,0,12,224,210,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,50,139,240,254,66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7,121,220,254,244,194,15,0,0,0,0,0,0,0,0,0,0,0,0,0,8,107,112,112,112,87,112,141,218,248,177,68,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,77,221,254,254,254,254,254,225,104,39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10,32,32,32,32,130,215,195,47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6,111,231,174,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47,18,0,0,0,0,0,0,0,0,0,40,228,205,35,0,0,0,0,0,0,0,0,0,0,0,0,22,234,42,0,0,0,0,0,0,0,0,0,0,56,212,226,38,0,0,0,0,0,0,0,0,0,0,0,96,157,0,0,0,0,0,0,0,0,0,0,0,0,30,215,188,9,0,0,0,0,0,0,0,0,0,0,96,142,0,0,0,0,0,0,0,0,0,0,0,0,0,86,254,68,0,0,0,0,0,0,0,0,0,0,71,202,15,0,0,0,0,0,0,0,0,0,0,0,0,6,214,151,0,0,0,0,0,0,0,0,0,0,10,231,86,2,0,0,0,0,0,0,0,0,0,0,0,0,191,207,0,0,0,0,0,0,0,0,0,0,0,93,248,129,7,0,0,0,0,0,0,0,0,0,0,117,238,112,0,0,0,0,0,0,0,0,0,0,0,0,94,248,209,73,12,0,0,0,0,0,0,42,147,252,136,9,0,0,0,0,0,0,0,0,0,0,0,0,0,48,160,215,230,158,74,64,94,153,223,250,214,105,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11,129,189,234,224,255,194,134,75,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 +5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,41,149,156,179,254,254,201,119,46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13,147,241,253,253,254,253,253,253,253,245,160,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,31,224,253,253,180,174,175,174,174,174,174,223,247,145,6,0,0,0,0,0,0,0,0,0,0,0,0,7,197,254,253,165,2,0,0,0,0,0,0,12,102,184,16,0,0,0,0,0,0,0,0,0,0,0,0,152,253,254,162,18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,235,254,158,15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,74,250,253,15,0,0,0,16,20,19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7,199,253,253,0,0,25,130,235,254,247,145,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,253,253,177,100,219,240,253,253,254,253,253,125,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,193,253,253,254,253,253,200,155,155,238,253,229,23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,61,249,254,241,150,30,0,0,0,215,254,254,58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,36,39,30,0,0,0,0,0,214,253,234,31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,41,241,253,183,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,201,253,253,102,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,114,254,253,154,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,62,254,255,241,30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10,118,235,253,249,103,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,55,81,0,102,211,253,253,253,135,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79,243,234,254,253,253,216,117,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,48,245,253,254,207,126,27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,60,136,136,147,254,255,199,111,18,9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,25,152,253,253,253,253,253,253,253,253,253,124,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,135,225,244,253,202,200,181,164,216,253,253,211,151,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,30,149,78,3,0,0,0,20,134,253,253,224,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28,206,253,253,224,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,78,253,253,253,224,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,99,234,253,253,224,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14,142,220,219,236,253,253,240,121,7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,24,253,253,253,253,235,233,253,253,185,53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,150,194,194,194,53,40,97,253,253,170,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,122,253,253,170,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,55,237,253,253,170,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,130,253,253,253,170,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,12,120,193,253,253,214,28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7,153,253,253,253,253,212,30,0,0,0,0,0,0,0,0,0,0,0,0,0,33,136,70,6,0,27,67,186,253,253,253,253,234,31,0,0,0,0,0,0,0,0,0,0,0,0,0,26,231,253,253,191,183,223,253,253,253,253,172,216,112,0,0,0,0,0,0,0,0,0,0,0,0,0,0,36,215,253,253,253,253,253,253,253,253,253,47,25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,87,223,253,253,253,244,152,223,223,109,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,67,50,176,148,78,16,0,12,12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 +8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7,24,24,97,253,253,253,253,255,180,48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,30,186,252,252,253,252,252,252,252,253,252,227,29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,38,155,252,252,252,253,252,252,227,79,222,252,252,129,0,0,0,0,0,0,0,0,0,0,0,0,0,85,233,252,252,252,252,253,252,252,202,11,180,252,252,119,0,0,0,0,0,0,0,0,0,0,0,0,43,240,253,252,252,252,252,253,252,252,244,126,201,252,252,150,0,0,0,0,0,0,0,0,0,0,0,7,212,253,255,253,253,253,232,221,42,0,104,253,255,253,205,21,0,0,0,0,0,0,0,0,0,0,0,25,223,252,253,252,252,214,18,0,0,34,215,252,253,223,56,0,0,0,0,0,0,0,0,0,0,0,0,0,99,246,253,252,252,77,0,7,70,203,252,252,173,25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,42,253,252,252,236,103,160,252,252,218,108,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,148,252,252,252,252,253,231,106,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,24,253,253,253,253,255,159,7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,43,118,252,240,244,252,253,231,37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,19,164,246,253,187,50,99,246,253,252,69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,80,232,252,203,58,0,0,135,253,252,121,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,43,246,252,200,11,0,0,0,116,253,252,69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,162,253,192,11,0,0,0,0,179,255,253,69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,178,252,119,0,5,47,47,140,244,253,252,69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7,186,252,227,184,191,252,252,252,252,253,240,50,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11,144,227,252,252,253,252,252,252,252,98,37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,48,137,242,253,231,137,137,32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 +9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15,48,143,186,244,143,31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,83,209,253,252,252,252,252,192,15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,166,241,252,253,252,170,162,252,252,113,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,61,234,252,252,243,121,44,2,21,245,252,122,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,80,252,252,243,163,50,0,0,0,5,101,88,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,105,234,252,210,88,0,0,0,0,74,199,240,43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,185,252,210,21,0,4,12,41,231,249,252,252,55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,242,252,218,154,154,184,252,253,252,252,248,184,22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,209,252,252,252,252,252,252,253,252,252,196,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17,57,142,95,142,61,81,253,252,209,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11,177,255,230,86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12,124,252,245,57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,135,252,252,86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79,248,252,233,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,231,252,202,12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,175,248,252,136,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,109,252,252,159,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,33,218,252,252,192,141,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,132,252,252,252,205,74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,132,252,252,146,13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,169,207,33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,168,254,105,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,84,249,254,105,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,89,254,254,105,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,89,254,193,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,89,254,184,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7,204,254,184,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,89,254,184,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13,209,254,178,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,37,209,254,254,69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,107,254,254,254,184,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,187,254,254,134,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,185,254,155,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,185,254,238,7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,185,254,254,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,185,254,231,7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,185,255,87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,173,254,87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9,254,87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9,254,87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,128,255,191,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,191,255,255,255,191,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,191,255,255,255,255,255,255,255,191,191,191,128,128,128,0,0,0,0,0,0,0,0,0,0,0,0,0,0,128,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,128,0,0,0,0,0,0,0,0,0,0,0,0,0,128,128,191,128,128,191,255,255,255,255,255,255,255,255,191,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,191,255,255,255,255,128,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,128,255,255,255,255,255,191,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,128,255,255,255,255,255,128,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,255,255,255,255,255,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,128,255,255,255,255,255,191,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,255,255,255,255,255,255,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,128,191,255,255,255,255,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,191,255,255,191,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,128,128,0,0,0,0,0,0,64,255,255,255,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,191,255,128,0,0,0,0,0,0,64,255,255,255,64,0,0,0,0,0,0,0,0,0,0,0,0,0,64,255,255,64,0,0,0,0,0,0,191,255,255,255,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,255,255,191,64,0,0,0,64,191,255,255,255,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,255,255,255,255,255,255,255,255,255,255,191,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,255,255,255,255,255,255,255,255,191,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,255,255,191,128,128,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,101,222,253,253,192,113,88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10,85,226,249,252,252,252,253,252,246,209,38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13,156,252,253,233,195,195,195,196,214,252,252,221,32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,57,252,252,162,56,0,0,0,0,28,121,252,252,216,18,0,0,0,0,0,0,0,0,0,0,0,0,0,57,252,173,0,0,0,0,0,0,0,25,205,252,253,27,0,0,0,0,0,0,0,0,0,0,0,0,0,57,253,253,0,0,0,0,0,0,0,0,92,253,255,27,0,0,0,0,0,0,0,0,0,0,0,0,0,38,224,252,126,0,0,0,0,0,0,51,243,252,253,27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,53,195,110,0,0,0,0,0,51,101,252,252,190,12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7,29,29,92,243,252,252,252,253,177,53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,126,165,252,252,253,252,252,252,252,253,252,195,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,226,253,253,253,255,215,140,140,140,192,253,253,146,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,178,252,242,167,106,18,0,0,0,12,228,252,223,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,19,55,49,0,0,0,0,0,0,0,225,252,223,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16,92,243,252,129,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16,203,253,252,220,37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,26,207,253,255,215,31,0,0,0,0,0,0,0,0,0,0,101,225,175,0,0,0,0,0,10,85,147,225,231,252,252,168,33,0,0,0,0,0,0,0,0,0,0,0,113,252,208,57,57,57,57,166,203,252,253,252,239,195,118,0,0,0,0,0,0,0,0,0,0,0,0,0,38,234,252,252,252,253,252,252,252,252,225,176,65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,100,221,252,252,253,127,112,112,112,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,38,236,73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,96,253,92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14,207,214,7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,92,253,97,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,144,253,44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,231,253,44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,54,254,192,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,130,249,107,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,221,221,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,221,221,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,78,255,208,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,107,253,111,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,188,253,68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,188,253,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10,202,181,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,45,253,143,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,146,253,90,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7,215,187,13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,45,253,66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,165,124,9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,39,91,91,91,91,91,191,255,142,91,23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23,211,253,253,253,253,253,253,253,253,253,181,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,180,253,253,253,253,253,253,253,253,253,253,228,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,180,253,253,253,253,253,253,253,253,253,253,228,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,101,253,253,253,253,253,253,253,253,253,253,228,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14,224,253,253,211,114,96,253,253,253,253,228,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,48,204,114,18,0,156,253,253,253,253,228,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15,0,0,0,230,253,253,253,253,185,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,31,156,244,253,253,253,192,25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,209,253,253,253,253,245,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,215,253,253,253,245,130,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,124,253,253,253,253,7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,41,220,253,253,253,238,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,49,222,253,253,253,253,98,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,107,253,253,253,253,210,35,0,0,0,5,42,42,42,42,18,0,0,0,0,0,0,0,0,0,0,0,0,107,253,253,253,253,253,178,131,131,131,143,253,253,253,253,182,8,0,0,0,0,0,0,0,0,0,0,0,83,240,253,253,253,253,253,253,253,253,253,253,253,253,240,195,12,0,0,0,0,0,0,0,0,0,0,0,0,197,253,253,253,253,253,253,253,253,253,253,229,106,82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,33,179,241,253,253,253,253,253,253,246,179,44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,75,89,195,89,89,89,89,81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,144,254,130,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,46,157,251,253,222,91,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,95,232,253,253,253,253,249,133,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,43,254,253,253,228,137,85,253,235,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47,229,254,253,228,57,0,54,253,253,107,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,226,253,254,202,55,0,0,54,253,253,107,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28,133,253,253,214,11,0,0,0,54,253,253,107,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6,182,253,253,168,0,0,0,0,0,54,253,253,107,0,0,0,0,0,0,0,0,0,0,0,0,0,4,65,253,252,233,50,0,0,0,0,0,131,253,252,101,0,0,0,0,0,0,0,0,0,0,0,0,0,68,253,253,226,0,0,0,0,0,0,0,187,253,226,0,0,0,0,0,0,0,0,0,0,0,0,0,0,202,254,235,49,0,0,0,0,0,0,131,254,255,94,0,0,0,0,0,0,0,0,0,0,0,0,0,204,251,230,53,0,0,0,0,0,0,0,201,253,253,93,0,0,0,0,0,0,0,0,0,0,0,0,85,249,228,126,0,0,0,0,0,0,0,123,248,253,253,93,0,0,0,0,0,0,0,0,0,0,0,35,215,253,186,0,0,0,0,0,0,0,123,247,253,236,151,15,0,0,0,0,0,0,0,0,0,0,0,108,253,253,81,0,0,0,0,0,0,191,248,253,253,164,0,0,0,0,0,0,0,0,0,0,0,0,0,108,253,194,14,0,0,0,3,54,188,250,253,253,175,18,0,0,0,0,0,0,0,0,0,0,0,0,0,108,253,228,68,41,132,174,178,253,255,253,253,176,17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,40,237,253,253,253,253,253,253,253,255,242,177,15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,221,253,253,253,253,253,252,240,241,68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,45,233,253,253,232,120,114,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 +7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12,69,128,199,199,220,255,177,242,22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,59,180,252,254,254,254,254,254,254,254,254,120,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,179,254,230,186,136,73,56,56,56,63,254,140,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,110,50,16,0,0,0,0,0,0,76,237,25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,205,214,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47,252,105,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,208,219,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,96,254,78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6,209,187,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,155,254,58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9,222,240,13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,87,254,71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9,213,238,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,31,254,203,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,172,254,152,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,63,254,151,10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,191,226,27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23,254,102,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,158,249,47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,165,160,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 diff --git a/tests/test_bigquery.py b/tests/test_bigquery.py new file mode 100644 index 00000000..cbee03ae --- /dev/null +++ b/tests/test_bigquery.py @@ -0,0 +1,38 @@ +import unittest +import os +import threading + +from http.server import BaseHTTPRequestHandler, HTTPServer + +from google.cloud import bigquery + +HOSTNAME = "127.0.0.1" +PORT = 8000 +URL = "http://%s:%s" % (HOSTNAME, PORT) + +class TestBigQuery(unittest.TestCase): + def test_proxy(self): + httpd = HTTPServer((HOSTNAME, PORT), HTTPHandler) + threading.Thread(target=httpd.serve_forever).start() + client = bigquery.Client() + + try: + for ds in client.list_datasets(): pass + except: + pass + + httpd.shutdown() + self.assertTrue(HTTPHandler.called, msg="Fake server did not recieve a request from the BQ client.") + self.assertTrue(HTTPHandler.header_found, msg="X-KAGGLE-PROXY-DATA header was missing from the BQ request.") + +class HTTPHandler(BaseHTTPRequestHandler): + called = False + header_found = False + + def do_HEAD(s): + s.send_response(200) + + def do_GET(s): + HTTPHandler.called = True + HTTPHandler.header_found = any(k for k in s.headers if k == "X-KAGGLE-PROXY-DATA" and s.headers[k] == "test-key") + s.send_response(200) diff --git a/tests/test_bokeh.py b/tests/test_bokeh.py new file mode 100644 index 00000000..1b186814 --- /dev/null +++ b/tests/test_bokeh.py @@ -0,0 +1,7 @@ +import unittest + +from bokeh.plotting import figure + +class TestBokeh(unittest.TestCase): + def test_figure(self): + figure(title="Hello World") diff --git a/tests/test_essentia.py b/tests/test_essentia.py new file mode 100644 index 00000000..749b9466 --- /dev/null +++ b/tests/test_essentia.py @@ -0,0 +1,7 @@ +import unittest + +from essentia.standard import Windowing + +class TestEssentia(unittest.TestCase): + def test_windowing(self): + Windowing(type = 'hann') diff --git a/tests/test_fastai.py b/tests/test_fastai.py new file mode 100644 index 00000000..275d2776 --- /dev/null +++ b/tests/test_fastai.py @@ -0,0 +1,9 @@ +import unittest + +from fastai.core import partition + +class TestFastAI(unittest.TestCase): + def test_partition(self): + result = partition([1,2,3,4,5], 2) + + self.assertEqual(3, len(result)) diff --git a/tests/test_fasttext.py b/tests/test_fasttext.py new file mode 100644 index 00000000..e7112ae7 --- /dev/null +++ b/tests/test_fasttext.py @@ -0,0 +1,9 @@ +import unittest + +import fastText + +class TestFastText(unittest.TestCase): + def test_tokenize(self): + tokens = fastText.FastText.tokenize("Hello World") + + self.assertEqual(["Hello", "World"], tokens) diff --git a/tests/test_keras.py b/tests/test_keras.py new file mode 100644 index 00000000..ca107563 --- /dev/null +++ b/tests/test_keras.py @@ -0,0 +1,25 @@ +import unittest + +import pandas as pd + +from keras.models import Sequential +from keras.layers import Dense +from keras.optimizers import RMSprop +from keras.utils.np_utils import to_categorical + +class TestKeras(unittest.TestCase): + def test_train(self): + train = pd.read_csv("/input/tests/data/train.csv") + + x_train = train.iloc[:,1:].values.astype('float32') + y_train = to_categorical(train.iloc[:,0].astype('int32')) + + model = Sequential() + model.add(Dense(units=10, input_dim=784, activation='softmax')) + + model.compile( + loss='categorical_crossentropy', + optimizer=RMSprop(lr=0.001), + metrics=['accuracy']) + + model.fit(x_train, y_train, epochs=1, batch_size=32) diff --git a/tests/test_kmapper.py b/tests/test_kmapper.py new file mode 100644 index 00000000..c75deea3 --- /dev/null +++ b/tests/test_kmapper.py @@ -0,0 +1,7 @@ +import unittest + +import kmapper as km + +class TestKMapper(unittest.TestCase): + def test_init(self): + km.KeplerMapper() diff --git a/tests/test_learntools.py b/tests/test_learntools.py new file mode 100644 index 00000000..c4c4efb0 --- /dev/null +++ b/tests/test_learntools.py @@ -0,0 +1,9 @@ +import unittest + +from learntools.core import binder; binder.bind(globals()) +from learntools.python.ex1 import * + +class TestLearnTools(unittest.TestCase): + def test_check(self): + color="blue" + q0.check() diff --git a/tests/test_matplotlib.py b/tests/test_matplotlib.py new file mode 100644 index 00000000..4b2605ec --- /dev/null +++ b/tests/test_matplotlib.py @@ -0,0 +1,18 @@ +import unittest +import os.path + +import matplotlib.pyplot as plt +import numpy as np + +from mpl_toolkits.basemap import Basemap + +class TestMatplotlib(unittest.TestCase): + def test_plot(self): + plt.plot(np.linspace(0,1,50), np.random.rand(50)) + plt.savefig("plot1.png") + + self.assertTrue(os.path.isfile("plot1.png")) + + def test_basemap(self): + Basemap(width=100,height=100,projection='aeqd', + lat_0=40,lon_0=-105) diff --git a/tests/test_nltk.py b/tests/test_nltk.py new file mode 100644 index 00000000..45bee936 --- /dev/null +++ b/tests/test_nltk.py @@ -0,0 +1,9 @@ +import unittest + +import nltk + +class TestNLTK(unittest.TestCase): + def test_tokenize(self): + tokens = nltk.word_tokenize("At eight o'clock") + + self.assertEqual(["At", "eight", "o'clock"], tokens) diff --git a/tests/test_numpy.py b/tests/test_numpy.py new file mode 100644 index 00000000..2d809f7b --- /dev/null +++ b/tests/test_numpy.py @@ -0,0 +1,16 @@ +import unittest + +import numpy as np +from numpy.distutils.system_info import get_info + +class TestNumpy(unittest.TestCase): + def test_array(self): + array = np.array([1, 3]) + + self.assertEqual((2,), array.shape) + + # Numpy must be linked to the MKL. (Occasionally, a third-party package will muck up the installation + # and numpy will be reinstalled with an OpenBLAS backing.) + def test_mkl(self): + # This will throw an exception if the MKL is not linked correctly. + get_info("blas_mkl") diff --git a/tests/test_opencv.py b/tests/test_opencv.py new file mode 100644 index 00000000..be0e92f8 --- /dev/null +++ b/tests/test_opencv.py @@ -0,0 +1,9 @@ +import unittest + +import cv2 + +class TestOpenCV(unittest.TestCase): + def test_imread(self): + img = cv2.imread('/input/tests/data/dot.png') + + self.assertEqual(1, img.shape[0]) diff --git a/tests/test_pandas.py b/tests/test_pandas.py new file mode 100644 index 00000000..f9163c57 --- /dev/null +++ b/tests/test_pandas.py @@ -0,0 +1,9 @@ +import unittest + +import pandas as pd + +class TestPandas(unittest.TestCase): + def test_read_csv(self): + data = pd.read_csv("/input/tests/data/train.csv") + + self.assertEqual(2, len(data.shape)) diff --git a/tests/test_plotly.py b/tests/test_plotly.py new file mode 100644 index 00000000..0c24a05e --- /dev/null +++ b/tests/test_plotly.py @@ -0,0 +1,9 @@ +import unittest + +import plotly.graph_objs as go + +class TestPlotly(unittest.TestCase): + def test_figure(self): + trace = {'x': [1, 2], 'y': [1, 3]} + data = [ trace ] + go.Figure(data=data) diff --git a/tests/test_pyfasttext.py b/tests/test_pyfasttext.py new file mode 100644 index 00000000..1ebf5211 --- /dev/null +++ b/tests/test_pyfasttext.py @@ -0,0 +1,9 @@ +import unittest + +from pyfasttext import FastText + +class TestPyFasttext(unittest.TestCase): + def test_vector(self): + model = FastText() + + model.supervised(input='/input/tests/data/text.txt', output='model', epoch=1, lr=0.7) diff --git a/tests/test_pytorch.py b/tests/test_pytorch.py new file mode 100644 index 00000000..4f250572 --- /dev/null +++ b/tests/test_pytorch.py @@ -0,0 +1,13 @@ +import unittest + +import torch +import torch.nn as tnn +import torch.autograd as autograd + +class TestPyTorch(unittest.TestCase): + # PyTorch smoke test based on http://pytorch.org/tutorials/beginner/nlp/deep_learning_tutorial.html + def test_nn(self): + torch.manual_seed(31337) + linear_torch = tnn.Linear(5,3) + data_torch = autograd.Variable(torch.randn(2, 5)) + linear_torch(data_torch) diff --git a/tests/test_seaborn.py b/tests/test_seaborn.py new file mode 100644 index 00000000..3986bc8c --- /dev/null +++ b/tests/test_seaborn.py @@ -0,0 +1,7 @@ +import unittest + +import seaborn as sns + +class TestSeaborn(unittest.TestCase): + def test_option(self): + sns.set(style="darkgrid") diff --git a/tests/test_shap.py b/tests/test_shap.py new file mode 100644 index 00000000..992d5c5e --- /dev/null +++ b/tests/test_shap.py @@ -0,0 +1,7 @@ +import unittest + +import shap + +class TestShap(unittest.TestCase): + def test_init(self): + shap.initjs() diff --git a/tests/test_skimage.py b/tests/test_skimage.py new file mode 100644 index 00000000..4a53b12e --- /dev/null +++ b/tests/test_skimage.py @@ -0,0 +1,8 @@ +import unittest + +from skimage import data, filters + +class TestSkImage(unittest.TestCase): + def test_filter(self): + image = data.coins() + filters.sobel(image) diff --git a/tests/test_sklearn.py b/tests/test_sklearn.py new file mode 100644 index 00000000..d2d4b027 --- /dev/null +++ b/tests/test_sklearn.py @@ -0,0 +1,18 @@ +import unittest + +from sklearn import datasets +from sklearn.ensemble import RandomForestClassifier +from sklearn.linear_model import LinearRegression + +class TestSklearn(unittest.TestCase): + def test_random_forest_classifier(self): + iris = datasets.load_iris() + X, y = iris.data, iris.target + rf1 = RandomForestClassifier() + rf1.fit(X,y) + + def test_linearn_classifier(self): + boston = datasets.load_boston() + X, y = boston.data, boston.target + lr1 = LinearRegression() + lr1.fit(X,y) diff --git a/tests/test_tensorflow.py b/tests/test_tensorflow.py new file mode 100644 index 00000000..120b773d --- /dev/null +++ b/tests/test_tensorflow.py @@ -0,0 +1,12 @@ +import unittest + +import tensorflow as tf + +class TestTensorflow(unittest.TestCase): + def test_addition(self): + op = tf.add(2, 3) + sess = tf.Session() + + result = sess.run(op) + + self.assertEqual(5, result) diff --git a/tests/test_theano.py b/tests/test_theano.py new file mode 100644 index 00000000..e4714157 --- /dev/null +++ b/tests/test_theano.py @@ -0,0 +1,20 @@ +import unittest + +import theano +from theano import tensor + +class TestTheano(unittest.TestCase): + def test_addition(self): + # declare two symbolic floating-point scalars + a = tensor.dscalar() + b = tensor.dscalar() + + # create a simple expression + c = a + b + + # convert the expression into a callable object that takes (a,b) + # values as input and computes a value for c + f = theano.function([a,b], c) + + # bind 1.5 to 'a', 2.5 to 'b', and evaluate 'c' + self.assertEqual(4.0, f(1.5, 2.5)) diff --git a/tests/test_vowpalwabbit.py b/tests/test_vowpalwabbit.py new file mode 100644 index 00000000..f6f62161 --- /dev/null +++ b/tests/test_vowpalwabbit.py @@ -0,0 +1,10 @@ +import unittest + +from vowpalwabbit import pyvw + +class TestVowpalwabbit(unittest.TestCase): + def test_basic(self): + vw = pyvw.vw(quiet=True) + ex = vw.example('1 | a b c') + vw.learn(ex) + self.assertEqual(0.632030725479126, vw.predict(ex)) diff --git a/tests/test_wordbatch.py b/tests/test_wordbatch.py new file mode 100644 index 00000000..4d64c8f7 --- /dev/null +++ b/tests/test_wordbatch.py @@ -0,0 +1,15 @@ +import unittest + +import wordbatch + +from wordbatch.extractors import WordBag + +class TestWordBatch(unittest.TestCase): + def test_wordbatch(self): + wordbatch.WordBatch(extractor=(WordBag, { + "hash_ngrams":2, + "hash_ngrams_weights":[0.5, -1.0], + "hash_size":2**23, + "norm":'l2', + "tf":'log', + "idf":50.0})) diff --git a/tests/test_xgboost.py b/tests/test_xgboost.py new file mode 100644 index 00000000..e1bc12f8 --- /dev/null +++ b/tests/test_xgboost.py @@ -0,0 +1,12 @@ +import unittest + +from sklearn import datasets +from xgboost import XGBClassifier + +class TestXGBoost(unittest.TestCase): + def test_classifier(self): + boston = datasets.load_boston() + X, y = boston.data, boston.target + + xgb1 = XGBClassifier(n_estimators=3) + xgb1.fit(X[0:70],y[0:70]) diff --git a/tests/text_mxnet.py b/tests/text_mxnet.py new file mode 100644 index 00000000..8c9d0712 --- /dev/null +++ b/tests/text_mxnet.py @@ -0,0 +1,9 @@ +import unittest + +import mxnet as mx + +class TestMxNet(unittest.TestCase): + def test_array(self): + x = mx.nd.array([[1, 2, 3], [4, 5, 6]]) + + self.assertEqual((2, 3), x.shape) From 8fbdaff9eb07479b959d745be5f949db920488d5 Mon Sep 17 00:00:00 2001 From: Ben Hamner Date: Fri, 10 Aug 2018 20:58:06 -0700 Subject: [PATCH 023/251] Added tensorflow_hub --- Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Dockerfile b/Dockerfile index 1e272503..85dc779e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -494,6 +494,7 @@ RUN pip install flashtext && \ pip install conx && \ pip install pandasql && \ pip install trackml && \ + pip install tensorflow_hub && \ ##### ^^^^ Add new contributions above here ^^^^ ##### # clean up pip cache rm -rf /root/.cache/pip/* From 3646d64d48ee240e233e02331651c7b6837f6f7c Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Mon, 13 Aug 2018 16:46:24 +0000 Subject: [PATCH 024/251] Capitalize and add punctuation to comment --- tests/test_theano.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/test_theano.py b/tests/test_theano.py index e4714157..156beb7b 100644 --- a/tests/test_theano.py +++ b/tests/test_theano.py @@ -5,16 +5,16 @@ class TestTheano(unittest.TestCase): def test_addition(self): - # declare two symbolic floating-point scalars + # Declare two symbolic floating-point scalars. a = tensor.dscalar() b = tensor.dscalar() - # create a simple expression + # Create a simple expression. c = a + b - # convert the expression into a callable object that takes (a,b) - # values as input and computes a value for c + # Convert the expression into a callable object that takes (a,b) + # values as input and computes a value for 'c'. f = theano.function([a,b], c) - # bind 1.5 to 'a', 2.5 to 'b', and evaluate 'c' + # Bind 1.5 to 'a', 2.5 to 'b', and evaluate 'c'. self.assertEqual(4.0, f(1.5, 2.5)) From cab99a4285ebf42503232756ffe5485a590d7648 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Mon, 13 Aug 2018 17:07:49 +0000 Subject: [PATCH 025/251] add tracking issue for tensorflow python 3.7 compatibility --- Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 85dc779e..f0495a89 100644 --- a/Dockerfile +++ b/Dockerfile @@ -48,11 +48,11 @@ RUN apt-get update && apt-get install -y python-software-properties zip && \ apt-get update && apt-get install -y bazel && \ apt-get upgrade -y bazel -# Tensorflow +# Tensorflow doesn't support python 3.7 yet. See https://github.com/tensorflow/tensorflow/issues/20517 # Fix to install tf 1.10:: Downgrade python 3.7->3.6.6 and downgrade Pandas 0.23.3->0.23.2 RUN conda install -y python=3.6.6 && \ pip install pandas==0.23.2 && \ -# Another fix for TF 1.10 https://github.com/tensorflow/tensorflow/issues/21518 + # Another fix for TF 1.10 https://github.com/tensorflow/tensorflow/issues/21518 pip install keras_applications==1.0.4 --no-deps && \ pip install keras_preprocessing==1.0.2 --no-deps && \ cd /usr/local/src && \ From 8dda3a7683c0206d44f2cdd53e731da963343e8d Mon Sep 17 00:00:00 2001 From: Chris Crawford Date: Tue, 14 Aug 2018 00:10:41 +0000 Subject: [PATCH 026/251] Ugly and inelegant fix for geopandas --- Dockerfile | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index f0495a89..5d97c5af 100644 --- a/Dockerfile +++ b/Dockerfile @@ -227,7 +227,6 @@ RUN pip install scipy && \ cd /usr/local/src && git clone git://github.com/nicolashennetier/pyeconometrics.git && \ cd pyeconometrics && python setup.py install && \ apt-get install -y graphviz && pip install graphviz && \ - apt-get install -y libgdal1-dev && GDAL_CONFIG=/usr/bin/gdal-config pip install fiona && pip install geopandas && \ # Pandoc is a dependency of deap apt-get install -y pandoc && \ cd /usr/local/src && git clone git://github.com/scikit-learn-contrib/py-earth.git && \ @@ -324,8 +323,10 @@ RUN pip install fancyimpute && \ pip install stemming && \ conda install -y -c conda-forge fbprophet && \ conda install -y -c conda-forge -c ioam holoviews geoviews && \ -# Temp fix : Fiona is already installed by pip and conda installs another version as a dependency for holoviews - conda uninstall -y fiona && \ + #Temp fix: After installing holoviews and geoviews, deps for fiona and geopandas get really messed up. This is a very unelegant fix. + conda uninstall -y fiona geopandas && \ + pip uninstall -y fiona geopandas && \ + apt-get install -y libgdal1-dev && GDAL_CONFIG=/usr/bin/gdal-config pip install fiona && pip install geopandas && \ pip install hypertools && \ # Nxviz has been causing an installation issue by trying unsuccessfully to remove setuptools. #pip install nxviz && \ From cc404542a0fdb365bd0dca19c9fc421a066b9cb7 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Wed, 22 Aug 2018 09:40:53 -0700 Subject: [PATCH 027/251] pin notebook package to 5.5.0 to fix styling issue --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index e4f353c5..d5cc3ce6 100644 --- a/Dockerfile +++ b/Dockerfile @@ -433,7 +433,7 @@ RUN pip install bcolz && \ pip install mistune && \ pip install nbconvert && \ pip install nbformat && \ - pip install notebook && \ + pip install notebook==0.5.5 && \ pip install numpy && \ pip install olefile && \ pip install opencv-python && \ From 94c03a970609f4f33ae3e1f4a93bb9929e69071f Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Wed, 22 Aug 2018 23:00:58 +0000 Subject: [PATCH 028/251] install tensorpack using pip --- Dockerfile | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index d5cc3ce6..6f689c0f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -360,8 +360,7 @@ RUN pip install kmeans-smote --no-dependencies && \ cd /opt/ && git clone https://github.com/PAIR-code/facets && cd facets/ && jupyter nbextension install facets-dist/ --user && \ export PYTHONPATH=$PYTHONPATH:/opt/facets/facets_overview/python/ && \ pip install --no-dependencies ethnicolr && \ - # Update setuptools and add tensorpack - pip install --upgrade --ignore-installed setuptools && pip install --no-cache-dir git+git://github.com/ppwwyyxx/tensorpack && \ + pip install tensorpack && \ pip install pycountry && pip install iso3166 && \ pip install pydash && \ pip install kmodes --no-dependencies && \ @@ -524,4 +523,4 @@ ENV PYTHONUSERBASE "/root/.local" ADD patches/sitecustomize.py /root/.local/lib/python3.6/site-packages/sitecustomize.py # Set backend for matplotlib -ENV MPLBACKEND "agg" \ No newline at end of file +ENV MPLBACKEND "agg" From 16307753cd6e84fd4e55618abca2e474a7a7bfa8 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Thu, 23 Aug 2018 15:06:11 -0700 Subject: [PATCH 029/251] Typo in notebook version. --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 6f689c0f..d7a63932 100644 --- a/Dockerfile +++ b/Dockerfile @@ -432,7 +432,7 @@ RUN pip install bcolz && \ pip install mistune && \ pip install nbconvert && \ pip install nbformat && \ - pip install notebook==0.5.5 && \ + pip install notebook==5.5.0 && \ pip install numpy && \ pip install olefile && \ pip install opencv-python && \ From c2b8335832c3a01f0c61276639471ed80c7f9f88 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Fri, 24 Aug 2018 16:40:01 +0000 Subject: [PATCH 030/251] Install pytorch for cpu on master --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index d7a63932..cb447b43 100644 --- a/Dockerfile +++ b/Dockerfile @@ -235,7 +235,7 @@ RUN pip install scipy && \ # PyTorch export CXXFLAGS="-std=c++11" && \ export CFLAGS="-std=c99" && \ - conda install -y pytorch torchvision -c pytorch && \ + conda install -y pytorch-cpu torchvision-cpu -c pytorch && \ # PyTorch Audio apt-get install -y sox libsox-dev libsox-fmt-all && \ pip install cffi && \ From 4432a8f3c3b0e5d1f3476062dc93e4e6c2a41b22 Mon Sep 17 00:00:00 2001 From: Gyczero <2997347185@qq.com> Date: Sat, 1 Sep 2018 18:50:30 +0800 Subject: [PATCH 031/251] ADD support for gensim and jieba --- Dockerfile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index cb447b43..61a11394 100644 --- a/Dockerfile +++ b/Dockerfile @@ -156,7 +156,7 @@ RUN apt-get update && \ conda install basemap && \ # Pillow (PIL) apt-get -y install zlib1g-dev liblcms2-dev libwebp-dev && \ - pip install Pillow + pip install Pillow RUN cd /usr/local/src && git clone https://github.com/vitruvianscience/opendeep.git && \ cd opendeep && python setup.py develop && \ @@ -495,6 +495,8 @@ RUN pip install flashtext && \ pip install pandasql && \ pip install trackml && \ pip install tensorflow_hub && \ + pip install gensim && \ + pip install jieba && \ ##### ^^^^ Add new contributions above here ^^^^ ##### # clean up pip cache rm -rf /root/.cache/pip/* From 3fe25c22f6ca2cd1ee6da68913bfded9fc6e2912 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Tue, 11 Sep 2018 18:41:19 +0000 Subject: [PATCH 032/251] Install basemap from source --- Dockerfile | 32 +++++++++++++++++--------------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/Dockerfile b/Dockerfile index cb447b43..49c709c1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -142,21 +142,23 @@ RUN apt-get install -y libfreetype6-dev && \ ENV LD_LIBRARY_PATH=/opt/conda/lib # Install Basemap via conda temporarily -RUN apt-get update && \ - #apt-get -y install libgeos-dev && \ - #pip install matplotlib && \ - #pip install pyshp && \ - #pip install pyproj && \ - #cd /usr/local/src && git clone https://github.com/matplotlib/basemap.git && \ - #cd basemap/geos-3.3.3 && \ - #export GEOS_DIR=/usr/local && \ - #./configure --prefix=$GEOS_DIR && \ - #make && make install && \ - #cd .. && python setup.py install && \ - conda install basemap && \ - # Pillow (PIL) - apt-get -y install zlib1g-dev liblcms2-dev libwebp-dev && \ - pip install Pillow +RUN apt-get -y install zlib1g-dev liblcms2-dev libwebp-dev libgeos-dev && \ + # Pillow is an optional dependency of basemap + pip install Pillow && \ + pip install matplotlib && \ + pip install pyshp && \ + pip install pyproj && \ + cd /usr/local/src && git clone https://github.com/matplotlib/basemap.git && \ + cd basemap && \ + git checkout v1.1.0 && \ + # Install geos + cd geos-3.3.3 && \ + export GEOS_DIR=/usr/local && \ + ./configure --prefix=$GEOS_DIR && \ + make && make install && \ + # Install basemap + cd .. && python setup.py install && \ + pip install basemap --no-binary basemap RUN cd /usr/local/src && git clone https://github.com/vitruvianscience/opendeep.git && \ cd opendeep && python setup.py develop && \ From 5095d04c40c4cd8df2860bb484637d2de1b2485c Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Thu, 13 Sep 2018 10:55:18 -0700 Subject: [PATCH 033/251] Move gensim after downgrading to python 3.6 Otherwise, the package gets silently removed. --- Dockerfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 49c709c1..100464f5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,7 +13,7 @@ RUN sed -i "s/httpredir.debian.org/debian.uchicago.edu/" /etc/apt/sources.list & apt-get -y install cmake RUN pip install seaborn python-dateutil dask pytagcloud pyyaml joblib \ - husl geopy ml_metrics mne pyshp gensim && \ + husl geopy ml_metrics mne pyshp && \ conda install -y -c conda-forge spacy && python -m spacy download en && \ python -m spacy download en_core_web_lg && \ # The apt-get version of imagemagick is out of date and has compatibility issues, so we build from source @@ -65,6 +65,7 @@ RUN conda install -y python=3.6.6 && \ RUN apt-get install -y libfreetype6-dev && \ apt-get install -y libglib2.0-0 libxext6 libsm6 libxrender1 libfontconfig1 --fix-missing && \ + pip install gensim && \ # textblob pip install textblob && \ #word cloud From 3af3acbb1968266e646970336abcb4d4cd355aef Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Thu, 13 Sep 2018 15:08:56 -0700 Subject: [PATCH 034/251] Add PDPbox --- Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Dockerfile b/Dockerfile index 100464f5..6317d1bc 100644 --- a/Dockerfile +++ b/Dockerfile @@ -498,6 +498,7 @@ RUN pip install flashtext && \ pip install pandasql && \ pip install trackml && \ pip install tensorflow_hub && \ + pip install PDPbox && \ ##### ^^^^ Add new contributions above here ^^^^ ##### # clean up pip cache rm -rf /root/.cache/pip/* From adffdb69ba1e03dff47b7e89d955c927e1fc85a3 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Fri, 14 Sep 2018 10:01:37 -0700 Subject: [PATCH 035/251] add tests for gensim --- tests/test_gensim.py | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 tests/test_gensim.py diff --git a/tests/test_gensim.py b/tests/test_gensim.py new file mode 100644 index 00000000..bf239017 --- /dev/null +++ b/tests/test_gensim.py @@ -0,0 +1,9 @@ +import unittest + +from gensim import corpora + +class TestGensim(unittest.TestCase): + def test_dictionary(self): + dic = corpora.Dictionary([['lorem', 'ipsum']]) + + self.assertEqual(2, len(dic.token2id)) From d4d81c2ebc4cad9d4be76c8704a2da65df7fcc65 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Fri, 14 Sep 2018 10:59:47 -0700 Subject: [PATCH 036/251] Remove duplicate install for opencv --- Dockerfile | 3 --- 1 file changed, 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 6317d1bc..c3f2f09c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -32,9 +32,6 @@ RUN pip install seaborn python-dateutil dask pytagcloud pyyaml joblib \ # clean up ImageMagick source files cd ../ && rm -rf ImageMagick* -# OpenCV install (from pip or source) -RUN pip install opencv-python - RUN apt-get update && apt-get install -y python-software-properties zip && \ echo "deb http://ppa.launchpad.net/webupd8team/java/ubuntu precise main" | tee -a /etc/apt/sources.list && \ echo "deb-src http://ppa.launchpad.net/webupd8team/java/ubuntu precise main" | tee -a /etc/apt/sources.list && \ From f087d24fe550f869f9dadb26ef31df760459852f Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Fri, 14 Sep 2018 11:03:34 -0700 Subject: [PATCH 037/251] Remove duplicate install for Pillow --- Dockerfile | 2 -- 1 file changed, 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index c3f2f09c..fe62e9eb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -141,8 +141,6 @@ ENV LD_LIBRARY_PATH=/opt/conda/lib # Install Basemap via conda temporarily RUN apt-get -y install zlib1g-dev liblcms2-dev libwebp-dev libgeos-dev && \ - # Pillow is an optional dependency of basemap - pip install Pillow && \ pip install matplotlib && \ pip install pyshp && \ pip install pyproj && \ From a175e913e08fca81245c94e86ad8f9e183c9b9a3 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Fri, 14 Sep 2018 11:14:23 -0700 Subject: [PATCH 038/251] Add ggplot --- Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Dockerfile b/Dockerfile index fe62e9eb..9f41bc9e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -494,6 +494,7 @@ RUN pip install flashtext && \ pip install trackml && \ pip install tensorflow_hub && \ pip install PDPbox && \ + pip install ggplot && \ ##### ^^^^ Add new contributions above here ^^^^ ##### # clean up pip cache rm -rf /root/.cache/pip/* From e8339b3f119e03429e85d6626871a38613b44aeb Mon Sep 17 00:00:00 2001 From: Michael Apers <32524673+perfectclear@users.noreply.github.com> Date: Fri, 14 Sep 2018 11:50:54 -0700 Subject: [PATCH 039/251] Update Dockerfile --- Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Dockerfile b/Dockerfile index 9f41bc9e..036d005b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -495,6 +495,7 @@ RUN pip install flashtext && \ pip install tensorflow_hub && \ pip install PDPbox && \ pip install ggplot && \ + pip install cesium && \ ##### ^^^^ Add new contributions above here ^^^^ ##### # clean up pip cache rm -rf /root/.cache/pip/* From eb6742f9b5760d8a5357e5241541b3f02095a9bc Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Mon, 17 Sep 2018 16:03:21 +0000 Subject: [PATCH 040/251] Merge and improve gpu tests. Add annotations --- tests/__init__.py | 0 tests/common.py | 6 ++++++ tests/test_nvidia.py | 26 ++++++++++++++++++++++++++ tests/test_pytorch.py | 12 ++++++++++++ tests/test_tensorflow.py | 16 ++++++++++++++++ 5 files changed, 60 insertions(+) create mode 100644 tests/__init__.py create mode 100644 tests/common.py create mode 100644 tests/test_nvidia.py diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/common.py b/tests/common.py new file mode 100644 index 00000000..823212a2 --- /dev/null +++ b/tests/common.py @@ -0,0 +1,6 @@ +"""Common testing setup""" + +import os +import unittest + +gpu_test = unittest.skipIf(len(os.environ.get('CUDA_VERSION', '')) == 0, 'Not running GPU tests') diff --git a/tests/test_nvidia.py b/tests/test_nvidia.py new file mode 100644 index 00000000..a6f3c428 --- /dev/null +++ b/tests/test_nvidia.py @@ -0,0 +1,26 @@ +"""Tests for general GPU support""" + +import os +import subprocess +import sys +import unittest + +import common +import pycuda.driver + +from common import gpu_test + + +class TestNvidia(unittest.TestCase): + @gpu_test + def test_system_management_interface(self): + smi = subprocess.Popen(['nvidia-smi'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + smi.communicate() + self.assertEqual(0, smi.returncode) + + @gpu_test + def test_pycuda(self): + pycuda.driver.init() + gpu_name = pycuda.driver.Device(0).name() + self.assertNotEqual(0, len(gpu_name)) + diff --git a/tests/test_pytorch.py b/tests/test_pytorch.py index 4f250572..16b17364 100644 --- a/tests/test_pytorch.py +++ b/tests/test_pytorch.py @@ -4,6 +4,9 @@ import torch.nn as tnn import torch.autograd as autograd +from common import gpu_test + + class TestPyTorch(unittest.TestCase): # PyTorch smoke test based on http://pytorch.org/tutorials/beginner/nlp/deep_learning_tutorial.html def test_nn(self): @@ -11,3 +14,12 @@ def test_nn(self): linear_torch = tnn.Linear(5,3) data_torch = autograd.Variable(torch.randn(2, 5)) linear_torch(data_torch) + + @gpu_test + def test_gpu(self): + cuda = torch.device('cuda') + a = torch.tensor([1., 2.], device=cuda) + + result = a.sum() + + self.assertEqual(torch.tensor([3.], device=cuda), result) diff --git a/tests/test_tensorflow.py b/tests/test_tensorflow.py index 120b773d..10a66837 100644 --- a/tests/test_tensorflow.py +++ b/tests/test_tensorflow.py @@ -1,7 +1,11 @@ import unittest +import numpy as np import tensorflow as tf +from common import gpu_test + + class TestTensorflow(unittest.TestCase): def test_addition(self): op = tf.add(2, 3) @@ -10,3 +14,15 @@ def test_addition(self): result = sess.run(op) self.assertEqual(5, result) + + @gpu_test + def test_gpu(self): + with tf.device('/gpu:0'): + m1 = tf.constant([2.0, 3.0], shape=[1, 2], name='a') + m2 = tf.constant([3.0, 4.0], shape=[2, 1], name='b') + op = tf.matmul(m1, m2) + + sess = tf.Session() + result = sess.run(op) + + self.assertEqual(np.array(18, dtype=np.float32, ndmin=2), result) From f9f5ee8002c66e20ff04d35434156fedeb90dbfa Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Mon, 17 Sep 2018 20:52:42 +0000 Subject: [PATCH 041/251] Fix nvidia tests for CPU --- tests/test_nvidia.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/tests/test_nvidia.py b/tests/test_nvidia.py index a6f3c428..b5ad61ce 100644 --- a/tests/test_nvidia.py +++ b/tests/test_nvidia.py @@ -5,9 +5,6 @@ import sys import unittest -import common -import pycuda.driver - from common import gpu_test @@ -19,7 +16,8 @@ def test_system_management_interface(self): self.assertEqual(0, smi.returncode) @gpu_test - def test_pycuda(self): + def test_pycuda(self): + import pycuda.driver pycuda.driver.init() gpu_name = pycuda.driver.Device(0).name() self.assertNotEqual(0, len(gpu_name)) From 228c99e27736236658c01a54d4e9128a2d4f1173 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Mon, 17 Sep 2018 20:53:24 +0000 Subject: [PATCH 042/251] Update build, test, push script to support gpu --- build | 29 ++++++++++++++++++++++++----- push | 25 ++++++++++++++++++++++--- test | 24 +++++++++++++++++++++--- 3 files changed, 67 insertions(+), 11 deletions(-) diff --git a/build b/build index d3f34959..8d3fc81f 100755 --- a/build +++ b/build @@ -1,10 +1,29 @@ #!/bin/bash +# +# Build a new Python Docker image. +# +# Usage: +# ./build [--gpu] [--use-cache] +# +# Options: +# --gpu: Build an image with GPU support. +# --use-cache: Use layer cache when building a new image. +# set -e +set -x + +CACHE_FLAG="--no-cache" +IMAGE_TAG="kaggle/python-build" +DOCKERFILE="Dockerfile" + +if [[ "$1" == "--gpu" ]]; then + IMAGE_TAG="kaggle/python-gpu-build" + DOCKERFILE="gpu.Dockerfile" + shift +fi -# Default behavior is to do everything from scratch. -# The --use-cache option is useful if you're iterating on a broken build. if [[ "$1" == "--use-cache" ]]; then - docker build --rm -t kaggle/python-build . -else - docker build --pull --rm --no-cache -t kaggle/python-build . + CACHE_FLAG="" fi + +docker build --rm -t $CACHE_FLAG $IMAGE_TAG -f $DOCKERFILE . diff --git a/push b/push index ac3d2136..85f11611 100755 --- a/push +++ b/push @@ -1,13 +1,32 @@ #!/bin/bash +# +# Push a newly-built image with the given label to gcr.io and DockerHub. +# +# Usage: +# ./push [--gpu] [label] +# +# Description: +# label: Docker image label. Defaults to "testing". +# +# Options: +# --gpu: Psuh the image with GPU support. +# set -e -set +x +set -x + +IMAGE_TAG="kaggle/python-build:latest" + +if [[ "$1" == "--gpu" ]]; then + IMAGE_TAG="kaggle/python-gpu-build:latest" + shift +fi label=${1:-testing} -docker tag kaggle/python-build:latest gcr.io/kaggle-images/python:${label} +docker tag $IMAGE_TAG gcr.io/kaggle-images/python:${label} gcloud docker -- push gcr.io/kaggle-images/python:${label} if [[ "$label" == "latest" ]]; then - docker tag kaggle/python-build:latest kaggle/python:${label} + docker tag $IMAGE_TAG kaggle/python:${label} docker push kaggle/python:${label} fi diff --git a/test b/test index 2528b6af..080fc5ff 100755 --- a/test +++ b/test @@ -1,6 +1,24 @@ #!/bin/bash +# +# Run tests for a newly-built Python Docker image. +# By default, it runs the tests for the CPU image. +# +# Usage: +# ./test [--gpu] +# +# Options: +# --gpu: Run tests for the GPU image +# + set -e -set +x +set -x + +IMAGE_TAG="kaggle/python-build" + +if [[ "$1" == "--gpu" ]]; then + IMAGE_TAG="kaggle/python-gpu-build" + shift +fi rm -rf /tmp/python-build docker rm jupyter_test || true @@ -8,7 +26,7 @@ mkdir -p /tmp/python-build/tmp mkdir -p /tmp/python-build/devshm mkdir -p /tmp/python-build/working # Check that Jupyter server can run; if it dies on startup, the `docker kill` command will throw an error -docker run -d --name=jupyter_test --read-only --net=none -e HOME=/tmp -v $PWD:/input:ro -v /tmp/python-build/working:/working -w=/working -v /tmp/python-build/tmp:/tmp -v /tmp/python-build/devshm:/dev/shm kaggle/python-build jupyter notebook --allow-root --ip="*" +docker run -d --name=jupyter_test --read-only --net=none -e HOME=/tmp -v $PWD:/input:ro -v /tmp/python-build/working:/working -w=/working -v /tmp/python-build/tmp:/tmp -v /tmp/python-build/devshm:/dev/shm $IMAGE_TAG jupyter notebook --allow-root --ip="*" sleep 3 docker kill jupyter_test && docker rm jupyter_test docker run --rm -t --read-only --net=none \ @@ -18,5 +36,5 @@ docker run --rm -t --read-only --net=none \ -v $PWD:/input:ro -v /tmp/python-build/working:/working \ -v /tmp/python-build/tmp:/tmp -v /tmp/python-build/devshm:/dev/shm \ -w=/working \ - kaggle/python-build \ + $IMAGE_TAG \ /bin/bash -c 'python -m unittest discover -s /input/tests' From 18b07e269a53272ba189819bcf5a8809f8dbb062 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Mon, 17 Sep 2018 23:20:29 +0000 Subject: [PATCH 043/251] fix push and test script --- push | 23 +++++++++++++---------- test | 3 +++ 2 files changed, 16 insertions(+), 10 deletions(-) diff --git a/push b/push index 85f11611..a938abb5 100755 --- a/push +++ b/push @@ -3,10 +3,10 @@ # Push a newly-built image with the given label to gcr.io and DockerHub. # # Usage: -# ./push [--gpu] [label] +# ./push [--gpu] [LABEL] # # Description: -# label: Docker image label. Defaults to "testing". +# LABEL: Docker image label. Defaults to "testing". # # Options: # --gpu: Psuh the image with GPU support. @@ -14,19 +14,22 @@ set -e set -x -IMAGE_TAG="kaggle/python-build:latest" +SOURCE_IMAGE="kaggle/python-build" +TARGET_IMAGE="gcr.io/kaggle-images/python" if [[ "$1" == "--gpu" ]]; then - IMAGE_TAG="kaggle/python-gpu-build:latest" + SOURCE_IMAGE="kaggle/python-gpu-build" + TARGET_IMAGE="gcr.io/kaggle-private-byod/python" shift fi -label=${1:-testing} +LABEL=${1:-testing} -docker tag $IMAGE_TAG gcr.io/kaggle-images/python:${label} -gcloud docker -- push gcr.io/kaggle-images/python:${label} +docker tag $SOURCE_IMAGE:latest $TARGET_IMAGE:$LABEL +gcloud docker -- push $TARGET_IMAGE:$LABEL -if [[ "$label" == "latest" ]]; then - docker tag $IMAGE_TAG kaggle/python:${label} - docker push kaggle/python:${label} +# Only CPU images are made public at this time. +if [[ "$LABEL" == "latest" && SOURCE_IMAGE = "kaggle/python-build" ]]; then + docker tag $SOURCE_IMAGE:latest kaggle/python:$LABEL + docker push kaggle/python:$LABEL fi diff --git a/test b/test index 080fc5ff..b2086cc8 100755 --- a/test +++ b/test @@ -14,9 +14,11 @@ set -e set -x IMAGE_TAG="kaggle/python-build" +ADDITONAL_OPTS="" if [[ "$1" == "--gpu" ]]; then IMAGE_TAG="kaggle/python-gpu-build" + ADDITONAL_OPTS="-v /tmp/empty_dir:/usr/local/cuda/lib64/stubs:ro" shift fi @@ -36,5 +38,6 @@ docker run --rm -t --read-only --net=none \ -v $PWD:/input:ro -v /tmp/python-build/working:/working \ -v /tmp/python-build/tmp:/tmp -v /tmp/python-build/devshm:/dev/shm \ -w=/working \ + $ADDITONAL_OPTS \ $IMAGE_TAG \ /bin/bash -c 'python -m unittest discover -s /input/tests' From c57c0896a88f45fce43ed9f8aee0bb8cacb66a7e Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Tue, 18 Sep 2018 21:41:50 +0000 Subject: [PATCH 044/251] improve pytorch gpu tests --- tests/test_pytorch.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/tests/test_pytorch.py b/tests/test_pytorch.py index 16b17364..61dd71ff 100644 --- a/tests/test_pytorch.py +++ b/tests/test_pytorch.py @@ -16,10 +16,20 @@ def test_nn(self): linear_torch(data_torch) @gpu_test - def test_gpu(self): + def test_gpu_computation(self): cuda = torch.device('cuda') a = torch.tensor([1., 2.], device=cuda) result = a.sum() self.assertEqual(torch.tensor([3.], device=cuda), result) + + @gpu_test + def test_cuda_nn(self): + # These throw if cuda is misconfigured + tnn.GRUCell(10,10).cuda() + tnn.RNNCell(10,10).cuda() + tnn.LSTMCell(10,10).cuda() + tnn.GRU(10,10).cuda() + tnn.LSTM(10,10).cuda() + tnn.RNN(10,10).cuda() From fb787224199d5ecd09f8724f13ec5aaaede4cfdd Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Tue, 18 Sep 2018 21:46:55 +0000 Subject: [PATCH 045/251] fix build command --- build | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build b/build index 8d3fc81f..c2b44598 100755 --- a/build +++ b/build @@ -26,4 +26,4 @@ if [[ "$1" == "--use-cache" ]]; then CACHE_FLAG="" fi -docker build --rm -t $CACHE_FLAG $IMAGE_TAG -f $DOCKERFILE . +docker build --rm $CACHE_FLAG -t $IMAGE_TAG -f $DOCKERFILE . From ba4a5237f2009c7c5a785f0dde864a9e1954920b Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Tue, 18 Sep 2018 21:48:46 +0000 Subject: [PATCH 046/251] Install tensorflow from pre-built wheel --- Dockerfile | 34 ++++++++++++---------------------- 1 file changed, 12 insertions(+), 22 deletions(-) diff --git a/Dockerfile b/Dockerfile index 45290385..1e2dce6e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,12 +1,17 @@ +FROM gcr.io/kaggle-images/python-tensorflow-whl:1.11.0-py36 as tensorflow_whl FROM continuumio/anaconda3:5.0.1 +# This is necessary to for apt to access HTTPS sources +RUN apt-get update && \ + apt-get install apt-transport-https + ADD patches/ /tmp/patches/ ADD patches/nbconvert-extensions.tpl /opt/kaggle/nbconvert-extensions.tpl # Use a fixed apt-get repo to stop intermittent failures due to flaky httpredir connections, # as described by Lionel Chan at http://stackoverflow.com/a/37426929/5881346 RUN sed -i "s/httpredir.debian.org/debian.uchicago.edu/" /etc/apt/sources.list && \ - apt-get update && apt-get install -y build-essential && \ + apt-get update && apt-get install -y build-essential unzip && \ # https://stackoverflow.com/a/46498173 conda update -y conda && conda update -y python && \ pip install --upgrade pip && \ @@ -32,33 +37,18 @@ RUN pip install seaborn python-dateutil dask pytagcloud pyyaml joblib \ # clean up ImageMagick source files cd ../ && rm -rf ImageMagick* -RUN apt-get update && apt-get install -y python-software-properties zip && \ - echo "deb http://ppa.launchpad.net/webupd8team/java/ubuntu precise main" | tee -a /etc/apt/sources.list && \ - echo "deb-src http://ppa.launchpad.net/webupd8team/java/ubuntu precise main" | tee -a /etc/apt/sources.list && \ - apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys EEA14886 C857C906 2B90D010 && \ - apt-get update && \ - echo debconf shared/accepted-oracle-license-v1-1 select true | debconf-set-selections && \ - echo debconf shared/accepted-oracle-license-v1-1 seen true | debconf-set-selections && \ - apt-get install -y oracle-java8-installer && \ - echo "deb [arch=amd64] http://storage.googleapis.com/bazel-apt stable jdk1.8" | tee /etc/apt/sources.list.d/bazel.list && \ - curl https://bazel.build/bazel-release.pub.gpg | apt-key add - && \ - apt-get update && apt-get install -y bazel && \ - apt-get upgrade -y bazel - # Tensorflow doesn't support python 3.7 yet. See https://github.com/tensorflow/tensorflow/issues/20517 # Fix to install tf 1.10:: Downgrade python 3.7->3.6.6 and downgrade Pandas 0.23.3->0.23.2 RUN conda install -y python=3.6.6 && \ pip install pandas==0.23.2 && \ # Another fix for TF 1.10 https://github.com/tensorflow/tensorflow/issues/21518 pip install keras_applications==1.0.4 --no-deps && \ - pip install keras_preprocessing==1.0.2 --no-deps && \ - cd /usr/local/src && \ - git clone https://github.com/tensorflow/tensorflow && \ - cd tensorflow && \ - cat /dev/null | ./configure && \ - bazel build --config=opt //tensorflow/tools/pip_package:build_pip_package && \ - bazel-bin/tensorflow/tools/pip_package/build_pip_package /tmp/tensorflow_pkg && \ - pip install /tmp/tensorflow_pkg/tensorflow*.whl + pip install keras_preprocessing==1.0.2 --no-deps + +# Install tensorflow from a pre-built wheel +COPY --from=tensorflow_whl /tmp/tensorflow_cpu/*.whl /tmp/tensorflow_cpu/ +RUN pip install /tmp/tensorflow_cpu/tensorflow*.whl && \ + rm -rf /tmp/tensorflow_cpu RUN apt-get install -y libfreetype6-dev && \ apt-get install -y libglib2.0-0 libxext6 libsm6 libxrender1 libfontconfig1 --fix-missing && \ From 5ee32f807974de2d452e8d251a54d071eccdff72 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Tue, 18 Sep 2018 21:49:10 +0000 Subject: [PATCH 047/251] GPU Dockerfile --- gpu.Dockerfile | 48 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) create mode 100644 gpu.Dockerfile diff --git a/gpu.Dockerfile b/gpu.Dockerfile new file mode 100644 index 00000000..89840ce0 --- /dev/null +++ b/gpu.Dockerfile @@ -0,0 +1,48 @@ +FROM nvidia/cuda:9.1-cudnn7-devel-ubuntu16.04 AS nvidia +FROM gcr.io/kaggle-images/python-tensorflow-whl:1.11.0-py36 as tensorflow_whl +FROM kaggle/python-build + +# Cuda support +COPY --from=nvidia /etc/apt/sources.list.d/cuda.list /etc/apt/sources.list.d/ +COPY --from=nvidia /etc/apt/sources.list.d/nvidia-ml.list /etc/apt/sources.list.d/ +COPY --from=nvidia /etc/apt/trusted.gpg /etc/apt/trusted.gpg.d/cuda.gpg + +ENV CUDA_VERSION=9.1.85 +ENV CUDA_PKG_VERSION=9-1=$CUDA_VERSION-1 +LABEL com.nvidia.volumes.needed="nvidia_driver" +LABEL com.nvidia.cuda.version="${CUDA_VERSION}" +ENV PATH=/usr/local/nvidia/bin:/usr/local/cuda/bin:${PATH} +# The stub is useful to us both for built-time linking and run-time linking, on CPU-only systems. +# When intended to be used with actual GPUs, make sure to (besides providing access to the host +# CUDA user libraries, either manually or through the use of nvidia-docker) exclude them. One +# convenient way to do so is to obscure its contents by a bind mount: +# docker run .... -v /non-existing-directory:/usr/local/cuda/lib64/stubs:ro ... +ENV LD_LIBRARY_PATH="/usr/local/nvidia/lib64:/usr/local/cuda/lib64:/usr/local/cuda/lib64/stubs" +ENV NVIDIA_VISIBLE_DEVICES=all +ENV NVIDIA_DRIVER_CAPABILITIES=compute,utility +ENV NVIDIA_REQUIRE_CUDA="cuda>=9.0" +RUN apt-get update && apt-get install -y --no-install-recommends \ + cuda-cudart-$CUDA_PKG_VERSION \ + cuda-libraries-$CUDA_PKG_VERSION \ + cuda-libraries-dev-$CUDA_PKG_VERSION \ + cuda-nvml-dev-$CUDA_PKG_VERSION \ + cuda-minimal-build-$CUDA_PKG_VERSION \ + cuda-command-line-tools-$CUDA_PKG_VERSION \ + libcudnn7=7.0.5.15-1+cuda9.1 \ + libcudnn7-dev=7.0.5.15-1+cuda9.1 \ + libnccl2=2.2.12-1+cuda9.1 \ + libnccl-dev=2.2.12-1+cuda9.1 && \ + ln -s /usr/local/cuda-9.1 /usr/local/cuda && \ + ln -s /usr/local/cuda/lib64/stubs/libcuda.so /usr/local/cuda/lib64/stubs/libcuda.so.1 + +# Reinstall packages with a separate version for GPU support +# Tensorflow +COPY --from=tensorflow_whl /tmp/tensorflow_gpu/*.whl /tmp/tensorflow_gpu/ +RUN pip uninstall -y tensorflow && \ + pip install /tmp/tensorflow_gpu/tensorflow*.whl && \ + rm -rf /tmp/tensorflow_gpu && \ + conda uninstall -y pytorch-cpu torchvision-cpu && \ + conda install -y pytorch torchvision -c pytorch + +# Install GPU-only packages +RUN pip install pycuda From c8dd5650433de9db922457d537fa34d29b706e23 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Tue, 18 Sep 2018 21:57:56 +0000 Subject: [PATCH 048/251] Update Jenkins config to support GPU build --- Jenkinsfile | 31 ++++++++++++++++++++++++++++--- 1 file changed, 28 insertions(+), 3 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index cef5a0b4..00996d1a 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -17,7 +17,7 @@ pipeline { } stages { - stage('Docker Build') { + stage('Docker CPU Build') { steps { slackSend color: 'none', message: "*<${env.BUILD_URL}console|${JOB_NAME} docker build>* ${GIT_COMMIT_SUMMARY}", channel: env.SLACK_CHANNEL sh '''#!/bin/bash @@ -28,7 +28,7 @@ pipeline { } } - stage('Test Image') { + stage('Test CPU Image') { steps { slackSend color: 'none', message: "*<${env.BUILD_URL}console|${JOB_NAME} test image>* ${GIT_COMMIT_SUMMARY}", channel: env.SLACK_CHANNEL sh '''#!/bin/bash @@ -39,8 +39,32 @@ pipeline { ''' } } + + stage('Docker GPU Build') { + steps { + slackSend color: 'none', message: "*<${env.BUILD_URL}console|${JOB_NAME} docker build>* ${GIT_COMMIT_SUMMARY}", channel: env.SLACK_CHANNEL + sh '''#!/bin/bash + set -exo pipefail + + ./build --gpu | ts + ''' + } + } + + stage('Test GPU Image') { + agent { label 'ephemeral-linux-gpu' } + steps { + slackSend color: 'none', message: "*<${env.BUILD_URL}console|${JOB_NAME} test image>* ${GIT_COMMIT_SUMMARY}", channel: env.SLACK_CHANNEL + sh '''#!/bin/bash + set -exo pipefail + + date + ./test --gpu + ''' + } + } - stage('Push Image') { + stage('Push Images') { steps { slackSend color: 'none', message: "*<${env.BUILD_URL}console|${JOB_NAME} pushing image>* ${GIT_COMMIT_SUMMARY}", channel: env.SLACK_CHANNEL sh '''#!/bin/bash @@ -48,6 +72,7 @@ pipeline { date ./push staging + ./push --gpu staging ''' } } From 6cfb569a70eecb4264f9a398a76d63a58c51e79c Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Wed, 19 Sep 2018 17:00:59 +0000 Subject: [PATCH 049/251] Fix typo --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 1e2dce6e..695aa187 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,7 @@ FROM gcr.io/kaggle-images/python-tensorflow-whl:1.11.0-py36 as tensorflow_whl FROM continuumio/anaconda3:5.0.1 -# This is necessary to for apt to access HTTPS sources +# This is necessary for apt to access HTTPS sources RUN apt-get update && \ apt-get install apt-transport-https From 408cc24ef2ed6e705b75ad472958304398f415d1 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Wed, 19 Sep 2018 17:01:21 +0000 Subject: [PATCH 050/251] Make our helper scripts more robus --- build | 60 +++++++++++++++++++++++++++++++++++++++-------------------- push | 60 ++++++++++++++++++++++++++++++++++++++--------------------- test | 47 +++++++++++++++++++++++++++++++++++++--------- 3 files changed, 117 insertions(+), 50 deletions(-) diff --git a/build b/build index c2b44598..8bcd1a0e 100755 --- a/build +++ b/build @@ -1,29 +1,49 @@ #!/bin/bash -# -# Build a new Python Docker image. -# -# Usage: -# ./build [--gpu] [--use-cache] -# -# Options: -# --gpu: Build an image with GPU support. -# --use-cache: Use layer cache when building a new image. -# set -e -set -x + +usage() { +cat << EOF +Usage: $0 [OPTIONS] +Build a new Python Docker image. + +Options: + -g, --gpu Build an image with GPU support. + -c, --use-cache Use layer cache when building a new image. +EOF +} CACHE_FLAG="--no-cache" -IMAGE_TAG="kaggle/python-build" DOCKERFILE="Dockerfile" +IMAGE_TAG="kaggle/python-build" + +while :; do + case "$1" in + -h|--help) + usage + exit + ;; + -g|--gpu) + IMAGE_TAG="kaggle/python-gpu-build" + DOCKERFILE="gpu.Dockerfile" + ;; + -c|--use-cache) + CACHE_FLAG="" + ;; + -?*) + usage + printf 'ERROR: Unknown option: %s\n' "$1" >&2 + exit + ;; + *) + break + esac -if [[ "$1" == "--gpu" ]]; then - IMAGE_TAG="kaggle/python-gpu-build" - DOCKERFILE="gpu.Dockerfile" shift -fi +done -if [[ "$1" == "--use-cache" ]]; then - CACHE_FLAG="" -fi +readonly CACHE_FLAG +readonly DOCKERFILE +readonly IMAGE_TAG -docker build --rm $CACHE_FLAG -t $IMAGE_TAG -f $DOCKERFILE . +set -x +docker build --rm $CACHE_FLAG -t "$IMAGE_TAG" -f "$DOCKERFILE" . diff --git a/push b/push index a938abb5..79ad8f13 100755 --- a/push +++ b/push @@ -1,35 +1,53 @@ #!/bin/bash -# -# Push a newly-built image with the given label to gcr.io and DockerHub. -# -# Usage: -# ./push [--gpu] [LABEL] -# -# Description: -# LABEL: Docker image label. Defaults to "testing". -# -# Options: -# --gpu: Psuh the image with GPU support. -# set -e -set -x + +usage() { +cat << EOF +Usage: $0 [OPTIONS] [LABEL] +Push a newly-built image with the given LABEL to gcr.io and DockerHub. + +Options: + -g, --gpu Push the image with GPU support. +EOF +} SOURCE_IMAGE="kaggle/python-build" TARGET_IMAGE="gcr.io/kaggle-images/python" -if [[ "$1" == "--gpu" ]]; then - SOURCE_IMAGE="kaggle/python-gpu-build" - TARGET_IMAGE="gcr.io/kaggle-private-byod/python" +while :; do + case "$1" in + -h|--help) + usage + exit + ;; + -g|--gpu) + SOURCE_IMAGE="kaggle/python-gpu-build" + TARGET_IMAGE="gcr.io/kaggle-private-byod/python" + ;; + -?*) + usage + printf 'ERROR: Unknown option: %s\n' "$1" >&2 + exit + ;; + *) + break + esac + shift -fi +done LABEL=${1:-testing} -docker tag $SOURCE_IMAGE:latest $TARGET_IMAGE:$LABEL -gcloud docker -- push $TARGET_IMAGE:$LABEL +readonly SOURCE_IMAGE +readonly TARGET_IMAGE +readonly LABEL + +set -x +docker tag "${SOURCE_IMAGE}:latest" "${TARGET_IMAGE}:${LABEL}" +gcloud docker -- push "${TARGET_IMAGE}:${LABEL}" # Only CPU images are made public at this time. if [[ "$LABEL" == "latest" && SOURCE_IMAGE = "kaggle/python-build" ]]; then - docker tag $SOURCE_IMAGE:latest kaggle/python:$LABEL - docker push kaggle/python:$LABEL + docker tag "${SOURCE_IMAGE}:latest" "kaggle/python:${LABEL}" + docker push "kaggle/python:${LABEL}" fi diff --git a/test b/test index b2086cc8..c336bb5e 100755 --- a/test +++ b/test @@ -1,7 +1,7 @@ #!/bin/bash # -# Run tests for a newly-built Python Docker image. -# By default, it runs the tests for the CPU image. +# +# # # Usage: # ./test [--gpu] @@ -9,19 +9,48 @@ # Options: # --gpu: Run tests for the GPU image # - set -e -set -x + +usage() { +cat << EOF +Usage: $0 [OPTIONS] +Run tests for a newly-built Python Docker image. +By default, it runs the tests for the CPU image. + +Options: + -g, --gpu Run tests for the GPU image. +EOF +} IMAGE_TAG="kaggle/python-build" ADDITONAL_OPTS="" -if [[ "$1" == "--gpu" ]]; then - IMAGE_TAG="kaggle/python-gpu-build" - ADDITONAL_OPTS="-v /tmp/empty_dir:/usr/local/cuda/lib64/stubs:ro" +while :; do + case "$1" in + -h|--help) + usage + exit + ;; + -g|--gpu) + IMAGE_TAG="kaggle/python-gpu-build" + ADDITONAL_OPTS="-v /tmp/empty_dir:/usr/local/cuda/lib64/stubs:ro" + ;; + -?*) + usage + printf 'ERROR: Unknown option: %s\n' "$1" >&2 + exit + ;; + *) + break + esac + shift -fi +done +readonly IMAGE_TAG +readonly ADDITONAL_OPTS + +set -x rm -rf /tmp/python-build docker rm jupyter_test || true mkdir -p /tmp/python-build/tmp @@ -39,5 +68,5 @@ docker run --rm -t --read-only --net=none \ -v /tmp/python-build/tmp:/tmp -v /tmp/python-build/devshm:/dev/shm \ -w=/working \ $ADDITONAL_OPTS \ - $IMAGE_TAG \ + "$IMAGE_TAG" \ /bin/bash -c 'python -m unittest discover -s /input/tests' From 1c2c907ce9f9d7e3f7a6ec1e3be01002b0caf068 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Wed, 19 Sep 2018 17:06:42 +0000 Subject: [PATCH 051/251] Remove extra comments --- test | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/test b/test index c336bb5e..2545ce39 100755 --- a/test +++ b/test @@ -1,14 +1,4 @@ #!/bin/bash -# -# -# -# -# Usage: -# ./test [--gpu] -# -# Options: -# --gpu: Run tests for the GPU image -# set -e usage() { From 77739d38954caa0fdc1cb34907b3e563878f7d81 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Wed, 19 Sep 2018 17:09:06 +0000 Subject: [PATCH 052/251] Minor fixes --- test | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test b/test index 2545ce39..42bc2c11 100755 --- a/test +++ b/test @@ -47,7 +47,7 @@ mkdir -p /tmp/python-build/tmp mkdir -p /tmp/python-build/devshm mkdir -p /tmp/python-build/working # Check that Jupyter server can run; if it dies on startup, the `docker kill` command will throw an error -docker run -d --name=jupyter_test --read-only --net=none -e HOME=/tmp -v $PWD:/input:ro -v /tmp/python-build/working:/working -w=/working -v /tmp/python-build/tmp:/tmp -v /tmp/python-build/devshm:/dev/shm $IMAGE_TAG jupyter notebook --allow-root --ip="*" +docker run -d --name=jupyter_test --read-only --net=none -e HOME=/tmp -v $PWD:/input:ro -v /tmp/python-build/working:/working -w=/working -v /tmp/python-build/tmp:/tmp -v /tmp/python-build/devshm:/dev/shm "$IMAGE_TAG" jupyter notebook --allow-root --ip="*" sleep 3 docker kill jupyter_test && docker rm jupyter_test docker run --rm -t --read-only --net=none \ From 9cb9f600687c14e2a3921c0505fc7cc0afddbafb Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Wed, 19 Sep 2018 18:36:35 +0000 Subject: [PATCH 053/251] Fix Jenkins config --- Jenkinsfile | 24 ++++++++++++++++++++++-- gpu.Dockerfile | 2 +- 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 00996d1a..07a455da 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -3,6 +3,7 @@ String cron_string = BRANCH_NAME == "master" ? "H 12 * * 1-5" : "" pipeline { agent { label 'ephemeral-linux' } options { + // The Build GPU stage depends on the image from the Push CPU stage disableConcurrentBuilds() } triggers { @@ -39,8 +40,27 @@ pipeline { ''' } } + + stage('Push CPU Image') { + steps { + slackSend color: 'none', message: "*<${env.BUILD_URL}console|${JOB_NAME} pushing image>* ${GIT_COMMIT_SUMMARY}", channel: env.SLACK_CHANNEL + sh '''#!/bin/bash + set -exo pipefail + + date + ./push staging + ''' + } + } stage('Docker GPU Build') { + // A GPU is not required to build this image. However, in our current setup, + // the default runtime is set to nvidia (as opposed to runc) and there + // is no option to specify a runtime for the `docker build` command. + // + // TODO(rosbo) don't set `nvidia` as the default runtime and use the + // `--runtime=nvidia` flag for the `docker run` command when GPU support is needed. + agent { label 'ephemeral-linux-gpu' } steps { slackSend color: 'none', message: "*<${env.BUILD_URL}console|${JOB_NAME} docker build>* ${GIT_COMMIT_SUMMARY}", channel: env.SLACK_CHANNEL sh '''#!/bin/bash @@ -64,14 +84,14 @@ pipeline { } } - stage('Push Images') { + stage('Push GPU Image') { + agent { label 'ephemeral-linux-gpu' } steps { slackSend color: 'none', message: "*<${env.BUILD_URL}console|${JOB_NAME} pushing image>* ${GIT_COMMIT_SUMMARY}", channel: env.SLACK_CHANNEL sh '''#!/bin/bash set -exo pipefail date - ./push staging ./push --gpu staging ''' } diff --git a/gpu.Dockerfile b/gpu.Dockerfile index 89840ce0..6124280f 100644 --- a/gpu.Dockerfile +++ b/gpu.Dockerfile @@ -1,6 +1,6 @@ FROM nvidia/cuda:9.1-cudnn7-devel-ubuntu16.04 AS nvidia FROM gcr.io/kaggle-images/python-tensorflow-whl:1.11.0-py36 as tensorflow_whl -FROM kaggle/python-build +FROM gcr.io/kaggle-images/python:staging # Cuda support COPY --from=nvidia /etc/apt/sources.list.d/cuda.list /etc/apt/sources.list.d/ From c17404600129b767bc04b8bece3441fdb3c5ddaf Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Thu, 20 Sep 2018 10:59:51 -0700 Subject: [PATCH 054/251] Remove duplicate install of seaborn already being install at line 20. --- Dockerfile | 1 - 1 file changed, 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 695aa187..481b656a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -439,7 +439,6 @@ RUN pip install bcolz && \ pip install PyYAML && \ pip install pyzmq && \ pip install qtconsole && \ - pip install seaborn && \ pip install simplegeneric && \ pip install six && \ pip install terminado && \ From 8ff8232d975e2b0a4b8cc11c4083624d2cb9b774 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Fri, 21 Sep 2018 11:22:44 -0700 Subject: [PATCH 055/251] Update Dockerfile --- Dockerfile | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 481b656a..4560b620 100644 --- a/Dockerfile +++ b/Dockerfile @@ -63,7 +63,10 @@ RUN apt-get install -y libfreetype6-dev && \ cd /usr/local/src && mkdir xgboost && cd xgboost && \ git clone --depth 1 --recursive https://github.com/dmlc/xgboost.git && cd xgboost && \ make && cd python-package && python setup.py install && \ - pip install lightgbm && \ + # Pinning to an older version. The latest release (2.2.0) depends on an old GLIBC version. + # This cause the package to fail at import time with "version `GLIBC_2.23' not found" + # TODO: Unpin once the latest release is fixed. + pip install lightgbm==2.1.2 && \ #lasagne cd /usr/local/src && mkdir Lasagne && cd Lasagne && \ git clone --depth 1 https://github.com/Lasagne/Lasagne.git && cd Lasagne && \ From 8085c063ffcdfa60e24f17a8cd961d25f116a5d5 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Fri, 21 Sep 2018 18:53:17 +0000 Subject: [PATCH 056/251] Add test for the lightgbm package --- tests/test_lightgbm.py | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 tests/test_lightgbm.py diff --git a/tests/test_lightgbm.py b/tests/test_lightgbm.py new file mode 100644 index 00000000..5727cacd --- /dev/null +++ b/tests/test_lightgbm.py @@ -0,0 +1,33 @@ +import unittest + +import lightgbm as lgb + +from sklearn.datasets import load_iris + +class TestLightgbm(unittest.TestCase): + # Based on the "simple_example" from their documentation: + # https://github.com/Microsoft/LightGBM/blob/master/examples/python-guide/simple_example.py + def test_simple(self): + # Load a dataset aleady on disk + iris = load_iris() + + lgb_train = lgb.Dataset(iris.data[:100], iris.target[:100]) + lgb_eval = lgb.Dataset(iris.data[100:], iris.target[100:], reference=lgb_train) + + params = { + 'task': 'train', + 'boosting_type': 'gbdt', + 'objective': 'regression', + 'metric': {'l2', 'auc'}, + 'num_leaves': 31, + 'learning_rate': 0.05, + 'feature_fraction': 0.9, + 'bagging_fraction': 0.8, + 'bagging_freq': 5, + 'verbose': 0 + } + + # Run only one round for faster test + gbm = lgb.train(params, lgb_train, num_boost_round=1, valid_sets=lgb_eval, early_stopping_rounds=1) + + self.assertEqual(1, gbm.best_iteration) From 4fb5b680af7954806ab6d372033834c42d505918 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Fri, 21 Sep 2018 18:56:34 +0000 Subject: [PATCH 057/251] formatting --- tests/test_lightgbm.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/test_lightgbm.py b/tests/test_lightgbm.py index 5727cacd..213eb0ca 100644 --- a/tests/test_lightgbm.py +++ b/tests/test_lightgbm.py @@ -28,6 +28,10 @@ def test_simple(self): } # Run only one round for faster test - gbm = lgb.train(params, lgb_train, num_boost_round=1, valid_sets=lgb_eval, early_stopping_rounds=1) + gbm = lgb.train(params, + lgb_train, + num_boost_round=1, + valid_sets=lgb_eval, + early_stopping_rounds=1) self.assertEqual(1, gbm.best_iteration) From a15031bac4beba79ec44bf52e261e9b0616be0fd Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Fri, 21 Sep 2018 14:09:29 -0700 Subject: [PATCH 058/251] Always pull the latest parent images at build --- build | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build b/build index 8bcd1a0e..0b8c662b 100755 --- a/build +++ b/build @@ -46,4 +46,4 @@ readonly DOCKERFILE readonly IMAGE_TAG set -x -docker build --rm $CACHE_FLAG -t "$IMAGE_TAG" -f "$DOCKERFILE" . +docker build --rm --pull $CACHE_FLAG -t "$IMAGE_TAG" -f "$DOCKERFILE" . From 561f3abad225f95869eefcd85e6528a48d4e7f5a Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Thu, 20 Sep 2018 21:22:42 +0000 Subject: [PATCH 059/251] Expand keras tests to cover conv net --- tests/test_keras.py | 36 ++++++++++++++++++++++++++++++++++-- 1 file changed, 34 insertions(+), 2 deletions(-) diff --git a/tests/test_keras.py b/tests/test_keras.py index ca107563..cdaa7cb9 100644 --- a/tests/test_keras.py +++ b/tests/test_keras.py @@ -1,10 +1,11 @@ import unittest +import numpy as np import pandas as pd from keras.models import Sequential -from keras.layers import Dense -from keras.optimizers import RMSprop +from keras.layers import Dense, Dropout, Flatten, Conv2D, MaxPooling2D +from keras.optimizers import RMSprop, SGD from keras.utils.np_utils import to_categorical class TestKeras(unittest.TestCase): @@ -23,3 +24,34 @@ def test_train(self): metrics=['accuracy']) model.fit(x_train, y_train, epochs=1, batch_size=32) + + def test_conv2d(self): + # Generate dummy data + x_train = np.random.random((100, 100, 100, 3)) + y_train = keras.utils.to_categorical(np.random.randint(10, size=(100, 1)), num_classes=10) + x_test = np.random.random((20, 100, 100, 3)) + y_test = keras.utils.to_categorical(np.random.randint(10, size=(20, 1)), num_classes=10) + + model = Sequential() + # input: 100x100 images with 3 channels -> (100, 100, 3) tensors. + # this applies 32 convolution filters of size 3x3 each. + model.add(Conv2D(32, (3, 3), activation='relu', input_shape=(100, 100, 3))) + model.add(Conv2D(32, (3, 3), activation='relu')) + model.add(MaxPooling2D(pool_size=(2, 2))) + model.add(Dropout(0.25)) + + model.add(Conv2D(64, (3, 3), activation='relu')) + model.add(Conv2D(64, (3, 3), activation='relu')) + model.add(MaxPooling2D(pool_size=(2, 2))) + model.add(Dropout(0.25)) + + model.add(Flatten()) + model.add(Dense(256, activation='relu')) + model.add(Dropout(0.5)) + model.add(Dense(10, activation='softmax')) + + sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True) + model.compile(loss='categorical_crossentropy', optimizer=sgd) + + model.fit(x_train, y_train, batch_size=32, epochs=1) + score = model.evaluate(x_test, y_test, batch_size=32) From f9386a24daf660bf6c583016cf63a8d7ea668385 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Thu, 20 Sep 2018 22:40:20 +0000 Subject: [PATCH 060/251] use libcudnn7.2 and cuda 9.2 --- gpu.Dockerfile | 30 ++++++++++++++++-------------- tests/test_keras.py | 1 + 2 files changed, 17 insertions(+), 14 deletions(-) diff --git a/gpu.Dockerfile b/gpu.Dockerfile index 6124280f..45515596 100644 --- a/gpu.Dockerfile +++ b/gpu.Dockerfile @@ -1,4 +1,4 @@ -FROM nvidia/cuda:9.1-cudnn7-devel-ubuntu16.04 AS nvidia +FROM nvidia/cuda:9.2-cudnn7-devel-ubuntu16.04 AS nvidia FROM gcr.io/kaggle-images/python-tensorflow-whl:1.11.0-py36 as tensorflow_whl FROM gcr.io/kaggle-images/python:staging @@ -7,8 +7,8 @@ COPY --from=nvidia /etc/apt/sources.list.d/cuda.list /etc/apt/sources.list.d/ COPY --from=nvidia /etc/apt/sources.list.d/nvidia-ml.list /etc/apt/sources.list.d/ COPY --from=nvidia /etc/apt/trusted.gpg /etc/apt/trusted.gpg.d/cuda.gpg -ENV CUDA_VERSION=9.1.85 -ENV CUDA_PKG_VERSION=9-1=$CUDA_VERSION-1 +ENV CUDA_VERSION=9.2.88 +ENV CUDA_PKG_VERSION=9-2=$CUDA_VERSION-1 LABEL com.nvidia.volumes.needed="nvidia_driver" LABEL com.nvidia.cuda.version="${CUDA_VERSION}" ENV PATH=/usr/local/nvidia/bin:/usr/local/cuda/bin:${PATH} @@ -22,17 +22,19 @@ ENV NVIDIA_VISIBLE_DEVICES=all ENV NVIDIA_DRIVER_CAPABILITIES=compute,utility ENV NVIDIA_REQUIRE_CUDA="cuda>=9.0" RUN apt-get update && apt-get install -y --no-install-recommends \ - cuda-cudart-$CUDA_PKG_VERSION \ - cuda-libraries-$CUDA_PKG_VERSION \ - cuda-libraries-dev-$CUDA_PKG_VERSION \ - cuda-nvml-dev-$CUDA_PKG_VERSION \ - cuda-minimal-build-$CUDA_PKG_VERSION \ - cuda-command-line-tools-$CUDA_PKG_VERSION \ - libcudnn7=7.0.5.15-1+cuda9.1 \ - libcudnn7-dev=7.0.5.15-1+cuda9.1 \ - libnccl2=2.2.12-1+cuda9.1 \ - libnccl-dev=2.2.12-1+cuda9.1 && \ - ln -s /usr/local/cuda-9.1 /usr/local/cuda && \ + cuda-cupti-$CUDA_PKG_VERSION \ + cuda-cudart-$CUDA_PKG_VERSION \ + cuda-cudart-dev-$CUDA_PKG_VERSION \ + cuda-libraries-$CUDA_PKG_VERSION \ + cuda-libraries-dev-$CUDA_PKG_VERSION \ + cuda-nvml-dev-$CUDA_PKG_VERSION \ + cuda-minimal-build-$CUDA_PKG_VERSION \ + cuda-command-line-tools-$CUDA_PKG_VERSION \ + libcudnn7=7.2.1.38-1+cuda9.2 \ + libcudnn7-dev=7.2.1.38-1+cuda9.2 \ + libnccl2=2.2.13-1+cuda9.2 \ + libnccl-dev=2.2.13-1+cuda9.2 && \ + ln -s /usr/local/cuda-9.2 /usr/local/cuda && \ ln -s /usr/local/cuda/lib64/stubs/libcuda.so /usr/local/cuda/lib64/stubs/libcuda.so.1 # Reinstall packages with a separate version for GPU support diff --git a/tests/test_keras.py b/tests/test_keras.py index cdaa7cb9..fc6448e3 100644 --- a/tests/test_keras.py +++ b/tests/test_keras.py @@ -1,5 +1,6 @@ import unittest +import keras import numpy as np import pandas as pd From 626ba4cbd0bf3043446ab26b8fb8c778cafa0d32 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Fri, 21 Sep 2018 17:12:20 +0000 Subject: [PATCH 061/251] Use libcudnn 7.2 --- gpu.Dockerfile | 31 +++++++++++++++---------------- tests/test_keras.py | 8 +++++--- 2 files changed, 20 insertions(+), 19 deletions(-) diff --git a/gpu.Dockerfile b/gpu.Dockerfile index 45515596..247c7dd4 100644 --- a/gpu.Dockerfile +++ b/gpu.Dockerfile @@ -1,4 +1,4 @@ -FROM nvidia/cuda:9.2-cudnn7-devel-ubuntu16.04 AS nvidia +FROM nvidia/cuda:9.1-cudnn7-devel-ubuntu16.04 AS nvidia FROM gcr.io/kaggle-images/python-tensorflow-whl:1.11.0-py36 as tensorflow_whl FROM gcr.io/kaggle-images/python:staging @@ -7,8 +7,8 @@ COPY --from=nvidia /etc/apt/sources.list.d/cuda.list /etc/apt/sources.list.d/ COPY --from=nvidia /etc/apt/sources.list.d/nvidia-ml.list /etc/apt/sources.list.d/ COPY --from=nvidia /etc/apt/trusted.gpg /etc/apt/trusted.gpg.d/cuda.gpg -ENV CUDA_VERSION=9.2.88 -ENV CUDA_PKG_VERSION=9-2=$CUDA_VERSION-1 +ENV CUDA_VERSION=9.1.85 +ENV CUDA_PKG_VERSION=9-1=$CUDA_VERSION-1 LABEL com.nvidia.volumes.needed="nvidia_driver" LABEL com.nvidia.cuda.version="${CUDA_VERSION}" ENV PATH=/usr/local/nvidia/bin:/usr/local/cuda/bin:${PATH} @@ -22,19 +22,18 @@ ENV NVIDIA_VISIBLE_DEVICES=all ENV NVIDIA_DRIVER_CAPABILITIES=compute,utility ENV NVIDIA_REQUIRE_CUDA="cuda>=9.0" RUN apt-get update && apt-get install -y --no-install-recommends \ - cuda-cupti-$CUDA_PKG_VERSION \ - cuda-cudart-$CUDA_PKG_VERSION \ - cuda-cudart-dev-$CUDA_PKG_VERSION \ - cuda-libraries-$CUDA_PKG_VERSION \ - cuda-libraries-dev-$CUDA_PKG_VERSION \ - cuda-nvml-dev-$CUDA_PKG_VERSION \ - cuda-minimal-build-$CUDA_PKG_VERSION \ - cuda-command-line-tools-$CUDA_PKG_VERSION \ - libcudnn7=7.2.1.38-1+cuda9.2 \ - libcudnn7-dev=7.2.1.38-1+cuda9.2 \ - libnccl2=2.2.13-1+cuda9.2 \ - libnccl-dev=2.2.13-1+cuda9.2 && \ - ln -s /usr/local/cuda-9.2 /usr/local/cuda && \ + cuda-cupti-$CUDA_PKG_VERSION \ + cuda-cudart-$CUDA_PKG_VERSION \ + cuda-libraries-$CUDA_PKG_VERSION \ + cuda-libraries-dev-$CUDA_PKG_VERSION \ + cuda-nvml-dev-$CUDA_PKG_VERSION \ + cuda-minimal-build-$CUDA_PKG_VERSION \ + cuda-command-line-tools-$CUDA_PKG_VERSION \ + libcudnn7=7.2.1.38-1+cuda9.0 \ + libcudnn7-dev=7.2.1.38-1+cuda9.0 \ + libnccl2=2.2.12-1+cuda9.1 \ + libnccl-dev=2.2.12-1+cuda9.1 && \ + ln -s /usr/local/cuda-9.1 /usr/local/cuda && \ ln -s /usr/local/cuda/lib64/stubs/libcuda.so /usr/local/cuda/lib64/stubs/libcuda.so.1 # Reinstall packages with a separate version for GPU support diff --git a/tests/test_keras.py b/tests/test_keras.py index fc6448e3..9f8a1e4c 100644 --- a/tests/test_keras.py +++ b/tests/test_keras.py @@ -25,7 +25,8 @@ def test_train(self): metrics=['accuracy']) model.fit(x_train, y_train, epochs=1, batch_size=32) - + + # Uses convnet which depends on libcudnn when running on GPU def test_conv2d(self): # Generate dummy data x_train = np.random.random((100, 100, 100, 3)) @@ -52,7 +53,8 @@ def test_conv2d(self): model.add(Dense(10, activation='softmax')) sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True) - model.compile(loss='categorical_crossentropy', optimizer=sgd) + # This throws if libcudnn is not properly installed with on a GPU + model.compile(loss='categorical_crossentropy', optimizer=sgd) model.fit(x_train, y_train, batch_size=32, epochs=1) - score = model.evaluate(x_test, y_test, batch_size=32) + model.evaluate(x_test, y_test, batch_size=32) From ec25fd3099e5c921c9818cbafe48a520e991d87a Mon Sep 17 00:00:00 2001 From: Gyczero <2997347185@qq.com> Date: Sat, 22 Sep 2018 18:53:54 +0800 Subject: [PATCH 062/251] ADD unit test for jieba --- tests/test_jieba.py | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 tests/test_jieba.py diff --git a/tests/test_jieba.py b/tests/test_jieba.py new file mode 100644 index 00000000..745545f0 --- /dev/null +++ b/tests/test_jieba.py @@ -0,0 +1,10 @@ +# encoding=utf-8 +import unittest + +import jieba + + +class TestJieba(unittest.TestCase): + def test_text_split(self): + sentence = "我爱北京天安门" + seg_list = jieba.cut(sentence) \ No newline at end of file From 3d3aeb69ed8ae29e3ecc87d9bcbf240619f0e698 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Mon, 1 Oct 2018 11:00:40 -0700 Subject: [PATCH 063/251] Prevent disk from filling up on GPU build The GPU build is still using a permanent worker (not ephemeral like the CPU build). Docker images are building up and end up taking all the space and the build start failing. --- Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index 07a455da..5e32b76c 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -65,7 +65,7 @@ pipeline { slackSend color: 'none', message: "*<${env.BUILD_URL}console|${JOB_NAME} docker build>* ${GIT_COMMIT_SUMMARY}", channel: env.SLACK_CHANNEL sh '''#!/bin/bash set -exo pipefail - + docker image prune # remove previously built image to prevent disk from filling up ./build --gpu | ts ''' } From 321eeb52d623138d3191d01b146bd17c0a2327cb Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Mon, 1 Oct 2018 16:23:07 -0700 Subject: [PATCH 064/251] Remove all untagged images, not just dangling Context: gpu build still uses a permanent worker. --- Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index 5e32b76c..702898fb 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -65,7 +65,7 @@ pipeline { slackSend color: 'none', message: "*<${env.BUILD_URL}console|${JOB_NAME} docker build>* ${GIT_COMMIT_SUMMARY}", channel: env.SLACK_CHANNEL sh '''#!/bin/bash set -exo pipefail - docker image prune # remove previously built image to prevent disk from filling up + docker image prune -a # remove previously built image to prevent disk from filling up ./build --gpu | ts ''' } From 560af468065cfd51c3b96d6526a19fe5d2817715 Mon Sep 17 00:00:00 2001 From: Gyczero <2997347185@qq.com> Date: Tue, 2 Oct 2018 12:18:13 +0800 Subject: [PATCH 065/251] Add an assertion for jieba --- .../inspectionProfiles/Project_Default.xml | 14 + .../inspectionProfiles/profiles_settings.xml | 7 + tests/.idea/misc.xml | 14 + tests/.idea/modules.xml | 8 + tests/.idea/tests.iml | 11 + tests/.idea/workspace.xml | 698 ++++++++++++++++++ tests/test_jieba.py | 5 +- 7 files changed, 756 insertions(+), 1 deletion(-) create mode 100644 tests/.idea/inspectionProfiles/Project_Default.xml create mode 100644 tests/.idea/inspectionProfiles/profiles_settings.xml create mode 100644 tests/.idea/misc.xml create mode 100644 tests/.idea/modules.xml create mode 100644 tests/.idea/tests.iml create mode 100644 tests/.idea/workspace.xml diff --git a/tests/.idea/inspectionProfiles/Project_Default.xml b/tests/.idea/inspectionProfiles/Project_Default.xml new file mode 100644 index 00000000..7700bea2 --- /dev/null +++ b/tests/.idea/inspectionProfiles/Project_Default.xml @@ -0,0 +1,14 @@ + + + + \ No newline at end of file diff --git a/tests/.idea/inspectionProfiles/profiles_settings.xml b/tests/.idea/inspectionProfiles/profiles_settings.xml new file mode 100644 index 00000000..c60c33bb --- /dev/null +++ b/tests/.idea/inspectionProfiles/profiles_settings.xml @@ -0,0 +1,7 @@ + + + + \ No newline at end of file diff --git a/tests/.idea/misc.xml b/tests/.idea/misc.xml new file mode 100644 index 00000000..559ad2b6 --- /dev/null +++ b/tests/.idea/misc.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/tests/.idea/modules.xml b/tests/.idea/modules.xml new file mode 100644 index 00000000..dac5cbb1 --- /dev/null +++ b/tests/.idea/modules.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/tests/.idea/tests.iml b/tests/.idea/tests.iml new file mode 100644 index 00000000..08bd4824 --- /dev/null +++ b/tests/.idea/tests.iml @@ -0,0 +1,11 @@ + + + + + + + + + + \ No newline at end of file diff --git a/tests/.idea/workspace.xml b/tests/.idea/workspace.xml new file mode 100644 index 00000000..f9541e62 --- /dev/null +++ b/tests/.idea/workspace.xml @@ -0,0 +1,698 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Python + + + + + PyPep8Inspection + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1537613612117 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/tests/test_jieba.py b/tests/test_jieba.py index 745545f0..44e49b0e 100644 --- a/tests/test_jieba.py +++ b/tests/test_jieba.py @@ -7,4 +7,7 @@ class TestJieba(unittest.TestCase): def test_text_split(self): sentence = "我爱北京天安门" - seg_list = jieba.cut(sentence) \ No newline at end of file + seg_list = jieba.cut(sentence) + seg_list = list(seg_list) + + self.assertEqual(4, len(seg_list)) \ No newline at end of file From 2a22bbe2993eab9c0b5ff0f070503b3468dd9e6e Mon Sep 17 00:00:00 2001 From: Gyczerinvis <2997347185@qq.com> Date: Tue, 2 Oct 2018 12:21:08 +0800 Subject: [PATCH 066/251] Delete Project_Default.xml --- tests/.idea/inspectionProfiles/Project_Default.xml | 14 -------------- 1 file changed, 14 deletions(-) delete mode 100644 tests/.idea/inspectionProfiles/Project_Default.xml diff --git a/tests/.idea/inspectionProfiles/Project_Default.xml b/tests/.idea/inspectionProfiles/Project_Default.xml deleted file mode 100644 index 7700bea2..00000000 --- a/tests/.idea/inspectionProfiles/Project_Default.xml +++ /dev/null @@ -1,14 +0,0 @@ - - - - \ No newline at end of file From e36a92c0a8c31313b881bfcaa7c95051087acd69 Mon Sep 17 00:00:00 2001 From: Gyczerinvis <2997347185@qq.com> Date: Tue, 2 Oct 2018 12:21:17 +0800 Subject: [PATCH 067/251] Delete profiles_settings.xml --- tests/.idea/inspectionProfiles/profiles_settings.xml | 7 ------- 1 file changed, 7 deletions(-) delete mode 100644 tests/.idea/inspectionProfiles/profiles_settings.xml diff --git a/tests/.idea/inspectionProfiles/profiles_settings.xml b/tests/.idea/inspectionProfiles/profiles_settings.xml deleted file mode 100644 index c60c33bb..00000000 --- a/tests/.idea/inspectionProfiles/profiles_settings.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - - \ No newline at end of file From a14b211232c8ad5bb4a7cb921ea8ce520f626653 Mon Sep 17 00:00:00 2001 From: Gyczerinvis <2997347185@qq.com> Date: Tue, 2 Oct 2018 12:21:26 +0800 Subject: [PATCH 068/251] Delete misc.xml --- tests/.idea/misc.xml | 14 -------------- 1 file changed, 14 deletions(-) delete mode 100644 tests/.idea/misc.xml diff --git a/tests/.idea/misc.xml b/tests/.idea/misc.xml deleted file mode 100644 index 559ad2b6..00000000 --- a/tests/.idea/misc.xml +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - - - - - - - - - \ No newline at end of file From 639e2232e1d1dea7daf7716bd3407ad1626a1b7d Mon Sep 17 00:00:00 2001 From: Gyczerinvis <2997347185@qq.com> Date: Tue, 2 Oct 2018 12:21:46 +0800 Subject: [PATCH 069/251] Delete workspace.xml --- tests/.idea/workspace.xml | 698 -------------------------------------- 1 file changed, 698 deletions(-) delete mode 100644 tests/.idea/workspace.xml diff --git a/tests/.idea/workspace.xml b/tests/.idea/workspace.xml deleted file mode 100644 index f9541e62..00000000 --- a/tests/.idea/workspace.xml +++ /dev/null @@ -1,698 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Python - - - - - PyPep8Inspection - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 1537613612117 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file From 45d9be2b9d70fb2101c6dd90184ac128fcc41770 Mon Sep 17 00:00:00 2001 From: Gyczerinvis <2997347185@qq.com> Date: Tue, 2 Oct 2018 12:21:56 +0800 Subject: [PATCH 070/251] Delete modules.xml --- tests/.idea/modules.xml | 8 -------- 1 file changed, 8 deletions(-) delete mode 100644 tests/.idea/modules.xml diff --git a/tests/.idea/modules.xml b/tests/.idea/modules.xml deleted file mode 100644 index dac5cbb1..00000000 --- a/tests/.idea/modules.xml +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - \ No newline at end of file From b54644ad067fc1d964477f46afa4a2f51a36138d Mon Sep 17 00:00:00 2001 From: Gyczerinvis <2997347185@qq.com> Date: Tue, 2 Oct 2018 12:22:04 +0800 Subject: [PATCH 071/251] Delete tests.iml --- tests/.idea/tests.iml | 11 ----------- 1 file changed, 11 deletions(-) delete mode 100644 tests/.idea/tests.iml diff --git a/tests/.idea/tests.iml b/tests/.idea/tests.iml deleted file mode 100644 index 08bd4824..00000000 --- a/tests/.idea/tests.iml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - \ No newline at end of file From 94872226ebff5dad9285b4b507615267a54291e0 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Tue, 2 Oct 2018 18:00:25 +0000 Subject: [PATCH 072/251] Install hep_ml, xgboost and lasagne using pip --- Dockerfile | 17 +++-------------- tests/test_hep_ml.py | 13 +++++++++++++ tests/test_lasagne.py | 15 +++++++++++++++ 3 files changed, 31 insertions(+), 14 deletions(-) create mode 100644 tests/test_hep_ml.py create mode 100644 tests/test_lasagne.py diff --git a/Dockerfile b/Dockerfile index 4a1a74f5..f29ad383 100644 --- a/Dockerfile +++ b/Dockerfile @@ -53,24 +53,15 @@ RUN pip install /tmp/tensorflow_cpu/tensorflow*.whl && \ RUN apt-get install -y libfreetype6-dev && \ apt-get install -y libglib2.0-0 libxext6 libsm6 libxrender1 libfontconfig1 --fix-missing && \ pip install gensim && \ - # textblob pip install textblob && \ - #word cloud pip install wordcloud && \ - #igraph conda install -y -c conda-forge python-igraph && \ - #xgboost - cd /usr/local/src && mkdir xgboost && cd xgboost && \ - git clone --depth 1 --recursive https://github.com/dmlc/xgboost.git && cd xgboost && \ - make && cd python-package && python setup.py install && \ + pip install xgboost && \ # Pinning to an older version. The latest release (2.2.0) depends on an old GLIBC version. # This cause the package to fail at import time with "version `GLIBC_2.23' not found" # TODO: Unpin once the latest release is fixed. pip install lightgbm==2.1.2 && \ - #lasagne - cd /usr/local/src && mkdir Lasagne && cd Lasagne && \ - git clone --depth 1 https://github.com/Lasagne/Lasagne.git && cd Lasagne && \ - pip install -r requirements.txt && python setup.py install && \ + pip install git+git://github.com/Lasagne/Lasagne.git && \ #keras cd /usr/local/src && mkdir keras && cd keras && \ git clone --depth 1 https://github.com/fchollet/keras.git && \ @@ -100,8 +91,7 @@ RUN apt-get install -y libfreetype6-dev && \ apt-get install -y libatlas-base-dev && \ cd /usr/local/src && git clone --depth 1 https://github.com/ztane/python-Levenshtein && \ cd python-Levenshtein && python setup.py install && \ - cd /usr/local/src && git clone --depth 1 https://github.com/arogozhnikov/hep_ml.git && \ - cd hep_ml && pip install . && \ + pip install hep_ml && \ # chainer pip install chainer && \ # NLTK Project datasets @@ -132,7 +122,6 @@ RUN apt-get install -y libfreetype6-dev && \ # Make sure the dynamic linker finds the right libstdc++ ENV LD_LIBRARY_PATH=/opt/conda/lib -# Install Basemap via conda temporarily RUN apt-get -y install zlib1g-dev liblcms2-dev libwebp-dev libgeos-dev && \ pip install matplotlib && \ pip install pyshp && \ diff --git a/tests/test_hep_ml.py b/tests/test_hep_ml.py new file mode 100644 index 00000000..37afc476 --- /dev/null +++ b/tests/test_hep_ml.py @@ -0,0 +1,13 @@ +import unittest + +import numpy as np + +from hep_ml.preprocessing import BinTransformer + +class TestHepML(unittest.TestCase): + def test_preprocessing(self): + X = np.array([[1.1, 1.2, 1.3],[5.1, 6.4, 10.5]]) + transformer = BinTransformer().fit(X) + new_X = transformer.transform(X) + + self.assertEqual((2, 3), new_X.shape) diff --git a/tests/test_lasagne.py b/tests/test_lasagne.py new file mode 100644 index 00000000..08e9dbf8 --- /dev/null +++ b/tests/test_lasagne.py @@ -0,0 +1,15 @@ +import unittest + +import lasagne +import theano.tensor as T + +class TestLasagne(unittest.TestCase): + def test_network_definition(self): + input_var = T.tensor4('X') + + network = lasagne.layers.InputLayer((None, 3, 32, 32), input_var) + network = lasagne.layers.Conv2DLayer(network, 64, (3, 3)) + + params = lasagne.layers.get_all_params(network, trainable=True) + + self.assertEqual(2, len(params)) From 566014a1b0e143f11591a47447945bc8195edcc0 Mon Sep 17 00:00:00 2001 From: Nikita Titov Date: Tue, 9 Oct 2018 02:45:09 +0300 Subject: [PATCH 073/251] update LightGBM --- Dockerfile | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index f29ad383..30f04615 100644 --- a/Dockerfile +++ b/Dockerfile @@ -57,10 +57,7 @@ RUN apt-get install -y libfreetype6-dev && \ pip install wordcloud && \ conda install -y -c conda-forge python-igraph && \ pip install xgboost && \ - # Pinning to an older version. The latest release (2.2.0) depends on an old GLIBC version. - # This cause the package to fail at import time with "version `GLIBC_2.23' not found" - # TODO: Unpin once the latest release is fixed. - pip install lightgbm==2.1.2 && \ + pip install lightgbm && \ pip install git+git://github.com/Lasagne/Lasagne.git && \ #keras cd /usr/local/src && mkdir keras && cd keras && \ From 687df0e926ff70cbe2058c9dd7c7c54c58c5a205 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Tue, 9 Oct 2018 09:15:03 -0700 Subject: [PATCH 074/251] Let prompt-toolkit used older version jupyter-console installs an older version of that package. --- Dockerfile | 1 - 1 file changed, 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 30f04615..9c7391a2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -419,7 +419,6 @@ RUN pip install bcolz && \ pip install pexpect && \ pip install pickleshare && \ pip install Pillow && \ - pip install prompt-toolkit && \ pip install ptyprocess && \ pip install Pygments && \ pip install pyparsing && \ From d9aacd14c702a378ad13d26ce4490349f0b4905c Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Wed, 10 Oct 2018 00:04:18 +0000 Subject: [PATCH 075/251] Fix compatibility issue with matplotlib and basemap --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 9c7391a2..d996cfa0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -125,7 +125,7 @@ RUN apt-get -y install zlib1g-dev liblcms2-dev libwebp-dev libgeos-dev && \ pip install pyproj && \ cd /usr/local/src && git clone https://github.com/matplotlib/basemap.git && \ cd basemap && \ - git checkout v1.1.0 && \ + git checkout v1.2.0rel && \ # Install geos cd geos-3.3.3 && \ export GEOS_DIR=/usr/local && \ From d0a8bc4b7b6ecc769149f05c3f9e5b62d6a98cb2 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Tue, 9 Oct 2018 19:56:29 -0700 Subject: [PATCH 076/251] Pin matplotlib to prevent compatibility issue with basemap Remove the duplicate pip install for matplotlib --- Dockerfile | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index d996cfa0..744c80c9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -120,12 +120,12 @@ RUN apt-get install -y libfreetype6-dev && \ ENV LD_LIBRARY_PATH=/opt/conda/lib RUN apt-get -y install zlib1g-dev liblcms2-dev libwebp-dev libgeos-dev && \ - pip install matplotlib && \ + pip install matplotlib==2.2.3 && \ pip install pyshp && \ pip install pyproj && \ cd /usr/local/src && git clone https://github.com/matplotlib/basemap.git && \ cd basemap && \ - git checkout v1.2.0rel && \ + git checkout v1.1.0 && \ # Install geos cd geos-3.3.3 && \ export GEOS_DIR=/usr/local && \ @@ -405,7 +405,6 @@ RUN pip install bcolz && \ pip install jupyter-console && \ pip install jupyter-core && \ pip install MarkupSafe && \ - pip install matplotlib && \ pip install mistune && \ pip install nbconvert && \ pip install nbformat && \ From 655a5084791a571d6aa267fa3f4db3cee5f4247a Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Tue, 9 Oct 2018 20:18:08 -0700 Subject: [PATCH 077/251] Removing unmaintained and unused opendeep package The package was broken for quite a while and no user noticed/complained. The last update on github for this package was 3 years ago and the package is still marked as an alpha and in active development, which is clearly not the case. --- Dockerfile | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/Dockerfile b/Dockerfile index 744c80c9..f7e7c1bc 100644 --- a/Dockerfile +++ b/Dockerfile @@ -134,11 +134,8 @@ RUN apt-get -y install zlib1g-dev liblcms2-dev libwebp-dev libgeos-dev && \ # Install basemap cd .. && python setup.py install && \ pip install basemap --no-binary basemap - -RUN cd /usr/local/src && git clone https://github.com/vitruvianscience/opendeep.git && \ - cd opendeep && python setup.py develop && \ - # sasl is apparently an ibis dependency - apt-get -y install libsasl2-dev && \ +# sasl is apparently an ibis dependency +RUN apt-get -y install libsasl2-dev && \ # ...as is psycopg2 apt-get install -y libpq-dev && \ pip install ibis-framework && \ From 68a3b18f87f8481657f3cd379cbf7a79d1664bd2 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Wed, 10 Oct 2018 03:26:27 +0000 Subject: [PATCH 078/251] Install shapely from pip and add tests --- Dockerfile | 3 +-- tests/test_shapely.py | 9 +++++++++ 2 files changed, 10 insertions(+), 2 deletions(-) create mode 100644 tests/test_shapely.py diff --git a/Dockerfile b/Dockerfile index f7e7c1bc..7af93feb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -142,8 +142,7 @@ RUN apt-get -y install libsasl2-dev && \ # Cartopy plus dependencies yes | conda install proj4 && \ pip install packaging && \ - cd /usr/local/src && git clone https://github.com/Toblerity/Shapely.git && \ - cd Shapely && python setup.py install && \ + pip install shapely && \ cd /usr/local/src && git clone https://github.com/SciTools/cartopy.git && \ cd cartopy && python setup.py install && \ # MXNet diff --git a/tests/test_shapely.py b/tests/test_shapely.py new file mode 100644 index 00000000..14c4843c --- /dev/null +++ b/tests/test_shapely.py @@ -0,0 +1,9 @@ +import unittest + +from shapely.geometry import Point + +class TestShapely(unittest.TestCase): + def test_geometry(self): + p = Point(0.0, 0.0) + + self.assertEqual("Point", p.geom_type) From d7138c2024c4b45d0261f7d2a24a765288102883 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Wed, 10 Oct 2018 16:57:21 +0000 Subject: [PATCH 079/251] Install cartopy from pip and add tests --- Dockerfile | 4 ++-- tests/test_cartopy.py | 8 ++++++++ 2 files changed, 10 insertions(+), 2 deletions(-) create mode 100644 tests/test_cartopy.py diff --git a/Dockerfile b/Dockerfile index 7af93feb..147571e1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -134,6 +134,7 @@ RUN apt-get -y install zlib1g-dev liblcms2-dev libwebp-dev libgeos-dev && \ # Install basemap cd .. && python setup.py install && \ pip install basemap --no-binary basemap + # sasl is apparently an ibis dependency RUN apt-get -y install libsasl2-dev && \ # ...as is psycopg2 @@ -143,8 +144,7 @@ RUN apt-get -y install libsasl2-dev && \ yes | conda install proj4 && \ pip install packaging && \ pip install shapely && \ - cd /usr/local/src && git clone https://github.com/SciTools/cartopy.git && \ - cd cartopy && python setup.py install && \ + pip install cartopy && \ # MXNet pip install mxnet && \ # h2o diff --git a/tests/test_cartopy.py b/tests/test_cartopy.py new file mode 100644 index 00000000..e372b280 --- /dev/null +++ b/tests/test_cartopy.py @@ -0,0 +1,8 @@ +import unittest + +import cartopy.crs as ccrs + +class TestCartopy(unittest.TestCase): + def test_projection(self): + ccrs.PlateCarree() + ccrs.Mollweide() From 599680f3a710b9b1eb9a284b1d36621a7ccdc284 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Wed, 10 Oct 2018 17:43:48 +0000 Subject: [PATCH 080/251] install scikit-learn from pip --- Dockerfile | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index 147571e1..bc1ba901 100644 --- a/Dockerfile +++ b/Dockerfile @@ -172,12 +172,9 @@ RUN apt-get -y install libsasl2-dev && \ # scikit-learn dependencies RUN pip install scipy && \ - # Scikit-Learn pinned to 0.19.X until 0.20.0 (many packages break with scikitlearn 0.20.0dev) - cd /usr/local/src && git clone https://github.com/scikit-learn/scikit-learn.git && \ - cd scikit-learn && python setup.py build && python setup.py install && \ + pip install scikit-learn && \ # HDF5 support conda install h5py && \ - # https://github.com/biopython/biopython pip install biopython && \ # PUDB, for local debugging convenience pip install pudb && \ From 7d554a4f832031e964cdeb47b64cc8ed3632e9e7 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Thu, 11 Oct 2018 01:28:14 +0000 Subject: [PATCH 081/251] install datashader from pip and add tests --- Dockerfile | 2 +- tests/test_datashader.py | 38 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 39 insertions(+), 1 deletion(-) create mode 100644 tests/test_datashader.py diff --git a/Dockerfile b/Dockerfile index bc1ba901..1fa64546 100644 --- a/Dockerfile +++ b/Dockerfile @@ -190,7 +190,7 @@ RUN pip install scipy && \ pip install orderedmultidict && \ pip install smhasher && \ conda install -y -c bokeh bokeh && \ - conda install -y -c bokeh datashader && \ + pip install datashader && \ # Boruta (python implementation) cd /usr/local/src && git clone https://github.com/danielhomola/boruta_py.git && \ cd boruta_py && python setup.py install && \ diff --git a/tests/test_datashader.py b/tests/test_datashader.py new file mode 100644 index 00000000..c8ff9295 --- /dev/null +++ b/tests/test_datashader.py @@ -0,0 +1,38 @@ +import unittest + +import numpy as np +import pandas as pd +import datashader as ds +import datashader.transfer_functions as tf + +class TestDatashader(unittest.TestCase): + # based on https://github.com/pyviz/datashader/blob/master/datashader/tests/test_pipeline.py + def test_pipeline(self): + df = pd.DataFrame({ + 'x': np.array(([0.] * 10 + [1] * 10)), + 'y': np.array(([0.] * 5 + [1] * 5 + [0] * 5 + [1] * 5)), + 'f64': np.arange(20, dtype='f8') + }) + df.f64.iloc[2] = np.nan + + cvs = ds.Canvas(plot_width=2, plot_height=2, x_range=(0, 1), y_range=(0, 1)) + + pipeline = ds.Pipeline(df, ds.Point('x', 'y')) + img = pipeline((0, 1), (0, 1), 2, 2) + agg = cvs.points(df, 'x', 'y', ds.count()) + self.assertTrue(img.equals(tf.shade(agg))) + + color_fn = lambda agg: tf.shade(agg, 'pink', 'red') + pipeline.color_fn = color_fn + img = pipeline((0, 1), (0, 1), 2, 2) + self.assertTrue(img.equals(color_fn(agg))) + + transform_fn = lambda agg: agg + 1 + pipeline.transform_fn = transform_fn + img = pipeline((0, 1), (0, 1), 2, 2) + self.assertTrue(img.equals(color_fn(transform_fn(agg)))) + + pipeline = ds.Pipeline(df, ds.Point('x', 'y'), ds.sum('f64')) + img = pipeline((0, 1), (0, 1), 2, 2) + agg = cvs.points(df, 'x', 'y', ds.sum('f64')) + self.assertTrue(img.equals(tf.shade(agg))) From 50ef11973c55eea558e1eba9b67d792a63d18c30 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Thu, 11 Oct 2018 01:22:53 +0000 Subject: [PATCH 082/251] only install packages after downgrading to python 3.6 --- Dockerfile | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/Dockerfile b/Dockerfile index 1fa64546..d1367e4c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -17,6 +17,14 @@ RUN sed -i "s/httpredir.debian.org/debian.uchicago.edu/" /etc/apt/sources.list & pip install --upgrade pip && \ apt-get -y install cmake +# Tensorflow doesn't support python 3.7 yet. See https://github.com/tensorflow/tensorflow/issues/20517 +# Fix to install tf 1.10:: Downgrade python 3.7->3.6.6 and downgrade Pandas 0.23.3->0.23.2 +RUN conda install -y python=3.6.6 && \ + pip install pandas==0.23.2 && \ + # Another fix for TF 1.10 https://github.com/tensorflow/tensorflow/issues/21518 + pip install keras_applications==1.0.4 --no-deps && \ + pip install keras_preprocessing==1.0.2 --no-deps + RUN pip install seaborn python-dateutil dask pytagcloud pyyaml joblib \ husl geopy ml_metrics mne pyshp && \ conda install -y -c conda-forge spacy && python -m spacy download en && \ @@ -37,14 +45,6 @@ RUN pip install seaborn python-dateutil dask pytagcloud pyyaml joblib \ # clean up ImageMagick source files cd ../ && rm -rf ImageMagick* -# Tensorflow doesn't support python 3.7 yet. See https://github.com/tensorflow/tensorflow/issues/20517 -# Fix to install tf 1.10:: Downgrade python 3.7->3.6.6 and downgrade Pandas 0.23.3->0.23.2 -RUN conda install -y python=3.6.6 && \ - pip install pandas==0.23.2 && \ - # Another fix for TF 1.10 https://github.com/tensorflow/tensorflow/issues/21518 - pip install keras_applications==1.0.4 --no-deps && \ - pip install keras_preprocessing==1.0.2 --no-deps - # Install tensorflow from a pre-built wheel COPY --from=tensorflow_whl /tmp/tensorflow_cpu/*.whl /tmp/tensorflow_cpu/ RUN pip install /tmp/tensorflow_cpu/tensorflow*.whl && \ From 428217dbd390acf2b0c61907be1126fea35737c2 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Thu, 11 Oct 2018 02:53:05 +0000 Subject: [PATCH 083/251] install fbprophet from pip and add tests --- Dockerfile | 2 +- tests/test_fbprophet.py | 16 ++++++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) create mode 100644 tests/test_fbprophet.py diff --git a/Dockerfile b/Dockerfile index d1367e4c..2d010414 100644 --- a/Dockerfile +++ b/Dockerfile @@ -291,7 +291,7 @@ RUN pip install fancyimpute && \ pip install pyexcel-ods && \ pip install sklearn-pandas && \ pip install stemming && \ - conda install -y -c conda-forge fbprophet && \ + pip install fbprophet && \ conda install -y -c conda-forge -c ioam holoviews geoviews && \ #Temp fix: After installing holoviews and geoviews, deps for fiona and geopandas get really messed up. This is a very unelegant fix. conda uninstall -y fiona geopandas && \ diff --git a/tests/test_fbprophet.py b/tests/test_fbprophet.py new file mode 100644 index 00000000..f8badbd6 --- /dev/null +++ b/tests/test_fbprophet.py @@ -0,0 +1,16 @@ +import unittest + +import numpy as np +import pandas as pd + +from fbprophet import Prophet + +class TestFbProphet(unittest.TestCase): + def test_fit(self): + train = pd.DataFrame({ + 'ds': np.array(['2012-05-18', '2012-05-20']), + 'y': np.array([38.23, 21.25]) + }) + + forecaster = Prophet() + forecaster.fit(train) From 03e180b04f2bba446f1550067491f0328c338b2e Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Thu, 11 Oct 2018 04:56:36 +0000 Subject: [PATCH 084/251] add tests for fiona --- tests/data/coutwildrnp.shp | Bin 0 -> 109548 bytes tests/data/coutwildrnp.shx | Bin 0 -> 636 bytes tests/test_fiona.py | 10 ++++++++++ 3 files changed, 10 insertions(+) create mode 100644 tests/data/coutwildrnp.shp create mode 100644 tests/data/coutwildrnp.shx create mode 100644 tests/test_fiona.py diff --git a/tests/data/coutwildrnp.shp b/tests/data/coutwildrnp.shp new file mode 100644 index 0000000000000000000000000000000000000000..f1f62b5d80e9ff767be06eeca2c001b3fe7d293f GIT binary patch literal 109548 zcmagn3D}Rt|Ns3VLK{kp7ENg1q(w?KiWHR++N2>#L@J?uN=ju*ds0IwMM_8|Oxhzw zrIIp26p=~`;=Y{E*XRHJ=f02Qb{yYdzsK>OnRCwB&-40RpUap7w=@5r|GxZl#Wu!N zBo_DX)Gn3%zy6B>r@FHLrrIZ8oXY-d?%v$QZTav2&%biUlpmwAOt-a{q_Xt>tVKoM zbn%M9MOkGDO{@PEmUdcG#N1_E$JPJ&e`&Xragb~jKNR~u>x;7UYV6+sgKO=+lJb`S zj@KytY1rM5Q|sS~xPSI1(pUYpu)9vaX$|??%RN>Xj)%$L{8HF_$i0Q0@<8maUqyc1 zdwAz(^7m8vr9YAXckIu_K2W|t8r$a7*LJ{h9X1{1({;b=S8%<`tBU){3zinPop4M` z3j5{qLpbgg!25)x?UZw?}lLeCHCd8KZE)qe_gqH(=W@Tc{*Tmu4fX?; z7xrf={V%j%en#b8LHp*)TrZW}y>xxzej|1tQNFpF_K2OhzMClTL;cbvv`4JO{h0IA zK98&Yx5GxZwQzox{QeW#E0#P9=jPNWU8wRJVcQXhS2X_CP~TjS@#&YT{x@TDoci;9 z96FHSo~pR!cKV$0o)1)g7Gv8%?K4dCp1xFk z6He!-f95G(NB8pt`OV+Dp9NgcAEoit0EdQZue(?e@}u0ZlkFJY|B?-ikD**IT_s;Y zed4>UKVeUtf5l#Q%}JlV(!Y(vZfcJkHWuZ_4b(Rr|8L=~*TeE<>*^j_SMB>1iZwdj z`z~!WawBv7+wYsWE$gcL-@0l}F-2N4EA8~UT&Z>`+MY350fj)f%NWtjgKM!b!hr!k>8KoRE#~}S{{Y-MB3l|^)F-d zS@{?2E6n5i1ODsO3x|{D6!G*qW82GNERNNP2aSFAKI6&!M%=Z>ZmOFd{o?zJp`xf|8;E4e6&9) zUSIwayGHL7`JG&v`{r2s=l^)C`CfLit299CMtW4 z2|sg75f2^46;4;*T-YrfTR6UUV`2X>_U-x>w!ghdd?F6Du!-2Oe7=b1cl(h44Qw0t zru?sP=z9a+cTADqecg-l7h-c6_8;Kf1e>REm~(xRJ_a0bz~LO64#jC_><`1{Wv(Bq z%2(m^#dSq_X2x?xeQaHvYhic&wOnt`XyU7T7WTbxY=qD}qroQm@a@`f&yw=>Rb zt|a|lI2?gfW2OJ|iXxty<9G~qdtlqPQxQ+w;Pm$8l)sjr}$yc;5 zuICph{_LfN-F-M!!@i#4FSH}RGY*&GSP9#=E}^_?I1jtHaIA^lri;kmK>26lxJc~_1hhw|&Jjy#j?J*wb5jcH}{jOS1j#B=MaXtmR7qP1+uhIH4pZuxW3#?}~RNf;v zUvn<^E2rJCeV6>^opXwK9*u2R9Ntj*C&@cgp8f4?(jSS_V>s-O<0&})r1F-ZRiqE? zb-kBx?9Bc9={PS_`+bMabew14^a3_lV)qmdzc;5oQ*oMtQw_?GS7Xx%+g-4~2!{{P zB>#P?&u|>eQQz1d+h#c3hs`zedpN0o(pshemALx>`~7Gi_nhLL)qeZpto||W&!9dF zh}&bD6?R8!zq{`A!l9FV9FFDLFWtB7{ce!_l>DRiSDg*gq1t~35znt-zYLqU?ANh{ zuJ;ZOugX=}-@`$wUwv#UQU7p__WyR=pBaq(ue#sK@}cxcsLA;u^iuy_Ctt7rtNse( z`#AMqS8SSK`>s3|hoziXY@-&m=OvtPY;Wv8$8jNcH76G3n*$hMu{q9v6HnJ-`?Sg% zg?&?b0}cn^(0Ec&o<9xy;n=>R^e0R%;<1sg_Z0Te;Jg5bnwn3`uo;idM>u)rr)w@R zZCTu}uPC3*e0PcKyPGxtuf=H!j+d&wzbkzg)wiqGlksxK_6_B~g7wHeiOqL%Jsies zJzYurqz+^kTzhyX|i%lc;i~NtytKVtAS%~v->_=v-`nQettMhPZhQn&c zgYS&pG>wm>{H615Yxcu*xbm0Re%cnNd$gZ=IV;`}$3xkV?R;JDBJI!B)uhw|dC*k+s##c>?xt8^I7-8qltii#h31>>eEN#>3g0h;ve!}dS0om`84LbBE5e|^JyJ+w`2bl`O`f4;v0(e@oeUAya%V& z*jD9v%1pwpyPmK5YkfK&r}{jL<^j0$Y?|~u=T9bXcH{ZaF4B5c|CZwV=_;IiVE+@> z^It2y)~E22(r@)VTfvxZO3%gdnb(VF`gl8gRqS^s?=Jrs__zIkpJQDj0?k?i)WO*14H)FE|$GSN69>D!y$Gptf+*{aO(6Q*3 z@HqCLuuhr%a6XK6Fx^9bcO&bl9fR}hth4F2`-}4YF|6yM$DqRb*0z*iWpLs2sP>5! z*dM|^;wL>oede>zxN{yV90%h(YL-r|m$0LR9;o8^AW8a5;(j2dNjdP0Y`@gY! zhke*hd6fJ*2bjl)7f!QCZ@+r7aIVeueHc;Lujl$S@7rAoj3Nb2)};s;~<2DaPaP#>G`vHL~ml&_vC z@|&M={BbnCU2gt-;k+7$=3@%S$-4gDFA%SU^0V^J z%pX5P`Cnyzg@Y*HOk@7}@ou&E6|5KGWc5dT`KVWD-@CP*41JmFAIo~@FUFw}`*1iG$M&qZ=4$%e zcJ5L1PiVn-2lhqoFdp-2rGNdoB7K@jdHF7l|HkaA zzSq-~cRS8ancv|J?Zf#o^505+f99j4|Aq3yPOLwsGW&QOG^~ibBgr36VEu}%aF{)W z_#>oGyRzPe_pqDDdTAz*-tBl_k=}3FyUMn#_!sM|>5k3Pdy2U2L;9`vv$9vk6-xVJ z+H^P1Jnx-KTaW2oI6Pl=?JfJ_VD`i<|JUx`r-XT{rMHwuS|%(Ju> zn<+d$+BSVD@5!dcwbM`>9%w>cUdLwjX?QlacVqXn@}G$9ChV&I*GBhdZojd(o|RLT zn+p5Vy&s6pZ?uJ%V=c-{>&TxL$t$tzM}4E5D|0_y>BBf|_EGuga{sX&cGYR0cmNJs zm;4Dh9;)`e7pDz4FU58g{o!idT-;Cm8{1kqpUU%aI!f`XT4$SL|1ZwHu^o1H(cbQ) zTj-x-&ncWcVKW5#M{ua$vWVLk3wT2oG+IZ_m#A6%kDOvqwY}pAF&&NZ3R6~Rtd%R z%o1!r!})lEG6O z7T2=}v5&-;aC!@eS=f2@6~9R7Phnqi|6=nVHVyj~<+-ZbM|#PNaC{Eiwp|!+<}T8| zjPq{&sn7DOin#BIjciBZw2rv>Sm}?_^Wj?T?!&RV*2*X3!*PBd+Xrxb4X2-RsMW2w zerc_>EwEXq^q*W!{TE}m9{V@2*+cuv@c-KGiPJ*T$I|^Mf7pil=WEGt@1%Y53hd9t zVORFCSOceRv3XDJy+G-^{nu`0sXs}dkHh&7?B8cR7JJE-4WupOH$KL`=J#X%1UaqJ z`f;!Jy}z)nL3_G-az*Urv_1W87Hd8Gf$?LHkOwl}!ueW1J2D>4d92^D3HGvU!T8SQ zw7xgecwepkW|W?LK9CR9`x=9N4bA5Z+3);ktQTP&d#kC(`eMG;Ub`#nk6DF%Yt|>z zNc-nWtaqsoc6C@EO<$aD*K^0ky51+Oujw1gbA7ZPzoqNH#(EvJ*XL#D6yqi1w*7dGt}2lfR%C*A)|D5{Tj+E!m!M@}l8JBJ| z>El2g&XS+dxUH$rEjq>3c77fEaNIy!`a#;a-ywZEm~rl#P@cJmah_|bK3#P#n5O$b zQeKA58+!gfMeSFUc@S>KLG~Nv(}~A6^hatVmp+%c7dB1Qe~Xw$ZUlB2$3JmcgVW{o zznh268u>^Zj$yo5jjc2YyKNMo!q{^aaA_?18`XaIlipvYbIn%cuTt6PK`zudcD-+4 zPu#yjF~?k|OAE)Z%f`l*alArcE+_gn4e18gQXXDh}6;3~4U%fH$O4k+X^E){2 ziOoKz7V&rlcD-@BOzx!f`K{O=)1*iro=|)jotI}~GX|&Ss-NNfoIl6ra2(dEetob# z_4=ZIX(Z?CaI5?Zwg>bgeix1x$PZ#O0{c(pKjk)@zx{!3@);;wLh%?Tt8GDKAz3Prz}r`f~$z z!*DvEaqbrNEc(mTXI)8`;!sWN)VbFbaW{^2FV@1vST7sj*~lLe&%L@A>2pK!yR+p( zaQydb$~yyx_Si1e^=5P{;m^X~G&IDSHVxTCS}r*-&m$_uM$KU-dLeLf;yfD`&?p9eL5M&W#@*7+~6o6Pt#vlQ=& zV}ERq)IM-6PD{0pUx(xU^4-`s#_ks7U%~i~?QtHg`OpHV12kXSDE>0@$=-_NzdAn+ zmmA^ufczfwH7mWnPxIFkPx|~suBP<+DgJwpV!W7jtOt1(cIWAQH6Ev3ak>TPzqH;o z$F{EWU!(q+NBMpS9M7P>`F-_oFRdp}tAAfnJjg-o%ZWHG$N3YDAE)+v4oCg`*`K9& zBenk#IDe-8sVg6^^=4b8Z;SmYj92rs;s-(QO7x7e|xVZ}FF-pJo3gYj{J>}2kZE$=P=Q}$U>ElWA!PtDs^}-HvOKktS zyh!iIYCL|6v&SytbSh52bS%>68aOJsIL^g!Gd3^LA9l0y*U@@)80Gt`)StC*{2AxRNbgS2 z`c)hIdN{4q^{OlXEYADp53UzV=YKhM)bl`PUH?FwZ&3bUwg2~4d=mB@6~9q_lk)Rx zoIm1|I6bfPNC(~Td)Uoqyrs=Jw7~XPoNFq+2B+85p0XWFd%Nwqe!2qZ4NCt3<0+hh zV_$g(>@@!3!Ak!P{Tr@V{3-RfY+K^ol(?Co@$?GkGus5aw%C8wp%{;5Po3Y+!gd4a zJ^zF+;^`lpyU5=v-U8dw`{3$0&*MB9HeE*k-*p~cjB|HvpTn+(+zs2ooM%Hl9Hw#J zjVs$1<=NA9{;h{o+mnm)sd<;PvwQob!g&}r&E;}6`t-v78u?YlC#rr`7~g)G-mg2HYHB>R zlP6+3R_pbFjIaEY=1&EArQ-LKK5BlO-zh(|VE)Di#9cY9ACD2wgXIaT&m0^t(DfUz zAGw1xzWd@(jrkGh;B-6AU$Z}kFK{|c`)zCN-o<`j#m`{B4$ZLdfuqXzwK*S{a*8+P zJe>Pbzp#k7-BtCsiqBX755Q?1{gsz1K2qbMvd-h%W7}MQm;ScrU^fW+-in{8@o^2d z+sXqJ|B?B%_48)svd?RDDg7S9Pa4~$&$89Ikhy-Vo?E;A@43}Xy@YwIb9<^E|*j)&>`hw7YOYRl1_liaCF z{{rWd;FZ2HcDG@Bw9YM~<@2zA2j_t}R;Ir8O>8dI{mJ=u-QPIH@53g@PhsCt<$bFB za;`;x`8$-pulld_bEIYRbmFe3#>Xg}CS&uE;-8hqi@X*G#p53B7+;Hs+veEpMf-)1 zae771DK%O5^D69H;P|Q1U#5NGA)K#Q`puLdZo{D+&LuD7Tv*z7vOf3i=8)d~NZkC1 z(^oo2)?!~VJ+W(q?d~|pX|~Rp=W1ViK%$cUzPu>_SI(Gzn6Vf z`>fafyo}S15+dsgL5PU~{hGmnz;8 zyXO^u82c|2e+wts|Bb^G;<*|1b1z`uPxteI^1rC~Hd-I1E8Y&Zg#<;eN zc&}}?lvP$nXs-9*^E59%)_e2E$!}|8H;Cu|a5C@RZRz(j=2Ew;&%>AM>iY4#Wh#)~ zWuCXf3Y<^U^PRq5!1UDfRX^U7m+n10!TrVK6(6ei;0KW3m)dF%mG?4jm&)@zXlAP| z8*o2cw^!MBVU&w%_tIxW(pt{qrL#xzS(YvBev}%$>_1z7&L!NbpM%}0_r3??a4?P+ z={>HV<4k=Vp5t5=^`6;S`5oSax}}^OeJ`9I$7ZYd-dn#Hw_Ck8-}=4!*6-c7elPE~ zdT;;#dau9Ld;d~fm3}T@mvD~Db2!JPUv;h|2}<;$$4I!s%UH8hfSBA(dSEoYK~xV+vb+&M9p5IjF5a zC-wjO9Mx8zv)cM|SaGY*X{D_`$F=qN-@a@f^(_57=@;f)=zUzV#+W|*EHC{~Hnz9S zqeK5Uk6aVY*Gi20r0)+&EpgbL^}yH0b`tAZ(EAb7Oh1>~Qg&IfVGiq67>(oEI8DTE zXZd05H|ghyeX-HcEsf?`TBr5O^Soq6$d~JRsRIrh=tsXdcB^oDfc4R@$F8e>jyYZF zC#Y@CU|g2k%DsPLaX;=3;_)bKdMbazhDAJ%)%D+MK>8Z2=kXnEWcTy&McjTxJpY8# zd|Bz;aN5&OBflMk-GlUHd{{rnEcLByza@RxS3lSOS=T=r+reBfAEf=MHubmlv3rE~ z8@Zm+52QUp$?cezPTz-;yJ?&P^Xy(6&sK>kkiv@|wf)${x=8h;aT-W$gT ztg~B>&mGFfdA-s&|MSq|J1g=pN71%#*DdU8l#Puos+PIi{gahRzIPEHr zWL$&}2Nz|Tvo&rO9auQ_(70(KS7034jR*Yi+FLGrtbCBlv)K06bNl;>-%LB@Z*crk z?fMW79pqiGI}ZCd_AjpICeTlIIL>>^Qxt!KacdXibTIv%Ud8@u#);oc@w<=tZ(FuM z8vjPyn&tHir}irULu}qw{`!j7p}w|;JRYZev2RHGnaPTGJE*vy&>QD_Y5!8+`WvyI zLjBT=LyGivG|mrT_XhXlG&bD3y1&)*uicaTH?mQCY&|}9EPKyU8fRg|?Zw`ax*tee zJjvb?s+65mwv4mO|2NM3hc^{z^D5oPGVF)aj_Hh>c~0G*ddKzHt;4SNEk)eR>3nS7 zByNY`aGShB9)xr2TZ{bWBY6t8M;%+#Jsfdc5f4kSX^Ydt+ACzU28X&zKTPY%@091N zXg&K$<*&f;Sn8Ah)cSUX?ym;z?mwVC(&HM3^|3ig>rX9gwH^h<<138gydCkp633}G zaz9)8qO2R-KG?6R`z>pT*>ilweQ2E z&zI5u{sru3(f{rPl|M)0V=1;1u=^c{OR@h(Uc(xfcjo^6qgwOS9`VL{^zSLe-I>^Q zQTjDU(mq|)exq>wobvsLIGl^kZP;!v2b{j*++-%mN8tPh&j0H3TYn!>T+cs^eJ_=# zx#LzHTExxA#N#hG&cUhiK}9^z!?rJTFCSPs$Klva=eUos=}Ub}bJg64U4P=KGq!7$ zzrXwx*E0#nat9aJ56f|`f$b%3$eTAI(Za43ELx{a=P zDf214sOw$I+7kvILVdIr`L2iKdx-nHu*sUw`_cYkG>)5ydym~DoV6Ypm1pK_KHqW} z?LQPphwZKZb!_ur+qyX4M|$_R)`PiRFFvmO-<$eHhhsyHmx|bSR{htiJxBi6c_p@0 zmA)zCE2zAn@oKv0dT$Xg*>9`)(O&U^*fqy~K8{+CQ+4K>SAY5=<-4d~7@+xhit0NL zyLlQf-)lZ^wLk1swr2fZI?sHiv+E4ag>8Q*0|8U#@{v0suNE&aBPe7$Fz;_h3zc0)rH)jE4AJ8 zILslQFI4-?raZG5htj(E2IqP^i2B>J>5F_d_ZQyLIsZ%DulmYVr+w_H#O(!Y?-i71 zM#@|DSJkrbkxT0L_`pQ=up4U>bIyMIT;aT?tnOR-y-lgt%YKijl(v=Dhtlt>S7SZ% zzr0f9_0tY0$}*?QGqHc}BQ{;cVjOMb;~SI4HuK5f2Tq>xbjq{b_4C&BX2+;{FjFO5f*kq~iL%k2Jq-k>7WJf%_ec^Qtk0 z(+K%NZ0^9R3AUY-fAjOyZ%-UP!DeEeqWm}-`xe*@$MK8W)PJb*zqw!G)L(9eO>1ly z+9K`_lSksxShEXialNICJ$LjzlxIlqDq~mrzLwd07xB=PxF3&W!1*QYm*6y2uCMlx z!(Z49CvN`4zV%*3`Q{b*$~_CaVK|?O{b@Kg$99vhcQ{V7aj1s#>#E;xHH-4HzW>O) zfJ@(pD6mN{<+vK+um42hHw{8#0Ye0S6kJw(O`oD;M7aZ#<{rNatw>$TH7)~{@ ztAO*TyA^T!E7vn!aQqzm^}81Fv@7E={UO)V_}s+WVj5~bXst<4Vl#tyI2)&`y54B! zpWA@bDCWDJiv4QV3tx}+@ZV^?_#3;AS#R7?^rv5pQyq|W}I70DCUFh#dx%{ zwMS`W5bKdTwDZR!1Ux|VmKX4Idjxh-p( zK06aTmi5(^b$D*s|MV{{=}JA1`>RN64(e9eSHWolE|rykz^+vG2tH?E4ko>SSD!oR zr2Lz39Q$`se*6QwqyFJ~v*ibHcp3XIWUu^P))(p1sY<^H$2K_h+(>y>$;;&@u&MuV z5f63v+=IJJZja4CT=x4*#r?Zw*j7(MSacHLedsZmoX0Gn%0r>-*|CaxjZ>vasx|82_-xjaJxpQTFP7nJ3 z^eXsG>}F%P3!m$Va(EYqPn3QgwpF$(@_RYoj`JtP^VitDf?bX6i}a?0;!AMYAN#pG zP@hUT^xLtpRou?NQJ-r~Rdy=kL7%hD<*VV1H$Km=MdLylS4#!)y4*jC~+=g8T?5-hy zs4ZWlb#Z&u_jvgk?l0`7^xNxx55{?{+M_Er*JIn4>lwATe@XS5%6JIBs(ue@e5}Q} zi97+@gK+MtcsU%;P`n}z3STAKa_*{WyGzV<#NdK4u*2TZ)vQH?ls)7P{WY*uRF&EsDRZ^uO~tuiOXQOK`jy zySj>>D~B$uH|no^mb`%a<$hNd@%)79dk#($uscM48HcreZY^|?U&QfbZ1%vp3AU^F zT%5f{@po~0O7VU4xw%Jie3Z}K*(0dGABod&Y=+^mT=#b`HdoQ#u03|I$ea0`Us{Qi zK4%>Ia&E}S>T{r<;`AGz8#E0$cciJf*xSnXw9;PZH6Lz8*w;>_BD&J>8$pD6_0Q+5Z*j89KO6f|YVQxK7WMZ@@wzyzP<$2ZssBmd8@tzW)OwsR!?_RZ zv;CR-^=q}Be}dgj*vY0IPL;`@hO-}pZrGoO)85=qK0y1&1=#N)chmjOVgE^2D}E3A zS-4L=0^7f^nO}+i`$hBP8k{d@KAN|+Kb|f($M$IKcalF~e#S}cuU57LaImD0=dvG% z8&%#=?e8P8ZGhcmxwp;-GjKWq$D6R9$N9o0>Tkcpeun0A4a!S5(VoW2x2S!G>AbQC z$2FX1>}c92&ELfNrV{1(YjA8UAEWp^ILY}_UH^LGc~90O_c3-?X??2C_2M)fZ^f=L z?U`Scr{FMF-iZC}*jA%GOcNZoReSxz_3TedAF&^R?VhTSY`Q4kkhtlC<9>2eY=%-_ z`>@W3cj*2`W8W70JFz`Y_tzDh>Nx+)dDQNR!xz|Bz-AiG>(rhT6<>`_#O`VOP=o#rCu7@4UZ&@b?%2J7&0E+!iS0q!ugBtW9}Ye7 z*3UoH|L^ZNZ~8;A_ojx6X{%S>VXuC*?5tO=Y_s{lzTZ55GM9aN66rx)jt{#({z*BpmgTF2{Pzd!4|`|^__y+0r4YO@QQ&lUe^7Jdzfao8@WywDkk zYqc-zh*JY>@BEnhJVJfq>DYI}_F$ZD(!Q|+Hs#3gYAW7L^{FOLSAAdqi1IJjx%o64 zhT%N_!y@jswC$E>(_Yx$gWc;N6mj2D@l)mVaNPJl`FEE;#%6}vV=NA9sIPfg>F-zj zcU1aW>aWT;H&B0#f3GOtjHZ9nP;9%(k1JkY{XbT&$Ud1qz^)bhp#4no2iVu*1RVF2 zyD9xW+UJg!UtnL$m*Q|c`&fEWzJq-#?MwaBKkQ4UKJArG#r`z4Z+-Tu^ft~tv`OK0q1tKpLq|5K{yS-CMv#+cSvRowtuZF)8GqM;-)(eM`7C(=jFc^@w8a=X@mVz)#s^IMcnpK`5)r^ zl=44;-Fw*A!RAQDpR2Tz`W=q#=|2~a^R!+y#LlsvYok^i|fV5)ZY7iS2#7$c&xFga5#wZ9xlYLirfq5J4v5j z$Fa5M!&#Z~2jX}XXCb@IFGYQ_&PL{Bd0*mrG7j?=Q=ffT)BfL6p8ZJPr0ZY1hW7Xr zhuxUp_FL?~!e%G(`vI&EsiMl;hxNoSR{GZNn` zk*c5Gd&fE77wzdkChp5&*FgC{p#R-;90uWBh3mO9uvtXhEMffE*C{VQi2Wdy-%{gi z9Jb#w{!%NI-%jIoHnw%JyO!(u2>$qMV&w6XF zRDBw2J^qdQ#|m^-P@eU@^rKCTsxRdT@wbIYP@e0Z_9k_p|`n&$LS6Za{o~H3p zm-e(?z7^;DuwAG2-bwxa4gD9s!TA^L>N8&QzKjQdA2uIpziq4cT<>c9Ji+rrSWA9W zmFJQCjK;@3*fdo9aGqbbexBXA?B|Q6?|F(Fd$PBT)!wFe8s?I+vtzlkXSWZxJizg9 z>*wqEU03#%y*X$4q1gSPbLK$B=VAY#;&W)p)x#4k@_kf-=CM$k~{DtD>#v9r#33;*jyqC(WuktG^{W_gnE6JZ=zY7j;;M`F8pT+hn`lZeCnce zaBmzQ$9WLWW3iut?Q=>$7rUAABJAH#{xvw=qWCf#F30X2`68vCt@Jx%UxlAndOdfV zL$KSQcsd4$ReJ8KhVuwLC)}>`uEV(t_B$xv8t3!$T-H$O*YO-;&%~}b_J88|FHW*M zf&8wouHQk=Ekjh_#)|8Eox%>-UQXQqNqO;H<^LR;{p3fnd!6gsn{@qqvAGoIjySc& zerKHbz@{FKm9T4z!$LiWYCPD1^7X`RU7UuiygPX=jrx2{7=mr(?(82+v9mb+rSvDs z8?kAN?HZ-;iQN($F2H#@w#Vb3=VH4??N>wTf57Hfo-<>{;eKr9;n)#}kL11eTsjlG z&3bMfhy5z~QJh}EX%fz7%BvM`i_O8**YwA^i|YThe4RW2TYYa>`cdvj-0w{N{5W|x z99QD>Jn7wUINyfNze?X3hs%1fzdS5IgY&1#|1@?(<$gFEfU~|Q#gFB^gXygFPs*)S zUR%AFxKR0<$#*M#J#0r}vz_vPi&HtAce^I|l5A_MBa~6yr;dBwUH{v)9n|;-O6P5mK>?h-V1rBdw z_n_iSvDNr1ojr3Gjn8$&(*TW!?^WKD*nXk-yNc_3`NCbyk6cstHwUK`>YqKB-*y)E zLG!&P&gEFI!uJ|q6S4mSo6fAK@im+lYJGhiyDF^LM&sYerO&5Xr8i${{(MV*Hx%1{ zRGvQnW>2F$cQ|o#wf48F%Krln$Klw8{nekZ^!?bcQ$K7!)_GwN4&8O0xl`%8a^8t0 z&%^FKrN4mlSKb%rl{%lT)_Pe{=gs%E-rj=!{fd{<`SlLP@6mgj%jGk49^U%%id%i2 zahI~+vn+i+-Me+g?+WB4i;8zdp$osWnoch(Ys+)Xv3h7J`%hY+?+X~OdN1SmN&PHr zKEp-cE!hsnW=mORv;+D5Rx^rvr|JCus~Mwn@N2sEu9R1*iy1{bhcAiy*LCh{tlv-m zi+;;96@P_(_3y};aS(sTK{gNQ_i+>H-AvV|x5mW)`7g$CJV)1C&A9j5>GyRH(7bw@ zxLd?LH+^w<75j-Q?`qbKbgtUtcHWnUSLKSt^MSOVyC26Na0q(8dN2KvH_>V zmuUmGn{v_qrM`AMlHZp4em#zBN$>a2^?${&E9*|W6q_5dlkIlech6V+RJD)$y{M0C zjNK_Xv@Xqi)&FapKi2rUl>Q3MvE4!QvZ~^D;#5cN|FrtwVSgLrBixMRk2uay`;EhP z12z|s-#Y5!@1g(8o{x(9`=!(;pG^JjdDzOazv_3A?sqKfggqS_J-2N=f6BgUq`WKr ze$aJy6nj{>gSIjoZ!Vl3FPj@%=3&#a|CRnJ=36pv@`1OIcQNxO>i0^+FxDS?q~dEe zk4nGy^rh-~H}SMTF8v(Lhejv($|(KvKUueP@OyCwck z;&Fw_e*=e$_`N!R4Cmo+3AP<@T&nVS;d*YFte@l9W~2`XXg} z;yfFhHjG_=Jn8*~*!08pC(?&X)Zebe?ilPe_T35`8x>8N?%dy@~0}Vvc}WZ zIR2>da|JH_Tr-YS`p!!KlHxbuR9^Yt(RiLhdzi|s6KN=RJ7A~3BNJ-Lhp0VUX+C|S z{=5mtdvK08_r-C0=3^X)!_V?iY?_qL572GYW)%@PF zmTXx=wpu@SE&F}=pmiu*c~jB%ZXee8-1Az-*5I29yj;KEI|s*3*gVU)az|nNpvKkGK1E&fDVhfhuv;eU?_JsMj02N< zQ}w9iof9$v8`cdm{o^-?i+PbWX?c?$ulpmVPW5{og$KeK*-w)4+szfAvymhxTL-Hzj_ijTx$ z4*l=j;q;c#V%D2~G^APq|;;j3LXX11^_wNqGv60$$ zH|1}J?Ud__>$|3;&&SK#F}}=xIOx4u-W#XVbIsc8iv0d%+RIJCaXL0V<+UwLP_8xG@{yZ&FwE6rthC$`rUw_1mz{{BRsL;IMfy585=oh=Vjd>@?7#Q8v+ zPr<1xHuaSLK^(R3xF4}Ul(?;<_R>CT>Z`qeRDT|e^9$&I`DXHU}lvJU6U ziq|B)dr0qrT43w3>4C$cINq=H|L8e+xZ>Ynw+Q=_a5$3j;TE!X#vA2HIzK-x>%ER& zihb!lPe;bLdtc}4S=jun@=nltwG!u%TF+in{k~Fsx$5_W)}spCkMF{InzzO28SH;j z{%7T-IKPDBP#nf%_dVCQ`ul(3be#0}nY`X>#M0ky(q7_bupi{Ur1$f*f4r#ti?OZD z^-}ptV=7whgMR(f#oP>! zq+-vs-TN0#f0xajE&Jd|ZGvV0NwuF~9nkw(^Tp$Z;|Rv79fH&OI964>wZ`?V$BOj% za;3it2c56NzmIb5R?L%l8@A^%kKNRViJzu<{>?*_H&yfe?*|L}Ep;!eZ+rn4_p+t_ zov?|@zc04UO6iA@zmewgB{(jjzF`NPWi$POBE2b3dVex@uaiFSIJAg|pXdiOZgAnW zR{ee^j+bITZcq`A9W-vP#bI}BR}3uTex;tX8^{-6w=K4NE57mmBE5O%2il_|@#J_8 zP8$^eSkKL0;5ZnYyKy)lr}j7>E4P+E;kn#(!S)QjW4RfpM&B3Lcd~o@yTYb9@$k{Y z!eQ-w+<*L*cmr(uE+}kg-OK%#!~QaCmVQ&jUBdz7ANMu+kHlf-d_3%)BA#o1S=g_< zyRhr`dEwj*r-$bi4(G^?aPH0AFcs$(@zPw0AALst5t=)#uxmp8@bjmn|3uy%=YO%i zXigFLm8hS8adu%-9h*wn9f9Lpvx<1ECcpDhVb?;QgyRtGx+=a0?cwhKko+&Iy?TC7 zIQ@<7C+`;yr_;XiqW22hJJjB_aGHv(!FdACliw}U`-gDa2Al3U{5_L+z`g-aZ(`f~ z?ILbA%bQc-e5=OCXdD;e(%R+LGM>yCma36uS}F&QttP94+(Pj-h?s71*_){ljXl2Yb>#xzUhfJlcrE#W)=E zG4(qS8%O{9J8`PY_y`k~|5=2!sbMr_R;$KAddIqv>kwq__5Mg)_&Sk9*@&_ z*3Wo2`>#6)r`FiW?m8Tr63?=)OL=y@;;+k#a9WDZo;nZwj?*}7#wh(-Y_Gi@NT61LktMSG4`yd!oExnBGlhX>?cbk>=z`W%Q; zCHA9eu<42Orz-zzY!_hLlKsrC#_0>3rYZlo?2mpK_MZK=bk>TWvmd8lmA)4HwS5M= z9oX+(8@crJ)m~iB{Jg9fkKq9MfFBv3D`_7uhc~fXPkMi^+E3@oa3PL+kl*h{`@4#q zU(#Iq$JLj&I{)on_Fi|4&XMLf?PX^&*Ua9$i*a?z?xkGW;j2~I|4RQ3Am(3TUYXl* zJPX@%Z!P*Y9E-#I*jLaz-1~OM$)(KO&>Q;&%xkksz7D6CdGD2*Vt z>x^T6Y;`Z;H5_kHd^z?5xqd#SUyu_Zn+bO7C8Kd^94Ar!{#pR1M|=i#_l1U z?v?++@fPKu!#wokvC%v;cVOQ}^Qct5$9}BRe=Cnu`rmM>!q`gnnP;XNHqB(C_#hlU zBEQ{_b7)$L<25)>lWQ_h?Q;1NY!+i*mw6i(;?NDd&#_sJ?NY_7YF?{8aS%53s9!97 zX7VH)4%NB6zwW22)&c!}X5S5$=2jZ0_&cQkKhK|EYCLS2Yh`oH)y84H)}ikhkMTF! z$Bo0OmD+bOHVd^bUW-G-=`h86VLL_h<5{i4cWXWzj&pm(Z`SzO7AK8Y_qyiaG>vzU za|4a{KbWuPGv<%$ruW#-XgzqI`JMX9O*P*YH&5SI%rCz^<>g&*IvvMHw4QXr;bfeL zVmpKND2&Hm-}mR`cq?(!hq$>~>Dwv&WjLRq^7dE!H|kqj8^f3KR_oy&##DGr>uTE2 zy||ZD_pPFDeT7R4M_;zqY*|-_mKN2rzpMJpl|?KZ$+~2o#kQ{2#l5hrgQFapu&$MU zpV=OWO)t`$?>JAH+myevo`<^PIG6L7uZPpaIKA7M`gD;;;Mg3;`>=gp=fmzw-%UOp z`*zsws`QhwTX02DzWWvD(%*SHLFJb`3g?GNpAMw{@p9#VO83(ehi|aI2*<{>hg00Y zuJ$@g=`T@xuIfa6CSms{&Kc*;*dD5VV;zncV5{$+h{v#x1pQsKqOZ%k*{|+clyB-1 zkC$LuL+;$6h^LcrIu?7`Z}3IjexUJjz5J)@H~+FC9#7T%e1*duy5A#Ip7udI3)>Ub zKKi>~exUmM3+m@@mup^LlphwVe;Z@dS^X#HXZ8GW8F71(JXGn+%d-@JUC+;p6z_rU zY@BM~G+gOB@;sAzVSg-k^>98$@tv{hspp@sxu0;G((CUUhFul^fc7v!&sWFbG>zxA zR8j5yF%H{ezmn&>R1N30*t|^pTR9&ow;`Svw=de$)Wp6%w!L_MG_6=G!YZCO(~j8B zz@@dsUx3YV#M3=`UfoaSRlue*j$?Vgb(3%yjQtMOFX{Q%ou~1$O792s_hiy2>=zLC z58=>I}~RYiS0IO4}Jd9b;G5-EbK%37=3=uZB%>d^KtPf-WP@D zlxIGqe_~Z#?`--nZ=inmUAgrAPbV@S;+dom585{fhd2ELWZN zFTbhs253DjeP7l*`3B{m!TM@W!08G0H>dtI&uM*sNBup4_1_M|t||LPI!zARKYr<2 zj34(TcK6HI;BYRs=i#`o;&l~2`6}|)P<%4uIseG@!=>2EuA;m%>9;=b)GYh{j%qqL z<*!~S%FZ*NEyiZn&k|C@vc4);wwW&a-?_=$z;mv>i+;~L%bOV|W+m@2Qh98jl(%Kv zrCvC{q;>5!?C;RJHXEDka9)I6Q=HzG4`tlv?{Uh^i(H%fgsX5oMfDrQyviqIGnjd3 z^q$4+j&on-pUu2=cjDMaRy=>Nd493V>x0AJ)W}j?-H2ncorV2ic>zv$ z>AlgmT31HNA7Jy0(s$!}?r!W4(>l{e>HA|}9h+wre_8Lr?#A{6xt!9!sh=ZltMYHh z;W(~e8XIPm?q@jhxJK(}9m)$|Ol2(XtNOKm124vQ=9I!|1MTJQYlY1u+A}{nsjyp( zT`L?9da@X+_81&4!r?r{@5AmzoF?G-fzl5fQRGi8CKu($pRt*U^YKp+--y#toVTNX zZkFP!6yJc|gV@!6y2$T##HQrYTtELx`goG+GYXqOCQ_ekasB`Y*$v0(F5>Aj`6i|R z>s8wCI2`W4aV_^3+TfgU?1cSQiceR(y1X8{uV}w4hYRHm#M4{quTIp*tk?XUgMDkw z&+T;o*K0iPC4a?uF?-{9ug2pV@`sK%O~Up>Y)@AC2Vys$`h+IRe?N{5xLzJKp=cl9 z9=ldJ+>BEtY~?)k71~pdr{mC$^!BmwMcm2e6daZi4`EypPphz>F}860LDy@D!&DqM zy-51!<MaJ@HiZi!*v(| z=Fx@y_oUCK;Cw9oX)0iM0pr0`#kM`;r?i*YTFeJOhrK226^r?i|Hl3*Y%A${Q*hW; z*V}1ik>0K#y*(ehib{VIHtprXI4n{A$K+?ZKbLSmSnV?f$Cu?Nuw5_TgI!13*FK>9 z%dvkQn>zG|{YmK^PKS>w>K~t%pTQ~7zxG-AAjX61it{v$mxhY}EbolnLXDqyb-mK~ z8iD;B;z`a`7>{-gar+YX4=a6-mx}&&FQ~lv*e;N(Grn_2t{)p|{QGB$`|~GYQ|H;j z?qBlz4{;c!@(&(Of9{C$McAyNKIRVWcOahnVAoIXB(KM@FU~!=zWo-*b6#frzVKXe zJ(HNfsq}s33$eTJdFo$U52oVyA?rbw%`U7b_7tVxr1hu)4ntV)!(y)I9%FwCE3y9p z+jXS3Q`ry0O6C6(n}={}{AN-AtnXJh!*NpF{2{+bdRLwOHtGA)O)YFzj-fw#W4kwJ zJv#x%l54Vm+h0f@Pi231Yq9@X=Yx-BotNA#*c{0Dsr3El`F!lpQu=Kn&!8l(00poQy#pmL12zF<1 z-nN5eJ!jcRmHxDkiu|b`jveG<6(5Jq_iFD2*gvE8m2-dWrxDK;6<^KubJBS2i__y8 zf0tl$nZ~c2>MH$YrLQg@M)`3Q^CLZk!<#s)#wOsnSJ|^j*<*-nsq|Z(O}4m}((jGK zH_U(gF8RII6Z^cb*NC{kTG!iE>02uOYOP=Gae734UFi?idbSPMv&&ik^3FI9#&$XB z^I?h)#%3t%r)_}!Cic5jYOj0QFZ1s7zgf+GYVOl`dLKJEOjLXd@iT|R{J z;-%QOraxSB9EYg?wtBwXv+VP4uarLjwqK88Ey;HcqU{f9Q#hVjwicBu>x_wI|115g z-hp+eJ?m&X$dmRE>_+lFD-YH>+oydIH~Zmm#HEEpbDg`I;#8M)J^a~@^sBTF-Hz>{ z@&PzMuYF?hB}IDo3=Y>~-y6FwitnWKLvVhSeaU@+Lp_}J_sHW{?6dwirT5yW$1DAU z7mKzs9poW6eSL9JpWI6BBmeyZ>35WeVE^Mqw8w$ieuv{LV~X@?98SIDF7mgH_%!IDV-3`jJK4Tk2z0$<1(@ zgYCC+x2W{oU%u_>!toH@|36O^PP@x@Vz-w3xhD1-h?AV&98CJtiKh-Y-61a=RK)H3*glB8(fw6&jRVt*3zJ1(KTI2D`uIIqL@57lRH){l5L`R!HM9?t!m$F;uvi~VGrr_+9U zuJWHxd%JIOnu<+%*2}m8+eTVHw;firk8g?nsW@GT(?A@W;Lwi#O{d|wkH*tbrQehB z7oJf(;y47m-5I}Wl+t&=;YS>%;=D%rYdl)iFUqbhcG@r8{+b`xu)nzJIMiZ4veV?6 z>{n?&>>77u{W{?BqI|Re<%R8YIE=^nvnPspzN1qS&(ntE7qDBx{LihgDB|f6)|1=} z$APRjLEqC{|e`mb-X&d zKMld|C&k~zX};?Jh3x{In$iEfEA{huY-ixmhVk*ubw6E>-5b~}Wqi#)+@FI!4@+Ng zf49>$9}d@Zc@tg#bLAH_KgQy4?D=KBxWlynsQ$k4VK}VPvBYE5C*aVPczOt@9poPs zUlZHwI+gn2AC8x|=*;{)O65E8`Geo_i87yk7o1PVRyMEVJeas?_GBp^*3#!`pJBI` zK2OvBE!E@mk^D1p|D`;0Y^k5F;PV%|_tP9-d(Sv*9A2vS8&9CTejkEahRr5w&+^>u z^!r-U9TQ9WIG4Cz&U1H~MSEp0TiL_f9y;F}^;|xR@i8A#->=X3nN8K+vs|yZ8ctv6 zdXJ&MaewBA{e|}kre=S@*~tKY}sdOppySx)ZHmki@t?_QZpbt;{^th&jr{NEnL z(s1@A?iOsW!sc1k-9nx%@2b4?0nQWIXTdQDYm_|4_W`A(w=L9{S~tI zHRdNA2WX$t>|w^XmCE0MgZ43YrQs#+uU7jNlE{$kz-xG%BM`x842#~&Uq?WK2Bzx|kE^SC_!30$pz@sq`Q z4bDHe8cX~)Z0>)mIPJ%H#uHf^>2b!px>oY8%m=%x;*V-yaVPb|MC|Hf*O>X_KIC}u zKAgVBp&j!v@1*)W3@&-Y9fQpj=Cjk^+e~LN|I_=_Pd#zC6T2buUD#g8J|Q(%dBb`# z!x@h_N#0C0TAzn9esMDK@F3%Bx5KtM_8a5;(C9LLuB+lBxIX@A)qh&=i#lVoCw8q= z{zqN!!PtK-cT#^Es{CKrx5eggJ@?e*cy1Ntr@L`}7Tc)!Gujtv{)8^tr`%2dLu>XK zZa$7%vaiUEIA6CZ`-m_Mr{Clzj91p@8TLi&KGHs8p5i;P4+t+|ujkr-|K8yu_5sNe zH!Zl&rzds%OXdA>T#>6Q={dHhzjh7c@i)#le!#xLw!yxI_8kKmzx)^di+Vqp4#bsq zSTBt^zofsHS=~3LZd@N@wQs9~;~6;3XFb~njAuB5xP6}X%+@MDK*!hL!?Xi&?neIb ze^B0iMZPgN;J7OJ(`=*bIfQ(hJ8-?@lghsjGe4~Iv+pbKruc9irzyYRq5D_O{ciLB z^ZlRUp=o7o{LfdFu@3{s727(MYrEpU`G3Fv^PSgAEY8uqUGi#ieh=rx*u2R)4T}{2 zjCCI#!Tx0Bfn8O8ZQbYcE2TX%fxHnq%958k{sQH(SFt$+ryJzG6(9N%{cEc{ zcQ_8W;{4|e#HT6m{W!7MAE*21kt@=^^NXnZweQ^gI-@m>%oPu5F>!|;P+JE+1 zyhP>K#o=7e$KMo6JYFe}?o*uh$M*W(#qI^QH@_$4U)S+|>rrg#sr*nJ_F(+nP1p~> zu@$zPFy3MJ?xlVnC~tF3aafGwuUFH4Z?1n>iMXAJV_lVBh|N!wkH>Sp%`dDqqxn!> zd-ee4qwPoe@POv$7dqa)FLJ#b(!N`Q?bg_Q!2FK=)t~oS4|aRb$7((K$8~;BXnpOX z^ZyB_zi~W<^%e$7mw4=<_#)vF#lkeA!tQo^>Dd`w1Ky3}JkF z$yZ~)fqb;~d1t*>&NmI`erW$;eB4glm)(mRzt?b@heLht(|#oNV=wIVcUseQ>89KYP_SM0m~=6c6(h^IT45Ajj$ zQ${ks%oW;4KE?d=)qSSsV_x_W{ylfuAFHvroOKrU{b0U7>(CA+p6+9vhCT16KgZ#u?~iqpS=VObkP^=` zWJmw)oAQ}BcE@%Hjo&pmy@CBW?Dy074OjhzIIV^A>BGwL>@K`-FdM{TH-Nm8Mqqm* z&g*FZ{W^BH;_w=_4IeJ`O<%<=cFWn<*+w`_B+tcDasE~BBPL-#1;=`N-|+`?&peE) zbJ6rud_Ci^L*`nptk>wD$nWFpEt0V-&S%+e44#| zJmC7`yjLY>t{8_t{~ZUn`JfUDACe!!gqw@gSnaF2V7E;9WEbquQT|z1<;UQB$3WV9 zRpocb<|wr{>ZTI+UvfNu9(MQY-nbRcbro-j%@Fp1<{;I7Q2WZk1L$uz?NgiLygAOV z-&o>#f9-3J!|7i3z3!|2)L+29JUoNLXWF-4jpM_54!BCbM$Zjxuv>6L*~f&9`s2I@^{t+R&6+sP4CVNFP(RMMCyq<|7Q0KZ zxe>?7*p0ycUfPdCaNddjgtKM!Fa3H$X)h-A_dFct;Jg;jildzb;TIqZmQ{Fs}?Rz*M z%=m>RIG!x8to9?ef6<<=dW?=YU*#KO^CLFDb}#*jy54p}#Si8BnRRe}QrGh(%G=Lz zcn-UNu>HK+p62tB)UW>Dsc;px?G*2Y!_7FJrt(@J_6OSY^;kb)U7Rn#=2dcun~STt z#kOUAr;RBe50dIG?8ZbQ4bPG~WiR{JqS_Gzq(f*z9r(>!&&EGu6fJP}XnyoAR+K>p#l&Qykt< zd+o>_&84M%E#)teMgPoIoNi_ zxt-!KsQfKBZh31N|Fk;!-|UC;A-W%IhuwG_=TYB`sNS!L`zGv{b4S(xf&H|59vvS)^Z>#;S6>qKfXRE$d{7ZQ!o&TpeuP#r-c0=|5 zBplbkes}p|U7tSsJo6kJ_K`KeY){TF{G<7B4&zaKf47ytpSGm>{ia*E;kvxUb#iOqd@?qZhnKkjhx>B+3fp^dJcRQ0BIZflPhMa5 z<*Mq@&X?n;=cqUZ`?WP*k1IY6`xnV0@z@b%yiC+SpbJhN$wRp# zb{~h+8EB!Ttu$#yEMman!w^e=pT#I{!_VY!= z?HxM)bUoKTjqMFM$>vOKPE))A4o2-g$8&RPio@;lIPCR(+V)nB&*8*l8^+ICT)Cf@ zxoZzn{iC>^_Im6)C{GT;Zcojpzj%)K58$Ndp!})aPVWU?*8DjhhhaDzsOP4Rv~RxG zdxhWWpZQg8qWi{bs(&iyQ~kL>H;M5!4e5`)iSu_>{aab%^*i7wuYvN%T_dVAQ z$D_C}2EC_n`n!Y^et_eftF-UfUFS1{eL)(< z`I`IXBXJ(6XONAt`&sdK_1yT8_8oU%H<#mu!{pEDpWQ)zk$pj`i){};aI&_tlq7}Q)A*`Tvy7sRs9Co zMb&?_ONqN~950-K<0kY!)s;_>2VSoBaQ^O6tlw{&re9KQMydRai;MjHjHyY0!SdXDE&Tr_wPJPBdw`BeLbFiI( zLt9;+tH~F3RqQ@h{@76U7t5F6^cIem>+QByydU!;yr=fpWd8U`IKP73W|U7aTvpB} zAA`gC%)fZ6+WSTE&#`|VC#!t;F*Y|VUyq=^`2f3raM+&wn~z|=huv`+i>>l|I-B+5 zM-mS|VDlX|XI{bh&*1)M?_@p3i@D$C#n|nk`?qKPnwIPbLUnzAr2R}s)_>fP{e(G- zd>uMy|1v?|SNoT1l+VXt+Zp?tN0Kk=lW)?1N15*jli#g=ZZVC}{YF3c=r$u?`Bg?y zKjL^k&hP4e@G|zz6hHKF+W(gJ%p@FIs{9k9OWd|2Kf24t6#EX!pKCoq{5j>@$FSR( z{Or0tS>j==++u8T{1nH&*lw-;%zZfPer}$_X$En7{!`T7pZkAkfWtG|58R>l_tSns zw%u@kO6}jm{=&_|ZWZ=JVGG)`OSL~5fOGXd)>Cq`{$>8SHJ&d0$^EsTJ`e{z=cT)_ zkL=guo7jAd?fTD@`u<()_Q&BjoG0Pf^k(MkRGdG+W;Rax+`>PET?^hThG$j(EqP_0 zUHl`rmG-=zWz6Td7u(Y)U!G+uk1_F6o!@zPaK3HS{|hOfkJj}WNBeme*Wd4fV@Kvo z-dM-4ulbY|pP>1=1g8fc8S zj&HM$;yrRJ){XnlgdkHa?D z?ZG;6PjP(T7KhI`A9EzmN6N=wyNY}P_N_iB{k0RZn=-YyT$>e_WzqY^rn|~N{%*0; zbFJSS+l%R+T?@w-IiFBRalLP{TVTH#=Wpb=Cia`D{(wMe)_#>-yVo7zwfKGZ)fVhl5svx z`|yc4+`{_EJyiZq-S>{hW@p{E9QM!ZzIiE*$71(^;tdpEtoR$;ckNx+Y=CoXoIcg} zh5g9+gxNTsuk(9ap7BOGpVSVgW3YLEeSv$I+>!RvK0>)9HfNvV-zFZPV_#yoq`v!_ zeM{OE``_8;$$>i!TE;~#M5ya zpCjo{4A}jlebuiTuYuZEUBP@yAK|F^mu|%A3*uoHm0#oSGJa+j$20rm_?i0K58I&g z`vzBYv%81+o(B^TTWbCMh*LMMm%p(6Sn=w3HPia;O5BXrdLF9sm%c~;CTYCKV|y~^ zpPP_x{H@rvR{l9h?uPAZDu27;vo*eR6|YA;?Lt0F6IA~KY))gm{6xhc()it?`^g5{ zx4uEX49_v&yzDLOrS^W+xbpM#)%%{AK8Lm0wq?oXzA^XZa(+0 zQr66uhaXZL_9svK8?YH7-;P}e`Wc6DEHerF6L9_uTlK}Y;Jy(i6AxXn8$o;d3Hb+X z+j8IW4G%5-v7>Y!+7nmrTV@D$e-O9*IiGO6_5e)|XDrUZ_DLK!)qU}@BZzN-!(&Gl zM~Cy-#}vE66`$S;ucrDf+fcp>#}8+;ElyivGvWl|J7B-xiN*Or`CS}0SNmV%Fb4Y` z*e{S*!fy4M*3;X+U+}oGq z#Q`|ne|B*-7r4LAp??!p|NRd1uRXS#o?9GNQ~iI4+dJuB?16nFY#+d}Bj@KMHj^|y z{c!4|@gA@G1F^e-S`5uuKlVawW@x?5#Q9e29#{FDapm`PQvVUw zb39G1kMkQkp7Nu8oBHlF<;NAdxpMq?9r-nEj{S$^W4{OW{R7-L@}}5r%zY=0*83)X zzj3}w=i7$+U^rIRzBu1}0sVbh_tDF+TbcWEZlU;k+~@5{*ldYoOWHF#vv0_UW4{mk zko*bPBQ(~&r6IQ3XU5F+4)+sJ>)`y6{OEa%M?2cLr(<)n_9aK)uoL|?Lsfn^_7QF{ zPOY*3koAyu#&N3F;}JMqt@XBp>et0_0qy(suxW|ICD>i8eNYFSCu<)yo%~=2X?!lg zW*82m$S3|Pc^i2Txhwf6e#&_G{^Y0d9X4-~&)g?Cww4DdzKDEjn{n^=?QnH3;2Oz$ zE54L`YbH=XuZh!a9PU1e>+u%0kCCrq8|jBphbp_}OtK?!RVy+$i}7&4>SBGf>xK3C{0hJ3{66(R?`?hZ}I(58Icq zeWX=6UND*ueX%(er_UANj`d^5)8BME&iiOTpyxz?IX2B%kG3A;VUEG3vBu{<`B`kg z!Rc?Dj%VNMn&|%i5)PMPUtjyux3RfU`_i{m{(ku)oa>NJ{MI`D+wyk09-k@SbY*)%QKmKT+>eg=dn$gF%I}EdAvnJ=lKNX=zYR81xlf1xj39n3c5mX?s(l%^{0mN9$y0Wj z;vXyT43l@j>0X=%k!M0%#S?kNZjHkmN5Uzk=zoylPPabm)D?ue=QD;uoS}+L$2p%hvDpEeZ&dy; z>?1a};5blu^gES58K>Pxm-hS(%DYF5q5a>L*C*j>E{IF9eNK7)4|%q7#SGOyTXLzmJ`M{yc81_1Zx7+u{^y-}J@yCLHg< z;X3SJ!fp`Gi?F{%@gG$G8`Xc9eBq4xvkmzs{7O9Z!s&CB|3Ufa9kq8c`77V8@~dj! zbON^Hu-ONj!R)h~oL1IzTwBHO#!>ZiFY;?Vm$+|0K6dR?{u1pY9d^%QzZuSd;P4;j zYuuasZl1+vAh!45@VVl>a6VA?i)~fDBeplN9&B@*kJ5TMUGbXx+cuT=LzVwmzVfKw zSSwR`$Kj?fDNe6c>aMs?4yD|R|HdgxODqo6Jl^uBV*8Zl`7_wvuX!!|^|bC6{!H6z zVfP)b?srUsUugd!_7ia<9Bw_j+y}!&l#c_jxe(`uiZA5&X*R}pW*6$awbh@NYOgW&gA_jqn>%s5SiV5cVztC7T8b0 zN#*@7IJY347b(x~f#X)%FJ6hmb=bY3_HI&qU*?{z$NULLV|R(%1$*VyaIfNniRWRe z-$C=Owc;PhXUe0QZ{Eo*$eZCDY{uYpAxt*T;6L{Eo)&VDehlc%@gA2Zw4ryOQVP6r7&Lp$YBfaX1as@#iZq zYOR|V`s$+ zRrvupf1>$Yy&tTq@}DZM_pNC$HuEW;Hs|6-n+w*)K9;0FZbtDJ~sYO$@i|Q>UYNG zc=Dg?g3}c3D-XiCE#>V|^7Cr%Lisp3sQxkX%Q(!VJu@2{JqP)@*sZDfAM#x757y&+ zou1S3E;vnB{4i`D!|5!QzZ%D@a5xLw`>{P9=Vw&^1o=<-CF~8?Cr*`D)Ad@8({m*dfj-!qjd#Zj^`O8)Q4xG2e?i%cyD82&@%4haB?tft! z=i`@fzpUNAZClv~?pu9dqPVW-S>~rj; ztc!fF+!crIu%G_}=e`%tvKy!OTE8wX@iYNPIqoiB`hAJp-EbI${g3S1{7v$6+Q+rU z?oOPW$-{Bl35Wlv{yK`!RD1($2V?gG{qq+qz68euasF6wIlYR_Ma08!Y-M|=;=2$x zFDSk;wsQJ}eXM^-@fYQuIL^{O_YCa+z@{zEOK~~@y9p|PiON5N?QPihRr!7@f4<@> zA6sF!8}Zy-@m*z=k4LM$W$gR&{@A>Y<5oCM!(j`>$K$j;cKsCJ9H*|Tzp~1A#CF|0`UzUnXGJ*jVj?bq1I>1lZv)sHx>i-R2A{g(Osy2^ip z;{&{BHHRxc1m|OL-a_wXUm;h-^%Q>%+g^v2Tw-6xp&tG>x5z=cCU4B=_SJaUiCkpY zSj2cKH&vZR5|6(UHxcJ0D!(@Eg+FoF5W7w2zp10+ZG*$AIBqBJgY!y^SJL+x_)gf| zq4v7s{GQ4`jNLpOo>lqHIe+&yw*53d6LESBrx$TtsPUSv_%AsA2j}^!zYK>qjDPw< z@t)YIewdBjEVWmc>+Qdn*U@2eEQ0GkKpt5p9l?C-^H zW99Q_a6T8O8MvCi(?a=d;(iu3Tg$&IUJtv5-2c)lYHxQO7jS%ci2Ni@XJa=K$8&M$ zg-ut*FI4%DaB8CRKVrW`~uy3 z@iBQ8${obJT-LbW$VJ{;#Y_T_xG!{#__SI2Q2PEV-+Z((yj&VR_QusdeI z(w_aiPZ`g&QDy8a!*1WhrU~{9uw8L&D)I0`Z^r*voTgwukM{B;#m~UzdK}s+elyOm z(Vx&7r$N~M*sHW32V*}IyJqrgDt{&8laC-C?&tca_2lh!J!bSQ?b)+&{#O10yD#Pb zv=<-2v4h6{5bRD;e0~qwy9uX9N>a=$~2Go%55^2iRPw_&f5^n$O>0 zx0259Th*VnSGgW>hT`vFH&Ff=+g8|)m5;>cMb=XueGTU`4#$!5-db-1aQ=CZa=g41 z4s&pbS93leVRHsHS7N&>&Tpyy-&d9LW}M1@hvVfczY)$`V_#oh1G{x`+I4s4qrv8= z-58HKSJI!acP+MqaQGJIs}w&GhYmQsxl1XZ24HtDHdo+$Yg5`EitUd(7n|XVUy4%; z?9W#G=WeAxemfky$!G7x@y3(;LZ=;R?|B?g$MFg5THt?k5|hpV|Bc5UK$h_27i zsy|0=spI_z=Yz5Tl=*croFsQ*{^pL@-HDy@ zrRyobOTP2(sJ$Sy!ex>%v5N6i$QW6@97nAzy&w zGS%N!?vHI99L~n>wXWq{TqA7G!)bk-26f?F2jI{L+ny?a9ri~lJ{sq_I-kLczlZbQ zI2=Ryye5vlaD4Xia=f%q?cam_PuQM`?PQhT3!Bfd|MRj^Kd#4k_&+W!w!aY1H(|3A zb{FCFr~11d_T90WcS))5&X7mq)C}9Zv9E{YwKz;w9yu4AA8lwC!1)ZEZ^mYC9Hwqv=7af-x$5r1=|^l njRPH0@pyQXR{ZB-m^ z!09xcR;9mb7o3k%|MyY(-55XD%+lUTIDWk){drL1dm+v*%QtSp@lN9URo7a$4Ci{p z(>>U?#BP1fk591q2!|fD?+?`cT8cw!>>4#H#|!5u{yI)W6rW1}^J_R=&iR>{D!)+Y zzcTB^?8*4Wr*Rm9{aLIx|24LwaoUaX57V*jA#b4d-<0cV&&GB-4*eBhQ~5->shV$^ z7*ppl<*oGU2W2j$wN@?X>ifN3Y{pc^qE01Q?4hK!;{U8Qt;E8hH7WZHb^UGl-?koI zuf%P&?O7WZr_0`@ZP~4X^C!gp)l+DHuDl11S8r76o2J;^fbEtz_i9+;p)NMZZ9@5p zZw32fHFak_pkPj>IiW`okceW&_!ee2V| z-q`GiV=wFm>iYD={)BZY-=5>8HrUGk>g#Vq*n4Uxy#!0ryu3RHP~OT z`a_v7p}*$ZeVR|2d-p-u2$mtNyXQ~w%eQKdQ|6f>Y(vmo4K9z z{pwcE<8(dl*8FXt`J?_={oHE4m$noV2&f?HX^LFBe9IHJ}n`3hs z_HSHJ;z3UBu)SR6KkZ!N`3jC7XUM;>j?BB*)W_~t?0YiL^U>I~WnH>kI+gb9`C8YT z;dHC!?F$`=&&07C4xcOD3Y$A{I8o)-#py`wZ)Bd^ov_;wyRR7gd^S!GV%r&4uZ_Kh zb>VstcW-K4yobYE^2+MZ)^Zme|47z>U4X+^4Oj=maa@Q)ADq5b{01EU#<4#AiS?K- z{&(zTdnxNY&Q%8% z9A~av;y&Uqoci9(EphV{4oh-zFgP##lJ?%GemZboabB$BwZd*E?2p2+XPt7q(Cq6{ zJ_elk#OZcJ|94kh*E%o9<}u3q*S;zB&AD~y|E>#ZuL1puO%@fq8#G?m;Pi>ccl&RN z_htP31K97S>;C}`_pVXecXJi*qCB@Nb1}_O9?{&)dy*HzF&xjFt30s-{d1QpuZ*Gp z{#qOd;P3$DNc4<6M>BAGH`9`%AU|ZLIZ? zC~x|)e*6&fUY`Fs^T#Xi?T7R4YJV*2H@=0#AFRjpj^Y>MJVwWRQu$y-E~}(6_dWT- z9-!xg7nDCfp}xIE?x(yymwaPy(S4&S`N$68z7jW6J{-dM)aJ*XRvc6N!D_6UE2Dl# zj%!kx>vbvxRj%^?Gu5)yJe#inka1a2wi2(_wXK*(z9tXX*5A2OXI8cS>NDJq$}!p# zn-zQB75&(_@~`?;Evx6f+Wl|s{y5d_pZ{lnU9y-MacHId+y3nmw@b*6 zaf5e?gZl1H#l9=&k`H>f#Pb5>tFbsfM}7A)Hq+JK7VpvCCLAx{huvy8z9V0X&HM6T z@}Fz*eyQ*GBmcRFussQ<<~%RB8On!e$ghzP?XNg}i))`JnpQq%?4;}Fc47||Z}_#P zTXB6(J&SYguj_WxivN+Xcx;Cfi-Z4Y=_>z|zY?b|IF7~UI-LHL8!jv5^AYD#zYn(S zoL8LM|5@U3XY7aIus3$y|0;1K+oS(3PVI>2IdX&ZOM7;8_8hS>4kut+_14_C@?I)G zQTMTfWXpXk?I>S^?aDZ3?3Qu-cnbHmbRcolg!^b3qVjuc9=(R+rCKAlW9hHEk9iXg z$F{%bMH_7Hkvrh{I?f~HA90+C{Z7oIJO|sSv0q#J`3}sZaD==F+naFct$ofU#edN} zTZEm{zNij!Cta_3wh6Xx;p*J--(WkR`gud{(|)Adn?pP~Y<43an<@S%Hg|G9;acUV zQ*d4}S4|}+e#`i}EwG)X>(g1~Z^r3Av}g5q;lp4YPE!1I9dCd6bNZKmW&Y-`JF)GkczYZ?cH7{15{|DizIJV#uES|M>nF6rahTRy9hL8n^V_<9{jnP?zpV1@ zarzG1U2$lp{;ZGVW$Mr0%ojTco9A%YP~&kpj;|5-R^^Aw?U;|1-(OMYx4)hFoleH7 zGxOio#r9CvPiS}!>vu!eSKjsP;(W8#-{+qAMC>Qxa3(en6r} zc_a?c>HOEgt~ujlJ|{n>jKgKvUCZ_L6UeXbIk_VaR@vs4&Ax`n1=E-zjOU&w=eT2^}%K+4u4^LB{mJH?{>jX*4?6Tq>P~F$myc0+b*i)Bx2sQiS+V>09vY{b) zw)Xu(?fZw?`ny*Cj)GM`(p}mcZu|{%@P*N34%&K^oV%hQuT}o(%ic1s#G+-s=L2y1 zhWQ-_K2zfUGwgqSy4W?t=>=??YdzeF{S~Tz{ZplUn1XY2Y`4?;`fO~8o5!>sFT>$k z#pgUx;&B3wZ{g5iUgJsnvndW=6HlW!UKp$Ly>WU7d-XSLO8qpE>l+$ja|m|7QQq99 z`O^=_RdL=1+v%F`)5mbWi!~oTwoNG?64xR(QvF%FHvLq;daYj5{QsM{X^i7Cm0$8p zOINvuVJ41!aQp)MW99WYzPae{4^d}6$`2fzx|AoVkI=>5_W&XaX@wf!X{y&uZxepGrae7$s#W+sG`6-p3 zClAJ9jS2MkPHdZCtMaC|e21=IAM6iWQjTx#!)c`a33lUgT!Q0m*suItIiCGZ*I$2^ z+AY`hcf{jmluwsoKZyC{?pFNhrRDf>n0yxw*W-pZu2hc)qls&ckV<+yR@ZFA#5|e0ai(^k-!pr()lR_QTKe z20C8Tmr8l_4gK{UaoCD@KH%jN&s}xAk@9aiEyZqY?JtjfrIfcTc3vf zr*3j*oOi%!0rrQf{yCHA|7dJJ!S))R&pK~#{JXJp*k6V1HQ4T~^ZgFHaXQ{LZak|D<;BmgBj1WbI}A`aCbi z<&<{~cz%nY(I5AvK1Vo7{lAgtzx+PuoA==PFxC6_*2CrtJ>Pz!^|@T*dlPmIi06ZG+)?kFer0_} zyA36?Dof5?^WGz z%9m&AdL4z`ZK{7Tj+@|ciQ;!^{5HaNFyj-ulFwZWjaOgYAKt-XTkfy^aoRK2X}|C* z$Ir{yUxX%d!uA8o=iZd}uXB9!4t9^yzI#?4#CUuCodWwT*Qfe-1pF_0U(lNNQ*Rs| z!Er;J8_}LQf%TUA;h4zBwfq0wD&J34{k?5wr@zWtG_O$BoU^pp?p3*VE1n}N4;|I7 zy!oHR^d2z&{l|aHRtou*mKCS^8kg@CKTYE}8Rw?hKaSHb*xn~k!frS=dY|rY!(ln` z&=q^b@$AVszezmqirqwP_WZr{FW%S=aro-{}8PU8l>ii#YCs!xuO$|Fx7i zC+j-ji&J0CyT5)ZaqDn60EdTlyp?_~aW_}*mCpRBIL~4pn2}tIcoYuRYtu+^IV@!! z`1>dyTFKwzH29yEu5x`+7ut87uzeWYqj3BV2fYW+&D6il+z8Wfb*`koI==;)JJ)jl zLGPu~HjG!SL;GfXjsG8dkM#r&de3Ek!gh)J*PZKU4%0kemHCxlAf7*yv-;ng`I+~k zyjzL69V6pgo$K}ou16Y0`E;`8Yt8)Ly|PZK&)e0{s@*@F>%X66ulVeG#X8xl@=ss? z#;Tul>vM3^j(*su=e0|fviTXVWBLjErn;{6zal<^^RR2l{cv2W@<-!vE4CZp++Xot zdOtWx-T}vrX|H-N=2&dkA|ATS?d5*h%vAlh^GpBio7kNz4^zB94lm17uog3BfW2n9Ud;Fj8Uva~}Jv~*8Ecf{{ww9 z0~Z$COL(qyyU=&P4$pPr9vsit@pNwSH=g6{&ML3>4DR_w9KR0FZRrl|ug397Y`)NQ z+eXw+U$PIie-IBxu@4Q`tAE>RAKnzZZ;7W(71!s6@p5eRp2j|`^6P6KESuGMPRutl z9(e-iZ<=HOwVvZ=e9d^>jQu0nJdQ(uoYvKI`fWIWDSwFVri^EPMV^h#v)CNM_}U+^ zAFb=xivFa=dTzc|*RK)J$@Y82H`H_Qr;6XJeg4(*V#e1Wgx!XEZaG43rRTnlaNb<` z{yp{2<1}3TUon>}Q#o#r^AXtozMza(evS70k2pTc@zO@B{|Eg^YpMNxahyTCG_2--1%IB4POjz&JG8b+8(~HYH z{}p+*Q{|t&YQO&=)qEaM`*}g_=Lxl+H`IO}QTusC?dKV_pLfI$+SAvOjI%!mhdT0O zXO(!mS?gS%_gIa)xr}%|h;3L1`Fw1~$VVE zN8r2$HWO6;?FAg~UF^HbALG~%`&rn0IKPxPpQ!#=?BB%B<2+vVH&OgyobUXq)X%bC zJ`evv+#)`LiPe{WR_sD8d2);^rMc@}FqH9iRUs`P!uee1Bw zKYdlp2G4cM?;cjl`l;F<9)ZIVTnE<$n=f&0CV!!E|KZS5-)+day6v#le%+jXNQv8< zRKAb=I_<^1aoAhFtR;QFRL9#w{vNw^akxj=w{)%(2oO2j`F#T(eV^4Xm z=0VU{dByu+8;ILIvFV9pYnA_x&gVAl`lvq%hoi8YqWVkdzd0WJ2|AzcaM~QFH?iG` z>tP$y-?TQ46NrbMwazZbt`V+Y`#6$y>UJO=|6v{bUN}Fhb>30+k5~L#arsW+!4o3m6vrq_o?o?PZ3Xt z;;>Nd4Q4Hd=9-UMoB8_|%+C*q+a=g_rM^)-97lgD-`-iun?tc#uK8bw@iE`ZvvAsx z^7&xK-+qnV%{X3-eSNNHYQ=g@1GxVAMy>x_aOL-(mE-4j%&*$~v3F&SEnKDYpURil z9y4gqmX@}o@8c_LXGI>`v=l4W*t@L%pb=|Y_Pha^S%G!N2->P*dYM*|M+UmjlbDynR+Rl1k&XeD6BMo8WX1Hm#Ze>7(W4 zI@@EFA5X(+rq;_K9QRfIJFpo_|GZQE(OlQKXV#C;DOA5eb&RP)8*cs=u}I=Az2nr}A~ul$@`nICyG+Rsz5 zIaue{n)&K(7UYb92R9 zTd~fDulOJN3a=POI?t1F+JJeH4#vI_^CsQVhjHwJ!-C$$cBbNOae9w=7p~}4;-Me&(tXsE z`pvZNe!+Ht=H+#YKaF!UoUX%uRqS+p|6q^OUaIb6F2?>jmABX}!EU$irMziF|Lj9o z)4!L9hi$IHZ);vmy|Oq?!1faC*JQlhj#rd;_MA^z(6!hpuY^P8zQp~~F0|KIdGl4A zzfzuk7~6Aoyj7`h*3wa?XC0O zQ}Nw(en-pi(7rzb=iwYb^k!Y>CO930?Y%mmi{`a-l_?$vW3vkO-6`+R;(defB(E=T zuJ-ESe7x%4io-fM?1`&uEB2XFju+;!*32*cNFRyaAXzc~iG^SIvmR~@f4$4~cY{Ho`7t=wQ%Ilenv<9W%)#c>+8 zOJ)?CA+#4($MH&zSIxCyq54;kc-Zio(~~jAzM%56h=)ltOMBrImEYr&;EeXdB# zS9>$wac8*`>pM2b_HV8KiP&}4eQs6d!`{U6yEv|^>)TxUwLK0OD!)#_{(Kw`q&;&6 z`P}@b`_*RT`}_fR!{yyoekeAF>iAb<_ayD7#k7~+;r!jHI6k29`IzII-i%+GrtvzK z`5VrWXEOeN5$B&9Xui$FaUI1Q;?Pj>wYYw9W7ea+6o>w-pV%LpdRkwNt7iR$?y7&D z=5H&FZ#TIOP9vCK;dRAF$=7mz{to$moNlFlx*NL%953I5^Re{Dk5K)2*nO??7tsH3 z54PuWe)$a5@1y*48&2zyKmAiGzn${uI2^Xcc5AMWTT}V%D(p|g?rm(pr@iU(Kd``)^%-g~!ZT?hS~lugPLuM&4J{{If&%xmdZ^g-pl9511K-eX=%R|)67*tgMj zcmtcKlt(|t;U%tX{0FBWx$bdw-rKvAG!OJ1-<-`nGVO_nG0c0j&N=N;<+x!(t&_*G zeM;+k{SGBw9ow|gnf3@5QDLHs|R0cVT-jwzuK9RP(wwcFUMo z{vfWc`xU3=I4{sV)3wiwbl>=Zc-V^T6CXIg^fxIF+cwxb;;t1Ao_(y@MCW&|yb&&Q zsWMKr^Jl-x|MvSTmH$+}s-IcePt^Abyf2D7RQ9kd=Hce0SozQYd|p-adDg$5FI7M9 zs`)(3)_h)OYd%l=pU>NBK98&Yyw25po>%*MU+w3CwVxOM&*zCXpEuTi9_ea6ue2BI zea?35_tW~gj9q2K;xfk4Z${i}Z1c}N52WXb+mXsQFJjY-d6Czky{vg<^m%LL_kXl> zmG-SZPpw|Nu)`V5n}x({uh;&@tXBOw|9soxQr7K0zO)rCdZsvRP{~~@u6z9z|KZCH zT1dNhv%XDBdAaiN@^5IX(Y%(fQY8<=W@Vh3$-h%RfAV!HZ?-2MFT?&7oafFj@%%it z^JV4fd-UCBIebmHO^U z@>94Pha<5c!tv4&@?ZLp_T70n_Qt6`#ur7;u)Le^e2zSZh+ih@hKcH?S_4|f752rUeh_{eA8AqU8;P4*heLv=Q3Vy z^oRIh-H(<}r~V(hUN__X1)RZz*@@dc-Af5g*O*<1`%i#%5E>r+w&ex|sIO zc^aQp=udh@;=H>`K^=f3uu#xSQu<_sko318fhLpX9kVA0h99(;S@7)pK!w+Vg*ZSjIn$ z#_6N!_)HvUeNdd&q`p~iTCv%6QaQfu__8S=bDvzwrs|I&psX9~>9( zjyBe(Ju{r~G85F^5sYu>jKk(QEm!{stNb8r+u+dbWya%njsM3l7Q407-lH!R`?Y!Q z^JhL!`5l=L>5YlSVQp;g!sblPhtse>ThEOXo-5_;H1%)(1lk`jx5vRyKHv9jiN`Bx zFSo(@1?I0GJHEutw#N#oc`w*b^SNO_G#uv*hlBH9rG!lqyE1A7V|+r*Bj1#6CXo7 zjhs|$p67hR4~h75@|&;XJt&_~$L=)s?*<$$qCf61mH$TLJ^K~fKb-56Z^fxD*U!ED za*5l7;~{cy=0kSae8GH4Z@g5>r{T<}bQ+F_yi~47KIX*|&-*g}(t$5f{!E+)JYVcI zKjY|$#o-do*S&CFs`=jOIpW8&KEh5o-ho}`2_wk{q*8sjod9vd|iNi@bKsa;Z$E^bF|*}Bk>GD&F$ZMSmkHh7w-y#KR5-!rCj zp6Ar_Icx2;*Is+A_1pX3|NJvuna@mx{Rr574hQ*@kD_0tD(SxnyMFS2k^0?g)*Cm7 z2Pi(v#arNTv-;s+;_KQ-ejxF;A7MX?)&4X=Za;y;FgWbae&!?eywd)M<*<22`EnA^ z1GhW*ls3U(9rAoA&olSE^aqQNSN^@K=UuJxxiiljKl_!kKe0W}@AQXx@ON;&L_C7$ zeclfaYxR4ZyTSPY&Ie%z><$*Mpq^q|*e_U6<4ooJcMfcq!M-Qw55ItVOuae3q}!-h zJBss<8v~m~oR@r~*NQ#!FV0hbDe_?6Dftqb!116Jh0Sl7FRp>pfO%zn{AOj5$8E&R z9nbkOo}&7@tXu_@igZ%J{&>*g#??!;rv`C9t*{{e^@xZ44YQmm%dR_!lu*5#h%~5dfgka&5B3cPl`MaBHr#zwh&iQuc?0g=tbDi_@5$dKG1sZ;(V5t!+zWHqHnGc z_tbf6EAfcCuPE{`1hzWg_}@vsb!CyqJBep%^lssXuSZl@URPaI|9aNk{v~$yT6~X3 zUPvtK-h;f8F{V@HTVD;|ubk#ccM0>vS>lV~5MVnGzwBT*?E$;r!?6$RvbVr_4eO2* zB>#fABynC0+qaOrX0SV`FKfAmbC%m6d#j0ao-ZDubMXw=-wOLNu(=m@H;6aFW&j*( zHU20#&4TM==h|?t^WBh#Wt_{smwtEH*1+aZoa5an*dE6@(B2KZPjwD_6!w1-zoPM1 zz<#9G|EA*d6*dDeD*A4<@_KB) z!tr|8pGyEkRm3!`X>i>^aC8Ohx6ye)BYZ| z3)zo!IUKf-AHM$cvc`~qu8r1nhw7}s`{z@G_t!Uk|9!*v=Qn)+e#7_oH>{^)Ds^d8 zm#%4b?^LO~Je{?<4xI|8t1GotSC{*%C@X*cTlSjj;2m_$BGrx9jSvs|O|fNflfF2g z0_SIt=ab?19PCypPhW!LDCOzV|ATL@UOWJvT@Gogwb3et7_#Yhl|4PTSbm{0;t@Vd_ga$o~`Ht#Orj+Wo|J zv954@f%S%yVe^FeKJ~FHV80Uk<~!o=kB8GB*lxuiQ?3134>#PuV=M2IKSX(u&r%Nv zyjGBAo>yutud1xQE)Qxd-}-V*dQ*?BiIZ;zoB6~s><-)e;q+5Dt6!$w;Pf-(uCeqR z!$Plr=u z@ffjk$*$0TKBL@v9?lzKI~evSF+R_rPV;Q>I-X~_-v4%b-Zdw_d7=98ad3J-edGq% zjTP^u{apk5--_45@o4fRj)zSn^3ybfLpAK5qR)hD#LucPEr#7^dL5->pR(Wnzw|tO z0d}f2yBD<=ry}?5;H2kqeQlasm@oevxxE5^Y_)2)De*}9eK5ZO4$WnMC-uFb!sbKb z9shwd^rT6qq-E1s-zl^=!c&xL=0 z{VwMf4(G%1NH|~mMA7$8qn}@zRXDyY{(ffRFb;k5=NZWFIKS9)Z$Aci?OE8L5BqQ7 zxEZ#G_d@<5oZ9y(oLW7K{p&6)YumggR`F^7G<+2tzo{*3@916pv-5TDo~ZcNaxYI; zT~OrdO75*;2K!<9ac}h-+0SIS2dD1rcgZbNIkzk$pW;*K$Cu!=C+sG`{y6OWP2{HE zjz9JY*o~f3{H@R3bn&xrSLx4VJ?0+R4u|7m%I5*%%VGEOGi7`_4GvE$KDWSm4f)-$ z9-J||)}M#-FXzxxmX9d5!hH`EHmxeY*45Sfm2Z9d{T}~OR{2?Ya(ZDmn|1i{j}?yX z;Mh_8j{N@TqeUM2ipRia0PBb+!0r;(;Vy%HmDbe)dwKFxHLfy$dPerAuuj`e{=7;Z zhf84VU^@(sqsS|(&s7cmm3L11SHt#6*}q2e>oxx*IBb`{+h8*UwjaXrWcfcrJW%@0 z;qkc!|vLtMc-D#=1|xyq)vT%*ms5VdD8DeUAyDq z{G$5OJ{rH9`ce}(by45=atizVAbrJ8fL%8@T?E_iuLX=*+O8fx5H<^7 zzY82T!LGyPq95jK{5KC5&dcC57!GUH2e(c_{yZF}!tn*!UpcYJV@t;SPU4Pmo*I#V zrTMF1(-TfBCKS0j2DSs?cmbUEfW!ZZ=Zr7<_UwmCzS{P%>piisuNL1J3%fzEUp1j{ z>?VGHeBrR}p<*x1e+XU$+Z)F*{%qJyc#!ea#ufc&#v*?RPIui0p8PddyhSMRFRv< zq(AzO!fCsDQ)@V0jQ#wNk;s?8u_bc99X8`&dkX&M6&gQH>v;`MU%+mx^g9tBezv{jAF7n5{fIKaOy<>cSPrMWM z&2r`2-(bH{@_BH0nEdrs(%+zb-W|@J$^URMY=1(%xJ$JkC&G3z9AAOMQrPvR{@gFv zU-O3ePBHkaiu2%nlL-q0tK8HHpE`A2KH;C_?Nxgjh0RFuS z`x{{U5gb?Gzd36b_2I-{!~Q|)(d;oB`5CIuX0SPo`iz}nf9XBskJ#-AXMHYu{$BNY z3ieG;I2{7Vlc_IT4cm?6bDj&Q>)^_7eX_nwl>duh`<>*T&&_sT^}0d-K0i_UqR)xX z7peYl{rfuUN#puWxlvK>~6%KxeN{)cpgWwzj!kK1nj5xcz*j{usxge zfxSikY=+H8a9pnQPao{tD$YyhRoJwJ?PlKpVFz*VGDCU)hrJH=f7km@4ufM`-v5!C z!C~~X7bH`8-C(8UFT+{)HlN_2duSQ~yUEkb`usn!XYYBuaF{Rq>)^Noc5OIMI^7LS z)vTgle^;=>;j}k*hrARvKVv@I<_YX4**_P~Ggx05$ay`zPyPAJVSkM3aWHIa74JVX zJ`ROL2k}*KuH`%*YT)!M*smg={0#ZqLHV_9Ch?d?ewzDW`*Y>*-Hdnph=*uB>YwQk z_?N|%eJ=J=fAqsi+*jhwu)j$6G}V(|D?Uj2zve!aS5mL8M%)Wd|3Kemo)zPeaeVrWN+Pur2>&aja@chEgzGydDd;jje`HTdKHN<7j@>SJQte^QyR z;eORre5n6_yYu($h}|Of#8cZA_WM?RsoVEmE8qHxlI0fo=}tp8X>8goTd!lQ-gZ0` zzkTcdiFHmppFz6eDB@u@ u|A%fx9w+R`;UU=Hir@9N^OKWmTt(lHK46Er>wSrn@WpCAb13>d%>VzCG>ZNJ literal 0 HcmV?d00001 diff --git a/tests/data/coutwildrnp.shx b/tests/data/coutwildrnp.shx new file mode 100644 index 0000000000000000000000000000000000000000..7eae7002fa8fbb935b0f012c4aec5b9a918ce079 GIT binary patch literal 636 zcmaiwPe@c@6vcngZu!aCkF z>;Djm?q?h3oL}KPE$k-!#Wwe5p=jQ~JxhN?>sOh?|GMmc=f7*Tj^KSfZwHpU)H#2p z#yqX?DQ7MJ*}U<)u0Dcl_pJT0zX4y&6G&}*bWRhLe_9mb>r?yUH~m$%Hro^bmNj|U HlBaM88B%7g literal 0 HcmV?d00001 diff --git a/tests/test_fiona.py b/tests/test_fiona.py new file mode 100644 index 00000000..51934a72 --- /dev/null +++ b/tests/test_fiona.py @@ -0,0 +1,10 @@ +import unittest + +import fiona +import pandas as pd + +class TestFiona(unittest.TestCase): + def test_read(self): + with fiona.open("/input/tests/data/coutwildrnp.shp") as source: + self.assertEqual(67, len(source)) + From 9b813778b74d6b55d6565a6d6be47a9d044115b9 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Thu, 11 Oct 2018 04:56:51 +0000 Subject: [PATCH 085/251] add tests for geopandas --- tests/test_geopandas.py | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 tests/test_geopandas.py diff --git a/tests/test_geopandas.py b/tests/test_geopandas.py new file mode 100644 index 00000000..10603fa0 --- /dev/null +++ b/tests/test_geopandas.py @@ -0,0 +1,8 @@ +import unittest + +import geopandas + +class TestGeopandas(unittest.TestCase): + def test_read(self): + df = geopandas.read_file(geopandas.datasets.get_path('nybb')) + self.assertTrue(df.size > 1) From 1c5c6542b39ff3636c28645313c79497229bfff4 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Thu, 11 Oct 2018 04:57:07 +0000 Subject: [PATCH 086/251] add tests for geoviews --- tests/test_geoviews.py | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 tests/test_geoviews.py diff --git a/tests/test_geoviews.py b/tests/test_geoviews.py new file mode 100644 index 00000000..36ce8882 --- /dev/null +++ b/tests/test_geoviews.py @@ -0,0 +1,11 @@ +import unittest + +import geoviews.feature as gf + +from cartopy import crs + +class TestGeoviews(unittest.TestCase): + def test_viz(self): + (gf.ocean + gf.land + gf.ocean * gf.land * gf.coastline * gf.borders).options( + 'Feature', projection=crs.Geostationary(), global_extent=True, height=325 + ).cols(3) From 7ec3690bb337e46fd48e8c32fcbd7d2a01f6d682 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Thu, 11 Oct 2018 04:57:35 +0000 Subject: [PATCH 087/251] add tests for holoviews --- tests/test_holoviews.py | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 tests/test_holoviews.py diff --git a/tests/test_holoviews.py b/tests/test_holoviews.py new file mode 100644 index 00000000..88671590 --- /dev/null +++ b/tests/test_holoviews.py @@ -0,0 +1,10 @@ +import unittest + +import holoviews as hv + +class TestHoloviews(unittest.TestCase): + def test_curve(self): + xs = range(-10,11) + ys = [100-x**2 for x in xs] + + hv.Curve((xs, ys)) From 71a80e877eddcffac8b3402360be96d556ea2c37 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Thu, 11 Oct 2018 04:57:57 +0000 Subject: [PATCH 088/251] reduce log spamming for fbprophet tests --- tests/test_fbprophet.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_fbprophet.py b/tests/test_fbprophet.py index f8badbd6..69caa984 100644 --- a/tests/test_fbprophet.py +++ b/tests/test_fbprophet.py @@ -12,5 +12,5 @@ def test_fit(self): 'y': np.array([38.23, 21.25]) }) - forecaster = Prophet() + forecaster = Prophet(mcmc_samples=1) forecaster.fit(train) From b8ef08e46b69bf7d9e375b865abbfefab71f63d2 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Thu, 11 Oct 2018 04:58:18 +0000 Subject: [PATCH 089/251] install holoviews and geoviews with pip --- Dockerfile | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/Dockerfile b/Dockerfile index 2d010414..7f4b40e7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -274,7 +274,6 @@ RUN pip install fancyimpute && \ pip install descartes && \ pip install geojson && \ pip install pysal && \ - #conda install -y gdal && \ pip install pyflux && \ pip install terminalplot && \ pip install raccoon && \ @@ -292,11 +291,8 @@ RUN pip install fancyimpute && \ pip install sklearn-pandas && \ pip install stemming && \ pip install fbprophet && \ - conda install -y -c conda-forge -c ioam holoviews geoviews && \ - #Temp fix: After installing holoviews and geoviews, deps for fiona and geopandas get really messed up. This is a very unelegant fix. - conda uninstall -y fiona geopandas && \ - pip uninstall -y fiona geopandas && \ - apt-get install -y libgdal1-dev && GDAL_CONFIG=/usr/bin/gdal-config pip install fiona && pip install geopandas && \ + pip install holoviews && \ + pip install geoviews && \ pip install hypertools && \ # Nxviz has been causing an installation issue by trying unsuccessfully to remove setuptools. #pip install nxviz && \ From 77bc7cf171fe7deec1f10f2d8ecf9d87a2f18403 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Thu, 11 Oct 2018 19:55:32 +0000 Subject: [PATCH 090/251] install feather-format and add tests --- Dockerfile | 4 ++++ tests/data/feather-0_3_1.feather | Bin 0 -> 672 bytes tests/test_pandas.py | 7 ++++++- 3 files changed, 10 insertions(+), 1 deletion(-) create mode 100644 tests/data/feather-0_3_1.feather diff --git a/Dockerfile b/Dockerfile index 7f4b40e7..b093a493 100644 --- a/Dockerfile +++ b/Dockerfile @@ -425,6 +425,10 @@ RUN pip install bcolz && \ pip install wcwidth && \ pip install webencodings && \ pip install widgetsnbextension && \ + # Latest version of pyarrow conflicts with pandas + # https://github.com/pandas-dev/pandas/issues/23053 + pip install pyarrow==0.10.0 && \ + pip install feather-format && \ cd /usr/local/src && git clone --depth=1 https://github.com/fastai/fastai && \ cd fastai && python setup.py install && \ # clean up pip cache diff --git a/tests/data/feather-0_3_1.feather b/tests/data/feather-0_3_1.feather new file mode 100644 index 0000000000000000000000000000000000000000..5a2c7b3dcc684b3676a94287702ea01aefa057af GIT binary patch literal 672 zcmZvaF;2r!42GRH6%j;Hh9YHPs1O4KL)q8}5_IMO2&n~CAXRkf$jr*b0gw=fKs^*E zAbkIOF+@;D_Otzd|8|^AF3ye(Nn|8HPIP-QYtvcl*z0)x_%bv*zkYri?8fZpX?t6H zxz_sISSiy5(TliUEtb!#> zF9YhAU;SJ@TFyNZ>fxdp=8NTY#aTB^!?9gsBtEC}yvO@q#oZA)Lc3hT-$In}#Q8{= z_3vU`Iu7A~UhS%)Ca0=oYemE*>a##cXt$5{aKpsvqrCR3eU%_L>}5wh5`Q0$OWWUs wPyBryFa9V6J>vJ&m(`r=t6g;GlRsN7H9B~0T=zM4NF3)??l1odi3$JaHx?%;)c^nh literal 0 HcmV?d00001 diff --git a/tests/test_pandas.py b/tests/test_pandas.py index f9163c57..abf70398 100644 --- a/tests/test_pandas.py +++ b/tests/test_pandas.py @@ -6,4 +6,9 @@ class TestPandas(unittest.TestCase): def test_read_csv(self): data = pd.read_csv("/input/tests/data/train.csv") - self.assertEqual(2, len(data.shape)) + self.assertEqual(14915, data.size) + + def test_read_feather(self): + data = pd.read_feather("/input/tests/data/feather-0_3_1.feather") + + self.assertEqual(10, data.size) From 7b7dfd6ee8e58e8371953e9c43dc7ccd1aa8d5fd Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Sat, 13 Oct 2018 01:02:20 +0000 Subject: [PATCH 091/251] Expand fastai tests --- tests/test_fastai.py | 30 ++++++++++++++++++++++++++++++ tests/test_pandas.py | 2 +- 2 files changed, 31 insertions(+), 1 deletion(-) diff --git a/tests/test_fastai.py b/tests/test_fastai.py index 275d2776..7d28b347 100644 --- a/tests/test_fastai.py +++ b/tests/test_fastai.py @@ -1,9 +1,39 @@ import unittest +import fastai +import pandas as pd +import torch + +from fastai.docs import * +from fastai.tabular import * from fastai.core import partition +from fastai.torch_core import tensor class TestFastAI(unittest.TestCase): def test_partition(self): result = partition([1,2,3,4,5], 2) self.assertEqual(3, len(result)) + + def test_has_version(self): + self.assertGreater(len(fastai.__version__), 1) + + # based on https://github.com/fastai/fastai/blob/master/tests/test_torch_core.py#L17 + def test_torch_tensor(self): + a = tensor([1, 2, 3]) + b = torch.tensor([1, 2, 3]) + + self.assertTrue(torch.all(a == b)) + + def test_tabular(self): + df = pd.read_csv("/input/tests/data/train.csv") + + train_df, valid_df = df[:-5].copy(),df[-5:].copy() + dep_var = "label" + cont_names = [] + for i in range(784): + cont_names.append("pixel" + str(i)) + + data = tabular_data_from_df("", train_df, valid_df, dep_var, cont_names=cont_names, cat_names=[]) + learn = get_tabular_learner(data, layers=[200, 100]) + learn.fit(epochs=1) diff --git a/tests/test_pandas.py b/tests/test_pandas.py index abf70398..f533a043 100644 --- a/tests/test_pandas.py +++ b/tests/test_pandas.py @@ -6,7 +6,7 @@ class TestPandas(unittest.TestCase): def test_read_csv(self): data = pd.read_csv("/input/tests/data/train.csv") - self.assertEqual(14915, data.size) + self.assertEqual(19, len(data.index)) def test_read_feather(self): data = pd.read_feather("/input/tests/data/feather-0_3_1.feather") From d74125053ba6218aed13cf1cbcb8b9efc61e42b6 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Sat, 13 Oct 2018 19:17:51 +0000 Subject: [PATCH 092/251] Pin fastai to 0.7.0 --- Dockerfile | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index b093a493..a485c1d9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -429,8 +429,7 @@ RUN pip install bcolz && \ # https://github.com/pandas-dev/pandas/issues/23053 pip install pyarrow==0.10.0 && \ pip install feather-format && \ - cd /usr/local/src && git clone --depth=1 https://github.com/fastai/fastai && \ - cd fastai && python setup.py install && \ + pip install fastai=0.7.0 && \ # clean up pip cache rm -rf /root/.cache/pip/* && \ cd && rm -rf /usr/local/src/* From c83eaeeabf742e520569c1faa94fb5f84129d26d Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Sun, 14 Oct 2018 12:00:05 -0700 Subject: [PATCH 093/251] fix pip command typo --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index a485c1d9..641cc92d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -429,7 +429,7 @@ RUN pip install bcolz && \ # https://github.com/pandas-dev/pandas/issues/23053 pip install pyarrow==0.10.0 && \ pip install feather-format && \ - pip install fastai=0.7.0 && \ + pip install fastai==0.7.0 && \ # clean up pip cache rm -rf /root/.cache/pip/* && \ cd && rm -rf /usr/local/src/* From ecd64a15e9866229dfba2b222dc05b41351c7683 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Mon, 15 Oct 2018 03:49:46 +0000 Subject: [PATCH 094/251] simplify fastai tests imports --- tests/test_fastai.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/test_fastai.py b/tests/test_fastai.py index 7d28b347..31236e98 100644 --- a/tests/test_fastai.py +++ b/tests/test_fastai.py @@ -4,8 +4,7 @@ import pandas as pd import torch -from fastai.docs import * -from fastai.tabular import * +from fastai.tabular import tabular_data_from_df, get_tabular_learner from fastai.core import partition from fastai.torch_core import tensor From 79e96f06edab69109b40d1a400d5efc2a6058698 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Mon, 15 Oct 2018 17:14:06 +0000 Subject: [PATCH 095/251] Install fastai from source and pinned version --- Dockerfile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 641cc92d..f7b0e8ef 100644 --- a/Dockerfile +++ b/Dockerfile @@ -429,7 +429,9 @@ RUN pip install bcolz && \ # https://github.com/pandas-dev/pandas/issues/23053 pip install pyarrow==0.10.0 && \ pip install feather-format && \ - pip install fastai==0.7.0 && \ + # This version is not available in pypi + # Also, installing from source to prevent static TLS (thread local storage) issue + pip install git+git://github.com/fastai/fastai.git@v0.7.2 && \ # clean up pip cache rm -rf /root/.cache/pip/* && \ cd && rm -rf /usr/local/src/* From bcda2d4f154a1498b0c53e3257e6e35c7cc969b1 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Mon, 15 Oct 2018 18:48:47 +0000 Subject: [PATCH 096/251] Prevent fastai install from downgrading pytorch --- Dockerfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index f7b0e8ef..c549ced6 100644 --- a/Dockerfile +++ b/Dockerfile @@ -429,9 +429,9 @@ RUN pip install bcolz && \ # https://github.com/pandas-dev/pandas/issues/23053 pip install pyarrow==0.10.0 && \ pip install feather-format && \ - # This version is not available in pypi - # Also, installing from source to prevent static TLS (thread local storage) issue - pip install git+git://github.com/fastai/fastai.git@v0.7.2 && \ + # Don't install dependencies for fastai because it requires pytorch<0.4. + # which downgrades pytorch. fastai does work with pytorch 0.4. + pip install fastai==0.7.0 --no-deps && \ # clean up pip cache rm -rf /root/.cache/pip/* && \ cd && rm -rf /usr/local/src/* From 8a430bb21e2e4928ec16ef370f7e4b4019b24ff4 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Mon, 15 Oct 2018 20:39:32 +0000 Subject: [PATCH 097/251] Remove fastai tests using 1.x api --- Dockerfile | 1 + tests/test_fastai.py | 45 ++++++++++++++++++++++++-------------------- 2 files changed, 26 insertions(+), 20 deletions(-) diff --git a/Dockerfile b/Dockerfile index c549ced6..500441ab 100644 --- a/Dockerfile +++ b/Dockerfile @@ -432,6 +432,7 @@ RUN pip install bcolz && \ # Don't install dependencies for fastai because it requires pytorch<0.4. # which downgrades pytorch. fastai does work with pytorch 0.4. pip install fastai==0.7.0 --no-deps && \ + pip install torchtext && \ # clean up pip cache rm -rf /root/.cache/pip/* && \ cd && rm -rf /usr/local/src/* diff --git a/tests/test_fastai.py b/tests/test_fastai.py index 31236e98..94223fe3 100644 --- a/tests/test_fastai.py +++ b/tests/test_fastai.py @@ -4,9 +4,8 @@ import pandas as pd import torch -from fastai.tabular import tabular_data_from_df, get_tabular_learner from fastai.core import partition -from fastai.torch_core import tensor +from fastai.layer_optimizer import LayerOptimizer class TestFastAI(unittest.TestCase): def test_partition(self): @@ -14,25 +13,31 @@ def test_partition(self): self.assertEqual(3, len(result)) - def test_has_version(self): - self.assertGreater(len(fastai.__version__), 1) - - # based on https://github.com/fastai/fastai/blob/master/tests/test_torch_core.py#L17 - def test_torch_tensor(self): - a = tensor([1, 2, 3]) - b = torch.tensor([1, 2, 3]) + # based on https://github.com/fastai/fastai/blob/0.7.0/tests/test_layer_optimizer.py + def test_layer_optimizer(self): + lo = LayerOptimizer(FakeOpt, fastai_params_('A', 'B', 'C'), 1e-2, 1e-4) + fast_check_optimizer_(lo.opt, [(nm, 1e-2, 1e-4) for nm in 'ABC']) - self.assertTrue(torch.all(a == b)) - def test_tabular(self): - df = pd.read_csv("/input/tests/data/train.csv") +class Par(object): + def __init__(self, x, grad=True): + self.x = x + self.requires_grad = grad + def parameters(self): return [self] - train_df, valid_df = df[:-5].copy(),df[-5:].copy() - dep_var = "label" - cont_names = [] - for i in range(784): - cont_names.append("pixel" + str(i)) - data = tabular_data_from_df("", train_df, valid_df, dep_var, cont_names=cont_names, cat_names=[]) - learn = get_tabular_learner(data, layers=[200, 100]) - learn.fit(epochs=1) +class FakeOpt(object): + def __init__(self, params): self.param_groups = params + + +def fastai_params_(*names): return [Par(nm) for nm in names] + +def fast_check_optimizer_(opt, expected): + actual = opt.param_groups + assert len(actual) == len(expected) + for (a, e) in zip(actual, expected): fastai_check_param_(a, *e) + +def fastai_check_param_(par, nm, lr, wd): + assert par['params'][0].x == nm + assert par['lr'] == lr + assert par['weight_decay'] == wd From 2aa9b9c1b85630b64cb787e51d1bca7b52188628 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Mon, 15 Oct 2018 15:25:23 -0700 Subject: [PATCH 098/251] Prune images previously built to prevent permanent gpu worker from filing up disk --- Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index 702898fb..c8ab54da 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -65,7 +65,7 @@ pipeline { slackSend color: 'none', message: "*<${env.BUILD_URL}console|${JOB_NAME} docker build>* ${GIT_COMMIT_SUMMARY}", channel: env.SLACK_CHANNEL sh '''#!/bin/bash set -exo pipefail - docker image prune -a # remove previously built image to prevent disk from filling up + docker image prune -a -f # remove previously built image to prevent disk from filling up ./build --gpu | ts ''' } From 6eb31c1581767691aa766dc3633d118ee3e9e9b2 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Mon, 15 Oct 2018 15:58:09 -0700 Subject: [PATCH 099/251] Update README to mention that GCR is the source of truth for our docker images. --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 90faac2e..655c3977 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ ![example script](http://i.imgur.com/yrWycNA.png) -This is the Dockerfile (etc.) used for building the image that runs python scripts on Kaggle. [Here's](https://registry.hub.docker.com/u/kaggle/python/) the Docker image on Dockerhub. +This is the Dockerfile (etc.) used for building the image that runs python scripts on Kaggle. [Here's](https://https://gcr.io/kaggle-images/python) the Docker image on Google Container Registry. ## Getting started @@ -17,7 +17,7 @@ We can merge your request quickly if you check that it builds correctly. Here's Start by running this image on your system: ``` -me@my-computer:/home$ docker run --rm -it kaggle/python +me@my-computer:/home$ docker run --rm -it gcr.io/kaggle-images/python root@d72b81a003e1:/# ``` From 549f89ac7f3a15070de428199b5619eb53eac485 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Fri, 19 Oct 2018 17:20:35 -0400 Subject: [PATCH 100/251] Prevent upgrade of matplotlib plotnine 0.5 is depending on matplotlib >= 3.0 which is not compatible with basemap. --- Dockerfile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 500441ab..07ed960a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -311,7 +311,9 @@ RUN pip install fancyimpute && \ pip install folium && \ pip install scikit-plot && \ pip install dipy && \ - pip install plotnine && \ + # plotnine 0.5 is depending on matplotlib >= 3.0 which is not compatible with basemap. + # once basemap support matplotlib, we can unpin this package. + pip install plotnine==0.4.0 && \ pip install git+https://github.com/dvaida/hallucinate.git && \ pip install scikit-surprise && \ pip install pymongo && \ From 8048a37411e7bd7e032a132358c1bca7552e2f2f Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Fri, 19 Oct 2018 19:33:41 +0000 Subject: [PATCH 101/251] Upgrade anaconda base to 5.2.0 --- Dockerfile | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/Dockerfile b/Dockerfile index 07ed960a..2351c92d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ FROM gcr.io/kaggle-images/python-tensorflow-whl:1.11.0-py36 as tensorflow_whl -FROM continuumio/anaconda3:5.0.1 +FROM continuumio/anaconda3:5.2.0 # This is necessary for apt to access HTTPS sources RUN apt-get update && \ @@ -30,13 +30,13 @@ RUN pip install seaborn python-dateutil dask pytagcloud pyyaml joblib \ conda install -y -c conda-forge spacy && python -m spacy download en && \ python -m spacy download en_core_web_lg && \ # The apt-get version of imagemagick is out of date and has compatibility issues, so we build from source - apt-get -y install dbus fontconfig fontconfig-config fonts-dejavu-core fonts-droid ghostscript gsfonts hicolor-icon-theme \ + apt-get -y install dbus fontconfig fontconfig-config fonts-dejavu-core fonts-droid-fallback ghostscript gsfonts hicolor-icon-theme \ libavahi-client3 libavahi-common-data libavahi-common3 libcairo2 libcap-ng0 libcroco3 \ libcups2 libcupsfilters1 libcupsimage2 libdatrie1 libdbus-1-3 libdjvulibre-text libdjvulibre21 libfftw3-double3 libfontconfig1 \ libfreetype6 libgdk-pixbuf2.0-0 libgdk-pixbuf2.0-common libgomp1 libgraphite2-3 libgs9 libgs9-common libharfbuzz0b libijs-0.35 \ - libilmbase6 libjasper1 libjbig0 libjbig2dec0 libjpeg62-turbo liblcms2-2 liblqr-1-0 libltdl7 libmagickcore-6.q16-2 \ - libmagickcore-6.q16-2-extra libmagickwand-6.q16-2 libnetpbm10 libopenexr6 libpango-1.0-0 libpangocairo-1.0-0 libpangoft2-1.0-0 \ - libpaper-utils libpaper1 libpixman-1-0 libpng12-0 librsvg2-2 librsvg2-common libthai-data libthai0 libtiff5 libwmf0.2-7 \ + libilmbase12 libjbig0 libjbig2dec0 libjpeg62-turbo liblcms2-2 liblqr-1-0 libltdl7 libmagickcore-6.q16-3 \ + libmagickcore-6.q16-3-extra libmagickwand-6.q16-3 libnetpbm10 libopenexr22 libpango-1.0-0 libpangocairo-1.0-0 libpangoft2-1.0-0 \ + libpaper-utils libpaper1 libpixman-1-0 libpng16-16 librsvg2-2 librsvg2-common libthai-data libthai0 libtiff5 libwmf0.2-7 \ libxcb-render0 libxcb-shm0 netpbm poppler-data p7zip-full && \ cd /usr/local/src && \ wget http://transloadit.imagemagick.org/download/ImageMagick.tar.gz && \ @@ -126,13 +126,7 @@ RUN apt-get -y install zlib1g-dev liblcms2-dev libwebp-dev libgeos-dev && \ cd /usr/local/src && git clone https://github.com/matplotlib/basemap.git && \ cd basemap && \ git checkout v1.1.0 && \ - # Install geos - cd geos-3.3.3 && \ - export GEOS_DIR=/usr/local && \ - ./configure --prefix=$GEOS_DIR && \ - make && make install && \ - # Install basemap - cd .. && python setup.py install && \ + python setup.py install && \ pip install basemap --no-binary basemap # sasl is apparently an ibis dependency From eb7b055c5ba92087074d948e34ac5e5f65de1bde Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Fri, 26 Oct 2018 18:33:39 +0000 Subject: [PATCH 102/251] install fasttesxt using pip git feature --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 2351c92d..a5a54e68 100644 --- a/Dockerfile +++ b/Dockerfile @@ -341,7 +341,7 @@ RUN pip install --upgrade cython && \ pip install --upgrade cysignals && \ pip install pyfasttext && \ pip install ktext && \ - cd /usr/local/src && git clone --depth=1 https://github.com/facebookresearch/fastText.git && cd fastText && pip install . && \ + pip install git+git://github.com/facebookresearch/fastText.git && \ apt-get install -y libhunspell-dev && pip install hunspell && \ pip install annoy && \ pip install category_encoders && \ From 736bad3134ee889e5cadfcb96f546e425a28c06f Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Fri, 26 Oct 2018 18:19:09 +0000 Subject: [PATCH 103/251] Don't preload tensorflow and torch --- patches/sitecustomize.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/patches/sitecustomize.py b/patches/sitecustomize.py index f0dfab45..9221ba59 100644 --- a/patches/sitecustomize.py +++ b/patches/sitecustomize.py @@ -1,13 +1,6 @@ -# Monkey patches BigQuery client creation to use proxy. - -# Import tensorflow and torch before anything else. This is a hacky workaround to an error on dlopen -# reporting a limit on static TLS, tracked in: -# tensorflow: https://github.com/tensorflow/tensorflow/issues/19010 -# torch: https://github.com/pytorch/pytorch/issues/2575 -import tensorflow -import torch import os +# Monkey patches BigQuery client creation to use proxy. kaggle_proxy_token = os.getenv("KAGGLE_DATA_PROXY_TOKEN") if kaggle_proxy_token: from google.auth import credentials From f9c4830c3bbe42cfcce579081568e4c45b0463fa Mon Sep 17 00:00:00 2001 From: Chris Gorgolewski Date: Sun, 28 Oct 2018 11:22:37 -0700 Subject: [PATCH 104/251] Add libGl1 to satisfy vtk depenencies --- Dockerfile | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 2351c92d..17367a23 100644 --- a/Dockerfile +++ b/Dockerfile @@ -210,11 +210,19 @@ RUN pip install scipy && \ conda clean -i -l -t -y && \ rm -rf /usr/local/src/* + +# vtk with dependencies +RUN apt-get install -y libgl1-mesa-glx && \ + pip install vtk && \ + # ~~~~ CLEAN UP ~~~~ + rm -rf /root/.cache/pip/* && \ + apt-get autoremove -y && apt-get clean + + RUN pip install --upgrade mpld3 && \ pip install mplleaflet && \ pip install gpxpy && \ pip install arrow && \ - pip install vtk && \ pip install nilearn && \ pip install nibabel && \ pip install pronouncing && \ From 5389746d750a50f3f31e12dad402f6121343a931 Mon Sep 17 00:00:00 2001 From: Chris Gorgolewski Date: Sun, 28 Oct 2018 14:33:26 -0700 Subject: [PATCH 105/251] adding xvfb --- Dockerfile | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/Dockerfile b/Dockerfile index 2351c92d..89ae6c49 100644 --- a/Dockerfile +++ b/Dockerfile @@ -210,6 +210,15 @@ RUN pip install scipy && \ conda clean -i -l -t -y && \ rm -rf /usr/local/src/* + +# xvfbwrapper with dependencies +RUN apt-get install -y xvfb && \ + pip install xvfbwrapper && \ + # ~~~~ CLEAN UP ~~~~ + rm -rf /root/.cache/pip/* && \ + apt-get autoremove -y && apt-get clean + + RUN pip install --upgrade mpld3 && \ pip install mplleaflet && \ pip install gpxpy && \ From 035f3d13c1cd3079e4adfab6932fd47e901147ca Mon Sep 17 00:00:00 2001 From: Chris Gorgolewski Date: Mon, 29 Oct 2018 09:49:45 -0700 Subject: [PATCH 106/251] adding xvfbwrapper test --- tests/test_xvfbwrapper.py | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 tests/test_xvfbwrapper.py diff --git a/tests/test_xvfbwrapper.py b/tests/test_xvfbwrapper.py new file mode 100644 index 00000000..0b8dac45 --- /dev/null +++ b/tests/test_xvfbwrapper.py @@ -0,0 +1,11 @@ +import unittest +import os.path +from xvfbwrapper import Xvfb + +class TestXvfbwrapper(unittest.TestCase): + def test_xvfb(self): + vdisplay = Xvfb() + vdisplay.start() + display_var = ':{}'.format(vdisplay.new_display) + self.assertEqual(display_var, os.environ['DISPLAY']) + vdisplay.stop() From 8b04aaee44e3e297ddc08c9354862fed69a58f31 Mon Sep 17 00:00:00 2001 From: Chris Gorgolewski Date: Mon, 29 Oct 2018 09:58:18 -0700 Subject: [PATCH 107/251] adding test for dipy renderer --- tests/test_dipy.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 tests/test_dipy.py diff --git a/tests/test_dipy.py b/tests/test_dipy.py new file mode 100644 index 00000000..de12ef74 --- /dev/null +++ b/tests/test_dipy.py @@ -0,0 +1,19 @@ +import unittest +import os.path +from dipy.viz import window +from xvfbwrapper import Xvfb + +class TestDipy(unittest.TestCase): + out_file = 'test.png' + def test_renderer(self): + vdisplay = Xvfb() + vdisplay.start() + + ren = window.Renderer() + window.record(ren, n_frames=1, out_path=self.out_file, size=(600, 600)) + self.assertTrue(os.path.exists(self.out_file)) + + vdisplay.stop() + + def tearDown(self): + os.remove(self.out_file) From 1e73362cc332ea7103de410df5551f64cb6f2411 Mon Sep 17 00:00:00 2001 From: Chris Gorgolewski Date: Mon, 29 Oct 2018 10:01:23 -0700 Subject: [PATCH 108/251] adding vtk import test --- tests/test_vtk.py | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 tests/test_vtk.py diff --git a/tests/test_vtk.py b/tests/test_vtk.py new file mode 100644 index 00000000..7100223a --- /dev/null +++ b/tests/test_vtk.py @@ -0,0 +1,6 @@ +import unittest + + +class TestVTK(unittest.TestCase): + def test_import(self): + import vtk From 67b8d337101972ee9ace1d4d187c0160dd611b81 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Tue, 30 Oct 2018 11:01:22 -0400 Subject: [PATCH 109/251] Preload tensorflow If fastai is used prior to tensorflow, an older version of libcudnn is loaded and tensorflow fails with the following error message: ``` tensorflow/stream_executor/cuda/cuda_dnn.cc:343] Loaded runtime CuDNN library: 7.1.2 but source was compiled with: 7.2.1. CuDNN library major and minor version needs to match or have higher minor version in case of CuDNN 7.0 or later version. If using a binary install, upgrade your CuDNN library. If building from sources, make sure the library loaded at runtime is compatible with the version specified during compile configuration. ``` Preloading for now (like before) until we find how to fix the issue with fastai --- patches/sitecustomize.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/patches/sitecustomize.py b/patches/sitecustomize.py index 9221ba59..7447f9cc 100644 --- a/patches/sitecustomize.py +++ b/patches/sitecustomize.py @@ -1,3 +1,5 @@ +# TODO(rosbo): Remove this once we fix the issue with fastai importing older libcudnn if imported prior to tensorflow +import tensorflow import os # Monkey patches BigQuery client creation to use proxy. From ff8079a18b7988efb03bf0694933fb771ceb607d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Joaqu=C3=ADn=20Ruales?= Date: Tue, 30 Oct 2018 23:36:39 -0700 Subject: [PATCH 110/251] Fix broken link in README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 655c3977..453a4ca8 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ ![example script](http://i.imgur.com/yrWycNA.png) -This is the Dockerfile (etc.) used for building the image that runs python scripts on Kaggle. [Here's](https://https://gcr.io/kaggle-images/python) the Docker image on Google Container Registry. +This is the Dockerfile (etc.) used for building the image that runs python scripts on Kaggle. [Here's](https://gcr.io/kaggle-images/python) the Docker image on Google Container Registry. ## Getting started From 4c6ed0fec1b8385169ec30eaef0a55b055d9972d Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Mon, 12 Nov 2018 19:30:01 +0000 Subject: [PATCH 111/251] Install latest version of seaborn --- Dockerfile | 5 +++-- tests/test_seaborn.py | 7 +++++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 8e0430aa..ea4f47f4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -25,8 +25,9 @@ RUN conda install -y python=3.6.6 && \ pip install keras_applications==1.0.4 --no-deps && \ pip install keras_preprocessing==1.0.2 --no-deps -RUN pip install seaborn python-dateutil dask pytagcloud pyyaml joblib \ - husl geopy ml_metrics mne pyshp && \ +# The anaconda base images includes outdated versions of these packages. Update them to include the latest version. +RUN pip install --upgrade seaborn python-dateutil dask && \ + pip install pyyaml joblib pytagcloud husl geopy ml_metrics mne pyshp && \ conda install -y -c conda-forge spacy && python -m spacy download en && \ python -m spacy download en_core_web_lg && \ # The apt-get version of imagemagick is out of date and has compatibility issues, so we build from source diff --git a/tests/test_seaborn.py b/tests/test_seaborn.py index 3986bc8c..b20cbe27 100644 --- a/tests/test_seaborn.py +++ b/tests/test_seaborn.py @@ -1,7 +1,14 @@ import unittest +from distutils.version import StrictVersion + import seaborn as sns class TestSeaborn(unittest.TestCase): + # Fails if seaborn gets downgraded by other package installations. + def test_version(self): + self.assertGreaterEqual(StrictVersion(sns.__version__), StrictVersion("0.9.0")) + + def test_option(self): sns.set(style="darkgrid") From 3a2c83983c866f82bb8b563a90d6a290698f3b41 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Mon, 12 Nov 2018 20:20:16 +0000 Subject: [PATCH 112/251] fix-typo-comment --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index ea4f47f4..3525c526 100644 --- a/Dockerfile +++ b/Dockerfile @@ -25,7 +25,7 @@ RUN conda install -y python=3.6.6 && \ pip install keras_applications==1.0.4 --no-deps && \ pip install keras_preprocessing==1.0.2 --no-deps -# The anaconda base images includes outdated versions of these packages. Update them to include the latest version. +# The anaconda base image includes outdated versions of these packages. Update them to include the latest version. RUN pip install --upgrade seaborn python-dateutil dask && \ pip install pyyaml joblib pytagcloud husl geopy ml_metrics mne pyshp && \ conda install -y -c conda-forge spacy && python -m spacy download en && \ From 429dec5b0e53c0e43b1664c14a7061083e5a9d25 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Tue, 13 Nov 2018 02:36:45 +0000 Subject: [PATCH 113/251] Install h2o dependencies --- Dockerfile | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index 3525c526..71f236c3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -142,12 +142,11 @@ RUN apt-get -y install libsasl2-dev && \ pip install cartopy && \ # MXNet pip install mxnet && \ - # h2o - # Temporary sync of conda's numpy with pip's, needed to avoid an install error - #conda upgrade -y numpy && \ + # h2o (requires java) # Upgrade numpy with pip to avoid install errors pip install --upgrade numpy && \ - # This requires python-software-properties and Java, which were installed above. + # requires java + apt-get install -y default-jdk && \ cd /usr/local/src && mkdir h2o && cd h2o && \ wget http://h2o-release.s3.amazonaws.com/h2o/latest_stable -O latest && \ wget --no-check-certificate -i latest -O h2o.zip && rm latest && \ From 5b9224e5cd500abd961db834e67b688db3f9e2e0 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Tue, 13 Nov 2018 02:37:22 +0000 Subject: [PATCH 114/251] Add tests for h2o --- tests/test_h2o.py | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 tests/test_h2o.py diff --git a/tests/test_h2o.py b/tests/test_h2o.py new file mode 100644 index 00000000..7b6b212d --- /dev/null +++ b/tests/test_h2o.py @@ -0,0 +1,9 @@ +import unittest + +import h2o + +class TestH2o(unittest.TestCase): + def test_init_read(self): + h2o.init() + train = h2o.import_file("/input/tests/data/train.csv", destination_frame="train") + self.assertEqual(19, train.nrow) From 0020fba526ec69378d5ae59e50d31796fb289269 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Tue, 13 Nov 2018 19:41:22 +0000 Subject: [PATCH 115/251] close h2o session in test --- tests/test_h2o.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/test_h2o.py b/tests/test_h2o.py index 7b6b212d..94fc57b8 100644 --- a/tests/test_h2o.py +++ b/tests/test_h2o.py @@ -3,6 +3,9 @@ import h2o class TestH2o(unittest.TestCase): + def tearDown(self): + h2o.cluster().shutdown(False) + def test_init_read(self): h2o.init() train = h2o.import_file("/input/tests/data/train.csv", destination_frame="train") From 8fcae6112175c2a1eef8f1c7246cb7b43fffadeb Mon Sep 17 00:00:00 2001 From: Wendy Kan Date: Mon, 19 Nov 2018 14:10:35 -0800 Subject: [PATCH 116/251] adding kaggle api --- Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Dockerfile b/Dockerfile index 71f236c3..a8212c59 100644 --- a/Dockerfile +++ b/Dockerfile @@ -475,6 +475,7 @@ RUN pip install flashtext && \ pip install PDPbox && \ pip install ggplot && \ pip install cesium && \ + pip install kaggle && \ ##### ^^^^ Add new contributions above here ^^^^ ##### # clean up pip cache rm -rf /root/.cache/pip/* From 40499323b7c83503ccfe849120e8cf1e1cbf78bd Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Tue, 20 Nov 2018 09:38:46 -0800 Subject: [PATCH 117/251] Update ImageMagick binary url --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index a8212c59..abda466c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -40,7 +40,7 @@ RUN pip install --upgrade seaborn python-dateutil dask && \ libpaper-utils libpaper1 libpixman-1-0 libpng16-16 librsvg2-2 librsvg2-common libthai-data libthai0 libtiff5 libwmf0.2-7 \ libxcb-render0 libxcb-shm0 netpbm poppler-data p7zip-full && \ cd /usr/local/src && \ - wget http://transloadit.imagemagick.org/download/ImageMagick.tar.gz && \ + wget https://imagemagick.org/download/ImageMagick.tar.gz && \ tar xzf ImageMagick.tar.gz && cd `ls -d ImageMagick-*` && pwd && ls -al && ./configure && \ make -j $(nproc) && make install && \ # clean up ImageMagick source files From 922c5a9030748ebab4da23d7dbdd4cb89a7b86a1 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Wed, 21 Nov 2018 09:06:11 -0800 Subject: [PATCH 118/251] Remove the kaggle api package This breaks the twosigma competition because the code for both lives in a python package called `kaggle`. The code for the twosigma competition under `kaggle.competitions` should probably be move. Additionally, the kaggle api client package requires an API secret key. We don't offer a way to manage secrets for now and we don't want to encourage people adding their secret in the code (easy to leak if ever made public). --- Dockerfile | 1 - 1 file changed, 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index abda466c..0aff17f6 100644 --- a/Dockerfile +++ b/Dockerfile @@ -475,7 +475,6 @@ RUN pip install flashtext && \ pip install PDPbox && \ pip install ggplot && \ pip install cesium && \ - pip install kaggle && \ ##### ^^^^ Add new contributions above here ^^^^ ##### # clean up pip cache rm -rf /root/.cache/pip/* From 5976f3c19b36060790863af361b769786643a72c Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Tue, 18 Sep 2018 22:22:43 +0000 Subject: [PATCH 119/251] Create process to prebuild tensorflow wheels --- tensorflow-whl/CHANGELOG.md | 1 + tensorflow-whl/Dockerfile | 100 ++++++++++++++++++++++++++++++++++++ tensorflow-whl/README.md | 22 ++++++++ tensorflow-whl/build | 11 ++++ tensorflow-whl/push | 25 +++++++++ 5 files changed, 159 insertions(+) create mode 100644 tensorflow-whl/CHANGELOG.md create mode 100644 tensorflow-whl/Dockerfile create mode 100644 tensorflow-whl/README.md create mode 100755 tensorflow-whl/build create mode 100755 tensorflow-whl/push diff --git a/tensorflow-whl/CHANGELOG.md b/tensorflow-whl/CHANGELOG.md new file mode 100644 index 00000000..52ab3b13 --- /dev/null +++ b/tensorflow-whl/CHANGELOG.md @@ -0,0 +1 @@ +1.11.0-py36: Tensorflow 1.11.0 wheels built with python 3.6 diff --git a/tensorflow-whl/Dockerfile b/tensorflow-whl/Dockerfile new file mode 100644 index 00000000..db5bc8df --- /dev/null +++ b/tensorflow-whl/Dockerfile @@ -0,0 +1,100 @@ +FROM nvidia/cuda:9.1-cudnn7-devel-ubuntu16.04 AS nvidia +FROM continuumio/anaconda3:5.0.1 + +# Avoid interactive configuration prompts/dialogs during apt-get. +ENV DEBIAN_FRONTEND=noninteractive + +# This is necessary to for apt to access HTTPS sources +RUN apt-get update && \ + apt-get install apt-transport-https + +# Cuda support +COPY --from=nvidia /etc/apt/sources.list.d/cuda.list /etc/apt/sources.list.d/ +COPY --from=nvidia /etc/apt/sources.list.d/nvidia-ml.list /etc/apt/sources.list.d/ +COPY --from=nvidia /etc/apt/trusted.gpg /etc/apt/trusted.gpg.d/cuda.gpg + +ENV CUDA_VERSION=9.1.85 +ENV CUDA_PKG_VERSION=9-1=$CUDA_VERSION-1 +LABEL com.nvidia.volumes.needed="nvidia_driver" +LABEL com.nvidia.cuda.version="${CUDA_VERSION}" +ENV PATH=/usr/local/nvidia/bin:/usr/local/cuda/bin:${PATH} +# The stub is useful to us both for built-time linking and run-time linking, on CPU-only systems. +# When intended to be used with actual GPUs, make sure to (besides providing access to the host +# CUDA user libraries, either manually or through the use of nvidia-docker) exclude them. One +# convenient way to do so is to obscure its contents by a bind mount: +# docker run .... -v /non-existing-directory:/usr/local/cuda/lib64/stubs:ro ... +ENV LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:/usr/local/nvidia/lib64:/usr/local/cuda/lib64:/usr/local/cuda/lib64/stubs" +ENV NVIDIA_VISIBLE_DEVICES=all +ENV NVIDIA_DRIVER_CAPABILITIES=compute,utility +ENV NVIDIA_REQUIRE_CUDA="cuda>=9.0" +RUN apt-get update && apt-get install -y --no-install-recommends \ + cuda-cudart-$CUDA_PKG_VERSION \ + cuda-libraries-$CUDA_PKG_VERSION \ + cuda-libraries-dev-$CUDA_PKG_VERSION \ + cuda-nvml-dev-$CUDA_PKG_VERSION \ + cuda-minimal-build-$CUDA_PKG_VERSION \ + cuda-command-line-tools-$CUDA_PKG_VERSION \ + libcudnn7=7.0.5.15-1+cuda9.1 \ + libcudnn7-dev=7.0.5.15-1+cuda9.1 \ + libnccl2=2.2.12-1+cuda9.1 \ + libnccl-dev=2.2.12-1+cuda9.1 && \ + ln -s /usr/local/cuda-9.1 /usr/local/cuda && \ + ln -s /usr/local/cuda/lib64/stubs/libcuda.so /usr/local/cuda/lib64/stubs/libcuda.so.1 && \ + rm -rf /var/lib/apt/lists/* + +# Install bazel +RUN apt-get update && apt-get install -y python-software-properties zip && \ + echo "deb http://ppa.launchpad.net/webupd8team/java/ubuntu precise main" | tee -a /etc/apt/sources.list && \ + echo "deb-src http://ppa.launchpad.net/webupd8team/java/ubuntu precise main" | tee -a /etc/apt/sources.list && \ + apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys EEA14886 C857C906 2B90D010 && \ + apt-get update && \ + echo debconf shared/accepted-oracle-license-v1-1 select true | debconf-set-selections && \ + echo debconf shared/accepted-oracle-license-v1-1 seen true | debconf-set-selections && \ + apt-get install -y oracle-java8-installer && \ + echo "deb [arch=amd64] http://storage.googleapis.com/bazel-apt stable jdk1.8" | tee /etc/apt/sources.list.d/bazel.list && \ + curl https://bazel.build/bazel-release.pub.gpg | apt-key add - && \ + apt-get update && apt-get install -y bazel && \ + apt-get upgrade -y bazel + +# Tensorflow doesn't support python 3.7 yet. See https://github.com/tensorflow/tensorflow/issues/20517 +RUN conda install -y python=3.6.6 && \ + # Another fix for TF 1.10 https://github.com/tensorflow/tensorflow/issues/21518 + pip install keras_applications==1.0.4 --no-deps && \ + pip install keras_preprocessing==1.0.2 --no-deps + +# Fetch tensorflow +RUN cd /usr/local/src && \ + git clone https://github.com/tensorflow/tensorflow && \ + cd tensorflow && \ + git checkout r1.11 + +# Create a tensorflow wheel for CPU +RUN cd /usr/local/src/tensorflow && \ + cat /dev/null | ./configure && \ + bazel build --config=opt //tensorflow/tools/pip_package:build_pip_package && \ + bazel-bin/tensorflow/tools/pip_package/build_pip_package /tmp/tensorflow_cpu && \ + bazel clean + +# Create a tensorflow wheel for GPU/cuda +ENV TF_NEED_CUDA=1 +ENV TF_CUDA_VERSION=9.1 +ENV TF_CUDA_COMPUTE_CAPABILITIES=3.7,6.0 +ENV TF_CUDNN_VERSION=7 +ENV TF_NCCL_VERSION=2 +ENV NCCL_INSTALL_PATH=/usr/ + +RUN cd /usr/local/src/tensorflow && \ + # TF_NCCL_INSTALL_PATH is used for both libnccl.so.2 and libnccl.h. Make sure they are both accessible from the same directory. + ln -s /usr/lib/x86_64-linux-gnu/libnccl.so.2 /usr/lib/ && \ + cat /dev/null | ./configure && \ + echo "/usr/local/cuda-${TF_CUDA_VERSION}/targets/x86_64-linux/lib/stubs" > /etc/ld.so.conf.d/cuda-stubs.conf && ldconfig && \ + bazel build --config=opt \ + --config=cuda \ + --cxxopt="-D_GLIBCXX_USE_CXX11_ABI=0" \ + //tensorflow/tools/pip_package:build_pip_package && \ + rm /etc/ld.so.conf.d/cuda-stubs.conf && ldconfig && \ + bazel-bin/tensorflow/tools/pip_package/build_pip_package /tmp/tensorflow_gpu && \ + bazel clean + +# Print out the built .whl files +RUN ls -R /tmp/tensorflow* diff --git a/tensorflow-whl/README.md b/tensorflow-whl/README.md new file mode 100644 index 00000000..ba0e0003 --- /dev/null +++ b/tensorflow-whl/README.md @@ -0,0 +1,22 @@ +# Build new Tensorflow wheels + +``` +./build +``` + +# Push the new wheels + +1. Add an entry in the [CHANGELOG](CHANGELOG.md) with an appropriate `LABEL`. +2. Push the new image using the `LABEL` you picked above. + + ``` + ./push LABEL + ``` + +# Use the new wheels + +Update the line below in the [CPU Dockerfile](../Dockerfile) and the [GPU Dockerfile](../gpu.Dockerfile) to use the new `LABEL`. + +``` +FROM gcr.io/kaggle-images/python-tensorflow-whl: as tensorflow_whl +``` diff --git a/tensorflow-whl/build b/tensorflow-whl/build new file mode 100755 index 00000000..b5678526 --- /dev/null +++ b/tensorflow-whl/build @@ -0,0 +1,11 @@ +#!/bin/bash +set -e +set -x + +# Default behavior is to do everything from scratch. +# The --use-cache option is useful if you're iterating on a broken build. +if [[ "$1" == "--use-cache" ]]; then + docker build --rm -t kaggle/python-tensorflow-whl . +else + docker build --pull --rm --no-cache -t kaggle/python-tensorflow-whl . +fi diff --git a/tensorflow-whl/push b/tensorflow-whl/push new file mode 100755 index 00000000..a27de8a1 --- /dev/null +++ b/tensorflow-whl/push @@ -0,0 +1,25 @@ +#!/bin/bash +# +# Push a newly-built image with the given label to gcr.io and DockerHub. +# +# Usage: +# ./push LABEL +# +# Description: +# LABEL: Image label. See CHANGELOG.md +# +set -e +set -x + +SOURCE_IMAGE="kaggle/python-tensorflow-whl" +TARGET_IMAGE="gcr.io/kaggle-images/python-tensorflow-whl" + +LABEL=$1 + +if [[ -z "$LABEL" ]]; then + echo "You must provide a label for the image" + exit 1 +fi + +docker tag $SOURCE_IMAGE:latest $TARGET_IMAGE:$LABEL +gcloud docker -- push $TARGET_IMAGE:$LABEL From 377387914cd323bda8163c1800e81c627d1474e0 Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Tue, 27 Nov 2018 02:39:59 +0000 Subject: [PATCH 120/251] Add TODOs and improve build/push script --- gpu.Dockerfile | 2 ++ tensorflow-whl/Dockerfile | 8 +++++-- tensorflow-whl/build | 47 ++++++++++++++++++++++++++++++++------- tensorflow-whl/push | 45 ++++++++++++++++++++++++++++--------- 4 files changed, 81 insertions(+), 21 deletions(-) diff --git a/gpu.Dockerfile b/gpu.Dockerfile index 247c7dd4..164b0d25 100644 --- a/gpu.Dockerfile +++ b/gpu.Dockerfile @@ -7,6 +7,8 @@ COPY --from=nvidia /etc/apt/sources.list.d/cuda.list /etc/apt/sources.list.d/ COPY --from=nvidia /etc/apt/sources.list.d/nvidia-ml.list /etc/apt/sources.list.d/ COPY --from=nvidia /etc/apt/trusted.gpg /etc/apt/trusted.gpg.d/cuda.gpg +# Ensure the cuda libraries are compatible with the custom Tensorflow wheels. +# TODO(b/120050292): Use templating to keep in sync. ENV CUDA_VERSION=9.1.85 ENV CUDA_PKG_VERSION=9-1=$CUDA_VERSION-1 LABEL com.nvidia.volumes.needed="nvidia_driver" diff --git a/tensorflow-whl/Dockerfile b/tensorflow-whl/Dockerfile index db5bc8df..5b7b421b 100644 --- a/tensorflow-whl/Dockerfile +++ b/tensorflow-whl/Dockerfile @@ -13,6 +13,8 @@ COPY --from=nvidia /etc/apt/sources.list.d/cuda.list /etc/apt/sources.list.d/ COPY --from=nvidia /etc/apt/sources.list.d/nvidia-ml.list /etc/apt/sources.list.d/ COPY --from=nvidia /etc/apt/trusted.gpg /etc/apt/trusted.gpg.d/cuda.gpg +# Ensure the cuda libraries are compatible with the GPU image. +# TODO(b/120050292): Use templating to keep in sync. ENV CUDA_VERSION=9.1.85 ENV CUDA_PKG_VERSION=9-1=$CUDA_VERSION-1 LABEL com.nvidia.volumes.needed="nvidia_driver" @@ -28,14 +30,16 @@ ENV NVIDIA_VISIBLE_DEVICES=all ENV NVIDIA_DRIVER_CAPABILITIES=compute,utility ENV NVIDIA_REQUIRE_CUDA="cuda>=9.0" RUN apt-get update && apt-get install -y --no-install-recommends \ + cuda-cupti-$CUDA_PKG_VERSION \ cuda-cudart-$CUDA_PKG_VERSION \ + cuda-cudart-dev-$CUDA_PKG_VERSION \ cuda-libraries-$CUDA_PKG_VERSION \ cuda-libraries-dev-$CUDA_PKG_VERSION \ cuda-nvml-dev-$CUDA_PKG_VERSION \ cuda-minimal-build-$CUDA_PKG_VERSION \ cuda-command-line-tools-$CUDA_PKG_VERSION \ - libcudnn7=7.0.5.15-1+cuda9.1 \ - libcudnn7-dev=7.0.5.15-1+cuda9.1 \ + libcudnn7=7.2.1.38-1+cuda9.0 \ + libcudnn7-dev=7.2.1.38-1+cuda9.0 \ libnccl2=2.2.12-1+cuda9.1 \ libnccl-dev=2.2.12-1+cuda9.1 && \ ln -s /usr/local/cuda-9.1 /usr/local/cuda && \ diff --git a/tensorflow-whl/build b/tensorflow-whl/build index b5678526..dc705f17 100755 --- a/tensorflow-whl/build +++ b/tensorflow-whl/build @@ -1,11 +1,42 @@ #!/bin/bash set -e -set -x -# Default behavior is to do everything from scratch. -# The --use-cache option is useful if you're iterating on a broken build. -if [[ "$1" == "--use-cache" ]]; then - docker build --rm -t kaggle/python-tensorflow-whl . -else - docker build --pull --rm --no-cache -t kaggle/python-tensorflow-whl . -fi +usage() { +cat << EOF +Usage: $0 [OPTIONS] +Build new Tensorflow wheels for use in the Kaggle Docker Python base images. + +Options: + -c, --use-cache Use layer cache when building a new image. +EOF +} + +CACHE_FLAG="--no-cache" +IMAGE_TAG="kaggle/python-build" + +while :; do + case "$1" in + -h|--help) + usage + exit + ;; + -c|--use-cache) + CACHE_FLAG="" + ;; + -?*) + usage + printf 'ERROR: Unknown option: %s\n' "$1" >&2 + exit + ;; + *) + break + esac + + shift +done + +readonly CACHE_FLAG +readonly IMAGE_TAG + +set -x +docker build --rm --pull $CACHE_FLAG -t "$IMAGE_TAG" . diff --git a/tensorflow-whl/push b/tensorflow-whl/push index a27de8a1..dfb64ec8 100755 --- a/tensorflow-whl/push +++ b/tensorflow-whl/push @@ -1,19 +1,37 @@ #!/bin/bash -# -# Push a newly-built image with the given label to gcr.io and DockerHub. -# -# Usage: -# ./push LABEL -# -# Description: -# LABEL: Image label. See CHANGELOG.md -# set -e + +usage() { +cat << EOF +Usage: $0 [LABEL] +Push a newly-built image with the given LABEL to gcr.io and DockerHub. +See CHANGELOG.md file for LABEL naming convention. +EOF +} + set -x SOURCE_IMAGE="kaggle/python-tensorflow-whl" TARGET_IMAGE="gcr.io/kaggle-images/python-tensorflow-whl" +while :; do + case "$1" in + -h|--help) + usage + exit + ;; + -?*) + usage + printf 'ERROR: Unknown option: %s\n' "$1" >&2 + exit + ;; + *) + break + esac + + shift +done + LABEL=$1 if [[ -z "$LABEL" ]]; then @@ -21,5 +39,10 @@ if [[ -z "$LABEL" ]]; then exit 1 fi -docker tag $SOURCE_IMAGE:latest $TARGET_IMAGE:$LABEL -gcloud docker -- push $TARGET_IMAGE:$LABEL +readonly SOURCE_IMAGE +readonly TARGET_IMAGE +readonly LABEL + +set -x +docker tag "$SOURCE_IMAGE:latest" "$TARGET_IMAGE:$LABEL" +gcloud docker -- push "$TARGET_IMAGE:$LABEL" From 52b051e44b53a816f62080ed17228d699787b1fb Mon Sep 17 00:00:00 2001 From: Vincent Roseberry Date: Tue, 27 Nov 2018 02:45:17 +0000 Subject: [PATCH 121/251] fix typo --- tensorflow-whl/README.md | 2 +- tensorflow-whl/build | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tensorflow-whl/README.md b/tensorflow-whl/README.md index ba0e0003..509591a2 100644 --- a/tensorflow-whl/README.md +++ b/tensorflow-whl/README.md @@ -18,5 +18,5 @@ Update the line below in the [CPU Dockerfile](../Dockerfile) and the [GPU Dockerfile](../gpu.Dockerfile) to use the new `LABEL`. ``` -FROM gcr.io/kaggle-images/python-tensorflow-whl: as tensorflow_whl +FROM gcr.io/kaggle-images/python-tensorflow-whl: