From deb1454ed765fc77fb9a35d4b2d06ace6c15f97a Mon Sep 17 00:00:00 2001 From: alon Date: Sun, 17 Oct 2021 15:50:23 +0300 Subject: [PATCH] reset state --- .circleci/config.yml | 59 ++- Dockerfile.arm | 83 +++++ Dockerfile.jetson | 102 ++++++ get_deps.sh | 133 ++++--- opt/build/bazel/Dockerfile | 32 ++ opt/build/dockerparts/base_image.tmpl | 22 ++ opt/build/dockerparts/go.yml | 9 + opt/build/libtorch/Dockerfile.arm | 53 +++ opt/build/libtorch/Dockerfile.jetson | 49 +++ opt/build/libtorch/Dockerfile.x64 | 38 ++ opt/build/libtorch/Makefile | 140 ++++++++ opt/build/libtorch/collect.py | 82 +++++ opt/build/libtorch/repack.sh | 94 +++++ opt/build/tensorflow-2.x/Dockerfile.jetson | 99 +++++ opt/build/tensorflow-2.x/Makefile.jetson | 149 ++++++++ opt/build/tensorflow-2.x/collect.py | 80 +++++ opt/build/tensorflow/.gitignore | 2 + opt/build/tensorflow/Dockerfile.arm | 59 +++ opt/build/tensorflow/Dockerfile.x64 | 81 +++++ opt/build/tensorflow/Makefile | 25 ++ opt/build/tensorflow/dockerfile.tmpl | 61 ++++ opt/build/tensorflow/pack.sh | 21 ++ opt/build/tensorflow/tf-1.14.0-arm64v8.patch | 86 +++++ opt/build/tflite/Dockerfile.x64 | 29 ++ opt/build/tflite/Makefile | 139 +++++++ opt/build/tflite/build | 20 ++ opt/build/tflite/build.macos | 12 + opt/build/tflite/collect.py | 80 +++++ opt/cmake/modules/FindTensorFlow.cmake | 359 +++++++++++++++++++ 29 files changed, 2129 insertions(+), 69 deletions(-) create mode 100644 Dockerfile.arm create mode 100644 Dockerfile.jetson create mode 100644 opt/build/bazel/Dockerfile create mode 100644 opt/build/dockerparts/base_image.tmpl create mode 100644 opt/build/dockerparts/go.yml create mode 100644 opt/build/libtorch/Dockerfile.arm create mode 100644 opt/build/libtorch/Dockerfile.jetson create mode 100644 opt/build/libtorch/Dockerfile.x64 create mode 100644 opt/build/libtorch/Makefile create mode 100644 opt/build/libtorch/collect.py create mode 100644 opt/build/libtorch/repack.sh create mode 100644 opt/build/tensorflow-2.x/Dockerfile.jetson create mode 100644 opt/build/tensorflow-2.x/Makefile.jetson create mode 100644 opt/build/tensorflow-2.x/collect.py create mode 100644 opt/build/tensorflow/.gitignore create mode 100644 opt/build/tensorflow/Dockerfile.arm create mode 100644 opt/build/tensorflow/Dockerfile.x64 create mode 100644 opt/build/tensorflow/Makefile create mode 100644 opt/build/tensorflow/dockerfile.tmpl create mode 100644 opt/build/tensorflow/pack.sh create mode 100644 opt/build/tensorflow/tf-1.14.0-arm64v8.patch create mode 100644 opt/build/tflite/Dockerfile.x64 create mode 100644 opt/build/tflite/Makefile create mode 100644 opt/build/tflite/build create mode 100644 opt/build/tflite/build.macos create mode 100644 opt/build/tflite/collect.py create mode 100644 opt/cmake/modules/FindTensorFlow.cmake diff --git a/.circleci/config.yml b/.circleci/config.yml index 367b5505f..c5f106df1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -221,11 +221,6 @@ jobs: platform: debian platforms-build: - machine: - enabled: true - docker_layer_caching: true - resource_class: small - image: ubuntu-2004:202107-02 parameters: lite: # LITE value during make type: string @@ -233,23 +228,18 @@ jobs: type: string target: # CPU|GPU type: string + docker: + - image: redisfab/rmbuilder:6.2.5-x64-buster steps: - abort_for_docs - abort_for_noci - early_return_for_forked_pull_requests + - setup_remote_docker: + docker_layer_caching: true - checkout-all - - run: - name: Relocate docker overlay2 dir - command: | - sudo systemctl stop docker - sudo mkdir -p /var2/lib/docker - sudo mv /var/lib/docker/overlay2 /var2/lib/docker - sudo mkdir /var/lib/docker/overlay2 - sudo mount --bind /var2/lib/docker/overlay2 /var/lib/docker/overlay2 - sudo systemctl start docker - restore_cache: keys: - - v1.2.5-deps-{{ checksum "get_deps.sh" }}-<>-<> + - v1-dependencies-{{ checksum "get_deps.sh" }}-<>-<> - setup-automation # since we run in parallel, we need to generate docker files with different suffixes hence the DOCKER_SUFFIX @@ -352,6 +342,45 @@ jobs: steps: - valgrind-general-steps +# build-macos: +# macos: +# xcode: 11.3.0 +# steps: +# - abort_for_docs +# - run: +# name: Fix macOS Python installation +# command: | +# brew reinstall -f python2 +# - build-steps: +# platform: macos +# +# build-multiarch-docker: +# machine: +# enabled: true +# image: cimg/base:2020.01 +# steps: +# - abort_for_docs +# - checkout-all +# - run: +# name: Checkout LFS +# command: | +# curl -s https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | sudo bash +# sudo apt-get install -y git-lfs +# git lfs install +# git lfs pull +# - run: +# name: Setup Docker client experimental features +# command: | +# sudo ./opt/readies/bin/getdocker --just-enable-exp +# docker version +# - run: +# name: Build +# command: | +# sudo docker login -u redisfab -p $DOCKER_REDISFAB_PWD +# cd opt/build/docker +# make build +# sudo make publish + # internal PRs execute build-and-test either in a workflow or # via a github action trigger build-and-test-gpu: diff --git a/Dockerfile.arm b/Dockerfile.arm new file mode 100644 index 000000000..22e609e7f --- /dev/null +++ b/Dockerfile.arm @@ -0,0 +1,83 @@ +# BUILD redisfab/redisai:${VERSION}-cpu-${ARCH}-${OSNICK} + +ARG REDIS_VER=6.2.4 + +# OSNICK=bionic|stretch|buster +ARG OSNICK=buster + +# OS=debian:buster-slim|debian:stretch-slim|ubuntu:bionic +ARG OS=debian:buster-slim + +# ARCH=arm64v8|arm32v7 +ARG ARCH=arm64v8 + +ARG PACK=0 +ARG REDISAI_LITE=0 +ARG TEST=0 + +#---------------------------------------------------------------------------------------------- +FROM redisfab/redis:${REDIS_VER}-${ARCH}-${OSNICK} AS redis +FROM redisfab/xbuild:${ARCH}-${OS} AS builder + +ARG OSNICK +ARG OS +ARG ARCH +ARG REDIS_VER + +RUN [ "cross-build-start" ] + +RUN echo "Building for ${OSNICK} (${OS}) for ${ARCH}" + +WORKDIR /build +COPY --from=redis /usr/local/ /usr/local/ + +COPY ./opt/ opt/ +COPY ./tests/flow/tests_setup/test_requirements.txt tests/flow + +RUN ./opt/readies/bin/getpy3 +RUN ./opt/system-setup.py + +ARG DEPS_ARGS="" +COPY ./get_deps.sh . +RUN if [ "$DEPS_ARGS" = "" ]; then ./get_deps.sh cpu; else env $DEPS_ARGS ./get_deps.sh cpu; fi + +ARG BUILD_ARGS="" +ADD ./ /build +RUN set -e ;\ + . ./opt/readies/bin/sourced ./profile.d ;\ + make -C opt build $BUILD_ARGS SHOW=1 + +ARG PACK +ARG TEST + +RUN if [ "$PACK" = "1" ]; then make -C opt pack; fi +RUN if [ "$TEST" = "1" ]; then TEST= make -C opt test $BUILD_ARGS NO_LFS=1; fi + +RUN [ "cross-build-end" ] + +#---------------------------------------------------------------------------------------------- +FROM redisfab/redis-xbuild:${REDIS_VER}-${ARCH}-${OSNICK} + +RUN [ "cross-build-start" ] + +ARG OSNICK +ARG OS +ARG ARCH +ARG REDIS_VER +ARG PACK + +RUN if [ ! -z $(command -v apt-get) ]; then apt-get -qq update; apt-get -q install -y libgomp1; fi +RUN if [ ! -z $(command -v yum) ]; then yum install -y libgomp; fi + +ENV REDIS_MODULES /usr/lib/redis/modules +ENV LD_LIBRARY_PATH $REDIS_MODULES + +RUN mkdir -p $REDIS_MODULES/ + +COPY --from=builder /build/install-cpu/ $REDIS_MODULES/ + +WORKDIR /data +EXPOSE 6379 +CMD ["--loadmodule", "/usr/lib/redis/modules/redisai.so"] + +RUN [ "cross-build-end" ] diff --git a/Dockerfile.jetson b/Dockerfile.jetson new file mode 100644 index 000000000..34c544b71 --- /dev/null +++ b/Dockerfile.jetson @@ -0,0 +1,102 @@ +# BUILD redisfab/redisai:${VERSION}-jetson-${ARCH}-${OSNICK} + +ARG REDIS_VER=6.2.4 + +# OSNICK=bionic|centos7|centos6 +ARG OSNICK=bionic + +# OS=ubuntu18.04|ubuntu16.04|centos7 +ARG OS=ubuntu18.04 + +# ARCH=arm64v8|arm32v7 +ARG ARCH=arm64v8 + +ARG CUDA_VER=10.2-cudnn7 + +ARG L4T_VER=r32.4.4 + +ARG PACK=0 +ARG REDISAI_LITE=0 +ARG TEST=0 + +#---------------------------------------------------------------------------------------------- +FROM redisfab/redis:${REDIS_VER}-${ARCH}-${OSNICK} AS redis +FROM redisfab/jetpack:4.4.1-arm64v8-l4t as builder + +ARG OSNICK +ARG OS +ARG ARCH +ARG REDIS_VER +ARG CUDA_VER +ARG L4T_VER + +RUN echo "Building for ${OSNICK} (${OS}) for ${ARCH} [with Redis ${REDIS_VER}]" + +ENV NVIDIA_VISIBLE_DEVICES all +ENV NVIDIA_DRIVER_CAPABILITIES compute,utility + +WORKDIR /build +COPY --from=redis /usr/local/ /usr/local/ + +COPY ./opt/ opt/ +COPY ./tests/flow/tests_setup/test_requirements.txt tests/flow/ + +RUN FORCE=1 ./opt/readies/bin/getpy3 +RUN ./opt/system-setup.py + +ARG DEPS_ARGS="GPU=1 JETSON=1 WITH_PT=1 WITH_TF=1 WITH_TFLITE=0 WITH_ORT=0" +COPY ./get_deps.sh . +# RUN if [ "$DEPS_ARGS" = "" ]; then ./get_deps.sh gpu; else env $DEPS_ARGS ./get_deps.sh gpu; fi +RUN set -e ;\ + env $DEPS_ARGS ./get_deps.sh + +ARG BUILD_ARGS="GPU=1 JETSON=1 WITH_TF=1 WITH_PT=1 WITH_TFLITE=0 WITH_ORT=0" + +ADD ./ /build +RUN bash -c "set -e ;\ + . ./opt/readies/bin/sourced ./profile.d ;\ + make -C opt build $BUILD_ARGS SHOW=1" + +ARG PACK +ARG REDISAI_LITE +ARG TEST + +RUN mkdir -p bin/artifacts +RUN set -e ;\ + if [ "$PACK" = "1" ]; then make -C opt pack GPU=1; fi + +RUN set -e ;\ + if [ "$TEST" = "1" ]; then \ + TEST= make -C opt test GPU=1 $BUILD_ARGS NO_LFS=1 ;\ + if [[ -d test/logs ]]; then \ + tar -C test/logs -czf bin/artifacts/test-logs-gpu.tgz . ;\ + fi ;\ + fi + +#---------------------------------------------------------------------------------------------- +# FROM nvidia/cuda:${CUDA_VER}-runtime-${OS} +# FROM nvidia/cuda-arm64:11.1-runtime-ubuntu18.04 +# FROM nvcr.io/nvidia/l4t-base:${L4T_VER} +FROM redisfab/jetpack:4.4.1-arm64v8-l4t + +ARG OS +ARG L4T_VER + +RUN if [ ! -z $(command -v apt-get) ]; then apt-get -qq update; apt-get -q install -y libgomp1; fi +RUN if [ ! -z $(command -v yum) ]; then yum install -y libgomp; fi + +ENV REDIS_MODULES /usr/lib/redis/modules +RUN mkdir -p $REDIS_MODULES/ + +COPY --from=redis /usr/local/ /usr/local/ +COPY --from=builder /build/install-gpu/ $REDIS_MODULES/ + +COPY --from=builder /build/bin/artifacts/ /var/opt/redislabs/artifacts + +# COPY --from=builder /usr/local/etc/redis/redis.conf /usr/local/etc/redis/ + +WORKDIR /data +EXPOSE 6379 +# CMD ["/usr/local/bin/redis-server", "/usr/local/etc/redis/redis.conf", "--loadmodule", "/usr/lib/redis/modules/redisai.so"] +RUN rm -rf /root/.cache /usr/local/cuda/lib64/*.a /usr/local/cuda/doc /usr/local/cuda/samples +CMD ["/usr/local/bin/redis-server", "--loadmodule", "/usr/lib/redis/modules/redisai.so"] diff --git a/get_deps.sh b/get_deps.sh index c50a8fbbd..af75ffbf7 100755 --- a/get_deps.sh +++ b/get_deps.sh @@ -8,6 +8,11 @@ TF_VERSION="2.6.0" TFLITE_VERSION="2.0.0" PT_VERSION="1.9.0" +if [[ $JETSON == 1 ]]; then + PT_VERSION="1.7.0" + TF_VERSION="2.4.0" +fi + ###### END VERSIONS ###### error() { @@ -26,12 +31,13 @@ if [[ $1 == --help || $1 == help ]]; then Argument variables: CPU=1 Get CPU dependencies GPU=1 Get GPU dependencies + JETSON=1 Get Jetson dependencies VERBOSE=1 Print commands FORCE=1 Download even if present WITH_DLPACK=0 Skip dlpack - WITH_TF=0 Skip Tensorflow or download official version - WITH_TFLITE=0 Skip TensorflowLite or download official version - WITH_PT=0 Skip PyTorch or download official version + WITH_TF=0 Skip Tensorflow or download from S3 repo + WITH_TFLITE=0 Skip TensorflowLite or download from S3 repo + WITH_PT=0 Skip PyTorch or download from S3 repo WITH_ORT=0 Skip OnnxRuntime or download from S3 repo OS= Set, to override the platform OS ARCH=0 Set, to override the platform ARCH @@ -80,18 +86,16 @@ DEPS_DIR=$HERE/deps/$OS-$ARCH-$DEVICE mkdir -p ${DEPS_DIR} cd ${DEPS_DIR} - -# Get the backend from its URL if is not already found and unpack it clean_and_fetch() { product=$1 archive=$2 - src_url=$3 - no_fetch=$4 + srcurl=$3 + nofetch=$4 [[ $FORCE == 1 ]] && rm -rf ${product} # FORCE is from the env [[ $FORCE != 1 ]] && [[ -d ${product} ]] && echo "${product} is in place, skipping. Set FORCE=1 to override. Continuing." && return - echo "Installing ${product} from ${src_url} in `pwd`..." - [[ ! -e ${archive} ]] && [[ -z ${no_fetch} ]] && wget -q ${src_url} + echo "Installing ${product} from ${srcurl} in `pwd`..." + [[ ! -e ${archive} ]] && [[ -z ${nofetch} ]] && wget -q ${srcurl} rm -rf ${product}.x mkdir ${product}.x tar xzf ${archive} --no-same-owner --strip-components=1 -C ${product}.x @@ -99,21 +103,6 @@ clean_and_fetch() { echo "Done." } -# This is for torch backend, which comes in a zip file -clean_and_fetch_torch() { - archive=$1 - src_url=$2 - - [[ $FORCE == 1 ]] && rm -rf libtorch # FORCE is from the env - [[ $FORCE != 1 ]] && [[ -d libtorch ]] && echo "libtorch is in place, skipping. Set FORCE=1 to override. Continuing." && return - echo "Installing libtorch from ${src_url} in `pwd`..." - LIBTORCH_ZIP=libtorch-${DEVICE}-${PT_VERSION}.zip - wget -q -O ${LIBTORCH_ZIP} ${src_url} - unzip -q -o ${LIBTORCH_ZIP} -} - - - ######################################################################################### DLPACK if [[ $WITH_DLPACK != 0 ]]; then if [[ ! -d dlpack ]]; then @@ -129,7 +118,7 @@ else fi ################################################################################## LIBTENSORFLOW - +# if [[ $OS == linux ]]; then TF_OS="linux" @@ -140,12 +129,27 @@ if [[ $OS == linux ]]; then fi if [[ $ARCH == x64 ]]; then TF_ARCH=x86_64 + LIBTF_URL_BASE=https://storage.googleapis.com/tensorflow/libtensorflow + elif [[ $ARCH == arm64v8 ]]; then + TF_ARCH=arm64 + if [[ $JETSON == 1 ]]; then + TF_BUILD="gpu-jetson" + fi + LIBTF_URL_BASE=https://s3.amazonaws.com/redismodules/tensorflow + elif [[ $ARCH == arm32v7 ]]; then + TF_ARCH=arm + LIBTF_URL_BASE=https://s3.amazonaws.com/redismodules/tensorflow + fi +elif [[ $OS == macos ]]; then + TF_OS=darwin + TF_BUILD=cpu + TF_ARCH=x86_64 + if [[ $WITH_TF == S3 ]]; then + LIBTF_URL_BASE=https://s3.amazonaws.com/redismodules/tensorflow else - echo "Only x64 is supported currently" + LIBTF_URL_BASE=https://storage.googleapis.com/tensorflow/libtensorflow fi -else - echo "Only Linux OS is supported currently" fi LIBTF_ARCHIVE=libtensorflow-${TF_BUILD}-${TF_OS}-${TF_ARCH}-${TF_VERSION}.tar.gz @@ -153,25 +157,28 @@ LIBTF_ARCHIVE=libtensorflow-${TF_BUILD}-${TF_OS}-${TF_ARCH}-${TF_VERSION}.tar.gz if [[ $WITH_TF != 0 ]]; then clean_and_fetch libtensorflow ${LIBTF_ARCHIVE} ${LIBTF_URL_BASE}/${LIBTF_ARCHIVE} else - echo "Skipping TensorFlow." + echo "Skipping TensorFlow." fi # WITH_TF ################################################################################## LIBTFLITE +# LIBTF_URL_BASE=https://s3.amazonaws.com/redismodules/tensorflow if [[ $OS == linux ]]; then TFLITE_OS="linux" if [[ $ARCH == x64 ]]; then TFLITE_ARCH=x86_64 - else - echo "Only x64 is supported currently" + elif [[ $ARCH == arm64v8 ]]; then + TFLITE_ARCH=arm64 + elif [[ $ARCH == arm32v7 ]]; then + TFLITE_ARCH=arm fi -else - echo "Only Linux OS is supported currently" +elif [[ $OS == macos ]]; then + TFLITE_OS=darwin + TFLITE_ARCH=x86_64 fi LIBTFLITE_ARCHIVE=libtensorflowlite-${TFLITE_OS}-${TFLITE_ARCH}-${TFLITE_VERSION}.tar.gz - if [[ $WITH_TFLITE != 0 ]]; then clean_and_fetch libtensorflow-lite ${LIBTFLITE_ARCHIVE} ${LIBTF_URL_BASE}/${LIBTFLITE_ARCHIVE} else @@ -179,36 +186,50 @@ else fi # WITH_TFLITE ####################################################################################### LIBTORCH - +PT_REPACK=0 PT_BUILD=cpu +PT_ARCH=x86_64 if [[ $OS == linux ]]; then PT_OS=linux if [[ $GPU == 1 ]]; then PT_BUILD=cu111 fi + if [[ $ARCH == x64 ]]; then - PT_ARCH=x86_64 - else - echo "Only x64 is supported currently" + PT_REPACK=1 + elif [[ $ARCH == arm64v8 ]]; then + PT_ARCH=arm64 + elif [[ $ARCH == arm32v7 ]]; then + PT_ARCH=arm fi -else - echo "Only Linux OS is supported currently" + + if [[ $JETSON == 1 ]]; then + PT_BUILD=cu102-jetson + PT_ARCH=arm64 + fi + +elif [[ $OS == macos ]]; then + PT_OS=macos + PT_REPACK=1 fi -if [[ $GPU != 1 ]]; then - LIBTORCH_ARCHIVE=libtorch-cxx11-abi-shared-with-deps-${PT_VERSION}%2B${PT_BUILD}.zip -else - LIBTORCH_ARCHIVE=libtorch-cxx11-abi-shared-with-deps-${PT_VERSION}%2B${PT_BUILD}.zip +LIBTORCH_ARCHIVE=libtorch-${PT_BUILD}-${PT_OS}-${PT_ARCH}-${PT_VERSION}.tar.gz +LIBTORCH_URL=https://s3.amazonaws.com/redismodules/pytorch/$LIBTORCH_ARCHIVE + +if [[ $PT_REPACK == 1 ]]; then + echo "Using repack.sh from ${HERE}/opt/build/libtorch/repack.sh" + PT_VERSION=$PT_VERSION GPU=$GPU OS=${OS} ARCH=${ARCH} $HERE/opt/build/libtorch/repack.sh fi -LIBTORCH_URL=https://download.pytorch.org/libtorch/$PT_BUILD/$LIBTORCH_ARCHIVE -if [[ $WITH_PT != 0 ]]; then - clean_and_fetch_torch ${LIBTORCH_ARCHIVE} ${LIBTORCH_URL} +if [[ $WITH_PT != 0 ]] && [ $PT_REPACK != 1 ]; then + clean_and_fetch libtorch ${LIBTORCH_ARCHIVE} ${LIBTORCH_URL} +elif [[ $PT_REPACK == 1 ]]; then + clean_and_fetch libtorch ${LIBTORCH_ARCHIVE} ${LIBTORCH_URL} 1 else - echo "Skipping libtorch." + echo "SKipping libtorch." fi # WITH_PT -############################################################################# ONNX +############################################################################# ORT_URL_BASE=https://s3.amazonaws.com/redismodules/onnxruntime ORT_BUILD="" @@ -219,16 +240,20 @@ if [[ $OS == linux ]]; then fi if [[ $ARCH == x64 ]]; then ORT_ARCH=x64 - else - echo "Only x64 is supported currently" + elif [[ $ARCH == arm64v8 ]]; then + ORT_ARCH=arm64 + elif [[ $ARCH == arm32v7 ]]; then + ORT_ARCH=arm fi -else - echo "Only Linux OS is supported currently" +elif [[ $OS == macos ]]; then + ORT_OS=osx + ORT_ARCH=x64 + ORT_URL_BASE=https://github.com/microsoft/onnxruntime/releases/download/v${ORT_VERSION} fi ORT_ARCHIVE=onnxruntime-${ORT_OS}-${ORT_ARCH}${ORT_BUILD}-${ORT_VERSION}.tgz if [[ $WITH_ORT != 0 ]]; then - clean_and_fetch onnxruntime ${ORT_ARCHIVE} ${ORT_URL_BASE}/${ORT_ARCHIVE} + clean_and_fetch onnxruntime ${ORT_ARCHIVE} ${ORT_URL_BASE}/${ORT_ARCHIVE} else echo "Skipping ONNXRuntime." fi # WITH_ORT diff --git a/opt/build/bazel/Dockerfile b/opt/build/bazel/Dockerfile new file mode 100644 index 000000000..7748006b7 --- /dev/null +++ b/opt/build/bazel/Dockerfile @@ -0,0 +1,32 @@ +# BUILD redisfab/bazel-${ARCH}:$(BAZ_VER) + +ARG OSNICK=buster +ARG ARCH=arm64v8 + +FROM redisfab/${ARCH}-xbuild:${OSNICK} + +RUN [ "cross-build-start" ] + +ARG BAZ_VER=0.24.1 + +WORKDIR /build + +RUN set -e ;\ + apt-get -qq update ;\ + apt-get -q install -y pkg-config g++ zlib1g-dev zip unzip python wget ;\ + apt-get -q install -y openjdk-11-jdk + +RUN set -e ;\ + wget -q https://github.com/bazelbuild/bazel/releases/download/0.24.1/bazel-0.24.1-dist.zip ;\ + unzip bazel-0.24.1-dist.zip + +RUN set -e ;\ + env EXTRA_BAZEL_ARGS="--host_javabase=@local_jdk//:jdk" bash ./compile.sh ;\ + copy bazel to /usr/local/bin + +RUN [ "cross-build-end" ] + +# FROM insready/bazel:latest as baz +# FROM ubuntu:latest +# COPY --from=baz /usr/bin/bazel* /usr/local/bin/ + diff --git a/opt/build/dockerparts/base_image.tmpl b/opt/build/dockerparts/base_image.tmpl new file mode 100644 index 000000000..0af08efdc --- /dev/null +++ b/opt/build/dockerparts/base_image.tmpl @@ -0,0 +1,22 @@ +{% if REDIS_OSNICK == "xenial" %} +{% set cuda_suffix_os = "ubuntu16.04" %} +{% elif REDIS_OSNICK == "bionic" %} +{% set cuda_suffix_os = "ubuntu18.04" %} +{% endif %} + +{% if REDIS_ARCH == 'jetson' %} +FROM nvcr.io/nvidia/deepstream-l4t:5.1-21.02-base +{% endif %} + +{% if REDIS_ARCH == 'x64' %} +{% if REDIS_GPU is defined %} +FROM nvidia/cuda:{{REDIS_CUDA_VERSION}}-devel-{{cuda_suffix_os}} +{% else %} +{% if REDIS_OSNICK == "xenial" %} +FROM ubuntu:xenial +{% elif REDIS_OSNICK == "bionic" %} +FROM ubuntu:bionic +{% endif %} +{% endif %} +{% endif %} + diff --git a/opt/build/dockerparts/go.yml b/opt/build/dockerparts/go.yml new file mode 100644 index 000000000..4275c6298 --- /dev/null +++ b/opt/build/dockerparts/go.yml @@ -0,0 +1,9 @@ +{% if REDIS_ARCH == 'x64' %} +{% set archsuffix = 'amd64' %} +{% elif REDIS_ARCH == 'jetson' %} +{% set archsuffix = 'arm64' %} +{% endif %} + +RUN wget -q https://golang.org/dl/go1.16.5.linux-{{archsuffix}}.tar.gz -O /tmp/go.tar.gz +RUN tar -C /usr/local -zxf /tmp/go.tar.gz + diff --git a/opt/build/libtorch/Dockerfile.arm b/opt/build/libtorch/Dockerfile.arm new file mode 100644 index 000000000..d0d305aab --- /dev/null +++ b/opt/build/libtorch/Dockerfile.arm @@ -0,0 +1,53 @@ +# BUILD redisfab/libtorch-cpu-${ARCH}:$(PT_VER) + +# stretch|bionic|buster +ARG OSNICK=buster + +# arm32v7|arm64v8 +ARG ARCH=arm64v8 + +#---------------------------------------------------------------------------------------------- +FROM redisfab/${ARCH}-xbuild:${OSNICK} as builder + +# redeclare +ARG OSNICK +ARG ARCH + +ARG PT_VER=1.2.0 + +RUN [ "cross-build-start" ] + +WORKDIR /build + +RUN set -e ;\ + apt-get -qq update ;\ + apt-get -q install -y git build-essential ninja-build cmake python3-pip python3-cffi +RUN pip3 install setuptools pyyaml typing +RUN pip3 install numpy +# RUN pip3 install mkl mkl-include + +RUN git clone --single-branch --branch v${PT_VER} --recursive https://github.com/pytorch/pytorch.git + +ENV BUILD_PYTHON=0 +ENV USE_GLOO=1 +ENV USE_OPENCV=0 +ENV BUILD_TORCH=ON +ENV BUILD_BINARY=ON +ENV BUILD_CAFFE2_OPS=ON +ENV BUILD_TEST=0 +ENV NO_CUDA=1 +ENV NO_DISTRIBUTED=1 +ENV NO_MKLDNN=1 +ENV NO_NNPACK=1 +ENV NO_QNNPACK=1 + +RUN set -e ;\ + cd pytorch ;\ + python3 setup.py install + +ADD ./opt/readies/ /build/readies/ +ADD ./opt/build/libtorch/collect.py /build/ + +RUN ./collect.py + +RUN [ "cross-build-end" ] diff --git a/opt/build/libtorch/Dockerfile.jetson b/opt/build/libtorch/Dockerfile.jetson new file mode 100644 index 000000000..06cdbe1a3 --- /dev/null +++ b/opt/build/libtorch/Dockerfile.jetson @@ -0,0 +1,49 @@ +# BUILD redisfab/libtorch-cpu-${ARCH}:$(PT_VER) + +# stretch|bionic|buster +ARG OSNICK=buster + +# arm32v7|arm64v8 +ARG ARCH=aarch64 + +#---------------------------------------------------------------------------------------------- +ARG OS=nvcr.io/nvidia/deepstream-l4t:5.1-21.02-base as builder + +FROM ${OS} + +ADD ./opt/readies/ /build/readies/ +ADD ./opt/build/libtorch/collect.py /build/ + +ARG PT_VER=1.7.0 + +RUN apt-get -qq update && apt-get -q install -y git build-essential ninja-build cmake python3.7 python3-pip +RUN update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.7 1 + +RUN apt-get -q install -y python3-cffi python3-numpy +RUN pip3 install setuptools pyyaml typing +# RUN pip3 install mkl mkl-include + +RUN git clone --single-branch --branch v${PT_VER} --recursive https://github.com/pytorch/pytorch.git /pytorch + +ENV BUILD_PYTHON=0 +ENV USE_GLOO=1 +ENV USE_OPENCV=0 +ENV BUILD_TORCH=ON +ENV BUILD_BINARY=ON +ENV BUILD_CAFFE2_OPS=ON +ENV BUILD_TEST=0 +ENV NO_CUDA=1 +ENV NO_DISTRIBUTED=1 +ENV NO_MKLDNN=1 +ENV NO_NNPACK=1 +ENV NO_QNNPACK=1 + +RUN mkdir /pytorch/build_libtorch +WORKDIR /pytorch/build_libtorch +RUN python3 ../tools/build_libtorch.py + +ENV BUILD_SHARED_LIBS=OFF +RUN python3 ../tools/build_libtorch.py + +WORKDIR /build +RUN ./collect.py --pytorch /pytorch --pytorch-ver ${PT_VER} --dest /pytorch/dest diff --git a/opt/build/libtorch/Dockerfile.x64 b/opt/build/libtorch/Dockerfile.x64 new file mode 100644 index 000000000..4420a5866 --- /dev/null +++ b/opt/build/libtorch/Dockerfile.x64 @@ -0,0 +1,38 @@ +ARG OS=debian:buster + +#---------------------------------------------------------------------------------------------- +FROM ${OS} + +ARG PT_VER=1.2.0 + +WORKDIR /build + +RUN set -e ;\ + apt-get -qq update ;\ + apt-get -qq install -y git build-essential ninja-build cmake python3-pip python3-cffi +RUN pip3 install setuptools pyyaml typing +RUN pip3 install numpy +RUN pip3 install mkl mkl-include + +RUN git clone --single-branch --branch v${PT_VER} --recursive https://github.com/pytorch/pytorch.git + +ENV BUILD_PYTHON=0 +ENV USE_GLOO=1 +ENV USE_OPENCV=0 +ENV BUILD_TORCH=ON +ENV BUILD_BINARY=ON +ENV BUILD_CAFFE2_OPS=ON +ENV NO_CUDA=1 +ENV NO_DISTRIBUTED=1 +ENV NO_MKLDNN=1 +ENV NO_NNPACK=1 +ENV NO_QNNPACK=1 + +RUN set -e ;\ + cd pytorch ;\ + python3 setup.py install + +ADD ./opt/readies/ /build/readies/ +ADD ./opt/build/libtorch/collect.py /build/ + +RUN ./collect.py diff --git a/opt/build/libtorch/Makefile b/opt/build/libtorch/Makefile new file mode 100644 index 000000000..2b6ed42c3 --- /dev/null +++ b/opt/build/libtorch/Makefile @@ -0,0 +1,140 @@ + +ROOT=../../.. + +VERSION ?= 1.7.0 +OSNICK ?= bionic + +REDIS_VER=6.2.5 + +#---------------------------------------------------------------------------------------------- + +S3_URL=redismodules/pytorch + +OS:=$(shell $(ROOT)/opt/readies/bin/platform --os) +OSNICK:=$(shell $(ROOT)/opt/readies/bin/platform --osnick) +ARCH:=$(shell $(ROOT)/opt/readies/bin/platform --arch) +DEVICE ?= cpu + +STEM=libtorch + +DOCKER_OS.bionic=ubuntu:bionic +DOCKER_OS.stretch=debian:stretch-slim +DOCKER_OS.buster=debian:buster-slim +DOCKER_OS=$(DOCKER_OS.$(OSNICK)) + +TAR_ARCH.x64=x86_64 +TAR_ARCH.arm64v8=arm64 +TAR_ARCH.arm32v7=arm + +#---------------------------------------------------------------------------------------------- + +define targets # (1=OP, 2=op) +$(1)_TARGETS := +$(1)_TARGETS += $(if $(findstring $(X64),1),$(2)_native) +ifeq ($$(CROSS),1) +$(1)_TARGETS += $(if $(findstring $(ARM7),1),$(2)_arm32v7) +$(1)_TARGETS += $(if $(findstring $(ARM8),1),$(2)_arm64v8) +endif + +ifeq ($$(CROSS),1) +$(1)_TARGETS += $$(if $$(strip $$($(1)_TARGETS)),,$(2)_native $(2)_arm32v7 $(2)_arm64v8) +else +$(1)_TARGETS += $$(if $$(strip $$($(1)_TARGETS)),,$(2)_native) +endif +endef + +$(eval $(call targets,BUILD,build)) +$(eval $(call targets,PUBLISH,publish)) + +#---------------------------------------------------------------------------------------------- + +DOCKER_BUILD_ARGS += \ + PT_VER=$(VERSION) \ + OSNICK=$(OSNICK) \ + OS=$(DOCKER_OS) \ + ARCH=$(ARCH) + +define build_native # (1=arch, 2=tar-arch) +IID_$(1)=$(1)_$(VERSION).iid +CID_$(1)=$(1)_$(VERSION).cid + +build_native: + @docker build --iidfile $$(IID_$(1)) -t redisfab/$(STEM):$(VERSION)-$(DEVICE)-$(1) -f Dockerfile.$(VARIANT) \ + $(foreach A,$(DOCKER_BUILD_ARGS),--build-arg $(A)) $(ROOT) + @docker create --cidfile $$(CID_$(1)) `cat $$(IID_$(1))` + @docker cp `cat $$(CID_$(1))`:/pytorch/dest/$(STEM)-$(DEVICE)-$(OS)-$(2)-$(VERSION).tar.gz . + +.PHONY: build_native +endef + +ifeq ($(CROSS),1) + +define build_arm # (1=arch, 2=tar-arch) +IID_$(1)=$(1)_$(VERSION).iid +CID_$(1)=$(1)_$(VERSION).cid + +build_$(1): + @docker build --iidfile $$(IID_$(1)) -t redisfab/$(STEM):$(VERSION)-$(DEVICE)-$(1) -f Dockerfile.$(VARIANT) \ + $(foreach A,$(DOCKER_BUILD_ARGS),--build-arg $(A)) ../../.. + @docker create --cidfile $$(CID_$(1)) `cat $$(IID_$(1))` + @docker cp `cat $$(CID_$(1))`:/build/$(STEM)-$(2)-$(VERSION).tar.gz . + +.PHONY: build_$(1) +endef + +endif # CROSS + +#---------------------------------------------------------------------------------------------- + +define publish_native # (1=arch, 2=tar-arch) +publish_native: + @aws s3 cp $(STEM)-$(2)-$(VERSION).tar.gz s3://$(S3_URL)/ --acl public-read + +.PHONY: publish_native +endef + +ifeq ($(CROSS),1) + +define publish_arm # (1=arch, 2=tar-arch) +publish_$(1): + @aws s3 cp $(STEM)-$(2)-$(VERSION).tar.gz s3://$(S3_URL)/ --acl public-read + +.PHONY: publish_$(1) +endef + +endif # CROSS + +#---------------------------------------------------------------------------------------------- + +all: build publish + +build: $(BUILD_TARGETS) + +$(eval $(call build_native,$(ARCH),$(TAR_ARCH.$(ARCH)))) +ifeq ($(CROSS),1) +$(eval $(call build_arm,arm64v8,arm64)) +$(eval $(call build_arm,arm32v7,arm)) +endif + +ifneq ($(filter publish,$(MAKECMDGOALS)),) +ifeq ($(wildcard $(HOME)/.aws/credentials),) +$(error Please run 'aws configure' and provide it with access credentials) +endif +endif + +publish: $(PUBLISH_TARGETS) + +$(eval $(call publish_x64,x64,x86_64)) +ifeq ($(CROSS),1) +$(eval $(call publish_arm,arm64v8,arm64)) +$(eval $(call publish_arm,arm32v7,arm)) +endif + +repack: + @PT_VERSION=$(VERSION) GPU=$(GPU) ./repack.sh + +help: + @echo "make [build|repack|publish] [X64=1|ARM7=1|ARM8=1] [CROSS=1]" + +.PHONY: all build repack publish help + diff --git a/opt/build/libtorch/collect.py b/opt/build/libtorch/collect.py new file mode 100644 index 000000000..a6076cb05 --- /dev/null +++ b/opt/build/libtorch/collect.py @@ -0,0 +1,82 @@ +#!/usr/bin/env python3 + +import os +import sys +import argparse +from pathlib import Path +import shutil +import tarfile + +# this refers to deps directory inside a container +sys.path.insert(0, os.path.join(os.path.dirname(__file__), "readies")) +import paella + +#---------------------------------------------------------------------------------------------- + +PYTORCH_VERSION = '1.7.0' + +parser = argparse.ArgumentParser(description='Prepare RedisAI dependant distribution packages.') +parser.add_argument('--pytorch', default='pytorch', help='root of pytorch repository') +parser.add_argument('--pytorch-ver', default=PYTORCH_VERSION, help='pytorch version') +parser.add_argument('--dest', default='dest', help='destination directory') +parser.add_argument('-n', '--nop', action="store_true", help='no operation') +args = parser.parse_args() + +#---------------------------------------------------------------------------------------------- + +pytorch = Path(args.pytorch).resolve() +dest = Path(args.dest).resolve() + +#---------------------------------------------------------------------------------------------- + +pt_build='cpu' + +platform = paella.Platform() + +pt_os = platform.os +if pt_os == 'macos': + pt_os = 'darwin' + +pt_arch = platform.arch +if pt_arch == 'x64': + pt_arch = 'x86_64' +elif pt_arch == 'arm64v8': + pt_arch = 'arm64' +elif pt_arch == 'arm32v7': + pt_arch = 'arm' + +pt_ver = args.pytorch_ver + +#---------------------------------------------------------------------------------------------- + +def copy_p(src, dest): + f = dest/src + paella.mkdir_p(os.path.dirname(f)) + shutil.copy(src, f, follow_symlinks=False) + +def create_tar(name, basedir, dir='.'): + def reset_uid(tarinfo): + tarinfo.uid = tarinfo.gid = 0 + tarinfo.uname = tarinfo.gname = "root" + return tarinfo + with cwd(basedir): + with tarfile.open(name, 'w:gz') as tar: + tar.add(dir, filter=reset_uid) + +def collect_pytorch(): + d_pytorch = dest/'libtorch' + with cwd(pytorch/'torch/include'): + for f in Path('.').glob('**/*.h'): + copy_p(f, d_pytorch/'include') + with cwd(pytorch/'torch/lib'): + for f in Path('.').glob('*.a'): + copy_p(f, d_pytorch/'lib') + for f in Path('.').glob('*.so*'): + copy_p(f, d_pytorch/'lib') + with cwd(pytorch/'torch'): + shutil.copytree('share', d_pytorch/'share', ignore_dangling_symlinks=True) + create_tar('libtorch-{}-{}-{}-{}.tar.gz'.format(pt_build, pt_os, pt_arch, pt_ver), dest, 'libtorch') + +#---------------------------------------------------------------------------------------------- + +collect_pytorch() diff --git a/opt/build/libtorch/repack.sh b/opt/build/libtorch/repack.sh new file mode 100644 index 000000000..c8f8a15a4 --- /dev/null +++ b/opt/build/libtorch/repack.sh @@ -0,0 +1,94 @@ +#!/usr/bin/env bash + +set -e +[[ $VERBOSE == 1 ]] && set -x + +HERE="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)" + +ROOT=$HERE/../../.. +#. $ROOT/opt/readies/shibumi/functions +ROOT=$(realpath $ROOT) + +if [[ "$1" == "cpu" || $CPU == 1 ]]; then + GPU=0 + DEVICE=cpu +elif [[ "$1" == "gpu" || $GPU == 1 ]]; then + GPU=1 + DEVICE=gpu +else + GPU=${GPU:-0} + if [[ $GPU == 1 ]]; then + DEVICE=gpu + else + DEVICE=cpu + fi +fi + +# set them internally or externally +if [ -z ${OS} ]; then + OS=$(python3 $ROOT/opt/readies/bin/platform --os) +fi +if [ -z ${ARCH} ]; then + ARCH=$(python3 $ROOT/opt/readies/bin/platform --arch) +fi + +TARGET_DIR=$ROOT/deps/$OS-$ARCH-$DEVICE + +# avoid wget warnings on macOS +[[ $OS == macos ]] && export LC_ALL=en_US.UTF-8 + +if [[ -z $PT_VERSION ]]; then + PT_VERSION="latest" +fi + +if [[ $OS == linux ]]; then + PT_OS=shared-with-deps + if [[ $GPU != 1 ]]; then + PT_BUILD=cpu + else + PT_BUILD=cu111 + fi + if [[ $ARCH == x64 ]]; then + PT_ARCH=x86_64 + fi +elif [[ $OS == macos ]]; then + PT_OS=macos + PT_ARCH=x86_64 + PT_BUILD=cpu +fi + +[[ "$PT_VERSION" == "latest" ]] && PT_BUILD=nightly/${PT_BUILD} + +if [[ $OS == linux ]]; then + if [[ $PT_VERSION == 1.2.0 ]]; then + LIBTORCH_ARCHIVE=libtorch-${PT_OS}-${PT_VERSION}.zip + elif [[ $PT_VERSION == latest ]]; then + LIBTORCH_ARCHIVE=libtorch-shared-with-deps-latest.zip + else + if [[ $GPU != 1 ]]; then + LIBTORCH_ARCHIVE=libtorch-cxx11-abi-shared-with-deps-${PT_VERSION}%2B${PT_BUILD}.zip + else + LIBTORCH_ARCHIVE=libtorch-cxx11-abi-shared-with-deps-${PT_VERSION}%2B${PT_BUILD}.zip + fi + fi +elif [[ $OS == macos ]]; then + LIBTORCH_ARCHIVE=libtorch-${PT_OS}-${PT_VERSION}.zip +fi + +[[ -z $LIBTORCH_URL ]] && LIBTORCH_URL=https://download.pytorch.org/libtorch/$PT_BUILD/$LIBTORCH_ARCHIVE + +LIBTORCH_ZIP=libtorch-${PT_BUILD}-${PT_OS}-${PT_ARCH}-${PT_VERSION}.zip +if [ ! -f $LIBTORCH_ARCHIVE ]; then + echo "Downloading libtorch ${PT_VERSION} ${PT_BUILD}" + wget -q -O $LIBTORCH_ZIP $LIBTORCH_URL +fi + +if [[ $OS == linux ]]; then + PT_OS=linux +fi + +unzip -q -o $LIBTORCH_ZIP +dest=$TARGET_DIR/libtorch-${PT_BUILD}-${PT_OS}-${PT_ARCH}-${PT_VERSION}.tar.gz +mkdir -p $(dirname $dest) +tar czf $dest libtorch/ +rm -rf libtorch/ $LIBTORCH_ZIP diff --git a/opt/build/tensorflow-2.x/Dockerfile.jetson b/opt/build/tensorflow-2.x/Dockerfile.jetson new file mode 100644 index 000000000..b715dc447 --- /dev/null +++ b/opt/build/tensorflow-2.x/Dockerfile.jetson @@ -0,0 +1,99 @@ + +ARG OS=ubuntu18.04 +ARG TF_VERSION=2.3.1 +ARG CUDA_VER=10.2 +ARG CUDNN_VERSION=8 +ARG TENSORRT_VERSION=7.2 +ARG MACH=aarch64 + +#---------------------------------------------------------------------------------------------- +FROM redisfab/jetpack:4.4.1-arm64v8-l4t + +ARG TF_VERSION +ARG CUDA_VERSION +ARG CUDNN_VERSION +ARG TENSORRT_VERSION + +RUN if [ ! -z $(command -v apt-get) ]; then apt-get -qq update; apt-get -q install -y git python3-h5py; fi +RUN if [ ! -z $(command -v yum) ]; then yum install -y git; fi + +WORKDIR /build + +#---------------------------------------------------------------------------------------------- + +RUN git clone https://github.com/RedisLabsModules/readies.git +RUN PIP=1 ./readies/bin/getpy3 +RUN ./readies/bin/getbazel + +#---------------------------------------------------------------------------------------------- + +ENV LANG=C.UTF-8 + +RUN ln -s $(command -v python3) /usr/local/bin/python +RUN python3 -m pip --no-cache-dir install virtualenv Pillow h5py keras_preprocessing matplotlib \ + mock 'numpy<1.19.0' future enum34 + +#---------------------------------------------------------------------------------------------- + +RUN git clone --recursive https://github.com/tensorflow/tensorflow.git + +WORKDIR /build/tensorflow/ +RUN git checkout v${TF_VERSION} + +#---------------------------------------------------------------------------------------------- + +ENV TF_NEED_CUDA=1 \ + TF_CUDA_COMPUTE_CAPABILITIES=6.1,5.3,3.5,7.2 \ + TF_NEED_TENSORRT=0 \ + \ + TF_NEED_GCP=0 \ + TF_NEED_HDFS=0 \ + TF_NEED_OPENCL=0 \ + TF_NEED_JEMALLOC=1 \ + TF_ENABLE_XLA=0 \ + TF_NEED_VERBS=0 \ + TF_CUDA_CLANG=0 \ + TF_NEED_MKL=0 \ + TF_DOWNLOAD_MKL=0 \ + TF_NEED_AWS=0 \ + TF_NEED_MPI=0 \ + TF_NEED_GDR=0 \ + TF_NEED_S3=0 \ + TF_NEED_OPENCL_SYCL=0 \ + TF_SET_ANDROID_WORKSPACE=0 \ + TF_NEED_COMPUTECPP=0 \ + TF_NEED_KAFKA=0 \ + +ENV TF_CUDA_VERSION=${CUDA_VER} \ + TF_CUDNN_VERSION=${CUDNN_VERSION} \ + TF_CUBLAS_VERSION=${TF_CUDA_VERSION} \ + TF_TENSORRT_VERSION=${TENSORRT_VERSION} \ + \ + CUDA_TOOLKIT_PATH="/usr/local/cuda-${TF_CUDA_VERSION}" \ + TF_CUDA_PATHS="/usr/include,/usr/include/cuda,/opt/cuda/include,/usr/local/cuda-${TF_CUDA_VERSION}/include,/usr/local/cuda-${TF_CUDA_VERSION}/lib64,/usr/local/cuda-${TF_CUDA_VERSION}/targets/${MACH}-linux/,/usr/local/cuda-${TF_CUDA_VERSION}/targets/${MACH}-linux/lib/,/usr/lib/${MACH}-linux-gnu,/usr/local/lib,/usr/local/cuda-${TF_CUDA_VERSION}," + \ + CUDNN_INSTALL_PATH=/usr/lib/${MACH}-linux-gnu \ + TENSORRT_INSTALL_PATH=/usr/lib/${MACH}-linux-gnu + \ + PATH="/usr/local/cuda-${TF_CUDA_VERSION}/bin:${PATH}" \ + LD_LIBRARY_PATH="/usr/local/cuda-${TF_CUDA_VERSION}/lib64:/usr/lib/${MACH}-linux-gnu:${LD_LIBRARY_PATH}" + +RUN yes "" | ./configure +RUN set -e ;\ + . /etc/profile.d/golang.sh ;\ + bazel build --jobs `/build/readies/bin/nproc` \ + --action_env=LD_LIBRARY_PATH=${LD_LIBRARY_PATH} \ + --config=v2 \ + --config=noaws \ + --config=nogcp \ + --config=nonccl \ + --config=nohdfs \ + --config=opt \ + --config=cuda \ + //tensorflow:libtensorflow.so + +#---------------------------------------------------------------------------------------------- + +WORKDIR /build +ADD collect.py /build/ +RUN python ./collect.py --version ${TF_VERSION} --variant gpu-jetson diff --git a/opt/build/tensorflow-2.x/Makefile.jetson b/opt/build/tensorflow-2.x/Makefile.jetson new file mode 100644 index 000000000..72133ef29 --- /dev/null +++ b/opt/build/tensorflow-2.x/Makefile.jetson @@ -0,0 +1,149 @@ + +ROOT=../../.. + +VERSION ?= 2.3.1 +OSNICK ?= bionic + +VARIANT=jetson + +#---------------------------------------------------------------------------------------------- + +OS:=$(shell $(ROOT)/opt/readies/bin/platform --os) +# OSNICK:=$(shell $(ROOT)/opt/readies/bin/platform --osnick) +OSNICK:=bionic +ARCH:=$(shell $(ROOT)/opt/readies/bin/platform --arch) +MACH:=$(shell uname -m) + +DEVICE=gpu + +DOCKER_OS.xenial=ubuntu:xenial +DOCKER_OS.bionic=ubuntu:bionic +DOCKER_OS.focal=ubuntu:focal +DOCKER_OS.stretch=debian:stretch-slim +DOCKER_OS.buster=debian:buster-slim +DOCKER_OS.centos7=centos:7.8.2003 +DOCKER_OS=$(DOCKER_OS.$(OSNICK)) + +TAR_ARCH.x64=x86_64 +TAR_ARCH.arm64v8=arm64 +TAR_ARCH.arm32v7=arm + +#---------------------------------------------------------------------------------------------- + +S3_URL=redismodules/tensorflow + +STEM=libtensorflow + +#---------------------------------------------------------------------------------------------- + +define targets # (1=OP, 2=op) +$(1)_TARGETS := +$(1)_TARGETS += $(if $(findstring $(X64),1),$(2)_native) +ifeq ($$(CROSS),1) +$(1)_TARGETS += $(if $(findstring $(ARM7),1),$(2)_arm32v7) +$(1)_TARGETS += $(if $(findstring $(ARM8),1),$(2)_arm64v8) +endif + +ifeq ($$(CROSS),1) +$(1)_TARGETS += $$(if $$(strip $$($(1)_TARGETS)),,$(2)_native $(2)_arm32v7 $(2)_arm64v8) +else +$(1)_TARGETS += $$(if $$(strip $$($(1)_TARGETS)),,$(2)_native) +endif +endef + +$(eval $(call targets,BUILD,build)) +$(eval $(call targets,PUBLISH,publish)) + +#---------------------------------------------------------------------------------------------- + +DOCKER_BUILD_ARGS += \ + TF_VER=$(VERSION) \ + OSNICK=$(OSNICK) \ + OS=$(DOCKER_OS) \ + ARCH=$(ARCH) \ + MACH=$(MACH) \ + REDIS_VER=$(REDIS_VER) \ + TEST=$(TEST) \ + PACK=$(PACK) + +define build_native # (1=arch, 2=tar-arch) +IID_$(1)=$(1)_$(VERSION).iid +CID_$(1)=$(1)_$(VERSION).cid + +build_native: + @docker build --iidfile $$(IID_$(1)) -t redisfab/$(STEM):$(VERSION)-$(DEVICE)-$(VARIANT)-$(1)-$(OSNICK) -f Dockerfile.$(VARIANT) \ + $(foreach A,$(DOCKER_BUILD_ARGS),--build-arg $(A)) . + @docker create --cidfile $$(CID_$(1)) `cat $$(IID_$(1))` + @docker cp `cat $$(CID_$(1))`:/build/dest/$(STEM)-$(DEVICE)-$(VARIANT)-$(OS)-$(2)-$(VERSION).tar.gz . + +.PHONY: build_native +endef + +ifeq ($(CROSS),1) + +define build_arm # (1=arch, 2=tar-arch) +IID_$(1)=$(1)_$(VERSION).iid +CID_$(1)=$(1)_$(VERSION).cid + +build_$(1): + @docker build --iidfile $$(IID_$(1)) -t redisfab/$(STEM):$(VERSION)-$(DEVICE)-$(1)-$(OSNICK) -f Dockerfile.$(VARIANT) \ + $(foreach A,$(DOCKER_BUILD_ARGS),--build-arg $(A)) . + @docker create --cidfile $$(CID_$(1)) `cat $$(IID_$(1))` + @docker cp `cat $$(CID_$(1))`:/build/$(STEM)-$(DEVICE)-$(VARIANT)-$(OS)-$(2)-$(VERSION).tar.gz . + +.PHONY: build_$(1) +endef + +endif # CROSS + +#---------------------------------------------------------------------------------------------- + +define publish_native # (1=arch, 2=tar-arch) +publish_native: + @aws s3 cp $(STEM)-$(DEVICE)-$(VARIANT)-$(OS)-$(2)-$(VERSION).tar.gz s3://$(S3_URL)/ --acl public-read + +.PHONY: publish_native +endef + +ifeq ($(CROSS),1) + +define publish_arm # (1=arch, 2=tar-arch) +publish_$(1): + @aws s3 cp $(STEM)-$(DEVICE)-$(VARIANT)-$(OS)-$(2)-$(VERSION).tar.gz s3://$(S3_URL)/ --acl public-read + +.PHONY: publish_$(1) +endef + +endif # CROSS + +#---------------------------------------------------------------------------------------------- + +all: build publish + +build: $(BUILD_TARGETS) + +$(eval $(call build_native,$(ARCH),$(TAR_ARCH.$(ARCH)))) +ifeq ($(CROSS),1) +$(eval $(call build_arm,arm64v8,arm64)) +$(eval $(call build_arm,arm32v7,arm)) +endif + +ifneq ($(filter publish,$(MAKECMDGOALS)),) +ifeq ($(wildcard $(HOME)/.aws/credentials),) +$(error Please run 'aws configure' and provide it with access credentials) +endif +endif + +publish: $(PUBLISH_TARGETS) + +$(eval $(call publish_native,$(ARCH),$(TAR_ARCH.$(ARCH)))) +ifeq ($(CROSS),1) +$(eval $(call publish_arm,arm64v8,arm64)) +$(eval $(call publish_arm,arm32v7,arm)) +endif + + +help: + @echo "make [build|publish] [X64=1|ARM7=1|ARM8=1] [CROSS=1]" + +.PHONY: all build publish help diff --git a/opt/build/tensorflow-2.x/collect.py b/opt/build/tensorflow-2.x/collect.py new file mode 100644 index 000000000..4fd1246fd --- /dev/null +++ b/opt/build/tensorflow-2.x/collect.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python3 + +import os +import sys +import argparse +from pathlib import Path +import shutil +import tarfile + +# this refers to deps directory inside a container +sys.path.insert(0, os.path.join(os.path.dirname(__file__), "readies")) +import paella + +#---------------------------------------------------------------------------------------------- + +TENSORFLOW_VERSION = '2.3.1' +VARIANT='gpu-jetson' + +parser = argparse.ArgumentParser(description='Prepare RedisAI dependant distribution packages.') +parser.add_argument('--root', default='tensorflow', help='root of tensorflow repository') +parser.add_argument('--version', default=TENSORFLOW_VERSION, help='tensorflow version') +parser.add_argument('--variant', default=VARIANT, help='build variant') +parser.add_argument('--dest', default='dest', help='destination directory') +parser.add_argument('-n', '--nop', action="store_true", help='no operation') +args = parser.parse_args() + +#---------------------------------------------------------------------------------------------- + +tensorflow = Path(args.root).resolve() +dest = Path(args.dest).resolve() + +#---------------------------------------------------------------------------------------------- + +tf_build=args.variant + +platform = paella.Platform() + +tf_os = platform.os +if tf_os == 'macos': + tf_os = 'darwin' + +tf_arch = platform.arch +if tf_arch == 'x64': + tf_arch = 'x86_64' +elif tf_arch == 'arm64v8': + tf_arch = 'arm64' + +tf_ver = args.version + +#---------------------------------------------------------------------------------------------- + +def copy_p(src, dest): + f = dest/src + paella.mkdir_p(os.path.dirname(f)) + shutil.copy(src, f, follow_symlinks=False) + +def create_tar(name, basedir, dir='.'): + def reset_uid(tarinfo): + tarinfo.uid = tarinfo.gid = 0 + tarinfo.uname = tarinfo.gname = "root" + return tarinfo + with cwd(basedir): + with tarfile.open(name, 'w:gz') as tar: + tar.add(dir, filter=reset_uid) + +def collect_tensorflow(): + d_tensorflow = dest #/'tensorflow' + with cwd(tensorflow): + for f in Path('tensorflow/c').glob('**/*.h'): + copy_p(f, d_tensorflow/'include') + with cwd(tensorflow/'bazel-bin'/'tensorflow'): + for f in Path('.').glob('*.so*'): + if str(f).endswith(".params"): + continue + copy_p(f, d_tensorflow/'lib') + create_tar(dest/f'libtensorflow-{tf_build}-{tf_os}-{tf_arch}-{tf_ver}.tar.gz', dest) + +#---------------------------------------------------------------------------------------------- + +collect_tensorflow() diff --git a/opt/build/tensorflow/.gitignore b/opt/build/tensorflow/.gitignore new file mode 100644 index 000000000..f4199305e --- /dev/null +++ b/opt/build/tensorflow/.gitignore @@ -0,0 +1,2 @@ +Dockerfile +*.cid diff --git a/opt/build/tensorflow/Dockerfile.arm b/opt/build/tensorflow/Dockerfile.arm new file mode 100644 index 000000000..e8fa9fce5 --- /dev/null +++ b/opt/build/tensorflow/Dockerfile.arm @@ -0,0 +1,59 @@ +# BUILD redisfab/libtensorflow:${TF_VER}-cpu-${OSNICK}-${ARCH} + +# stretch|bionic|buster +ARG OSNICK=buster + +# arm32v7|arm64v8 +ARG ARCH=arm64v8 + +#---------------------------------------------------------------------------------------------- +FROM redisfab/${ARCH}-jdk-xbuild:${OSNICK} + +RUN [ "cross-build-start" ] + +ARG ARCH + +ARG TF_BRANCH=r1.15 +ARG TF_VER=1.15.0 + +ENV DEBIAN_FRONTEND=noninteractive +ENV TF_IGNORE_MAX_BAZEL_VERSION=1 + +WORKDIR /build + +RUN apt-get -qq update +RUN apt-get -q install -y git build-essential python3 python3-dev python3-pip wget + +RUN set -e ;\ + wget -q -O /usr/local/bin/bazel https://s3.amazonaws.com/redismodules/bazel/bazel-0.24.1.${ARCH} ;\ + chmod +x /usr/local/bin/bazel + +RUN pip3 install wheel setuptools +RUN pip3 install six numpy mock +RUN pip3 install keras_applications==1.0.6 --no-deps +RUN pip3 install keras_preprocessing==1.0.5 --no-deps + +# RUN set -e ;\ +# mkdir -p /usr/share/man/man1 ;\ +# apt-get -q install -y openjdk-11-jdk + +RUN set -e ;\ + git clone --single-branch --branch ${TF_BRANCH} --recursive https://github.com/tensorflow/tensorflow.git ;\ + cd tensorflow ;\ + git checkout v${TF_VER} + +RUN set -e ;\ + cd tensorflow ;\ + patch -p1 -i ../tf-1.15.0-arm64v8.patch + +RUN set -e ;\ + cd tensorflow ;\ + ./configure ;\ + bazel build --jobs $(ncpus) --config=opt //tensorflow:libtensorflow.so + +ADD ./opt/readies/ /build/readies/ +ADD ./opt/build/tensorflow/collect.py /build/ + +RUN ./collect.py + +RUN [ "cross-build-end" ] diff --git a/opt/build/tensorflow/Dockerfile.x64 b/opt/build/tensorflow/Dockerfile.x64 new file mode 100644 index 000000000..77a12f6e3 --- /dev/null +++ b/opt/build/tensorflow/Dockerfile.x64 @@ -0,0 +1,81 @@ +# BUILD redisfab/tensorflow:${TF_VER}-x64-${OSNICK} + +ARG OS=debian:buster + +#---------------------------------------------------------------------------------------------- +# FROM insready/bazel:latest as bazel +FROM ${OS} + + +ARG TF_BRANCH=r1.15 +ARG TF_VER=1.15.0 + +ENV DEBIAN_FRONTEND=noninteractive +ENV X_NPROC "cat /proc/cpuinfo|grep processor|wc -l" +ENV TF_IGNORE_MAX_BAZEL_VERSION=1 + +WORKDIR /build + +# COPY --from=bazel /usr/bin/bazel* /usr/local/bin/ + +RUN apt-get -qq update +RUN apt-get -q install -y git build-essential python3-dev python3-pip wget +RUN ln -s /usr/bin/python3 /usr/bin/python + +RUN set -e ;\ + wget -q -O /usr/local/bin/bazel https://s3.amazonaws.com/redismodules/bazel/bazel-0.24.1.x64 ;\ + chmod +x /usr/local/bin/bazel + +RUN pip3 install wheel setuptools +RUN pip3 install six numpy mock +RUN pip3 install keras_applications==1.0.6 --no-deps +RUN pip3 install keras_preprocessing==1.0.5 --no-deps + +RUN set -e ;\ + mkdir -p /usr/share/man/man1 ;\ + apt-get -q install -y openjdk-11-jdk + +RUN set -e ;\ + git clone --single-branch --branch ${TF_BRANCH} --recursive https://github.com/tensorflow/tensorflow.git ;\ + cd tensorflow ;\ + git checkout v${TF_VER} + +# from https://gist.github.com/PatWie/0c915d5be59a518f934392219ca65c3d +# ENV PYTHON_BIN_PATH=$(which ${python_version}) +# ENV PYTHON_LIB_PATH="$($PYTHON_BIN_PATH -c 'import site; print(site.getsitepackages()[0])')" +# ENV PYTHONPATH=${TF_ROOT}/lib +# ENV PYTHON_ARG=${TF_ROOT}/lib + +# ENV CUDA_TOOLKIT_PATH=${opt}/cuda/toolkit_8.0/cuda +# ENV CUDNN_INSTALL_PATH=${opt}/cuda/cudnn/6/cuda + +ENV TF_NEED_GCP=0 +ENV TF_NEED_CUDA=0 +# ENV TF_CUDA_COMPUTE_CAPABILITIES=6.1,5.2,3.5 +ENV TF_NEED_HDFS=0 +ENV TF_NEED_OPENCL=0 +ENV TF_NEED_JEMALLOC=1 +ENV TF_ENABLE_XLA=0 +ENV TF_NEED_VERBS=0 +ENV TF_CUDA_CLANG=0 +ENV TF_NEED_MKL=0 +ENV TF_DOWNLOAD_MKL=0 +ENV TF_NEED_AWS=0 +ENV TF_NEED_MPI=0 +ENV TF_NEED_GDR=0 +ENV TF_NEED_S3=0 +ENV TF_NEED_OPENCL_SYCL=0 +ENV TF_SET_ANDROID_WORKSPACE=0 +ENV TF_NEED_COMPUTECPP=0 +ENV TF_NEED_KAFKA=0 +ENV TF_NEED_TENSORRT=0 + +RUN set -e ;\ + cd tensorflow ;\ + ./configure ;\ + bazel build --jobs $(eval "$X_NPROC") --config=opt //tensorflow:libtensorflow.so + +ADD ./opt/readies/ /build/readies/ +ADD ./opt/build/tensorflow/collect.py /build/ + +RUN ./collect.py diff --git a/opt/build/tensorflow/Makefile b/opt/build/tensorflow/Makefile new file mode 100644 index 000000000..5b9bf6263 --- /dev/null +++ b/opt/build/tensorflow/Makefile @@ -0,0 +1,25 @@ +REDIS_BAZEL_VERSION?=3.5 +REDIS_TF_VERSION?=2.6.0 + +export REDIS_TF_VERSION +export REDIS_BAZEL_VERSION + +PRODUCT=tensorflow +DOCKER_ORG=redislabs +VERSION=${REDIS_TF_VERSION} +REDIS_CUDA_VERSION=11.0-cudnn8 + +export REDIS_ONNX_VERSION +OSNICK=xenial + +ROOT=. +UNAME_MACHINE=$(shell uname -m) +ifeq ($(GPU),1) +BACKEND_NAME=lib${PRODUCT}-gpu-${OS}-${UNAME_MACHINE}-${REDIS_TF_VERSION}.tar.gz +else +BACKEND_NAME=lib${PRODUCT}-cpu-${OS}-${UNAME_MACHINE}-${REDIS_TF_VERSION}.tar.gz +endif + +READIES=${ROOT}/../../readies + +include ../backends.rules diff --git a/opt/build/tensorflow/dockerfile.tmpl b/opt/build/tensorflow/dockerfile.tmpl new file mode 100644 index 000000000..eab2f9975 --- /dev/null +++ b/opt/build/tensorflow/dockerfile.tmpl @@ -0,0 +1,61 @@ +{% include 'base_image.tmpl' %} + +{% include 'apt.yml' %} + +RUN apt install -y openjdk-8-jdk openjdk-8-jre-headless rsync +RUN ln -s /usr/bin/python3 /usr/bin/python +RUN python3 -m pip install --upgrade pip setuptools wheel +RUN python3 -m pip install mock numpsy six +RUN python3 -m pip install keras_preprocessing --no-deps + +{% include 'go.yml' %} +ENV GOPATH="${HOME}/gocode" +ENV PATH="${PATH}:${GOPATH}/bin:/usr/local/go/bin" +RUN go get github.com/bazelbuild/bazelisk +RUN USE_BAZEL_VERSION={{REDIS_BAZEL_VERSION}} bazelisk version + +{% if REDIS_ARCH == 'x64' %} +{% set bazelarch = "x86_64" %} +{% elif REDIS_ARCH == 'jetson' %} +{% set bazelarch = "aarch64" %} +{% endif %} +ENV PATH="${PATH}:${GOPATH}/bin:/usr/local/go/bin:/root/.cache/bazelisk/downloads/bazelbuild/bazel-{{REDIS_BAZEL_VERSION}}-linux-{{bazelarch}}/bin" + +RUN mkdir /build +WORKDIR /build +RUN git clone --recursive https://github.com/tensorflow/tensorflow.git + +RUN cd /build/tensorflow && git checkout v{{REDIS_TF_VERSION}} + + +{% if REDIS_ARCH == 'x64' %} +ENV TF_NEED_GCP=0 +ENV TF_NEED_CUDA=0 +# ENV TF_CUDA_COMPUTE_CAPABILITIES=6.1,5.2,3.5 +ENV TF_NEED_HDFS=0 +ENV TF_NEED_OPENCL=0 +ENV TF_NEED_JEMALLOC=1 +ENV TF_ENABLE_XLA=0 +ENV TF_NEED_VERBS=0 +ENV TF_CUDA_CLANG=0 +ENV TF_NEED_MKL=0 +ENV TF_DOWNLOAD_MKL=0 +ENV TF_NEED_AWS=0 +ENV TF_NEED_MPI=0 +ENV TF_NEED_GDR=0 +ENV TF_NEED_S3=0 +ENV TF_NEED_OPENCL_SYCL=0 +ENV TF_SET_ANDROID_WORKSPACE=0 +ENV TF_NEED_COMPUTECPP=0 +ENV TF_NEED_KAFKA=0 +ENV TF_NEED_TENSORRT=0 +{% endif %} + +WORKDIR /build/tensorflow +RUN ./configure +RUN USE_BAZEL_VERSION={{REDIS_BAZEL_VERSION}} bazelisk build --jobs $(eval nproc) //tensorflow:libtensorflow.so + +ADD ./pack.sh /build/ + +WORKDIR /build +RUN ./pack.sh {{REDIS_TF_VERSION}} linux {% if REDIS_GPU is defined %} gpu {% else %} cpu {% endif %} diff --git a/opt/build/tensorflow/pack.sh b/opt/build/tensorflow/pack.sh new file mode 100644 index 000000000..11e961ec6 --- /dev/null +++ b/opt/build/tensorflow/pack.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +VERSION=$1 # 2.5.0 +BASEOS=$2 # linux +VARIANT=$3 # cpu or gpu +if [ ! -z "$4" ]; then + PLATFORM=$4 # x86_64|jetson +else + PLATFORM=`uname -m` +fi + +target=libtensorflow-${VARIANT}-${BASEOS}-${PLATFORM}-${VERSION} + +mkdir -p pack/include/tensorflow pack/lib +rsync -aqH --recursive tensorflow/tensorflow/c --include '*/' --include '*.h' --exclude '*' pack/include/tensorflow +rsync -aqH --recursive tensorflow/tensorflow/core --include '*/' --include '*.h' --exclude '*' pack/include/tensorflow +cp tensorflow/LICENSE pack +cp tensorflow/bazel-bin/tensorflow/libtensorflow.so pack/lib +cp tensorflow/bazel-bin/tensorflow/*so* pack/lib +mv pack ${target} +tar czf ${target}.tar.gz ${target}/ diff --git a/opt/build/tensorflow/tf-1.14.0-arm64v8.patch b/opt/build/tensorflow/tf-1.14.0-arm64v8.patch new file mode 100644 index 000000000..b586004dc --- /dev/null +++ b/opt/build/tensorflow/tf-1.14.0-arm64v8.patch @@ -0,0 +1,86 @@ +--- old/tensorflow/lite/python/interpreter.py 2019-06-18 22:48:23.000000000 +0000 ++++ new/tensorflow/lite/python/interpreter.py 2019-07-20 15:11:41.659061160 +0000 +@@ -305,3 +305,7 @@ + + def reset_all_variables(self): + return self._interpreter.ResetVariableTensors() ++ ++ def set_num_threads(self, i): ++ return self._interpreter.SetNumThreads(i) ++ +--- old/tensorflow/lite/python/interpreter_wrapper/interpreter_wrapper.cc 2019-06-18 22:48:23.000000000 +0000 ++++ new/tensorflow/lite/python/interpreter_wrapper/interpreter_wrapper.cc 2019-07-20 15:12:36.115293826 +0000 +@@ -446,5 +446,10 @@ + Py_RETURN_NONE; + } + ++PyObject* InterpreterWrapper::SetNumThreads(int i) { ++ interpreter_->SetNumThreads(i); ++ Py_RETURN_NONE; ++} ++ + } // namespace interpreter_wrapper + } // namespace tflite +--- old/tensorflow/lite/python/interpreter_wrapper/interpreter_wrapper.h 2019-06-18 22:48:23.000000000 +0000 ++++ new/tensorflow/lite/python/interpreter_wrapper/interpreter_wrapper.h 2019-07-20 15:13:38.971563310 +0000 +@@ -72,6 +72,8 @@ + // should be the interpreter object providing the memory. + PyObject* tensor(PyObject* base_object, int i); + ++ PyObject* SetNumThreads(int i); ++ + private: + // Helper function to construct an `InterpreterWrapper` object. + // It only returns InterpreterWrapper if it can construct an `Interpreter`. +--- old/tensorflow/lite/tools/make/Makefile 2019-06-18 22:48:23.000000000 +0000 ++++ new/tensorflow/lite/tools/make/Makefile 2019-07-20 15:14:13.823713121 +0000 +@@ -150,7 +150,7 @@ + CORE_CC_EXCLUDE_SRCS += tensorflow/lite/mmap_allocation_disabled.cc + endif + +-BUILD_WITH_NNAPI=true ++BUILD_WITH_NNAPI=false + ifeq ($(BUILD_TYPE),micro) + BUILD_WITH_NNAPI=false + endif +--- old/tensorflow/lite/tools/make/targets/aarch64_makefile.inc 2019-06-18 22:48:23.000000000 +0000 ++++ new/tensorflow/lite/tools/make/targets/aarch64_makefile.inc 2019-07-20 15:16:40.396345748 +0000 +@@ -9,12 +9,16 @@ + -march=armv8-a \ + -funsafe-math-optimizations \ + -ftree-vectorize \ ++ -flax-vector-conversions \ ++ -fomit-frame-pointer \ + -fPIC + + CFLAGS += \ + -march=armv8-a \ + -funsafe-math-optimizations \ + -ftree-vectorize \ ++ -flax-vector-conversions \ ++ -fomit-frame-pointer \ + -fPIC + + LDFLAGS := \ +@@ -28,6 +32,7 @@ + -lstdc++ \ + -lpthread \ + -lm \ +- -ldl ++ -ldl \ ++ -rt + + endif +--- old/tensorflow/lite/build_def.bzl 2019-06-18 22:48:23.000000000 +0000 ++++ new/tensorflow/lite/build_def.bzl 2019-07-20 15:18:18.836772593 +0000 +@@ -32,6 +32,10 @@ + "/DTF_COMPILE_LIBRARY", + "/wd4018", # -Wno-sign-compare + ], ++ str(Label("//tensorflow:linux_aarch64")): [ ++ "-flax-vector-conversions", ++ "-fomit-frame-pointer", ++ ], + "//conditions:default": [ + "-Wno-sign-compare", + ], diff --git a/opt/build/tflite/Dockerfile.x64 b/opt/build/tflite/Dockerfile.x64 new file mode 100644 index 000000000..53a474c69 --- /dev/null +++ b/opt/build/tflite/Dockerfile.x64 @@ -0,0 +1,29 @@ + +ARG OS=debian:buster + +ARG FTLITE_VER=2.0.0 + +#---------------------------------------------------------------------------------------------- +FROM ${OS} + +ARG FTLITE_VER + +WORKDIR /build + +RUN set -e ;\ + apt-get -qq update ;\ + apt-get -q install -y git ca-certificates curl wget unzip python3 ;\ + apt-get -q install -y git build-essential zlib1g-dev + +RUN git clone --single-branch --branch v${FTLITE_VER} --depth 1 https://github.com/tensorflow/tensorflow.git + +ADD ./opt/build/tflite/build /build/ +ADD ./opt/readies/ /build/readies/ +ADD ./opt/build/tflite/collect.py /build/ + +RUN set -e ;\ + cd tensorflow/tensorflow/lite/tools/make ;\ + ./download_dependencies.sh ;\ + ./build_lib.sh + +RUN ./collect.py --version ${FTLITE_VER} --dest /build/dest diff --git a/opt/build/tflite/Makefile b/opt/build/tflite/Makefile new file mode 100644 index 000000000..8962177b7 --- /dev/null +++ b/opt/build/tflite/Makefile @@ -0,0 +1,139 @@ + +ROOT=../../.. + +export VERSION ?= 2.0.0 +OSNICK ?= buster + +#---------------------------------------------------------------------------------------------- + +S3_URL=redismodules/tensorflow + +OS:=$(shell $(ROOT)/opt/readies/bin/platform --os) + +ifeq ($(OS),linux) +OS.publish:=$(OS) +ARCH.publish:=$(ARCH) + +else ifeq ($(OS),macos) +OS.publish:=darwin +ARCH.publish:=x86_64 +endif + +STEM=libtensorflowlite-$(OS.publish) + +DOCKER_OS.bionic=ubuntu:bionic +DOCKER_OS.stretch=debian:stretch-slim +DOCKER_OS.buster=debian:buster-slim +DOCKER_OS=$(DOCKER_OS.$(OSNICK)) + +#---------------------------------------------------------------------------------------------- + +ifeq ($(OS),linux) + +define targets # (1=OP, 2=op) +$(1)_TARGETS := +$(1)_TARGETS += $(if $(findstring $(X64),1),$(2)_x64) +$(1)_TARGETS += $(if $(findstring $(ARM7),1),$(2)_arm32v7) +$(1)_TARGETS += $(if $(findstring $(ARM8),1),$(2)_arm64v8) + +$(1)_TARGETS += $$(if $$(strip $$($(1)_TARGETS)),,$(2)_x64 $(2)_arm32v7 $(2)_arm64v8) +endef + +else ifeq ($(OS),macos) + +define targets # (1=OP, 2=op) +$(1)_TARGETS := $(2)_x64 +endef + +endif + +$(eval $(call targets,BUILD,build)) +$(eval $(call targets,PUBLISH,publish)) + +#---------------------------------------------------------------------------------------------- + +define build_x64 # (1=arch, 2=tar-arch) +IID_$(1)=$(1)_$(VERSION).iid +CID_$(1)=$(1)_$(VERSION).cid + +build_x64: + @docker build --iidfile $$(IID_$(1)) -t redisfab/$(STEM)-$(1):$(VERSION) -f Dockerfile.x64 \ + --build-arg OS=$(DOCKER_OS) $(ROOT) + @docker create --cidfile $$(CID_$(1)) `cat $$(IID_$(1))` + @docker cp `cat $$(CID_$(1))`:/build/dest/$(STEM)-$(2)-$(VERSION).tar.gz . + +.PHONY: build_x64 +endef + +define build_arm # (1=arch, 2=tar-arch) +IID_$(1)=$(1)_$(VERSION).iid +CID_$(1)=$(1)_$(VERSION).cid + +build_$(1): + @docker build --iidfile $$(IID_$(1)) -t redisfab/$(STEM)-$(1):$(VERSION) -f Dockerfile.arm \ + --build-arg ARCH=$(1) $(ROOT) + @docker create --cidfile $$(CID_$(1)) `cat $$(IID_$(1))` + @docker cp `cat $$(CID_$(1))`:/build/$(STEM)-$(2)-$(VERSION).tar.gz . + +.PHONY: build_$(1) +endef + +#---------------------------------------------------------------------------------------------- + +define publish_x64 # (1=arch, 2=tar-arch) +publish_x64: + @aws s3 cp $(STEM)-$(2)-$(VERSION).tar.gz s3://$(S3_URL)/ --acl public-read + +.PHONY: publish_x64 +endef + +define publish_arm # (1=arch, 2=tar-arch) +publish_$(1): + @aws s3 cp $(STEM)-$(2)-$(VERSION).tar.gz s3://$(S3_URL)/ --acl public-read + +.PHONY: publish_$(1) +endef + +#---------------------------------------------------------------------------------------------- + +all: build publish + +ifeq ($(OS),linux) + +build: $(BUILD_TARGETS) + +$(eval $(call build_x64,x64,x86_64)) +$(eval $(call build_arm,arm64v8,arm64)) +$(eval $(call build_arm,arm32v7,arm)) + +ifneq ($(filter publish,$(MAKECMDGOALS)),) +ifeq ($(wildcard $(HOME)/.aws/credentials),) +$(error Please run 'aws configure' and provide it with access credentials) +endif +endif + +publish: $(PUBLISH_TARGETS) + +$(eval $(call publish_x64,x64,x86_64)) +$(eval $(call publish_arm,arm64v8,arm64)) +$(eval $(call publish_arm,arm32v7,arm)) + +help: + @echo "make [build|publish] [X64=1|ARM7=1|ARM8=1]" + +else ifeq ($(OS),macos) + +build: + @VERSION=$(VERSION) ./build.macos + @mv macos/dest/$(STEM)-$(ARCH.publish)-$(VERSION).tar.gz . + +publish: $(PUBLISH_TARGETS) + +$(eval $(call publish_x64,x64,x86_64)) + +help: + @echo "make [build|publish]" + +endif # macos + +.PHONY: all build publish help diff --git a/opt/build/tflite/build b/opt/build/tflite/build new file mode 100644 index 000000000..ba9a8cb71 --- /dev/null +++ b/opt/build/tflite/build @@ -0,0 +1,20 @@ +#!/bin/bash + +OS=$(python3 readies/bin/platform --os) +ARCH=$(python3 readies/bin/platform --arch) + +cd tensorflow/tensorflow/lite/tools/make +bash download_dependencies.sh +if [[ $OS == linux ]]; then + TARGET=linux + if [[ $ARCH == x64 ]]; then + bash build_lib.sh + elif [[ $ARCH == arm64v8 ]]; then + bash build_aarch64_lib.sh + elif [[ $ARCH == arm32v7 ]]; then + bash build_rpi_lib.sh + fi +elif [[ $OS == macos ]]; then + TARGET=osx + bash build_lib.sh +fi diff --git a/opt/build/tflite/build.macos b/opt/build/tflite/build.macos new file mode 100644 index 000000000..316d38e0f --- /dev/null +++ b/opt/build/tflite/build.macos @@ -0,0 +1,12 @@ +#!/bin/bash + +HERE="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)" +cd $HERE + +set -e +mkdir -p macos +cd macos +cp ../collect.py . +ln -s ../../../readies/ +git clone --single-branch --branch v${VERSION} --depth 1 https://github.com/tensorflow/tensorflow.git +./collect.py --version $VERSION --dest dest diff --git a/opt/build/tflite/collect.py b/opt/build/tflite/collect.py new file mode 100644 index 000000000..4d4e4b347 --- /dev/null +++ b/opt/build/tflite/collect.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python3 + +import os +import sys +import argparse +from pathlib import Path +import shutil +import tarfile + +# this refers to deps directory inside a container +sys.path.insert(0, os.path.join(os.path.dirname(__file__), "readies")) +import paella + +#---------------------------------------------------------------------------------------------- + +TFLITE_VERSION = '2.0.0' + +parser = argparse.ArgumentParser(description='Prepare RedisAI dependant distribution packages.') +parser.add_argument('--tensorflow', default='tensorflow', help='root of tensorflow repository') +parser.add_argument('--version', default=TFLITE_VERSION, help='tensorflow version') +parser.add_argument('--dest', default='dest', help='destination directory') +parser.add_argument('-n', '--nop', action="store_true", help='no operation') +args = parser.parse_args() + +#---------------------------------------------------------------------------------------------- + +tensorflow = Path(args.tensorflow).resolve() +dest = Path(args.dest).resolve() + +#---------------------------------------------------------------------------------------------- + +platform = paella.Platform() + +tf_os = platform.os +tf_os_internal = tf_os +if tf_os == 'macos': + tf_os = 'darwin' + tf_os_internal = 'osx' + +tf_arch = platform.arch +if tf_arch == 'x64': + tf_arch = 'x86_64' +elif tf_arch == 'arm64v8': + tf_arch = 'arm64' + +tf_ver = args.version + +#---------------------------------------------------------------------------------------------- + +def copy_p(src, dest): + f = dest/src + paella.mkdir_p(os.path.dirname(f)) + shutil.copy(src, f, follow_symlinks=False) + +def create_tar(name, basedir, dir='.'): + def reset_uid(tarinfo): + tarinfo.uid = tarinfo.gid = 0 + tarinfo.uname = tarinfo.gname = "root" + return tarinfo + with cwd(basedir): + with tarfile.open(name, 'w:gz') as tar: + tar.add(dir, filter=reset_uid) + +def collect_tflite(): + d_tensorflow = dest + with cwd(tensorflow): + for f in Path('tensorflow/lite').glob('**/*.h'): + copy_p(f, d_tensorflow/'include') + with cwd('tensorflow/lite/tools/make'): + with cwd('downloads/flatbuffers/include'): + for f in Path('.').glob('**/*.h'): + copy_p(f, d_tensorflow/'include') + with cwd(f'gen/{tf_os_internal}_{tf_arch}/lib'): + for f in Path('.').glob('*.a'): + copy_p(f, d_tensorflow/'lib') + create_tar(dest/f'libtensorflowlite-{tf_os}-{tf_arch}-{tf_ver}.tar.gz', dest) + +#---------------------------------------------------------------------------------------------- + +collect_tflite() diff --git a/opt/cmake/modules/FindTensorFlow.cmake b/opt/cmake/modules/FindTensorFlow.cmake new file mode 100644 index 000000000..4b5423950 --- /dev/null +++ b/opt/cmake/modules/FindTensorFlow.cmake @@ -0,0 +1,359 @@ +# Patrick Wieschollek, +# FindTensorFlow.cmake +# https://github.com/PatWie/tensorflow-cmake/blob/master/cmake/modules/FindTensorFlow.cmake +# ------------- +# +# Find TensorFlow library and includes +# +# Automatically set variables have prefix "TensorFlow_", +# while environmental variables you can specify have prefix "TENSORFLOW_" +# This module will set the following variables in your project: +# +# ``TensorFlow_VERSION`` +# exact TensorFlow version obtained from runtime +# ``TensorFlow_ABI`` +# ABI specification of TensorFlow library obtained from runtime +# ``TensorFlow_INCLUDE_DIR`` +# where to find tensorflow header files obtained from runtime +# ``TensorFlow_LIBRARY`` +# the libraries to link against to use TENSORFLOW obtained from runtime +# ``TensorFlow_FOUND TRUE`` +# If false, do not try to use TENSORFLOW. +# ``TensorFlow_C_LIBRARY`` +# Path to tensorflow_cc library (libtensorflow[.so,.dylib,.dll], or similar) +# +# for some examples, you will need to specify on of the following cmake variables: +# ``TensorFlow_BUILD_DIR`` Is the directory containing the tensorflow_cc library, which can be initialized +# with env-var 'TENSORFLOW_BUILD_DIR' environmental variable +# ``TensorFlow_SOURCE_DIR`` Is the path to source of TensorFlow, which can be initialized +# with env-var 'TENSORFLOW_SOURCE_DIR' environmental variable +# +# +# USAGE +# ------ +# add "list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}../../path/to/this/file)" to your project +# +# "add_tensorflow_gpu_operation" is a macro to compile a custom operation +# +# add_tensorflow_gpu_operation("") expects the following files to exists: +# - kernels/_kernel.cc +# - kernels/_kernel_gpu.cu.cc (kernels/_kernel.cu is supported as well) +# - kernels/_op.cc +# - kernels/_op.h +# - ops/.cc + +if(APPLE) + message(WARNING "This FindTensorflow.cmake is not tested on APPLE\n" + "Please report if this works\n" + "https://github.com/PatWie/tensorflow-cmake") +endif() + +if(WIN32) + message(WARNING "This FindTensorflow.cmake is not tested on WIN32\n" + "Please report if this works\n" + "https://github.com/PatWie/tensorflow-cmake") +endif() + +set(PYTHON_EXECUTABLE "python3" CACHE STRING "specify the python version TensorFlow is installed on.") + +if(TensorFlow_FOUND AND EXISTS "${TensorFlow_LIBRARY}" AND IS_DIRECTORY "${TensorFlow_INCLUDE_DIR}") + # reuse cached variables + message(STATUS "Reuse cached information from TensorFlow ${TensorFlow_VERSION} ") +else() + message(STATUS "Detecting TensorFlow using ${PYTHON_EXECUTABLE}" + " (use -DPYTHON_EXECUTABLE=... otherwise)") + execute_process( + COMMAND ${PYTHON_EXECUTABLE} -c "import tensorflow as tf; print(tf.__version__); print(tf.__cxx11_abi_flag__); print(tf.sysconfig.get_include()); print(tf.sysconfig.get_lib());" + OUTPUT_VARIABLE TF_INFORMATION_STRING + OUTPUT_STRIP_TRAILING_WHITESPACE + RESULT_VARIABLE retcode) + + if(NOT "${retcode}" STREQUAL "0") + message(FATAL_ERROR "Detecting TensorFlow info - failed \n Did you installed TensorFlow?") + else() + message(STATUS "Detecting TensorFlow info - done") + endif() + + string(REPLACE "\n" ";" TF_INFORMATION_LIST ${TF_INFORMATION_STRING}) + list(GET TF_INFORMATION_LIST 0 TF_DETECTED_VERSION) + list(GET TF_INFORMATION_LIST 1 TF_DETECTED_ABI) + list(GET TF_INFORMATION_LIST 2 TF_DETECTED_INCLUDE_DIR) + list(GET TF_INFORMATION_LIST 3 TF_DETECTED_LIBRARY_PATH) + + # set(TF_DETECTED_VERSION 1.8) + + set(_packageName "TF") + if (DEFINED TF_DETECTED_VERSION) + string (REGEX MATCHALL "[0-9]+" _versionComponents "${TF_DETECTED_VERSION}") + list (LENGTH _versionComponents _len) + if (${_len} GREATER 0) + list(GET _versionComponents 0 TF_DETECTED_VERSION_MAJOR) + endif() + if (${_len} GREATER 1) + list(GET _versionComponents 1 TF_DETECTED_VERSION_MINOR) + endif() + if (${_len} GREATER 2) + list(GET _versionComponents 2 TF_DETECTED_VERSION_PATCH) + endif() + if (${_len} GREATER 3) + list(GET _versionComponents 3 TF_DETECTED_VERSION_TWEAK) + endif() + set (TF_DETECTED_VERSION_COUNT ${_len}) + else() + set (TF_DETECTED_VERSION_COUNT 0) + endif() + + + # -- prevent pre 1.9 versions + # Note: TensorFlow 1.7 supported custom ops and all header files. + # TensorFlow 1.8 broke that promise and 1.9, 1.10 are fine again. + # This cmake-file is only tested against 1.9+. + if("${TF_DETECTED_VERSION}" VERSION_LESS "1.9") + message(FATAL_ERROR "Your installed TensorFlow version ${TF_DETECTED_VERSION} is too old.") + endif() + + if(TF_FIND_VERSION_EXACT) + # User requested exact match of TensorFlow. + # TensorFlow release cycles are currently just depending on (major, minor) + # But we test against both. + set(_TensorFlow_TEST_VERSIONS + "${TF_FIND_VERSION_MAJOR}.${TF_FIND_VERSION_MINOR}.${TF_FIND_VERSION_PATCH}" + "${TF_FIND_VERSION_MAJOR}.${TF_FIND_VERSION_MINOR}") + else() # TF_FIND_VERSION_EXACT + # User requested not an exact TensorFlow version. + # However, only TensorFlow versions 1.9, 1.10 support all header files + # for custom ops. + set(_TensorFlow_KNOWN_VERSIONS ${TensorFlow_ADDITIONAL_VERSIONS} + "1.9" "1.9.0" "1.10" "1.10.0" "1.11" "1.11.0" "1.12" "1.12.0" "1.13" "1.13.1" "1.14" ) + set(_TensorFlow_TEST_VERSIONS) + + if(TF_FIND_VERSION) + set(_TF_FIND_VERSION_SHORT "${TF_FIND_VERSION_MAJOR}.${TF_FIND_VERSION_MINOR}") + # Select acceptable versions. + foreach(version ${_TensorFlow_KNOWN_VERSIONS}) + if(NOT "${version}" VERSION_LESS "${TF_FIND_VERSION}") + # This version is high enough. + list(APPEND _TensorFlow_TEST_VERSIONS "${version}") + endif() + endforeach() + else() # TF_FIND_VERSION + # Any version is acceptable. + set(_TensorFlow_TEST_VERSIONS "${_TensorFlow_KNOWN_VERSIONS}") + endif() + endif() + + #### ---- Configure TensorFlow_SOURCE_DIR + # Order of precidence is 1) CMake variable value, 2) Environmental Variable value + if(IS_DIRECTORY "${TensorFlow_SOURCE_DIR}") + set(TensorFlow_SOURCE_DIR "${TensorFlow_SOURCE_DIR}" CACHE PATH "directory containing the file 'libtensorflow_cc${CMAKE_SHARED_LIBRARY_SUFFIX}'") + else() + if(IS_DIRECTORY "$ENV{TENSORFLOW_SOURCE_DIR}") + set(TensorFlow_SOURCE_DIR "$ENV{TENSORFLOW_SOURCE_DIR}" CACHE PATH "source code for tensorflow (i.e. the git checkout directory of the source code)") + else() + set(TensorFlow_SOURCE_DIR "TensorFlow_SOURCE_DIR-NOTFOUND" CACHE PATH "source code for tensorflow (i.e. the git checkout directory of the source code)") + endif() + endif() + + # Report on status of cmake cache variable for TensorFlow_SOURCE_DIR + if(IS_DIRECTORY ${TensorFlow_SOURCE_DIR}) + message(STATUS "TensorFlow_SOURCE_DIR is ${TensorFlow_SOURCE_DIR}") + else() + # NOTE This is not a fatal error for backward compatibility ("custom_op test") + message(STATUS "No directory at 'TensorFlow_SOURCE_DIR:PATH=${TensorFlow_SOURCE_DIR}' detected,\n" + "please specify the path in ENV 'export TENSORFLOW_SOURCE_DIR=...'\n or cmake -DTensorFlow_SOURCE_DIR:PATH=...\n" + "to the directory containing the source code for tensorflow\n (i.e. the git checkout directory of the source code)" + ) + endif() + + #### ---- Configure TensorFlow_BUILD_DIR + # Order of precidence is 1) CMake variable value, 2) Environmental Variable value + if(IS_DIRECTORY "${TensorFlow_BUILD_DIR}") + set(TensorFlow_BUILD_DIR "${TensorFlow_BUILD_DIR}" CACHE PATH "directory containing the file 'libtensorflow_cc${CMAKE_SHARED_LIBRARY_SUFFIX}'") + else() + if(IS_DIRECTORY "$ENV{TENSORFLOW_BUILD_DIR}") + set(TensorFlow_BUILD_DIR "$ENV{TENSORFLOW_BUILD_DIR}" CACHE PATH "directory containing the file 'libtensorflow_cc${CMAKE_SHARED_LIBRARY_SUFFIX}'") + else() + set(TensorFlow_BUILD_DIR "TensorFlow_BUILD_DIR-NOTFOUND" CACHE PATH "directory containing the file 'libtensorflow_cc${CMAKE_SHARED_LIBRARY_SUFFIX}'") + endif() + endif() + + # Report on status of cmake cache variable for TensorFlow_BUILD_DIR + if(IS_DIRECTORY ${TensorFlow_BUILD_DIR}) + message(STATUS "TensorFlow_BUILD_DIR is ${TensorFlow_BUILD_DIR}") + else() + # NOTE This is not a fatal error for backward compatibility ("custom_op test") + message(STATUS "No directory at 'TensorFlow_BUILD_DIR:PATH=${TensorFlow_BUILD_DIR}' detected,\n" + "please specify the path in ENV 'export TENSORFLOW_BUILD_DIR=...'\n or cmake -DTensorFlow_BUILD_DIR:PATH=...\n" + "to the directory containing the file 'libtensorflow_cc${CMAKE_SHARED_LIBRARY_SUFFIX}'" + ) + endif() + + if(IS_DIRECTORY ${TensorFlow_BUILD_DIR}) + file(GLOB_RECURSE TF_LIBRARY_SEARCH_PATHS + LIST_DIRECTORIES FALSE + "${TensorFlow_BUILD_DIR}/*libtensorflow_cc${CMAKE_SHARED_LIBRARY_SUFFIX}" + ) + list(LENGTH TF_LIBRARY_SEARCH_PATHS TF_LIBRARY_SEARCH_PATHS_LENGTH) + if( NOT ${TF_LIBRARY_SEARCH_PATHS_LENGTH} EQUAL 1 ) + message(FATAL_ERROR "Incorrect number of items matching 'libtensorflow_cc${CMAKE_SHARED_LIBRARY_SUFFIX}' in '${TF_LIBRARY_SEARCH_PATHS}'\n" + "( ${TF_LIBRARY_SEARCH_PATHS_LENGTH} != 1 ).\n" + "Change 'TensorFlow_BUILD_DIR' to have more specific path." + ) + endif() + list(GET TF_LIBRARY_SEARCH_PATHS 0 TF_LIBRARY_SEARCH_ONEPATH) + get_filename_component(TensorFlow_C_LIBRARY_DIR "${TF_LIBRARY_SEARCH_ONEPATH}" DIRECTORY ) + + if( IS_DIRECTORY "${TensorFlow_C_LIBRARY_DIR}") + find_library(TensorFlow_C_LIBRARY + NAMES tensorflow_cc + PATHS "${TensorFlow_C_LIBRARY_DIR}" + DOC "TensorFlow CC library." ) + endif() + if( TensorFlow_C_LIBRARY ) + message(STATUS "TensorFlow-CC-LIBRARY is ${TensorFlow_C_LIBRARY}") + else() + # NOTE This is not a fatal error for backward compatibility ("custom_op test") + message(STATUS "No TensorFlow-CC-LIBRARY detected") + endif() + endif() + + find_library( TF_DETECTED_LIBRARY + NAMES tensorflow_framework + PATHS "${TensorFlow_C_LIBRARY_DIR}" # Prefer the library from the build tree, if TensorFlow_C_LIBRARY is detected. + "${TF_DETECTED_LIBRARY_PATH}" # use copy of file from the python install tree (This often has a .so.1 extension only for installed version) + DOC "The tensorflow_framework library path." + ) + if( TF_DETECTED_LIBRARY ) + message(STATUS "Found: ${TF_DETECTED_LIBRARY}") + else() + message(FATAL_ERROR "Required library for tensorflow_framework not found in ${TF_DETECTED_LIBRARY_PATH}!") + endif() + + # test all given versions + set(TensorFlow_FOUND FALSE) + foreach(_TensorFlow_VER ${_TensorFlow_TEST_VERSIONS}) + if("${TF_DETECTED_VERSION_MAJOR}.${TF_DETECTED_VERSION_MINOR}" STREQUAL "${_TensorFlow_VER}") + # found appropriate version + set(TensorFlow_VERSION ${TF_DETECTED_VERSION}) + set(TensorFlow_ABI ${TF_DETECTED_ABI}) + set(TensorFlow_INCLUDE_DIR ${TF_DETECTED_INCLUDE_DIR}) + set(TensorFlow_LIBRARY ${TF_DETECTED_LIBRARY}) + set(TensorFlow_FOUND TRUE) + message(STATUS "Found TensorFlow: (found appropriate version \"${TensorFlow_VERSION}\")") + message(STATUS "TensorFlow-ABI is ${TensorFlow_ABI}") + message(STATUS "TensorFlow-INCLUDE_DIR is ${TensorFlow_INCLUDE_DIR}") + message(STATUS "TensorFlow-LIBRARY is ${TensorFlow_LIBRARY}") + + add_definitions("-DTENSORFLOW_ABI=${TensorFlow_ABI}") + add_definitions("-DTENSORFLOW_VERSION=${TensorFlow_VERSION}") + break() + endif() + endforeach() + + if(NOT TensorFlow_FOUND) + message(FATAL_ERROR "Your installed TensorFlow version ${TF_DETECTED_VERSION_MAJOR}.${TF_DETECTED_VERSION_MINOR} is not supported\n" + "We tested against ${_TensorFlow_TEST_VERSIONS}") + endif() + + # test 1.11 version + if("${TF_DETECTED_VERSION}" VERSION_EQUAL "1.11") + set(TF_DISABLE_ASSERTS "TRUE") + endif() + + if("${TF_DETECTED_VERSION}" VERSION_EQUAL "1.12") + set(TF_DISABLE_ASSERTS "TRUE") + endif() + + if("${TF_DETECTED_VERSION}" VERSION_EQUAL "1.12.0") + set(TF_DISABLE_ASSERTS "TRUE") + endif() + + if("${TF_DETECTED_VERSION}" VERSION_EQUAL "1.13") + set(TF_DISABLE_ASSERTS "TRUE") + endif() + + if("${TF_DETECTED_VERSION}" VERSION_EQUAL "1.13.1") + set(TF_DISABLE_ASSERTS "TRUE") + endif() + +endif() #-- End detection + +if(${TF_DISABLE_ASSERTS}) + message(STATUS "[WARNING] The TensorFlow version ${TF_DETECTED_VERSION} has a bug (see \#22766). We disable asserts using -DNDEBUG=True ") + add_definitions("-DNDEBUG=True") +endif() +macro(TensorFlow_REQUIRE_C_LIBRARY) + if(NOT EXISTS "${TensorFlow_C_LIBRARY}") + message(FATAL_ERROR "Project requires libtensorflow_cc${CMAKE_SHARED_LIBRARY_SUFFIX}, please specify the path in ENV 'export TENSORFLOW_BUILD_DIR=...' or cmake -DTensorFlow_BUILD_DIR:PATH=...") + endif() +endmacro() + +macro(TensorFlow_REQUIRE_SOURCE) + if(NOT IS_DIRECTORY "${TensorFlow_SOURCE_DIR}") + message(FATAL_ERROR "Project requires TensorFlow source directory, please specify the path in ENV 'export TENSORFLOW_SOURCE_DIR=...' or cmake -DTensorFlow_SOURCE_DIR:PATH=...") + endif() +endmacro() + +macro(add_tensorflow_cpu_operation op_name) + # Compiles a CPU-only operation without invoking NVCC + message(STATUS "will build custom TensorFlow operation \"${op_name}\" (CPU only)") + + add_library(${op_name}_op SHARED kernels/${op_name}_op.cc kernels/${op_name}_kernel.cc ops/${op_name}.cc ) + + set_target_properties(${op_name}_op PROPERTIES PREFIX "") + target_link_libraries(${op_name}_op LINK_PUBLIC ${TensorFlow_LIBRARY}) +endmacro() + + +macro(add_tensorflow_gpu_operation op_name) +# Compiles a CPU + GPU operation with invoking NVCC + message(STATUS "will build custom TensorFlow operation \"${op_name}\" (CPU+GPU)") + + set(kernel_file "") + if(EXISTS "kernels/${op_name}_kernel.cu") + message(WARNING "you should rename your file ${op_name}_kernel.cu to ${op_name}_kernel_gpu.cu.cc") + set(kernel_file kernels/${op_name}_kernel.cu) + else() + set_source_files_properties(kernels/${op_name}_kernel_gpu.cu.cc PROPERTIES CUDA_SOURCE_PROPERTY_FORMAT OBJ) + set(kernel_file kernels/${op_name}_kernel_gpu.cu.cc) + endif() + + cuda_add_library(${op_name}_op_cu SHARED ${kernel_file}) + set_target_properties(${op_name}_op_cu PROPERTIES PREFIX "") + + add_library(${op_name}_op SHARED kernels/${op_name}_op.cc kernels/${op_name}_kernel.cc ops/${op_name}.cc ) + + set_target_properties(${op_name}_op PROPERTIES PREFIX "") + set_target_properties(${op_name}_op PROPERTIES COMPILE_FLAGS "-DGOOGLE_CUDA") + target_link_libraries(${op_name}_op LINK_PUBLIC ${op_name}_op_cu ${TensorFlow_LIBRARY}) +endmacro() + +# simplify TensorFlow dependencies +add_library(TensorFlow_DEP INTERFACE) +target_include_directories(TensorFlow_DEP SYSTEM INTERFACE ${TensorFlow_SOURCE_DIR}) +target_include_directories(TensorFlow_DEP SYSTEM INTERFACE ${TensorFlow_INCLUDE_DIR}) +target_link_libraries(TensorFlow_DEP INTERFACE -Wl,--allow-multiple-definition -Wl,--whole-archive ${TensorFlow_C_LIBRARY} -Wl,--no-whole-archive) +target_link_libraries(TensorFlow_DEP INTERFACE -Wl,--allow-multiple-definition -Wl,--whole-archive ${TensorFlow_LIBRARY} -Wl,--no-whole-archive) + +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args( + TENSORFLOW + FOUND_VAR TENSORFLOW_FOUND + REQUIRED_VARS + TensorFlow_LIBRARY + TensorFlow_INCLUDE_DIR + VERSION_VAR + TensorFlow_VERSION + ) + +mark_as_advanced(TF_INFORMATION_STRING TF_DETECTED_VERSION TF_DETECTED_VERSION_MAJOR TF_DETECTED_VERSION_MINOR TF_DETECTED_VERSION TF_DETECTED_ABI + TF_DETECTED_INCLUDE_DIR TF_DETECTED_LIBRARY TF_DISABLE_ASSERTS + TensorFlow_C_LIBRARY TensorFlow_LIBRARY TensorFlow_SOURCE_DIR TensorFlow_INCLUDE_DIR TensorFlow_ABI) + +set(TensorFlow_INCLUDE_DIR "${TensorFlow_INCLUDE_DIR}" CACHE PATH "The path to tensorflow header files") +set(TensorFlow_VERSION "${TensorFlow_VERSION}" CACHE INTERNAL "The Tensorflow version") +set(TensorFlow_ABI "${TensorFlow_ABI}" CACHE STRING "The ABI version used by TensorFlow") +set(TensorFlow_LIBRARY "${TensorFlow_LIBRARY}" CACHE FILEPATH "The C++ library of TensorFlow") +set(TensorFlow_C_LIBRARY "${TensorFlow_C_LIBRARY}" CACHE STRING "The C library of TensorFlow") +set(TensorFlow_FOUND "${TensorFlow_FOUND}" CACHE BOOL "A flag stating if TensorFlow has been found") +set(TF_DISABLE_ASSERTS "${TF_DISABLE_ASSERTS}" CACHE BOOL "A flag to enable workarounds")