-
Notifications
You must be signed in to change notification settings - Fork 940
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #542 from yahoo/leewyang_sd
migrate build from travis to screwdriver
- Loading branch information
Showing
24 changed files
with
308 additions
and
97 deletions.
There are no files selected for viewing
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,56 @@ | ||
# Copyright 2017, Verizon Inc. | ||
# Licensed under the terms of the apache license. See the LICENSE file in the project root for terms | ||
|
||
version: 4 | ||
shared: | ||
environment: | ||
PACKAGE_DIRECTORY: tensorflowonspark | ||
SPARK_HOME: ${SD_ROOT_DIR}/spark | ||
TOX_ARGS: '--verbose' | ||
TOX_ENVLIST: py37 | ||
annotations: | ||
screwdriver.cd/cpu: HIGH | ||
screwdriver.cd/ram: HIGH | ||
|
||
jobs: | ||
validate_test: | ||
template: python/validate_unittest | ||
requires: [~commit, ~pr] | ||
steps: | ||
- prevalidate_code: | | ||
source scripts/install_spark.sh | ||
validate_lint: | ||
template: python/validate_lint | ||
requires: [~commit, ~pr] | ||
|
||
validate_codestyle: | ||
template: python/validate_codestyle | ||
requires: [~commit, ~pr] | ||
|
||
validate_safetydb: | ||
template: python/validate_safety | ||
requires: [~commit, ~pr] | ||
|
||
# validate_security: | ||
# template: python/validate_security | ||
# requires: [~commit, ~pr] | ||
|
||
publish_test_pypi: | ||
template: python/package_python | ||
environment: | ||
PUBLISH: True | ||
TWINE_REPOSITORY_URL: https://test.pypi.org/legacy/ | ||
requires: [validate_test, validate_lint, validate_codestyle, validate_safetydb] | ||
steps: | ||
- update_version: | | ||
echo 'using version from setup.cfg' | ||
publish_pypi: | ||
template: python/package_python | ||
environment: | ||
PUBLISH: True | ||
requires: [publish_test_pypi] | ||
steps: | ||
- update_version: | | ||
echo 'using version from setup.cfg' |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,11 @@ | ||
#!/bin/bash -x | ||
|
||
# Install JDK8 | ||
yum install -y java-1.8.0-openjdk | ||
export JAVA_HOME=/usr/lib/jvm/jre-1.8.0 | ||
|
||
# Install Spark | ||
export SPARK_VERSION=2.4.7 | ||
curl -LO http://www-us.apache.org/dist/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop2.7.tgz | ||
mkdir $SPARK_HOME | ||
tar -xf spark-${SPARK_VERSION}-bin-hadoop2.7.tgz -C $SPARK_HOME --strip-components=1 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,12 @@ | ||
#!/bin/bash -x | ||
#export SPARK_HOME=/opt/spark | ||
#export SPARK_LOCAL_IP=127.0.0.1 | ||
#export PATH=$SPARK_HOME/bin:$PATH | ||
# | ||
## Start Spark Standalone Cluster | ||
#export SPARK_CLASSPATH=./lib/tensorflow-hadoop-1.0-SNAPSHOT.jar | ||
#export MASTER=spark://$(hostname):7077 | ||
#export SPARK_WORKER_INSTANCES=2; export CORES_PER_WORKER=1 | ||
#export TOTAL_CORES=$((${CORES_PER_WORKER}*${SPARK_WORKER_INSTANCES})) | ||
|
||
${SPARK_HOME}/sbin/start-master.sh; ${SPARK_HOME}/sbin/start-slave.sh -c ${CORES_PER_WORKER} -m 1G ${MASTER} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
#!/bin/bash -x | ||
|
||
${SPARK_HOME}/sbin/stop-slave.sh; ${SPARK_HOME}/sbin/stop-master.sh |
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
version: 1 | ||
push: | ||
- screwdriver:6384 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,5 +1,76 @@ | ||
# Copyright 2017, Verizon Inc. | ||
# Licensed under the terms of the apache license. See the LICENSE file in the project root for terms | ||
[metadata] | ||
description-file = README.md | ||
author = Lee Yang | ||
author_email = [email protected] | ||
classifiers = | ||
Intended Audience :: Developers | ||
Intended Audience :: Science/Research | ||
License :: OSI Approved :: Apache Software License | ||
Topic :: Software Development :: Libraries | ||
Programming Language :: Python :: 3 :: Only | ||
Programming Language :: Python :: 3.6 | ||
Programming Language :: Python :: 3.7 | ||
Programming Language :: Python :: 3.8 | ||
description = Deep learning with TensorFlow on Apache Spark clusters | ||
license = Apache 2.0 | ||
long_description = file:README.md | ||
long_description_content_type = text/markdown | ||
name = tensorflowonspark | ||
url = https://github.com/yahoo/TensorFlowOnSpark | ||
version = 2.2.2 | ||
|
||
[options] | ||
packages = | ||
tensorflowonspark | ||
|
||
# The install_requires should include abstract package dependencies | ||
# here (do not specify specific versions) | ||
|
||
install_requires = | ||
setuptools>38.0 | ||
|
||
# By default new packages require at minimum the current supported Python release. | ||
python_requires = >="3.6" | ||
zip_safe = True | ||
|
||
[options.extras_require] | ||
# This config section allows you to define optional dependencies. For the general case, the defaults will | ||
# work fine. So these settings aren't required. However, many of the screwdriver CI Pipeline steps | ||
# will install the appropriate extras for that step. This makes it possible to install packages that install | ||
# or enhance the functionality of the CI Pipeline step. | ||
# Such as packages that implement plugins or themes for the step in question. | ||
|
||
# Additional packages for testing (test step) | ||
# test = | ||
|
||
# Additonal packages needed for documentation generation (doc_build/doc_publish steps) | ||
# If you want to use a sphinx theme from a package, list it here. | ||
# doc_build = | ||
|
||
# Additional packages needed for mypy type checking | ||
# mypy = | ||
|
||
# Additional packages needed for pep8/pycodestyle style checking | ||
# pycodestyle = | ||
|
||
# Additional packages needed for pylint code analysis | ||
# pylint = | ||
|
||
[options.entry_points] | ||
# Console script entry points are used to create wrapper scripts that run a specific function, the resulting wrapper | ||
# is installed in the bin directory. | ||
|
||
# They are defined using the following format: | ||
# scriptname = modulename:function | ||
# console_scripts = | ||
# TFoS=ouroath.TFoS.cli:main | ||
|
||
[screwdrivercd.version] | ||
# Base the autoversion build number on the screwdriver build number | ||
# This requires the CI Pipeline to have a build step that runs before | ||
# any packaging steps. | ||
version_type = sdv4_SD_BUILD | ||
|
||
[bdist_wheel] | ||
universal = 1 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,29 +1,26 @@ | ||
from setuptools import setup | ||
#!/usr/bin/env python | ||
# Copyright 2017, Verizon Inc. | ||
# Licensed under the terms of the apache license. See the LICENSE file in the project root for terms | ||
""" | ||
Package setup file for python module 'tensorflowonspark' | ||
""" | ||
import setuptools | ||
import sys | ||
|
||
with open('README.md') as f: | ||
long_description = f.read() | ||
|
||
setup( | ||
name='tensorflowonspark', | ||
packages=['tensorflowonspark'], | ||
version='2.2.1', | ||
description='Deep learning with TensorFlow on Apache Spark clusters', | ||
long_description=long_description, | ||
long_description_content_type='text/markdown', | ||
author='Yahoo, Inc.', | ||
url='https://github.com/yahoo/TensorFlowOnSpark', | ||
keywords=['tensorflowonspark', 'tensorflow', 'spark', 'machine learning', 'yahoo'], | ||
install_requires=['packaging'], | ||
license='Apache 2.0', | ||
classifiers=[ | ||
'Intended Audience :: Developers', | ||
'Intended Audience :: Science/Research', | ||
'License :: OSI Approved :: Apache Software License', | ||
'Topic :: Software Development :: Libraries', | ||
'Programming Language :: Python :: 2', | ||
'Programming Language :: Python :: 2.7', | ||
'Programming Language :: Python :: 3', | ||
'Programming Language :: Python :: 3.5', | ||
'Programming Language :: Python :: 3.6' | ||
] | ||
) | ||
def setuptools_version_supported(): | ||
major, minor, patch = setuptools.__version__.split('.') | ||
if int(major) > 38: | ||
return True | ||
return False | ||
|
||
|
||
if __name__ == '__main__': | ||
# Check for a working version of setuptools here because earlier versions did not | ||
# support python_requires. | ||
if not setuptools_version_supported(): | ||
print('Setuptools version 38.0.0 or higher is needed to install this package') | ||
sys.exit(1) | ||
|
||
# We're being run from the command line so call setup with our arguments | ||
setuptools.setup() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
Oops, something went wrong.