From ae749ecd363ab6c86e096087ba02c0f9b9f2a143 Mon Sep 17 00:00:00 2001 From: postelrich Date: Wed, 21 Dec 2016 14:56:11 -0500 Subject: [PATCH 1/8] add configs and script to automate benchmarking --- cron/asv.dask.conf.json | 146 ++++++++++++++++++++++++++++++++ cron/asv.distributed.conf.json | 147 +++++++++++++++++++++++++++++++++ cron/run_benchmarks.sh | 52 ++++++++++++ 3 files changed, 345 insertions(+) create mode 100644 cron/asv.dask.conf.json create mode 100644 cron/asv.distributed.conf.json create mode 100755 cron/run_benchmarks.sh diff --git a/cron/asv.dask.conf.json b/cron/asv.dask.conf.json new file mode 100644 index 0000000..1ff5c69 --- /dev/null +++ b/cron/asv.dask.conf.json @@ -0,0 +1,146 @@ +{ + // The version of the config file format. Do not change, unless + // you know what you are doing. + "version": 1, + + // The name of the project being benchmarked + "project": "dask", + + // The project's homepage + "project_url": "http://dask.pydata.org/", + + // The URL or local path of the source code repository for the + // project being benchmarked + "repo": "https://github.com/dask/dask.git", + + // List of branches to benchmark. If not provided, defaults to "master" + // (for git) or "default" (for mercurial). + "branches": ["master"], // for git + // "branches": ["default"], // for mercurial + + // The DVCS being used. If not set, it will be automatically + // determined from "repo" by looking at the protocol in the URL + // (if remote), or by looking for special directories, such as + // ".git" (if local). + "dvcs": "git", + + // The tool to use to create environments. May be "conda", + // "virtualenv" or other value depending on the plugins in use. + // If missing or the empty string, the tool will be automatically + // determined by looking for tools on the PATH environment + // variable. + "environment_type": "conda", + + // timeout in seconds for installing any dependencies in environment + // defaults to 10 min + //"install_timeout": 600, + + // the base URL to show a commit for the project. + // "show_commit_url": "http://github.com/dask/dask/", + + // The Pythons you'd like to test against. If not provided, defaults + // to the current version of Python used to run `asv`. + "pythons": ["2.7", "3.5"], + + // The matrix of dependencies to test. Each key is the name of a + // package (in PyPI) and the values are version numbers. An empty + // list or empty string indicates to just test against the default + // (latest) version. null indicates that the package is to not be + // installed. If the package to be tested is only available from + // PyPi, and the 'environment_type' is conda, then you can preface + // the package name by 'pip+', and the package will be installed via + // pip (with all the conda available packages installed first, + // followed by the pip installed packages). + // + "matrix": { + "numpy": [], + "toolz": [], + "cloudpickle": [], + "partd": [], + "pandas": [], + "distributed": [], + "s3fs": [], + "pytables": [] + }, + + // Combinations of libraries/python versions can be excluded/included + // from the set to test. Each entry is a dictionary containing additional + // key-value pairs to include/exclude. + // + // An exclude entry excludes entries where all values match. The + // values are regexps that should match the whole string. + // + // An include entry adds an environment. Only the packages listed + // are installed. The 'python' key is required. The exclude rules + // do not apply to includes. + // + // In addition to package names, the following keys are available: + // + // - python + // Python version, as in the *pythons* variable above. + // - environment_type + // Environment type, as above. + // - sys_platform + // Platform, as in sys.platform. Possible values for the common + // cases: 'linux2', 'win32', 'cygwin', 'darwin'. + // + // "exclude": [ + // {"python": "3.2", "sys_platform": "win32"}, // skip py3.2 on windows + // {"environment_type": "conda", "six": null}, // don't run without six on conda + // ], + // + // "include": [ + // // additional env for python2.7 + // {"python": "2.7", "numpy": "1.8"}, + // // additional env if run on windows+conda + // {"platform": "win32", "environment_type": "conda", "python": "2.7", "libpython": ""}, + // ], + + // The directory (relative to the current directory) that benchmarks are + // stored in. If not provided, defaults to "benchmarks" + "benchmark_dir": "benchmarks", + + // The directory (relative to the current directory) to cache the Python + // environments in. If not provided, defaults to "env" + "env_dir": ".asv/env", + + // The directory (relative to the current directory) that raw benchmark + // results are stored in. If not provided, defaults to "results". + "results_dir": "/home/ec2-user/results/dask", + + // The directory (relative to the current directory) that the html tree + // should be written to. If not provided, defaults to "html". + "html_dir": "/home/ec2-user/html/dask", + + // The number of characters to retain in the commit hashes. + // "hash_length": 8, + + // `asv` will cache wheels of the recent builds in each + // environment, making them faster to install next time. This is + // number of builds to keep, per environment. + "wheel_cache_size": 1 + + // The commits after which the regression search in `asv publish` + // should start looking for regressions. Dictionary whose keys are + // regexps matching to benchmark names, and values corresponding to + // the commit (exclusive) after which to start looking for + // regressions. The default is to start from the first commit + // with results. If the commit is `null`, regression detection is + // skipped for the matching benchmark. + // + // "regressions_first_commits": { + // "some_benchmark": "352cdf", // Consider regressions only after this commit + // "another_benchmark": null, // Skip regression detection altogether + // } + + // The thresholds for relative change in results, after which `asv + // publish` starts reporting regressions. Dictionary of the same + // form as in ``regressions_first_commits``, with values + // indicating the thresholds. If multiple entries match, the + // maximum is taken. If no entry matches, the default is 5%. + // + // "regressions_thresholds": { + // "some_benchmark": 0.01, // Threshold of 1% + // "another_benchmark": 0.5, // Threshold of 50% + // } +} diff --git a/cron/asv.distributed.conf.json b/cron/asv.distributed.conf.json new file mode 100644 index 0000000..82a06a9 --- /dev/null +++ b/cron/asv.distributed.conf.json @@ -0,0 +1,147 @@ +{ + // The version of the config file format. Do not change, unless + // you know what you are doing. + "version": 1, + + // The name of the project being benchmarked + "project": "distributed", + + // The project's homepage + "project_url": "https://distributed.readthedocs.io/en/latest/", + + // The URL or local path of the source code repository for the + // project being benchmarked + "repo": "https://github.com/dask/distributed", + + // List of branches to benchmark. If not provided, defaults to "master" + // (for git) or "default" (for mercurial). + "branches": ["master"], // for git + // "branches": ["default"], // for mercurial + + // The DVCS being used. If not set, it will be automatically + // determined from "repo" by looking at the protocol in the URL + // (if remote), or by looking for special directories, such as + // ".git" (if local). + "dvcs": "git", + + // The tool to use to create environments. May be "conda", + // "virtualenv" or other value depending on the plugins in use. + // If missing or the empty string, the tool will be automatically + // determined by looking for tools on the PATH environment + // variable. + "environment_type": "conda", + + // timeout in seconds for installing any dependencies in environment + // defaults to 10 min + //"install_timeout": 600, + + // the base URL to show a commit for the project. + // "show_commit_url": "http://github.com/dask/dask/", + + // The Pythons you'd like to test against. If not provided, defaults + // to the current version of Python used to run `asv`. + "pythons": ["2.7", "3.5"], + + // The matrix of dependencies to test. Each key is the name of a + // package (in PyPI) and the values are version numbers. An empty + // list or empty string indicates to just test against the default + // (latest) version. null indicates that the package is to not be + // installed. If the package to be tested is only available from + // PyPi, and the 'environment_type' is conda, then you can preface + // the package name by 'pip+', and the package will be installed via + // pip (with all the conda available packages installed first, + // followed by the pip installed packages). + // + "matrix": { + "numpy": [], + "toolz": [], + "cloudpickle": [], + "partd": [], + "pandas": [], + "s3fs": [], + "pip+blosc": [], + "pip+lz4": [], + "pip+git+https://github.com/dask/dask.git": [], + }, + + // Combinations of libraries/python versions can be excluded/included + // from the set to test. Each entry is a dictionary containing additional + // key-value pairs to include/exclude. + // + // An exclude entry excludes entries where all values match. The + // values are regexps that should match the whole string. + // + // An include entry adds an environment. Only the packages listed + // are installed. The 'python' key is required. The exclude rules + // do not apply to includes. + // + // In addition to package names, the following keys are available: + // + // - python + // Python version, as in the *pythons* variable above. + // - environment_type + // Environment type, as above. + // - sys_platform + // Platform, as in sys.platform. Possible values for the common + // cases: 'linux2', 'win32', 'cygwin', 'darwin'. + // + // "exclude": [ + // {"python": "3.2", "sys_platform": "win32"}, // skip py3.2 on windows + // {"environment_type": "conda", "six": null}, // don't run without six on conda + // ], + // + // "include": [ + // // additional env for python2.7 + // {"python": "2.7", "numpy": "1.8"}, + // // additional env if run on windows+conda + // {"platform": "win32", "environment_type": "conda", "python": "2.7", "libpython": ""}, + // ], + + // The directory (relative to the current directory) that benchmarks are + // stored in. If not provided, defaults to "benchmarks" + "benchmark_dir": "/home/ec2-user/dask-benchmarks/distributed/benchmarks", + + // The directory (relative to the current directory) to cache the Python + // environments in. If not provided, defaults to "env" + "env_dir": ".asv/env", + + // The directory (relative to the current directory) that raw benchmark + // results are stored in. If not provided, defaults to "results". + "results_dir": "/home/ec2-user/results/distributed", + + // The directory (relative to the current directory) that the html tree + // should be written to. If not provided, defaults to "html". + "html_dir": "/home/ec2-user/html/distributed", + + // The number of characters to retain in the commit hashes. + // "hash_length": 8, + + // `asv` will cache wheels of the recent builds in each + // environment, making them faster to install next time. This is + // number of builds to keep, per environment. + "wheel_cache_size": 1 + + // The commits after which the regression search in `asv publish` + // should start looking for regressions. Dictionary whose keys are + // regexps matching to benchmark names, and values corresponding to + // the commit (exclusive) after which to start looking for + // regressions. The default is to start from the first commit + // with results. If the commit is `null`, regression detection is + // skipped for the matching benchmark. + // + // "regressions_first_commits": { + // "some_benchmark": "352cdf", // Consider regressions only after this commit + // "another_benchmark": null, // Skip regression detection altogether + // } + + // The thresholds for relative change in results, after which `asv + // publish` starts reporting regressions. Dictionary of the same + // form as in ``regressions_first_commits``, with values + // indicating the thresholds. If multiple entries match, the + // maximum is taken. If no entry matches, the default is 5%. + // + // "regressions_thresholds": { + // "some_benchmark": 0.01, // Threshold of 1% + // "another_benchmark": 0.5, // Threshold of 50% + // } +} diff --git a/cron/run_benchmarks.sh b/cron/run_benchmarks.sh new file mode 100755 index 0000000..4b5d900 --- /dev/null +++ b/cron/run_benchmarks.sh @@ -0,0 +1,52 @@ +#!/usr/bin/bash +echo "Running benchmark update `date`" +HOME=/home/ec2-user +BENCHMARK_REPO=$HOME/dask-benchmarks +DASK_DIR=$BENCHMARK_REPO/dask +DISTRIBUTED_DIR=$BENCHMARK_REPO/distributed + +source activate dask-asv + +echo "Updating benchmark repo..." +cd $BENCHMARK_REPO +git checkout master +git pull + +echo "Running dask benchmarks..." +cd $DASK_DIR +asv --config $HOME/asv.dask.conf.json run NEW +DASK_STATUS=$? +if [ "$DASK_STATUS" -eq "0" ]; then + echo "Generating dask html files..." + asv --config $HOME/asv.dask.conf.json publish +fi + +echo "Running distributed benchmarks..." +cd $DISTRIBUTED_DIR +asv --config $HOME/asv.distributed.conf.json run NEW +DISTRIBUTED_STATUS=$? +if [ "$DISTRIBUTED_STATUS" -eq "0" ]; then + echo "Generating distributed html files..." + # Currently install dask dependency for distributed via pip install git+http to + # get current dask master. asv does not directly support this even though you + # can get it to work. However directory structure gets messed up and machine.json + # is not in the correct location to generate the graphs. Thus this hack to copy it + # to the right locations before running publish. + find /home/ec2-user/results/distributed/aws-ec2-c4.xlarge -type d -exec cp /home/ec2-user/results/distributed/aws-ec2-c4.xlarge/machine.json {} \; + asv --config $HOME/asv.distributed.conf.json publish +fi + +STATUSES=$(($DASK_STATUS + $DISTRIBUTED_STATUS)) +if [ "$STATUSES" -lt "2" ]; then + echo "Publishing results to github..." + cd $BENCHMARK_REPO + git checkout gh-pages + cp -r $HOME/html . + rm -rf results + mv html results + git add results + git commit -am "Auto-committed by benchmark script" + git push +else + echo "No updates to publish..." +fi From 2ead1e189b58b2bef770f9b6ccec5b9b525507aa Mon Sep 17 00:00:00 2001 From: postelrich Date: Wed, 21 Dec 2016 15:32:23 -0500 Subject: [PATCH 2/8] Add cron readme --- cron/README.md | 48 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) create mode 100644 cron/README.md diff --git a/cron/README.md b/cron/README.md new file mode 100644 index 0000000..0979360 --- /dev/null +++ b/cron/README.md @@ -0,0 +1,48 @@ +# Automating Benchmarking + +This directory contains a bash script and asv configuration files for automating benchmarking for dask and dask-distributed. The script takes advantage of an asv feature that allows it to benchmark all commits since the last benchmark. If no new commits were found for both repositories, the script will just exit. If commits were found for at least one, it will benchmark, generate html files, and publish to the gh-pages branch. + +Note that asv does have a built in feature for doing all the steps to publish to gh-pages. This was not used as we are benchmarking two packages and to achieve a custom url structure. + +## Setting up new machine (for CentOS, adapt as needed) + +Install requirements: + +``` +sudo yum update +sudo yum upgrade +sudo yum install wget git gcc gcc-c++ bzip2 +git clone git@github.com:dask/dask-benchmarks.git +wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh +bash Miniconda3-latest-Linux-x86_64.sh # install to default location, yes to append path in bashrc +source ~/.bashrc +``` + +Generate key and add to your github ssh keys: + +``` +ssh-keygen -t rsa -b 4096 +cat .ssh/id_rsa.pub +``` + +Set git username and email + +``` +git config --global user.name "My Name" +git config --global user.email "me@email.com" +``` + +## Configuring benchmark script + +The script pulls down the latest dask-benchmarks from the repository. If you want to autodeploy changes to the asv config files and the script, you can run the script directly from the cloned repository. Otherwise, copy the files to another location and configure cron accordingly. + +The script needs to know the location of the config files and the benchmark clone. It defaults to the common directory structure for an AWS EC2 instance but you can override by setting environment variables for `BENCHMARK_REPO`, `DASK_ASV_CONFIG`, `DISTRIBUTED_ASV_CONFIG`. + +## Configuring cron + +Run `crontab -e` and add the following line: + +``` +0 12 * * * /path/to/run_benchmarks.sh > /path/to/benchmarking.log 2>&1 +``` + From a424c98477c332b5e428ed7126ec4c14af69bf29 Mon Sep 17 00:00:00 2001 From: postelrich Date: Wed, 21 Dec 2016 15:36:18 -0500 Subject: [PATCH 3/8] explain distributed dask dependency --- cron/README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/cron/README.md b/cron/README.md index 0979360..5a86019 100644 --- a/cron/README.md +++ b/cron/README.md @@ -4,6 +4,8 @@ This directory contains a bash script and asv configuration files for automating Note that asv does have a built in feature for doing all the steps to publish to gh-pages. This was not used as we are benchmarking two packages and to achieve a custom url structure. +Also note that distributed has a dependency on dask. The master branch for distributed has a strong dependency on dask master. This requires pip installing the dask dependency from github. As this is not directly supported by asv, a small hack is required as commented in the script. + ## Setting up new machine (for CentOS, adapt as needed) Install requirements: From 5ef2b9c7d8c1ebe3fe33fb76a9dce5fbdb4dfd77 Mon Sep 17 00:00:00 2001 From: postelrich Date: Wed, 21 Dec 2016 15:51:43 -0500 Subject: [PATCH 4/8] Auto-committed by benchmark script --- cron/run_benchmarks.sh | 13 +- results/dask/asv.css | 158 ++ results/dask/asv.js | 428 ++++++ results/dask/asv_ui.js | 231 +++ results/dask/error.html | 23 + results/dask/flot/jquery.flot.axislabels.js | 140 ++ results/dask/flot/jquery.flot.orderBars.js | 192 +++ results/dask/graphdisplay.js | 1302 +++++++++++++++++ .../toolz/array.FancyIndexing.time_fancy.json | 82 ++ .../toolz/array.Rechunk.time_rechunk.json | 82 ++ .../array.Rechunk.time_rechunk_meta.json | 82 ++ ...MemoryDataFrame.time_boolean_indexing.json | 82 ++ ...ame.MemoryDataFrame.time_count_values.json | 82 ++ ...ataframe.MemoryDataFrame.time_groupby.json | 26 + ...aframe.MemoryDataFrame.time_reduction.json | 54 + ...emoryDataFrame.time_scalar_comparison.json | 82 ++ ...aframe.MemoryDataFrame.time_set_index.json | 66 + .../s3fs/toolz/io.CSV.time_read_csv.json | 162 ++ .../s3fs/toolz/io.CSV.time_read_csv_meta.json | 162 ++ .../s3fs/toolz/io.HDF5.time_read_hdf5.json | 162 ++ .../toolz/io.HDF5.time_read_hdf5_meta.json | 162 ++ .../ram-7.5GB/s3fs/toolz/summary.json | 230 +++ .../toolz/array.FancyIndexing.time_fancy.json | 82 ++ .../toolz/array.Rechunk.time_rechunk.json | 82 ++ .../array.Rechunk.time_rechunk_meta.json | 82 ++ ...MemoryDataFrame.time_boolean_indexing.json | 82 ++ ...ame.MemoryDataFrame.time_count_values.json | 82 ++ ...ataframe.MemoryDataFrame.time_groupby.json | 26 + ...aframe.MemoryDataFrame.time_reduction.json | 54 + ...emoryDataFrame.time_scalar_comparison.json | 82 ++ ...aframe.MemoryDataFrame.time_set_index.json | 66 + .../s3fs/toolz/io.CSV.time_read_csv.json | 162 ++ .../s3fs/toolz/io.CSV.time_read_csv_meta.json | 162 ++ .../s3fs/toolz/io.HDF5.time_read_hdf5.json | 162 ++ .../toolz/io.HDF5.time_read_hdf5_meta.json | 162 ++ .../ram-7.5GB/s3fs/toolz/summary.json | 224 +++ .../array.FancyIndexing.time_fancy.json | 82 ++ .../summary/array.Rechunk.time_rechunk.json | 82 ++ .../array.Rechunk.time_rechunk_meta.json | 82 ++ ...MemoryDataFrame.time_boolean_indexing.json | 82 ++ ...ame.MemoryDataFrame.time_count_values.json | 82 ++ ...ataframe.MemoryDataFrame.time_groupby.json | 26 + ...aframe.MemoryDataFrame.time_reduction.json | 54 + ...emoryDataFrame.time_scalar_comparison.json | 82 ++ ...aframe.MemoryDataFrame.time_set_index.json | 66 + .../graphs/summary/io.CSV.time_read_csv.json | 82 ++ .../summary/io.CSV.time_read_csv_meta.json | 82 ++ .../summary/io.HDF5.time_read_hdf5.json | 82 ++ .../summary/io.HDF5.time_read_hdf5_meta.json | 82 ++ results/dask/index.html | 182 +++ results/dask/index.json | 454 ++++++ results/dask/jquery.md5.js | 269 ++++ results/dask/regressions.css | 33 + results/dask/regressions.js | 449 ++++++ results/dask/regressions.json | 70 + results/dask/regressions.xml | 10 + results/dask/stupidtable.js | 154 ++ results/dask/summarygrid.js | 109 ++ results/dask/summarylist.css | 50 + results/dask/summarylist.js | 464 ++++++ results/dask/swallow.ico | Bin 0 -> 4286 bytes results/dask/swallow.png | Bin 0 -> 893 bytes results/distributed/asv.css | 158 ++ results/distributed/asv.js | 428 ++++++ results/distributed/asv_ui.js | 231 +++ results/distributed/error.html | 23 + .../flot/jquery.flot.axislabels.js | 140 ++ .../distributed/flot/jquery.flot.orderBars.js | 192 +++ results/distributed/graphdisplay.js | 1302 +++++++++++++++++ ...client.ClientSuite.time_trivial_tasks.json | 14 + .../protocol.ProtocolSuite.time_dumps.json | 41 + .../protocol.ProtocolSuite.time_loads.json | 41 + .../protocol.ProtocolSuite.track_size.json | 41 + .../ram-7.5GB/s3fs/toolz/summary.json | 252 ++++ ...client.ClientSuite.time_trivial_tasks.json | 14 + .../protocol.ProtocolSuite.time_dumps.json | 41 + .../protocol.ProtocolSuite.time_loads.json | 41 + .../protocol.ProtocolSuite.track_size.json | 41 + .../ram-7.5GB/s3fs/toolz/summary.json | 252 ++++ ...client.ClientSuite.time_trivial_tasks.json | 14 + .../protocol.ProtocolSuite.time_dumps.json | 14 + .../protocol.ProtocolSuite.time_loads.json | 14 + .../protocol.ProtocolSuite.track_size.json | 14 + results/distributed/index.html | 182 +++ results/distributed/index.json | 356 +++++ results/distributed/jquery.md5.js | 269 ++++ results/distributed/regressions.css | 33 + results/distributed/regressions.js | 449 ++++++ results/distributed/regressions.json | 3 + results/distributed/regressions.xml | 2 + results/distributed/stupidtable.js | 154 ++ results/distributed/summarygrid.js | 109 ++ results/distributed/summarylist.css | 50 + results/distributed/summarylist.js | 464 ++++++ results/distributed/swallow.ico | Bin 0 -> 4286 bytes results/distributed/swallow.png | Bin 0 -> 893 bytes 96 files changed, 14096 insertions(+), 6 deletions(-) create mode 100644 results/dask/asv.css create mode 100644 results/dask/asv.js create mode 100644 results/dask/asv_ui.js create mode 100644 results/dask/error.html create mode 100644 results/dask/flot/jquery.flot.axislabels.js create mode 100644 results/dask/flot/jquery.flot.orderBars.js create mode 100644 results/dask/graphdisplay.js create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-2.7/ram-7.5GB/s3fs/toolz/array.FancyIndexing.time_fancy.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-2.7/ram-7.5GB/s3fs/toolz/array.Rechunk.time_rechunk.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-2.7/ram-7.5GB/s3fs/toolz/array.Rechunk.time_rechunk_meta.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-2.7/ram-7.5GB/s3fs/toolz/dataframe.MemoryDataFrame.time_boolean_indexing.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-2.7/ram-7.5GB/s3fs/toolz/dataframe.MemoryDataFrame.time_count_values.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-2.7/ram-7.5GB/s3fs/toolz/dataframe.MemoryDataFrame.time_groupby.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-2.7/ram-7.5GB/s3fs/toolz/dataframe.MemoryDataFrame.time_reduction.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-2.7/ram-7.5GB/s3fs/toolz/dataframe.MemoryDataFrame.time_scalar_comparison.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-2.7/ram-7.5GB/s3fs/toolz/dataframe.MemoryDataFrame.time_set_index.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-2.7/ram-7.5GB/s3fs/toolz/io.CSV.time_read_csv.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-2.7/ram-7.5GB/s3fs/toolz/io.CSV.time_read_csv_meta.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-2.7/ram-7.5GB/s3fs/toolz/io.HDF5.time_read_hdf5.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-2.7/ram-7.5GB/s3fs/toolz/io.HDF5.time_read_hdf5_meta.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-2.7/ram-7.5GB/s3fs/toolz/summary.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-3.5/ram-7.5GB/s3fs/toolz/array.FancyIndexing.time_fancy.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-3.5/ram-7.5GB/s3fs/toolz/array.Rechunk.time_rechunk.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-3.5/ram-7.5GB/s3fs/toolz/array.Rechunk.time_rechunk_meta.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-3.5/ram-7.5GB/s3fs/toolz/dataframe.MemoryDataFrame.time_boolean_indexing.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-3.5/ram-7.5GB/s3fs/toolz/dataframe.MemoryDataFrame.time_count_values.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-3.5/ram-7.5GB/s3fs/toolz/dataframe.MemoryDataFrame.time_groupby.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-3.5/ram-7.5GB/s3fs/toolz/dataframe.MemoryDataFrame.time_reduction.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-3.5/ram-7.5GB/s3fs/toolz/dataframe.MemoryDataFrame.time_scalar_comparison.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-3.5/ram-7.5GB/s3fs/toolz/dataframe.MemoryDataFrame.time_set_index.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-3.5/ram-7.5GB/s3fs/toolz/io.CSV.time_read_csv.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-3.5/ram-7.5GB/s3fs/toolz/io.CSV.time_read_csv_meta.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-3.5/ram-7.5GB/s3fs/toolz/io.HDF5.time_read_hdf5.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-3.5/ram-7.5GB/s3fs/toolz/io.HDF5.time_read_hdf5_meta.json create mode 100644 results/dask/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/distributed/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pytables/python-3.5/ram-7.5GB/s3fs/toolz/summary.json create mode 100644 results/dask/graphs/summary/array.FancyIndexing.time_fancy.json create mode 100644 results/dask/graphs/summary/array.Rechunk.time_rechunk.json create mode 100644 results/dask/graphs/summary/array.Rechunk.time_rechunk_meta.json create mode 100644 results/dask/graphs/summary/dataframe.MemoryDataFrame.time_boolean_indexing.json create mode 100644 results/dask/graphs/summary/dataframe.MemoryDataFrame.time_count_values.json create mode 100644 results/dask/graphs/summary/dataframe.MemoryDataFrame.time_groupby.json create mode 100644 results/dask/graphs/summary/dataframe.MemoryDataFrame.time_reduction.json create mode 100644 results/dask/graphs/summary/dataframe.MemoryDataFrame.time_scalar_comparison.json create mode 100644 results/dask/graphs/summary/dataframe.MemoryDataFrame.time_set_index.json create mode 100644 results/dask/graphs/summary/io.CSV.time_read_csv.json create mode 100644 results/dask/graphs/summary/io.CSV.time_read_csv_meta.json create mode 100644 results/dask/graphs/summary/io.HDF5.time_read_hdf5.json create mode 100644 results/dask/graphs/summary/io.HDF5.time_read_hdf5_meta.json create mode 100644 results/dask/index.html create mode 100644 results/dask/index.json create mode 100644 results/dask/jquery.md5.js create mode 100644 results/dask/regressions.css create mode 100644 results/dask/regressions.js create mode 100644 results/dask/regressions.json create mode 100644 results/dask/regressions.xml create mode 100644 results/dask/stupidtable.js create mode 100644 results/dask/summarygrid.js create mode 100644 results/dask/summarylist.css create mode 100644 results/dask/summarylist.js create mode 100644 results/dask/swallow.ico create mode 100644 results/dask/swallow.png create mode 100644 results/distributed/asv.css create mode 100644 results/distributed/asv.js create mode 100644 results/distributed/asv_ui.js create mode 100644 results/distributed/error.html create mode 100644 results/distributed/flot/jquery.flot.axislabels.js create mode 100644 results/distributed/flot/jquery.flot.orderBars.js create mode 100644 results/distributed/graphdisplay.js create mode 100644 results/distributed/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pip+blosc/pip+git+https:/github.com/dask/dask.git/pip+lz4/python-2.7/ram-7.5GB/s3fs/toolz/client.ClientSuite.time_trivial_tasks.json create mode 100644 results/distributed/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pip+blosc/pip+git+https:/github.com/dask/dask.git/pip+lz4/python-2.7/ram-7.5GB/s3fs/toolz/protocol.ProtocolSuite.time_dumps.json create mode 100644 results/distributed/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pip+blosc/pip+git+https:/github.com/dask/dask.git/pip+lz4/python-2.7/ram-7.5GB/s3fs/toolz/protocol.ProtocolSuite.time_loads.json create mode 100644 results/distributed/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pip+blosc/pip+git+https:/github.com/dask/dask.git/pip+lz4/python-2.7/ram-7.5GB/s3fs/toolz/protocol.ProtocolSuite.track_size.json create mode 100644 results/distributed/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pip+blosc/pip+git+https:/github.com/dask/dask.git/pip+lz4/python-2.7/ram-7.5GB/s3fs/toolz/summary.json create mode 100644 results/distributed/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pip+blosc/pip+git+https:/github.com/dask/dask.git/pip+lz4/python-3.5/ram-7.5GB/s3fs/toolz/client.ClientSuite.time_trivial_tasks.json create mode 100644 results/distributed/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pip+blosc/pip+git+https:/github.com/dask/dask.git/pip+lz4/python-3.5/ram-7.5GB/s3fs/toolz/protocol.ProtocolSuite.time_dumps.json create mode 100644 results/distributed/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pip+blosc/pip+git+https:/github.com/dask/dask.git/pip+lz4/python-3.5/ram-7.5GB/s3fs/toolz/protocol.ProtocolSuite.time_loads.json create mode 100644 results/distributed/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pip+blosc/pip+git+https:/github.com/dask/dask.git/pip+lz4/python-3.5/ram-7.5GB/s3fs/toolz/protocol.ProtocolSuite.track_size.json create mode 100644 results/distributed/graphs/arch-x86_64/branch-master/cloudpickle/cpu-Intel(R) Xeon(R) CPU E5-2666 v3 @ 2.90GHz/machine-aws-ec2-c4.xlarge/numpy/os-Linux 3.10.0-514.el7.x86_64/pandas/partd/pip+blosc/pip+git+https:/github.com/dask/dask.git/pip+lz4/python-3.5/ram-7.5GB/s3fs/toolz/summary.json create mode 100644 results/distributed/graphs/summary/client.ClientSuite.time_trivial_tasks.json create mode 100644 results/distributed/graphs/summary/protocol.ProtocolSuite.time_dumps.json create mode 100644 results/distributed/graphs/summary/protocol.ProtocolSuite.time_loads.json create mode 100644 results/distributed/graphs/summary/protocol.ProtocolSuite.track_size.json create mode 100644 results/distributed/index.html create mode 100644 results/distributed/index.json create mode 100644 results/distributed/jquery.md5.js create mode 100644 results/distributed/regressions.css create mode 100644 results/distributed/regressions.js create mode 100644 results/distributed/regressions.json create mode 100644 results/distributed/regressions.xml create mode 100644 results/distributed/stupidtable.js create mode 100644 results/distributed/summarygrid.js create mode 100644 results/distributed/summarylist.css create mode 100644 results/distributed/summarylist.js create mode 100644 results/distributed/swallow.ico create mode 100644 results/distributed/swallow.png diff --git a/cron/run_benchmarks.sh b/cron/run_benchmarks.sh index 4b5d900..4864543 100755 --- a/cron/run_benchmarks.sh +++ b/cron/run_benchmarks.sh @@ -1,9 +1,10 @@ #!/usr/bin/bash echo "Running benchmark update `date`" -HOME=/home/ec2-user -BENCHMARK_REPO=$HOME/dask-benchmarks +BENCHMARK_REPO=${BENCHMARK_REPOSITORY:-$HOME/dask-benchmarks} DASK_DIR=$BENCHMARK_REPO/dask DISTRIBUTED_DIR=$BENCHMARK_REPO/distributed +DASK_CONFIG=${DASK_ASV_CONFIG:-$HOME/asv.dask.conf.json} +DISTRIBUTED_CONFIG=${DISTRIBUTED_ASV_CONFIG:-$HOME/asv.distributed.conf.json} source activate dask-asv @@ -14,16 +15,16 @@ git pull echo "Running dask benchmarks..." cd $DASK_DIR -asv --config $HOME/asv.dask.conf.json run NEW +asv --config $DASK_CONFIG run NEW DASK_STATUS=$? if [ "$DASK_STATUS" -eq "0" ]; then echo "Generating dask html files..." - asv --config $HOME/asv.dask.conf.json publish + asv --config $DASK_CONFIG publish fi echo "Running distributed benchmarks..." cd $DISTRIBUTED_DIR -asv --config $HOME/asv.distributed.conf.json run NEW +asv --config $DISTRIBUTED_CONFIG run NEW DISTRIBUTED_STATUS=$? if [ "$DISTRIBUTED_STATUS" -eq "0" ]; then echo "Generating distributed html files..." @@ -33,7 +34,7 @@ if [ "$DISTRIBUTED_STATUS" -eq "0" ]; then # is not in the correct location to generate the graphs. Thus this hack to copy it # to the right locations before running publish. find /home/ec2-user/results/distributed/aws-ec2-c4.xlarge -type d -exec cp /home/ec2-user/results/distributed/aws-ec2-c4.xlarge/machine.json {} \; - asv --config $HOME/asv.distributed.conf.json publish + asv --config $DISTRIBUTED_CONFIG publish fi STATUSES=$(($DASK_STATUS + $DISTRIBUTED_STATUS)) diff --git a/results/dask/asv.css b/results/dask/asv.css new file mode 100644 index 0000000..82096f1 --- /dev/null +++ b/results/dask/asv.css @@ -0,0 +1,158 @@ +/* Basic navigation */ + +.asv-navigation { + padding: 2px; +} + +nav ul li.active a { + height: 52px; +} + +nav li.active span.navbar-brand { + background-color: #e7e7e7; + height: 52px; +} + +nav li.active span.navbar-brand:hover { + background-color: #e7e7e7; +} + +.navbar-default .navbar-link { + color: #2458D9; +} + +.panel-body { + padding: 0; +} + +.panel { + margin-bottom: 4px; + -webkit-box-shadow: none; + box-shadow: none; + border-radius: 0; + border-top-left-radius: 3px; + border-top-right-radius: 3px; +} + +.panel-default>.panel-heading, +.panel-heading { + font-size: 12px; + font-weight:bold; + padding: 2px; + text-align: center; + border-top-left-radius: 3px; + border-top-right-radius: 3px; + background-color: #eee; +} + +.btn, +.btn-group, +.btn-group-vertical>.btn:first-child, +.btn-group-vertical>.btn:last-child:not(:first-child), +.btn-group-vertical>.btn:last-child { + border: none; + border-radius: 0px; + overflow: hidden; +} + +.btn-default:focus, .btn-default:active, .btn-default.active { + border: none; + color: #fff; + background-color: #99bfcd; +} + +#range { + font-family: monospace; + text-align: center; + background: #ffffff; +} + +.form-control { + border: none; + border-radius: 0px; + font-size: 12px; + padding: 0px; +} + +.tooltip-inner { + min-width: 100px; + max-width: 800px; + text-align: left; + white-space: pre; + font-family: monospace; +} + +/* Benchmark tree */ + +.nav-list { + font-size: 12px; + padding: 0; + padding-left: 15px; +} + +.nav-list>li { + overflow-x: hidden; +} + +.nav-list>li>a { + padding: 0; + padding-left: 5px; + color: #000; +} + +.nav-list>li>a:focus { + color: #fff; + background-color: #99bfcd; + box-shadow: inset 0 3px 5px rgba(0,0,0,.125); +} + +.nav-list>li>.nav-header { + white-space: nowrap; + font-weight: 500; + margin-bottom: 2px; +} + +.caret-right { + display: inline-block; + width: 0; + height: 0; + margin-left: 2px; + vertical-align: middle; + border-left: 4px solid; + border-bottom: 4px solid transparent; + border-top: 4px solid transparent; +} + +/* Summary page */ + +.benchmark-container { + float: left; + width: 300px; + height: 116px; + padding: 4px; + border-radius: 3px; +} + +.benchmark-container:hover { + background-color: #eee; +} + +.benchmark-plot { + width: 292px; + height: 88px; +} + +.benchmark-text { + font-size: 12px; + color: #000; + width: 292px; + overflow: hidden; +} + +#extra-buttons { + margin: 1em; +} + +#extra-buttons a { + border: solid 1px #ccc; +} diff --git a/results/dask/asv.js b/results/dask/asv.js new file mode 100644 index 0000000..5144fe3 --- /dev/null +++ b/results/dask/asv.js @@ -0,0 +1,428 @@ +'use strict'; + +$(document).ready(function() { + /* GLOBAL STATE */ + /* The index.json content as returned from the server */ + var master_json = {}; + /* Extra pages: {name: show_function} */ + var loaded_pages = {}; + /* Previous window scroll positions */ + var window_scroll_positions = {}; + /* Previous window hash location */ + var window_last_location = null; + /* Graph data cache */ + var graph_cache = {}; + var graph_cache_max_size = 5; + + var colors = [ + '#247AAD', + '#E24A33', + '#988ED5', + '#777777', + '#FBC15E', + '#8EBA42', + '#FFB5B8' + ]; + + var time_units = [ + ['ps', 'picoseconds', 0.000000000001], + ['ns', 'nanoseconds', 0.000000001], + ['μs', 'microseconds', 0.000001], + ['ms', 'milliseconds', 0.001], + ['s', 'seconds', 1], + ['m', 'minutes', 60], + ['h', 'hours', 60 * 60], + ['d', 'days', 60 * 60 * 24], + ['w', 'weeks', 60 * 60 * 24 * 7], + ['y', 'years', 60 * 60 * 24 * 7 * 52], + ['C', 'centuries', 60 * 60 * 24 * 7 * 52 * 100] + ]; + + function pretty_second(x) { + for (var i = 0; i < time_units.length - 1; ++i) { + if (Math.abs(x) < time_units[i+1][2]) { + return (x / time_units[i][2]).toFixed(3) + time_units[i][0]; + } + } + + return 'inf'; + } + + /* Convert a flat index to permutation to the corresponding value */ + function param_selection_from_flat_idx(params, idx) { + var selection = []; + if (idx < 0) { + idx = 0; + } + for (var k = params.length-1; k >= 0; --k) { + var j = idx % params[k].length; + selection.unshift([j]); + idx = (idx - j) / params[k].length; + } + selection.unshift([null]); + return selection; + } + + /* Convert a benchmark parameter value from their native Python + repr format to a number or a string, ready for presentation */ + function convert_benchmark_param_value(value_repr) { + var match = Number(value_repr); + if (!isNaN(match)) { + return match; + } + + /* Python str */ + match = value_repr.match(/^'(.+)'$/); + if (match) { + return match[1]; + } + + /* Python unicode */ + match = value_repr.match(/^u'(.+)'$/); + if (match) { + return match[1]; + } + + /* Python class */ + match = value_repr.match(/^$/); + if (match) { + return match[1]; + } + + return value_repr; + } + + /* Convert loaded graph data to a format flot understands, by + treating either time or one of the parameters as x-axis, + and selecting only one value of the remaining axes */ + function filter_graph_data(raw_series, x_axis, other_indices, params) { + if (params.length == 0) { + /* Simple time series */ + return raw_series; + } + + /* Compute position of data entry in the results list, + and stride corresponding to plot x-axis parameter */ + var stride = 1; + var param_stride = 0; + var param_idx = 0; + for (var k = params.length - 1; k >= 0; --k) { + if (k == x_axis - 1) { + param_stride = stride; + } + else { + param_idx += other_indices[k + 1] * stride; + } + stride *= params[k].length; + } + + if (x_axis == 0) { + /* x-axis is time axis */ + var series = new Array(raw_series.length); + for (var k = 0; k < raw_series.length; ++k) { + if (raw_series[k][1] === null) { + series[k] = [raw_series[k][0], null]; + } else { + series[k] = [raw_series[k][0], + raw_series[k][1][param_idx]]; + } + } + return series; + } + else { + /* x-axis is some parameter axis */ + var time_idx = null; + if (other_indices[0] === null) { + time_idx = raw_series.length - 1; + } + else { + /* Need to search for the correct time value */ + for (var k = 0; k < raw_series.length; ++k) { + if (raw_series[k][0] == other_indices[0]) { + time_idx = k; + break; + } + } + if (time_idx === null) { + /* No data points */ + return []; + } + } + + var x_values = params[x_axis - 1]; + var series = new Array(x_values.length); + for (var k = 0; k < x_values.length; ++k) { + if (raw_series[time_idx][1] === null) { + series[k] = [convert_benchmark_param_value(x_values[k]), + null]; + } + else { + series[k] = [convert_benchmark_param_value(x_values[k]), + raw_series[time_idx][1][param_idx]]; + } + param_idx += param_stride; + } + return series; + } + } + + function filter_graph_data_idx(raw_series, x_axis, flat_idx, params) { + var selection = param_selection_from_flat_idx(params, flat_idx); + var flat_selection = []; + $.each(selection, function(i, v) { + flat_selection.push(v[0]); + }); + return filter_graph_data(raw_series, x_axis, flat_selection, params); + } + + /* Given a specific group of parameters, generate the URL to + use to load that graph. */ + function graph_to_path(benchmark_name, state) { + var parts = []; + $.each(state, function(key, value) { + if (value === null) { + parts.push(key + "-null"); + } else if (value) { + parts.push(key + "-" + value); + } else { + parts.push(key); + } + }); + parts.sort(); + parts.splice(0, 0, "graphs"); + parts.push(benchmark_name); + return parts.join('/') + ".json"; + } + + /* + Load and cache graph data (on javascript side) + */ + function load_graph_data(url, success, failure) { + var dfd = $.Deferred(); + if (graph_cache[url]) { + setTimeout(function() { + dfd.resolve(graph_cache[url]); + }, 1); + } + else { + $.ajax({ + url: url, + dataType: "json", + cache: false + }).done(function(data) { + if (Object.keys(graph_cache).length > graph_cache_max_size) { + $.each(Object.keys(graph_cache), function (i, key) { + delete graph_cache[key]; + }); + } + graph_cache[url] = data; + dfd.resolve(data); + }).fail(function() { + dfd.reject(); + }); + } + return dfd.promise(); + } + + /* + Parse hash string, assuming format similar to standard URL + query strings + */ + function parse_hash_string(str) { + var info = {location: [''], params: {}}; + + if (str && str[0] == '#') { + str = str.slice(1); + } + if (str && str[0] == '/') { + str = str.slice(1); + } + + var match = str.match(/^([^?]*?)\?/); + if (match) { + info['location'] = match[1].replace(/\/+/, '/').split('/'); + var rest = str.slice(match[1].length+1); + var parts = rest.split('&'); + for (var i = 0; i < parts.length; ++i) { + var part = parts[i].split('='); + if (part.length != 2) { + continue; + } + var key = part[0]; + var value = decodeURIComponent(part[1].replace(/\+/g, " ")); + if (value == '[none]') { + value = null; + } + if (info['params'][key] === undefined) { + info['params'][key] = [value]; + } + else { + info['params'][key].push(value); + } + } + } + else { + info['location'] = str.replace(/\/+/, '/').split('/'); + } + return info; + } + + /* + Generate a hash string, inverse of parse_hash_string + */ + function format_hash_string(info) { + var parts = info['params']; + var str = '#' + info['location']; + + if (parts) { + str = str + '?'; + var first = true; + $.each(parts, function (key, values) { + $.each(values, function (idx, value) { + if (!first) { + str = str + '&'; + } + if (value === null) { + value = '[none]'; + } + str = str + key + '=' + encodeURIComponent(value); + first = false; + }); + }); + } + return str; + } + + /* + Dealing with sub-pages + */ + + function show_page(name, params) { + if (loaded_pages[name] !== undefined) { + $("#nav ul li.active").removeClass('active'); + $("#nav-li-" + name).addClass('active'); + $("#graph-display").hide(); + $("#summarygrid-display").hide(); + $("#summarylist-display").hide(); + $('#regressions-display').hide(); + $('.tooltip').remove(); + loaded_pages[name](params); + return true; + } + else { + return false; + } + } + + function hashchange() { + var info = parse_hash_string(window.location.hash); + + /* Keep track of window scroll position; makes the back-button work */ + var old_scroll_pos = window_scroll_positions[info.location.join('/')]; + window_scroll_positions[window_last_location] = $(window).scrollTop(); + window_last_location = info.location.join('/'); + + /* Redirect to correct handler */ + if (show_page(info.location, info.params)) { + /* show_page does the work */ + } + else { + /* Display benchmark page */ + info.params['benchmark'] = info.location[0]; + show_page('graphdisplay', info.params); + } + + /* Scroll back to previous position, if any */ + if (old_scroll_pos !== undefined) { + $(window).scrollTop(old_scroll_pos); + } + } + + function get_commit_hash(revision) { + var commit_hash = master_json.revision_to_hash[revision]; + if (commit_hash) { + // Return printable commit hash + commit_hash = commit_hash.slice(0, master_json.hash_length); + } + return commit_hash; + } + + function get_revision(commit_hash) { + var rev = null; + $.each(master_json.revision_to_hash, function(revision, full_commit_hash) { + if (full_commit_hash.startsWith(commit_hash)) { + rev = revision; + // break the $.each loop + return false; + } + }); + return rev; + } + + function init() { + /* Fetch the master index.json and then set up the page elements + based on it. */ + $.ajax({ + url: "index.json", + dataType: "json", + cache: false + }).done(function (index) { + master_json = index; + $.asv.master_json = index; + + /* Page title */ + var project_name = $("#project-name")[0]; + project_name.textContent = index.project; + project_name.setAttribute("href", index.project_url); + $("#project-name").textContent = index.project; + document.title = "airspeed velocity of an unladen " + index.project; + + $(window).on('hashchange', hashchange); + + $('#graph-display').hide(); + $('#regressions-display').hide(); + $('#summarygrid-display').hide(); + $('#summarylist-display').hide(); + + hashchange(); + }).fail(function () { + $.asv.ui.network_error(); + }); + } + + + /* + Set up $.asv + */ + + this.register_page = function(name, show_function) { + loaded_pages[name] = show_function; + } + this.parse_hash_string = parse_hash_string; + this.format_hash_string = format_hash_string; + + this.filter_graph_data = filter_graph_data; + this.filter_graph_data_idx = filter_graph_data_idx; + this.convert_benchmark_param_value = convert_benchmark_param_value; + this.param_selection_from_flat_idx = param_selection_from_flat_idx; + this.graph_to_path = graph_to_path; + this.load_graph_data = load_graph_data; + this.get_commit_hash = get_commit_hash; + this.get_revision = get_revision; + + this.master_json = master_json; /* Updated after index.json loads */ + + this.pretty_second = pretty_second; + this.time_units = time_units; + + this.colors = colors; + + $.asv = this; + + + /* + Launch it + */ + + init(); +}); diff --git a/results/dask/asv_ui.js b/results/dask/asv_ui.js new file mode 100644 index 0000000..49833b7 --- /dev/null +++ b/results/dask/asv_ui.js @@ -0,0 +1,231 @@ +'use strict'; + +$(document).ready(function() { + function make_panel(nav, heading) { + var panel = $('
'); + nav.append(panel); + var panel_header = $( + '
' + heading + '
'); + panel.append(panel_header); + var panel_body = $('
'); + panel.append(panel_body); + return panel_body; + } + + function make_value_selector_panel(nav, heading, values, setup_callback) { + var panel_body = make_panel(nav, heading); + var vertical = false; + var buttons = $('
'); + + panel_body.append(buttons); + + $.each(values, function (idx, value) { + var button = $( + ''); + setup_callback(idx, value, button); + buttons.append(button); + }); + + return panel_body; + } + + function reflow_value_selector_panels(no_timeout) { + $('.panel').each(function (i, panel_obj) { + var panel = $(panel_obj); + panel.find('.btn-group').each(function (i, buttons_obj) { + var buttons = $(buttons_obj); + var width = 0; + + if (buttons.hasClass('reflow-done')) { + /* already processed */ + return; + } + + $.each(buttons.children(), function(idx, value) { + width += value.scrollWidth; + }); + + var max_width = panel_obj.clientWidth; + + if (width >= max_width) { + buttons.addClass("btn-group-vertical"); + buttons.css("width", "100%"); + buttons.css("max-height", "20ex"); + buttons.css("overflow-y", "auto"); + } + else { + buttons.addClass("btn-group-justified"); + } + + /* The widths can be zero if the UI is not fully layouted yet, + so mark the adjustment complete only if this is not the case */ + if (width > 0 && max_width > 0) { + buttons.addClass("reflow-done"); + } + }); + }); + + if (!no_timeout) { + /* Call again asynchronously, in case the UI was not fully layouted yet */ + setTimeout(function() { $.asv.ui.reflow_value_selector_panels(true); }, 0); + } + } + + function network_error(ajax, status, error) { + $("#error-message").text( + "Error fetching content. " + + "Perhaps web server has gone down."); + $("#error").modal('show'); + } + + function hover_graph(element, graph_url, benchmark_basename, parameter_idx, revisions) { + /* Show the summary graph as a popup */ + var plot_div = $('
'); + plot_div.css('width', '11.8em'); + plot_div.css('height', '7em'); + plot_div.css('border', '2px solid black'); + plot_div.css('background-color', 'white'); + + function update_plot() { + var markings = []; + + if (revisions) { + $.each(revisions, function(i, revs) { + var rev_a = revs[0]; + var rev_b = revs[1]; + + if (rev_a !== null) { + markings.push({ color: '#d00', lineWidth: 2, xaxis: { from: rev_a, to: rev_a }}); + markings.push({ color: "rgba(255,0,0,0.1)", xaxis: { from: rev_a, to: rev_b }}); + } + markings.push({ color: '#d00', lineWidth: 2, xaxis: { from: rev_b, to: rev_b }}); + }); + } + + $.asv.load_graph_data( + graph_url + ).done(function (data) { + var params = $.asv.master_json.benchmarks[benchmark_basename].params; + data = $.asv.filter_graph_data_idx(data, 0, parameter_idx, params); + var options = { + colors: ['#000'], + series: { + lines: { + show: true, + lineWidth: 2 + }, + shadowSize: 0 + }, + grid: { + borderWidth: 1, + margin: 0, + labelMargin: 0, + axisMargin: 0, + minBorderMargin: 0, + markings: markings, + }, + xaxis: { + ticks: [], + }, + yaxis: { + ticks: [], + min: 0 + }, + legend: { + show: false + } + }; + var plot = $.plot(plot_div, [{data: data}], options); + }).fail(function () { + // TODO: Handle failure + }); + + return plot_div; + } + + element.popover({ + placement: 'left auto', + trigger: 'hover', + html: true, + delay: 50, + content: $('
').append(plot_div) + }); + + element.on('show.bs.popover', update_plot); + } + + function hover_summary_graph(element, benchmark_basename) { + /* Show the summary graph as a popup */ + var plot_div = $('
'); + plot_div.css('width', '11.8em'); + plot_div.css('height', '7em'); + plot_div.css('border', '2px solid black'); + plot_div.css('background-color', 'white'); + + function update_plot() { + var markings = []; + + $.asv.load_graph_data( + 'graphs/summary/' + benchmark_basename + '.json' + ).done(function (data) { + var options = { + colors: $.asv.colors, + series: { + lines: { + show: true, + lineWidth: 2 + }, + shadowSize: 0 + }, + grid: { + borderWidth: 1, + margin: 0, + labelMargin: 0, + axisMargin: 0, + minBorderMargin: 0, + markings: markings, + }, + xaxis: { + ticks: [], + }, + yaxis: { + ticks: [], + min: 0 + }, + legend: { + show: false + } + }; + var plot = $.plot(plot_div, [{data: data}], options); + }).fail(function () { + // TODO: Handle failure + }); + + return plot_div; + } + + element.popover({ + placement: 'left auto', + trigger: 'hover', + html: true, + delay: 50, + content: $('
').append(plot_div) + }); + + element.on('show.bs.popover', update_plot); + } + + /* + Set up $.asv.ui + */ + + this.network_error = network_error; + this.make_panel = make_panel; + this.make_value_selector_panel = make_value_selector_panel; + this.reflow_value_selector_panels = reflow_value_selector_panels; + this.hover_graph = hover_graph; + this.hover_summary_graph = hover_summary_graph; + + $.asv.ui = this; +}); diff --git a/results/dask/error.html b/results/dask/error.html new file mode 100644 index 0000000..d55f9a8 --- /dev/null +++ b/results/dask/error.html @@ -0,0 +1,23 @@ + + + + airspeed velocity error + + + + +

+ swallow + Can not determine continental origin of swallow. +

+ +

+ One or more external (Javascript) dependencies of airspeed velocity failed to load. +

+ +

+ Make sure you have an active internet connection and enable 3rd-party scripts + in your browser the first time you load airspeed velocity. +

+ + diff --git a/results/dask/flot/jquery.flot.axislabels.js b/results/dask/flot/jquery.flot.axislabels.js new file mode 100644 index 0000000..e8017e8 --- /dev/null +++ b/results/dask/flot/jquery.flot.axislabels.js @@ -0,0 +1,140 @@ +/* +CAxis Labels Plugin for flot. :P +Copyright (c) 2010 Xuan Luo + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + */ +(function ($) { + var options = { }; + + function init(plot) { + // This is kind of a hack. There are no hooks in Flot between + // the creation and measuring of the ticks (setTicks, measureTickLabels + // in setupGrid() ) and the drawing of the ticks and plot box + // (insertAxisLabels in setupGrid() ). + // + // Therefore, we use a trick where we run the draw routine twice: + // the first time to get the tick measurements, so that we can change + // them, and then have it draw it again. + var secondPass = false; + plot.hooks.draw.push(function (plot, ctx) { + if (!secondPass) { + // MEASURE AND SET OPTIONS + $.each(plot.getAxes(), function(axisName, axis) { + var opts = axis.options // Flot 0.7 + || plot.getOptions()[axisName]; // Flot 0.6 + if (!opts || !opts.axisLabel) + return; + + var w, h; + if (opts.axisLabelUseCanvas != false) + opts.axisLabelUseCanvas = true; + + if (opts.axisLabelUseCanvas) { + // canvas text + if (!opts.axisLabelFontSizePixels) + opts.axisLabelFontSizePixels = 14; + if (!opts.axisLabelFontFamily) + opts.axisLabelFontFamily = 'sans-serif'; + // since we currently always display x as horiz. + // and y as vertical, we only care about the height + w = opts.axisLabelFontSizePixels; + h = opts.axisLabelFontSizePixels; + + } else { + // HTML text + var elem = $('
' + opts.axisLabel + '
'); + plot.getPlaceholder().append(elem); + w = elem.outerWidth(true); + h = elem.outerHeight(true); + elem.remove(); + } + + if (axisName.charAt(0) == 'x') + axis.labelHeight += h; + else + axis.labelWidth += w; + opts.labelHeight = axis.labelHeight; + opts.labelWidth = axis.labelWidth; + }); + // re-draw with new label widths and heights + secondPass = true; + plot.setupGrid(); + plot.draw(); + + + } else { + // DRAW + $.each(plot.getAxes(), function(axisName, axis) { + var opts = axis.options // Flot 0.7 + || plot.getOptions()[axisName]; // Flot 0.6 + if (!opts || !opts.axisLabel) + return; + + if (opts.axisLabelUseCanvas) { + // canvas text + var ctx = plot.getCanvas().getContext('2d'); + ctx.save(); + ctx.font = opts.axisLabelFontSizePixels + 'px ' + + opts.axisLabelFontFamily; + var width = ctx.measureText(opts.axisLabel).width; + var height = opts.axisLabelFontSizePixels; + var x, y; + if (axisName.charAt(0) == 'x') { + x = plot.getPlotOffset().left + plot.width()/2 - width/2; + y = plot.getCanvas().height; + } else { + x = height * 0.72; + y = plot.getPlotOffset().top + plot.height()/2 - width/2; + } + ctx.translate(x, y); + ctx.rotate((axisName.charAt(0) == 'x') ? 0 : -Math.PI/2); + ctx.fillText(opts.axisLabel, 0, 0); + ctx.restore(); + + } else { + // HTML text + plot.getPlaceholder().find('#' + axisName + 'Label').remove(); + var elem = $('
' + opts.axisLabel + '
'); + if (axisName.charAt(0) == 'x') { + elem.css('left', plot.getPlotOffset().left + plot.width()/2 - elem.outerWidth()/2 + 'px'); + elem.css('bottom', '0px'); + } else { + elem.css('top', plot.getPlotOffset().top + plot.height()/2 - elem.outerHeight()/2 + 'px'); + elem.css('left', '0px'); + } + plot.getPlaceholder().append(elem); + } + }); + secondPass = false; + } + }); + } + + + + $.plot.plugins.push({ + init: init, + options: options, + name: 'axisLabels', + version: '1.0' + }); +})(jQuery); diff --git a/results/dask/flot/jquery.flot.orderBars.js b/results/dask/flot/jquery.flot.orderBars.js new file mode 100644 index 0000000..3157a71 --- /dev/null +++ b/results/dask/flot/jquery.flot.orderBars.js @@ -0,0 +1,192 @@ +/* + * Flot plugin to order bars side by side. + * + * Released under the MIT license by Benjamin BUFFET, 20-Sep-2010. + * + * This plugin is an alpha version. + * + * To activate the plugin you must specify the parameter "order" for the specific serie : + * + * $.plot($("#placeholder"), [{ data: [ ... ], bars :{ order = null or integer }]) + * + * If 2 series have the same order param, they are ordered by the position in the array; + * + * The plugin adjust the point by adding a value depanding of the barwidth + * Exemple for 3 series (barwidth : 0.1) : + * + * first bar décalage : -0.15 + * second bar décalage : -0.05 + * third bar décalage : 0.05 + * + */ + +(function($){ + function init(plot){ + var orderedBarSeries; + var nbOfBarsToOrder; + var borderWidth; + var borderWidthInXabsWidth; + var pixelInXWidthEquivalent = 1; + var isHorizontal = false; + + /* + * This method add shift to x values + */ + function reOrderBars(plot, serie, datapoints){ + var shiftedPoints = null; + + if(serieNeedToBeReordered(serie)){ + checkIfGraphIsHorizontal(serie); + calculPixel2XWidthConvert(plot); + retrieveBarSeries(plot); + calculBorderAndBarWidth(serie); + + if(nbOfBarsToOrder >= 2){ + var position = findPosition(serie); + var decallage = 0; + + var centerBarShift = calculCenterBarShift(); + + if (isBarAtLeftOfCenter(position)){ + decallage = -1*(sumWidth(orderedBarSeries,position-1,Math.floor(nbOfBarsToOrder / 2)-1)) - centerBarShift; + }else{ + decallage = sumWidth(orderedBarSeries,Math.ceil(nbOfBarsToOrder / 2),position-2) + centerBarShift + borderWidthInXabsWidth*2; + } + + shiftedPoints = shiftPoints(datapoints,serie,decallage); + datapoints.points = shiftedPoints; + } + } + return shiftedPoints; + } + + function serieNeedToBeReordered(serie){ + return serie.bars != null + && serie.bars.show + && serie.bars.order != null; + } + + function calculPixel2XWidthConvert(plot){ + var gridDimSize = isHorizontal ? plot.getPlaceholder().innerHeight() : plot.getPlaceholder().innerWidth(); + var minMaxValues = isHorizontal ? getAxeMinMaxValues(plot.getData(),1) : getAxeMinMaxValues(plot.getData(),0); + var AxeSize = minMaxValues[1] - minMaxValues[0]; + pixelInXWidthEquivalent = AxeSize / gridDimSize; + } + + function getAxeMinMaxValues(series,AxeIdx){ + var minMaxValues = new Array(); + for(var i = 0; i < series.length; i++){ + if (typeof series[i].data[0] != "number") { + minMaxValues[0] = 0; + minMaxValues[1] = series.length; + return minMaxValues; + } + minMaxValues[0] = series[i].data[0][AxeIdx]; + minMaxValues[1] = series[i].data[series[i].data.length - 1][AxeIdx]; + } + return minMaxValues; + } + + function retrieveBarSeries(plot){ + orderedBarSeries = findOthersBarsToReOrders(plot.getData()); + nbOfBarsToOrder = orderedBarSeries.length; + } + + function findOthersBarsToReOrders(series){ + var retSeries = new Array(); + + for(var i = 0; i < series.length; i++){ + if(series[i].bars.order != null && series[i].bars.show){ + retSeries.push(series[i]); + } + } + + return retSeries.sort(sortByOrder); + } + + function sortByOrder(serie1,serie2){ + var x = serie1.bars.order; + var y = serie2.bars.order; + return ((x < y) ? -1 : ((x > y) ? 1 : 0)); + } + + function calculBorderAndBarWidth(serie){ + borderWidth = serie.bars.lineWidth ? serie.bars.lineWidth : 2; + borderWidthInXabsWidth = borderWidth * pixelInXWidthEquivalent; + } + + function checkIfGraphIsHorizontal(serie){ + if(serie.bars.horizontal){ + isHorizontal = true; + } + } + + function findPosition(serie){ + var pos = 0 + for (var i = 0; i < orderedBarSeries.length; ++i) { + if (serie == orderedBarSeries[i]){ + pos = i; + break; + } + } + + return pos+1; + } + + function calculCenterBarShift(){ + var width = 0; + + if(nbOfBarsToOrder%2 != 0) + width = (orderedBarSeries[Math.ceil(nbOfBarsToOrder / 2)].bars.barWidth)/2; + + return width; + } + + function isBarAtLeftOfCenter(position){ + return position <= Math.ceil(nbOfBarsToOrder / 2); + } + + function sumWidth(series,start,end){ + var totalWidth = 0; + + for(var i = start; i <= end; i++){ + totalWidth += series[i].bars.barWidth+borderWidthInXabsWidth*2; + } + + return totalWidth; + } + + function shiftPoints(datapoints,serie,dx){ + var ps = datapoints.pointsize; + var points = datapoints.points; + var j = 0; + for(var i = isHorizontal ? 1 : 0;i < points.length; i += ps){ + points[i] += dx; + //Adding the new x value in the serie to be abble to display the right tooltip value, + //using the index 3 to not overide the third index. + serie.data[j][3] = points[i]; + j++; + } + + return points; + } + + plot.hooks.processDatapoints.push(reOrderBars); + + } + + var options = { + series : { + bars: {order: null} // or number/string + } + }; + + $.plot.plugins.push({ + init: init, + options: options, + name: "orderBars", + version: "0.2" + }); + +})(jQuery); + diff --git a/results/dask/graphdisplay.js b/results/dask/graphdisplay.js new file mode 100644 index 0000000..65ad077 --- /dev/null +++ b/results/dask/graphdisplay.js @@ -0,0 +1,1302 @@ +'use strict'; + +$(document).ready(function() { + /* The state of the parameters in the sidebar. Dictionary mapping + strings to arrays containing the "enabled" configurations. */ + var state = null; + /* The name of the current benchmark being displayed. */ + var current_benchmark = null; + /* An array of graphs being displayed. */ + var graphs = []; + var orig_graphs = []; + /* An array of commit revisions being displayed */ + var current_revisions = []; + /* True when log scaling is enabled. */ + var log_scale = false; + /* True when zooming in on the y-axis. */ + var zoom_y_axis = false; + /* True when log scaling is enabled. */ + var reference_scale = false; + /* True when selecting a reference point */ + var select_reference = false; + /* The reference value */ + var reference = 1.0; + /* Is even commit spacing being used? */ + var even_spacing = false; + var even_spacing_revisions = []; + /* Is date scale being used ? */ + var date_scale = false; + var date_to_revision = {}; + /* A little div to handle tooltip placement on the graph */ + var tooltip = null; + /* X-axis coordinate axis in the data set; always 0 for + non-parameterized tests where revision and date are the only potential x-axis */ + var x_coordinate_axis = 0; + var x_coordinate_is_category = false; + /* List of lists of value combinations to plot (apart from x-axis) + in parameterized tests. */ + var benchmark_param_selection = [[null]]; + /* Highlighted revisions */ + var highlighted_revisions = null; + /* Whether benchmark graph display was set up */ + var benchmark_graph_display_ready = false; + + + /* UTILITY FUNCTIONS */ + function arr_remove_from(a, x) { + var out = []; + $.each(a, function(i, val) { + if (x !== val) { + out.push(val); + } + }); + return out; + } + + function obj_copy(obj) { + var newobj = {}; + $.each(obj, function(key, val) { + newobj[key] = val; + }); + return newobj; + } + + function obj_length(obj) { + var i = 0; + for (var x in obj) + ++i; + return i; + } + + function obj_get_first_key(data) { + for (var prop in data) + return prop; + } + + function no_data(ajax, status, error) { + $("#error-message").text( + "No data for this combination of filters. "); + $("#error").modal('show'); + } + + function get_x_from_revision(rev) { + if (date_scale) { + return $.asv.master_json.revision_to_date[rev]; + } else { + return rev; + } + } + + function get_commit_hash(x) { + // Return the commit hash in the current graph located at position x + if (date_scale) { + x = date_to_revision[x]; + } + return $.asv.get_commit_hash(x); + } + + + function display_benchmark(bm_name, state_selection, sub_benchmark_idx, highlight_revisions) { + setup_benchmark_graph_display(); + + $('#graph-display').show(); + $('#summarygrid-display').hide(); + $('#regressions-display').hide(); + $('.tooltip').remove(); + + if (reference_scale) { + reference_scale = false; + $('#reference').removeClass('active'); + reference = 1.0; + } + current_benchmark = bm_name; + highlighted_revisions = highlight_revisions; + $("#title").text(bm_name); + setup_benchmark_params(state_selection, sub_benchmark_idx); + replace_graphs(); + } + + function setup_benchmark_graph_display() { + if (benchmark_graph_display_ready) { + return; + } + benchmark_graph_display_ready = true; + + /* When the window resizes, redraw the graphs */ + $(window).resize(function() { + update_graphs(); + }); + + var nav = $("#graphdisplay-navigation"); + + /* Make the static tooltips look correct */ + $('[data-toggle="tooltip"]').tooltip({container: 'body'}); + + /* Add insertion point for benchmark parameters */ + var state_params_nav = $("
"); + nav.append(state_params_nav); + + /* Add insertion point for benchmark parameters */ + var bench_params_nav = $("
"); + nav.append(bench_params_nav); + + /* Benchmark panel */ + var panel_body = $.asv.ui.make_panel(nav, 'benchmark'); + + var tree = $('