From 9b7920aaf2938f157d2d4906d332715ef5a0055d Mon Sep 17 00:00:00 2001
From: vsoch
Date: Wed, 21 Feb 2024 18:50:59 -0700
Subject: [PATCH 1/3] finishing up
Signed-off-by: vsoch
---
.devcontainer/Dockerfile | 26 ++
.devcontainer/devcontainer.json | 17 ++
.github/workflows/main.yaml | 60 +++++
README.md | 202 ++++++++++++++-
compspec_ior/__init__.py | 2 +-
compspec_ior/defaults.py | 4 +
compspec_ior/plugin.py | 130 +++++++++-
compspec_ior/schema.json | 439 +++++++++++++++++++++++++++++++-
examples/singleton-run.py | 25 ++
examples/test/ior-data.json | 165 ++++++++++++
10 files changed, 1045 insertions(+), 25 deletions(-)
create mode 100644 .devcontainer/Dockerfile
create mode 100644 .devcontainer/devcontainer.json
create mode 100644 .github/workflows/main.yaml
create mode 100644 compspec_ior/defaults.py
create mode 100644 examples/singleton-run.py
create mode 100644 examples/test/ior-data.json
diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile
new file mode 100644
index 0000000..6b556e8
--- /dev/null
+++ b/.devcontainer/Dockerfile
@@ -0,0 +1,26 @@
+FROM ghcr.io/converged-computing/metric-ior:latest
+
+LABEL maintainer="Vanessasaurus <@vsoch>"
+
+# Match the default user id for a single system so we aren't root
+ARG USERNAME=vscode
+ARG USER_UID=1000
+ARG USER_GID=1000
+ENV USERNAME=${USERNAME}
+ENV USER_UID=${USER_UID}
+ENV USER_GID=${USER_GID}
+USER root
+
+# extra interactive utilities
+RUN apt-get update \
+ && apt-get -qq install -y --no-install-recommends \
+ vim less sudo python3-pip
+
+# Ensure regular python is visible
+RUN ln -s /usr/bin/python3 /usr/bin/python
+
+# Add the group and user that match our ids
+RUN groupadd -g ${USER_GID} ${USERNAME} && \
+ adduser --disabled-password --uid ${USER_UID} --gid ${USER_GID} --gecos "" ${USERNAME} && \
+ echo "${USERNAME} ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers
+USER $USERNAME
diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
new file mode 100644
index 0000000..49813f2
--- /dev/null
+++ b/.devcontainer/devcontainer.json
@@ -0,0 +1,17 @@
+{
+ "name": "Compspec IOR Development Environment",
+ "dockerFile": "Dockerfile",
+ "context": "../",
+
+ "customizations": {
+ "vscode": {
+ "settings": {
+ "terminal.integrated.defaultProfile.linux": "bash"
+ },
+ "extensions": [
+ "ms-vscode.cmake-tools"
+ ]
+ }
+ },
+ "postStartCommand": "git config --global --add safe.directory /workspaces/compspec-ior"
+}
diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml
new file mode 100644
index 0000000..44188d6
--- /dev/null
+++ b/.github/workflows/main.yaml
@@ -0,0 +1,60 @@
+name: CI
+
+on:
+ push:
+ branches:
+ - main
+ pull_request: []
+
+jobs:
+ test:
+ name: Test IOR
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout Repository
+ uses: actions/checkout@v4
+ - name: Install compspec
+ run: |
+ git clone -b refactor-compspec https://github.com/compspec/compspec /tmp/cs
+ cd /tmp/cs
+ pip install .
+
+ - name: Install compspec-ior
+ run: pip install .
+
+ - name: Test with loading data
+ run: compspec extract ior --ior-load ./examples/test/ior-data.json
+
+ - name: Test Python
+ run: python ./examples/singleton-run.py
+
+ validate-schema:
+ name: Validate schema
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout Repository
+ uses: actions/checkout@v4
+ - name: Validate Schema
+ uses: compspec/actions/validate-schema@main
+ with:
+ schema: ./compspec_ior/schema.json
+
+ formatting:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Setup black linter
+ run: conda create --quiet --name black pyflakes
+
+ - name: Check Spelling
+ uses: crate-ci/typos@7ad296c72fa8265059cc03d1eda562fbdfcd6df2 # v1.9.0
+ with:
+ files: ./docs/getting_started/ ./docs/index.rst
+
+ - name: Lint and format Python code
+ run: |
+ export PATH="/usr/share/miniconda/bin:$PATH"
+ source activate black
+ pip install -r .github/dev-requirements.txt
+ pre-commit run --all-files
diff --git a/README.md b/README.md
index bcaa156..53f72b8 100644
--- a/README.md
+++ b/README.md
@@ -4,6 +4,8 @@
+[![PyPI version](https://badge.fury.io/py/compspec-ior.svg)](https://badge.fury.io/py/compspec-ior)
+
A compspec (Composition spec) is a specification and model for comparing things. Compspec IOR is
a plugin for extraction of [IOR](https://github.com/hpc/ior) metadata from applications, and packaging in compatibility specification
artifacts. This means that we also maintain the compatibility schema here. To learn more:
@@ -23,21 +25,205 @@ pip install compspec-ior
```
Then run an extraction with IOR. You can use defaults, or add any parameters to IOR after the plugin name "ior"
+Here is how to print to the terminal:
+
+```bash
+compspec extract ior
+```
+
+
+
+IOR output
+
+```console
+{
+ "version": "0.0.0",
+ "kind": "CompatibilitySpec",
+ "metadata": {
+ "name": "compat-experiment",
+ "schemas": {
+ "io.compspec.ior": "https://raw.githubusercontent.com/compspec/compspec-ior/main/compspec_ior/schema.json"
+ }
+ },
+ "compatibilities": [
+ {
+ "name": "io.compspec.ior",
+ "version": "0.0.0",
+ "attributes": {
+ "version": "4.0.0rc1",
+ "began": "Thu Feb 22 00:36:12 2024",
+ "machine": "Linux 2b3ee0c4c948",
+ "finished": "Thu Feb 22 00:36:12 2024",
+ "command_line": "ior -O summaryFormat=JSON",
+ "summary.write.operation": "write",
+ "summary.write.API": "POSIX",
+ "summary.write.TestID": 0,
+ "summary.write.ReferenceNumber": 0,
+ "summary.write.segmentCount": 1,
+ "summary.write.blockSize": 1048576,
+ "summary.write.transferSize": 262144,
+ "summary.write.numTasks": 1,
+ "summary.write.tasksPerNode": 1,
+ "summary.write.repetitions": 1,
+ "summary.write.filePerProc": 0,
+ "summary.write.reorderTasks": 0,
+ "summary.write.taskPerNodeOffset": 1,
+ "summary.write.reorderTasksRandom": 0,
+ "summary.write.reorderTasksRandomSeed": 0,
+ "summary.write.bwMaxMIB": 904.92,
+ "summary.write.bwMinMIB": 904.92,
+ "summary.write.bwMeanMIB": 904.92,
+ "summary.write.bwStdMIB": 0.0,
+ "summary.write.OPsMax": 3619.6798,
+ "summary.write.OPsMin": 3619.6798,
+ "summary.write.OPsMean": 3619.6798,
+ "summary.write.OPsSD": 0.0,
+ "summary.write.MeanTime": 0.0011,
+ "summary.write.xsizeMiB": 1.0,
+ "summary.read.operation": "read",
+ "summary.read.API": "POSIX",
+ "summary.read.TestID": 0,
+ "summary.read.ReferenceNumber": 0,
+ "summary.read.segmentCount": 1,
+ "summary.read.blockSize": 1048576,
+ "summary.read.transferSize": 262144,
+ "summary.read.numTasks": 1,
+ "summary.read.tasksPerNode": 1,
+ "summary.read.repetitions": 1,
+ "summary.read.filePerProc": 0,
+ "summary.read.reorderTasks": 0,
+ "summary.read.taskPerNodeOffset": 1,
+ "summary.read.reorderTasksRandom": 0,
+ "summary.read.reorderTasksRandomSeed": 0,
+ "summary.read.bwMaxMIB": 6615.6215,
+ "summary.read.bwMinMIB": 6615.6215,
+ "summary.read.bwMeanMIB": 6615.6215,
+ "summary.read.bwStdMIB": 0.0,
+ "summary.read.OPsMax": 26462.4858,
+ "summary.read.OPsMin": 26462.4858,
+ "summary.read.OPsMean": 26462.4858,
+ "summary.read.OPsSD": 0.0,
+ "summary.read.MeanTime": 0.0002,
+ "summary.read.xsizeMiB": 1.0,
+ "test.0.starttime": "Thu Feb 22 00:36:12 2024",
+ "test.0.capacity": "1.8 TiB",
+ "test.0.used_capacity": "20.2%",
+ "test.0.inodes": "116.4 Mi",
+ "test.0.used_inodes": "5.3%",
+ "test.0.parameters.testID": 0,
+ "test.0.parameters.refnum": 0,
+ "test.0.parameters.api": "POSIX",
+ "test.0.parameters.platform": "2b3ee0c4c(Linux)",
+ "test.0.parameters.testFileName": "testFile",
+ "test.0.parameters.deadlineForStonewall": 0,
+ "test.0.parameters.stoneWallingWearOut": 0,
+ "test.0.parameters.maxTimeDuration": 0,
+ "test.0.parameters.outlierThreshold": 0,
+ "test.0.parameters.options": "(null)",
+ "test.0.parameters.dryRun": 0,
+ "test.0.parameters.nodes": 1,
+ "test.0.parameters.memoryPerTask": 0,
+ "test.0.parameters.memoryPerNode": 0,
+ "test.0.parameters.tasksPerNode": 1,
+ "test.0.parameters.repetitions": 1,
+ "test.0.parameters.multiFile": 0,
+ "test.0.parameters.interTestDelay": 0,
+ "test.0.parameters.fsync": 0,
+ "test.0.parameters.fsyncperwrite": 0,
+ "test.0.parameters.useExistingTestFile": 0,
+ "test.0.parameters.uniqueDir": 0,
+ "test.0.parameters.singleXferAttempt": 0,
+ "test.0.parameters.readFile": 1,
+ "test.0.parameters.writeFile": 1,
+ "test.0.parameters.filePerProc": 0,
+ "test.0.parameters.reorderTasks": 0,
+ "test.0.parameters.reorderTasksRandom": 0,
+ "test.0.parameters.reorderTasksRandomSeed": 0,
+ "test.0.parameters.randomOffset": 0,
+ "test.0.parameters.checkWrite": 0,
+ "test.0.parameters.checkRead": 0,
+ "test.0.parameters.dataPacketType": 0,
+ "test.0.parameters.keepFile": 0,
+ "test.0.parameters.keepFileWithError": 0,
+ "test.0.parameters.warningAsErrors": 0,
+ "test.0.parameters.verbose": 0,
+ "test.0.parameters.data packet type": "g",
+ "test.0.parameters.setTimeStampSignature/incompressibleSeed": 0,
+ "test.0.parameters.collective": 0,
+ "test.0.parameters.segmentCount": 1,
+ "test.0.parameters.transferSize": 262144,
+ "test.0.parameters.blockSize": 1048576,
+ "test.0.options.api": "POSIX",
+ "test.0.options.apiVersion": "",
+ "test.0.options.test filename": "testFile",
+ "test.0.options.access": "single-shared-file",
+ "test.0.options.type": "independent",
+ "test.0.options.segments": 1,
+ "test.0.options.ordering in a file": "sequential",
+ "test.0.options.ordering inter file": "no tasks offsets",
+ "test.0.options.nodes": 1,
+ "test.0.options.tasks": 1,
+ "test.0.options.clients per node": 1,
+ "test.0.options.repetitions": 1,
+ "test.0.options.xfersize": "262144 bytes",
+ "test.0.options.blocksize": "1 MiB",
+ "test.0.options.aggregate filesize": "1 MiB",
+ "test.0.results.0.access": "write",
+ "test.0.results.0.bwMiB": 904.92,
+ "test.0.results.0.blockKiB": 1024.0,
+ "test.0.results.0.xferKiB": 256.0,
+ "test.0.results.0.iops": 3842.6972,
+ "test.0.results.0.latency": 0.0003,
+ "test.0.results.0.openTime": 0.0001,
+ "test.0.results.0.wrRdTime": 0.001,
+ "test.0.results.0.closeTime": 0.0,
+ "test.0.results.0.totalTime": 0.0011,
+ "test.0.results.1.access": "read",
+ "test.0.results.1.bwMiB": 6615.6215,
+ "test.0.results.1.blockKiB": 1024.0,
+ "test.0.results.1.xferKiB": 256.0,
+ "test.0.results.1.iops": 27962.0267,
+ "test.0.results.1.latency": 0.0,
+ "test.0.results.1.openTime": 0.0,
+ "test.0.results.1.wrRdTime": 0.0001,
+ "test.0.results.1.closeTime": 0.0,
+ "test.0.results.1.totalTime": 0.0002
+ }
+ }
+ ]
+}
+```
+
+
+
+And how to save to file
+
+```bash
+compspec extract --outfile ior-test.json ior
+```
+
+And run the example to see how to use compspec-ior directly in Python to generate the same
+artifact.
+
+```bash
+python ./examples/singleton-run.py
+```
+
+
+### Development
+
+If you open the [Development container](.devcontainer) in VSCode, you'll find ior on the path:
```bash
-compspec extract ior ...
+$ which ior
+/usr/bin/ior
```
-More coming soon!
+This allows us to easily develop and test the compatibility plugin. You can
## TODO
-- Developer environment with IOR installed (for others and me too)
-- testing, etc with pre-commit and spell checking
-- implement run functionality
- - use reasonable defaults for when nothing provided
- - outputs should map to new schema.json attributes
- - main library compspec should have support for oras push, etc.
+- how to handle adding lists (with indices) to schema?
## License
diff --git a/compspec_ior/__init__.py b/compspec_ior/__init__.py
index 6a49c28..3def8be 100644
--- a/compspec_ior/__init__.py
+++ b/compspec_ior/__init__.py
@@ -1,2 +1,2 @@
-from .plugin import ExtractorPlugin
+from .plugin import Plugin
from .version import __version__
diff --git a/compspec_ior/defaults.py b/compspec_ior/defaults.py
new file mode 100644
index 0000000..ee66130
--- /dev/null
+++ b/compspec_ior/defaults.py
@@ -0,0 +1,4 @@
+# These are required to be given to compspec
+spec_version = "0.0.0"
+schema_url = "https://raw.githubusercontent.com/compspec/compspec-ior/main/compspec_ior/schema.json"
+namespace = "io.compspec.ior"
diff --git a/compspec_ior/plugin.py b/compspec_ior/plugin.py
index d938094..969ea83 100644
--- a/compspec_ior/plugin.py
+++ b/compspec_ior/plugin.py
@@ -1,14 +1,27 @@
import argparse
+import json
+import logging
+import os
+import shlex
-from compspec.plugin import Plugin
+import compspec.utils as utils
+from compspec.plugin import PluginBase
+import compspec_ior.defaults as defaults
-class ExtractorPlugin(Plugin):
+logger = logging.getLogger("compspec-ior")
+
+
+class Plugin(PluginBase):
"""
The IOR extractor plugin
"""
+ # These metadata fields are required (and checked for)
description = "IOR parallel I/O benchmarks"
+ namespace = defaults.namespace
+ version = defaults.spec_version
+ schema = defaults.schema_url
def add_arguments(self, subparser):
"""
@@ -19,19 +32,116 @@ def add_arguments(self, subparser):
formatter_class=argparse.RawTextHelpFormatter,
description=self.description,
)
+ # Ensure these are namespaced to your plugin
ior.add_argument(
- "args",
+ "ior_args",
help="Arguments for IOR (defaults to reasonable set if not defined)",
- nargs="?",
+ nargs="*",
+ )
+ ior.add_argument(
+ "--ior-load",
+ dest="ior_load",
+ help="Load metadata from this file instead of extraction from system directly.",
)
- def run(self, args):
+ def run_ior(self, command):
+ """
+ Run IOR to generate json instead of loading from file.
+ """
+ if not isinstance(command, list):
+ command = shlex.split(command)
+
+ if "ior" not in command:
+ command = ["ior"] + command
+
+ # We must output to json
+ if "summaryFormat" not in command:
+ command += ["-O", "summaryFormat=JSON"]
+ logger.debug(" ".join(command))
+
+ result = utils.run_command(command)
+ if result["return_code"] != 0:
+ msg = " ".join(command)
+ raise ValueError(f"Issue with running {msg}: {result['message']}")
+
+ # Load the result
+ return json.loads(result["message"])
+
+ def extract(self, args, extra):
"""
Run IOR and map metadata into compspec schema.
+
+ Note that "extract" simply needs to return key value
+ pairs of extraction metadata and values. Ideally, you can
+ maintain things in lowercase, and if you have flattened
+ groups represent them as .. Do not worry
+ about adding a top level namespace for the plugin, this
+ is handled by compspec.
"""
- # TODO
- print(args)
- print("RUN IOR HERE - choose defaults if")
- import IPython
+ if args.ior_load:
+ data = utils.read_json(args.ior_load)
+ else:
+ data = self.run_ior(args.ior_args)
+
+ # Prepare metadata, this is handled by another function for extended use
+ return self.load_metadata(data)
+
+ def load_metadata(self, data):
+ """
+ Load IOR metadata into a dictionary that can be given to compspec.
+ """
+ # Be forgiving if they provide a filepath
+ if isinstance(data, str) and os.path.exists(data):
+ data = utils.read_json(data)
+
+ meta = {}
+
+ # High level metadata
+ for key in ["Version", "Began", "Machine", "Finished", "Command line"]:
+ value = data.get(key)
+
+ # This handles a single entry, lowercase and removes spaces
+ key = utils.normalize_key(key)
+
+ # Key needs to be lowercase and
+ # Do not add empty values
+ if value is not None:
+ meta[key] = value
+
+ # Add in summary - the operations (I think) should be unique
+ for entry in data["summary"]:
+ key = f"summary.{entry['operation']}"
+ for k, v in entry.items():
+ # This is more of the namespace
+ if k == "operation":
+ continue
+ meta[f"{key}.{k}"] = v
+
+ # Now add in tests (note it's not clear yet which of these we should keep)
+ for test in data["tests"]:
+ key = f"test.{test['TestID']}"
+ for a in [
+ "StartTime",
+ "Capacity",
+ "Used Capacity",
+ "Inodes",
+ "Used Inodes",
+ ]:
+ subkey = utils.normalize_key(a)
+ meta[f"{key}.{subkey}"] = test[a]
+
+ # Add in Parameters for tests
+ # These are in camel case, let's keep them as such
+ for k, v in (test.get("Parameters") or {}).items():
+ meta[f"{key}.parameters.{k}"] = v
+
+ # Now add in options
+ for k, v in (test.get("Options") or {}).items():
+ subkey = utils.normalize_key(k)
+ meta[f"{key}.options.{k}"] = v
- IPython.embed()
+ # Add results from list
+ for i, result in enumerate(test.get("Results") or []):
+ for k, v in result.items():
+ meta[f"{key}.results.{i}.{k}"] = v
+ return meta
diff --git a/compspec_ior/schema.json b/compspec_ior/schema.json
index 60642e6..a8bd491 100644
--- a/compspec_ior/schema.json
+++ b/compspec_ior/schema.json
@@ -3,6 +3,10 @@
"id": "hpc.ior",
"type": "compspec",
"label": "compatibilities",
+ "metadata": {
+ "version": "0.0.0",
+ "source": "https://github.com/supercontainers/compspec"
+ },
"nodes": {
"modules": {
"label": "IOR modules"
@@ -138,9 +142,176 @@
},
"options.random_offset_seed": {
"label": "The seed for -z"
+ },
+ "version": {
+ "label": "The version of IOR"
+ },
+ "began": {
+ "label": "When the analysis began (human readable time)"
+ },
+ "finished": {
+ "label": "When the analysis finished (human readable time)"
+ },
+ "machine": {
+ "label": "IORs representation of the machine it ran on"
+ },
+ "command_line": {
+ "label": "Command line provided to run IOR"
+ },
+ "summary.write.operation": {
+ "label": "A summary of a write operation"
+ },
+ "summary.write.API": {
+ "label": "Module API used"
+ },
+ "summary.write.TestID": {
+ "label": "ID of the test"
+ },
+ "summary.write.ReferenceNumber": {
+ "label": "Reference number of write"
+ },
+ "summary.write.segmentCount": {
+ "label": "Segments counts of write"
+ },
+ "summary.write.blockSize": {
+ "label": "Block size of write"
+ },
+ "summary.write.transferSize": {
+ "label": "Transfer size of write"
+ },
+ "summary.write.numTasks": {
+ "label": "Number of tasks"
+ },
+ "summary.write.tasksPerNode": {
+ "label": "Taks per node"
+ },
+ "summary.write.repetitions": {
+ "label": "Repetitions of write"
+ },
+ "summary.write.filePerProc": {
+ "label": "Files per process of write"
+ },
+ "summary.write.reorderTasks": {
+ "label": "Reorder of tasks for write"
+ },
+ "summary.write.taskPerNodeOffset": {
+ "label": "Task per node offset of write"
+ },
+ "summary.write.reorderTasksRandom": {
+ "label": "Reorder tasks random for write"
+ },
+ "summary.write.reorderTasksRandomSeed": {
+ "label": "Reorder task random seed of write"
+ },
+ "summary.write.bwMaxMIB": {
+ "label": "Bandwidth Max MIB of write"
+ },
+ "summary.write.bwMinMIB": {
+ "label": "Bandwidth Min MIB of write"
+ },
+ "summary.write.bwMeanMIB": {
+ "label": "Bandwidth mean MIB of write"
+ },
+ "summary.write.bwStdMIB": {
+ "label": "Bandwidth standard deviation MID of write"
+ },
+ "summary.write.OPsMax": {
+ "label": "Operations max of write"
+ },
+ "summary.write.OPsMin": {
+ "label": "Operations min of write"
+ },
+ "summary.write.OPsMean": {
+ "label": "Operations mean of write"
+ },
+ "summary.write.OPsSD": {
+ "label": "Operations standard deviation of write"
+ },
+ "summary.write.MeanTime": {
+ "label": "Mean time of write"
+ },
+ "summary.write.xsizeMiB": {
+ "label": "Size MIB of write"
+ },
+ "summary.read.operation": {
+ "label": "A summary of a read operation"
+ },
+ "summary.read.API": {
+ "label": "Module API used"
+ },
+ "summary.read.TestID": {
+ "label": "ID of the test"
+ },
+ "summary.read.ReferenceNumber": {
+ "label": "Reference number of read"
+ },
+ "summary.read.segmentCount": {
+ "label": "Segments counts of read"
+ },
+ "summary.read.blockSize": {
+ "label": "Block size of read"
+ },
+ "summary.read.transferSize": {
+ "label": "Transfer size of read"
+ },
+ "summary.read.numTasks": {
+ "label": "Number of tasks"
+ },
+ "summary.read.tasksPerNode": {
+ "label": "Taks per node"
+ },
+ "summary.read.repetitions": {
+ "label": "Repetitions of read"
+ },
+ "summary.read.filePerProc": {
+ "label": "Files per process of read"
+ },
+ "summary.read.reorderTasks": {
+ "label": "Reorder of tasks for read"
+ },
+ "summary.read.taskPerNodeOffset": {
+ "label": "Task per node offset of read"
+ },
+ "summary.read.reorderTasksRandom": {
+ "label": "Reorder tasks random for read"
+ },
+ "summary.read.reorderTasksRandomSeed": {
+ "label": "Reorder task random seed of read"
+ },
+ "summary.read.bwMaxMIB": {
+ "label": "Bandwidth Max MIB of read"
+ },
+ "summary.read.bwMinMIB": {
+ "label": "Bandwidth Min MIB of read"
+ },
+ "summary.read.bwMeanMIB": {
+ "label": "Bandwidth mean MIB of read"
+ },
+ "summary.read.bwStdMIB": {
+ "label": "Bandwidth standard deviation MID of read"
+ },
+ "summary.read.OPsMax": {
+ "label": "Operations max of read"
+ },
+ "summary.read.OPsMin": {
+ "label": "Operations min of read"
+ },
+ "summary.read.OPsMean": {
+ "label": "Operations mean of read"
+ },
+ "summary.read.OPsSD": {
+ "label": "Operations standard deviation of read"
+ },
+ "summary.read.MeanTime": {
+ "label": "Mean time of read"
+ },
+ "summary.read.xsizeMiB": {
+ "label": "Size MIB of read"
+ },
+ "test": {
+ "label": "test list"
}
},
-
"edges": [
{
"source": "modules",
@@ -356,11 +527,267 @@
"source": "options",
"target": "options.random_offset_seed",
"relation": "has-option"
+ },
+ {
+ "source": "summary",
+ "target": "summary.write",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.read",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.write.operation",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.write.API",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.write.TestID",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.write.ReferenceNumber",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.write.SegmentCount",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.write.blockSize",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.write.transferSize",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.write.numTasks",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.write.tasksPerNode",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.write.repetitions",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.write.filePerProc",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.write.reorderTasks",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.write.taskPerNodeOffset",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.write.reorderTasksRandom",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.write.reorderTasksRandomSeed",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.write.bwMaxMIB",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.write.bwMinMIB",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.write.bwMeanMIB",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.write.bwStdMIB",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.write.OPsMax",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.write.OPsMin",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.write.OPsMean",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.write.OPsSD",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.write.MeanTime",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.write.xsizeMiB",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.read.operation",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.read.API",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.read.TestID",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.read.ReferenceNumber",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.read.SegmentCount",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.read.blockSize",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.read.transferSize",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.read.numTasks",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.read.tasksPerNode",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.read.repetitions",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.read.filePerProc",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.read.reorderTasks",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.read.taskPerNodeOffset",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.read.reorderTasksRandom",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.read.reorderTasksRandomSeed",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.read.bwMaxMIB",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.read.bwMinMIB",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.read.bwMeanMIB",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.read.bwStdMIB",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.read.OPsMax",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.read.OPsMin",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.read.OPsMean",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.read.OPsSD",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.read.MeanTime",
+ "relation": "contains"
+ },
+ {
+ "source": "summary",
+ "target": "summary.read.xsizeMiB",
+ "relation": "contains"
}
- ],
- "metadata": {
- "version": "0.0.0",
- "source": "https://github.com/supercontainers/compspec"
- }
+ ]
}
}
diff --git a/examples/singleton-run.py b/examples/singleton-run.py
new file mode 100644
index 0000000..6f782fe
--- /dev/null
+++ b/examples/singleton-run.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+
+# This is a singleton example of running the plugin without compspec,
+# primarily to test, etc. There is no validation of the schema or other.
+
+import os
+
+import compspec.artifact
+
+from compspec_ior.plugin import Plugin
+
+here = os.path.dirname(os.path.abspath(__file__))
+test_file = os.path.join(here, "test", "ior-data.json")
+
+
+def main():
+ # Load data we've generated with IOR
+ plugin = Plugin("ior")
+ attributes = plugin.load_metadata(test_file)
+ artifact = compspec.artifact.generate(plugin, "ior-compat-example", attributes)
+ print(artifact.render())
+
+
+if __name__ == "__main__":
+ main()
diff --git a/examples/test/ior-data.json b/examples/test/ior-data.json
new file mode 100644
index 0000000..c115236
--- /dev/null
+++ b/examples/test/ior-data.json
@@ -0,0 +1,165 @@
+{
+ "Version": "4.0.0rc1",
+ "Began": "Wed Feb 21 15:39:09 2024",
+ "Command line": "ior -O summaryFormat=JSON",
+ "Machine": "Linux 2b3ee0c4c948",
+ "tests": [
+ {
+ "TestID": 0,
+ "StartTime": "Wed Feb 21 15:39:09 2024" , "Path": "testFile","Capacity": "1.8 TiB", "Used Capacity": "20.0%","Inodes": "116.4 Mi", "Used Inodes" : "5.2%"
+,
+ "Parameters": {
+ "testID": 0,
+ "refnum": 0,
+ "api": "POSIX",
+ "platform": "2b3ee0c4c(Linux)",
+ "testFileName": "testFile",
+ "deadlineForStonewall": 0,
+ "stoneWallingWearOut": 0,
+ "maxTimeDuration": 0,
+ "outlierThreshold": 0,
+ "options": "(null)",
+ "dryRun": 0,
+ "nodes": 1,
+ "memoryPerTask": 0,
+ "memoryPerNode": 0,
+ "tasksPerNode": 1,
+ "repetitions": 1,
+ "multiFile": 0,
+ "interTestDelay": 0,
+ "fsync": 0,
+ "fsyncperwrite": 0,
+ "useExistingTestFile": 0,
+ "uniqueDir": 0,
+ "singleXferAttempt": 0,
+ "readFile": 1,
+ "writeFile": 1,
+ "filePerProc": 0,
+ "reorderTasks": 0,
+ "reorderTasksRandom": 0,
+ "reorderTasksRandomSeed": 0,
+ "randomOffset": 0,
+ "checkWrite": 0,
+ "checkRead": 0,
+ "dataPacketType": 0,
+ "keepFile": 0,
+ "keepFileWithError": 0,
+ "warningAsErrors": 0,
+ "verbose": 0,
+ "data packet type": "g",
+ "setTimeStampSignature/incompressibleSeed": 0,
+ "collective": 0,
+ "segmentCount": 1,
+ "transferSize": 262144,
+ "blockSize": 1048576
+ }
+,
+ "Options": {
+ "api": "POSIX",
+ "apiVersion": "",
+ "test filename": "testFile",
+ "access": "single-shared-file",
+ "type": "independent",
+ "segments": 1,
+ "ordering in a file": "sequential",
+ "ordering inter file": "no tasks offsets",
+ "nodes": 1,
+ "tasks": 1,
+ "clients per node": 1,
+ "repetitions": 1,
+ "xfersize": "262144 bytes",
+ "blocksize": "1 MiB",
+ "aggregate filesize": "1 MiB"
+ }
+,
+ "Results": [
+ {
+ "access": "write",
+ "bwMiB": 1377.4397,
+ "blockKiB": 1024.0000,
+ "xferKiB": 256.0000,
+ "iops": 5714.3106,
+ "latency": 0.0002,
+ "openTime": 0.0000,
+ "wrRdTime": 0.0007,
+ "closeTime": 0.0000,
+ "totalTime": 0.0007
+ }
+,
+ {
+ "access": "read",
+ "bwMiB": 14122.2357,
+ "blockKiB": 1024.0000,
+ "xferKiB": 256.0000,
+ "iops": 58867.4246,
+ "latency": 0.0000,
+ "openTime": 0.0000,
+ "wrRdTime": 0.0001,
+ "closeTime": 0.0000,
+ "totalTime": 0.0001
+ }
+]
+
+ }
+]
+,
+"summary": [
+ {
+ "operation": "write",
+ "API": "POSIX",
+ "TestID": 0,
+ "ReferenceNumber": 0,
+ "segmentCount": 1,
+ "blockSize": 1048576,
+ "transferSize": 262144,
+ "numTasks": 1,
+ "tasksPerNode": 1,
+ "repetitions": 1,
+ "filePerProc": 0,
+ "reorderTasks": 0,
+ "taskPerNodeOffset": 1,
+ "reorderTasksRandom": 0,
+ "reorderTasksRandomSeed": 0,
+ "bwMaxMIB": 1377.4397,
+ "bwMinMIB": 1377.4397,
+ "bwMeanMIB": 1377.4397,
+ "bwStdMIB": 0.0000,
+ "OPsMax": 5509.7589,
+ "OPsMin": 5509.7589,
+ "OPsMean": 5509.7589,
+ "OPsSD": 0.0000,
+ "MeanTime": 0.0007,
+ "xsizeMiB": 1.0000
+ }
+,
+ {
+ "operation": "read",
+ "API": "POSIX",
+ "TestID": 0,
+ "ReferenceNumber": 0,
+ "segmentCount": 1,
+ "blockSize": 1048576,
+ "transferSize": 262144,
+ "numTasks": 1,
+ "tasksPerNode": 1,
+ "repetitions": 1,
+ "filePerProc": 0,
+ "reorderTasks": 0,
+ "taskPerNodeOffset": 1,
+ "reorderTasksRandom": 0,
+ "reorderTasksRandomSeed": 0,
+ "bwMaxMIB": 14122.2357,
+ "bwMinMIB": 14122.2357,
+ "bwMeanMIB": 14122.2357,
+ "bwStdMIB": 0.0000,
+ "OPsMax": 56488.9428,
+ "OPsMin": 56488.9428,
+ "OPsMean": 56488.9428,
+ "OPsSD": 0.0000,
+ "MeanTime": 0.0001,
+ "xsizeMiB": 1.0000
+ }
+]
+,
+"Finished": "Wed Feb 21 15:39:09 2024"
+}
From c9a4ac389004676187e723b1ad44105a4dab16d6 Mon Sep 17 00:00:00 2001
From: vsoch
Date: Wed, 21 Feb 2024 18:58:18 -0700
Subject: [PATCH 2/3] updating typos
Signed-off-by: vsoch
---
.github/workflows/main.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml
index 44188d6..ff7356f 100644
--- a/.github/workflows/main.yaml
+++ b/.github/workflows/main.yaml
@@ -50,7 +50,7 @@ jobs:
- name: Check Spelling
uses: crate-ci/typos@7ad296c72fa8265059cc03d1eda562fbdfcd6df2 # v1.9.0
with:
- files: ./docs/getting_started/ ./docs/index.rst
+ files: ./README.md
- name: Lint and format Python code
run: |
From 1cfa6654322d816796c362dfeee46740b30511e5 Mon Sep 17 00:00:00 2001
From: vsoch
Date: Wed, 21 Feb 2024 19:24:08 -0700
Subject: [PATCH 3/3] update to use release
Signed-off-by: vsoch
---
.github/workflows/main.yaml | 8 +-------
1 file changed, 1 insertion(+), 7 deletions(-)
diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml
index ff7356f..2477c06 100644
--- a/.github/workflows/main.yaml
+++ b/.github/workflows/main.yaml
@@ -14,17 +14,11 @@ jobs:
- name: Checkout Repository
uses: actions/checkout@v4
- name: Install compspec
- run: |
- git clone -b refactor-compspec https://github.com/compspec/compspec /tmp/cs
- cd /tmp/cs
- pip install .
-
+ run: pip install compspec
- name: Install compspec-ior
run: pip install .
-
- name: Test with loading data
run: compspec extract ior --ior-load ./examples/test/ior-data.json
-
- name: Test Python
run: python ./examples/singleton-run.py