Skip to content

Commit

Permalink
Transfer labels to the top of README.md
Browse files Browse the repository at this point in the history
Signed-off-by: Julio Faracco <[email protected]>
  • Loading branch information
jcfaracco committed Feb 12, 2024
1 parent 6243d6b commit b3a9ea1
Showing 2 changed files with 106 additions and 3 deletions.
5 changes: 2 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
# DASF is an Accelerated and Scalable Framework

[![Continuous Test](https://github.com/discovery-unicamp/dasf-core/actions/workflows/ci.yaml/badge.svg)](https://github.com/discovery-unicamp/dasf-core/actions/workflows/ci.yaml)

DASF is a generic framework specialized in acceleration and scaling common
techniques for Machine Learning. DASF uses most methods and function from
the most common libraries to increase the speed up of most algorithms. Part
of this is to use Dask data to scale computation and RAPIDS AI algorithms to
extend the support to GPUs as well.

### CI Results
[![Continuous Test](https://github.com/discovery-unicamp/dasf-core/actions/workflows/ci.yaml/badge.svg)](https://github.com/discovery-unicamp/dasf-core/actions/workflows/ci.yaml)

## Installation

For now, the installation can be done using docker or singularity (if available).
104 changes: 104 additions & 0 deletions tests/datasets/test_generic.py
Original file line number Diff line number Diff line change
@@ -3,6 +3,8 @@
import os
import unittest

import numpy as np

from pytest import fixture
from parameterized import parameterized_class

@@ -45,3 +47,105 @@ def test_dataset_load(self):

self.assertTrue(hasattr(dataset, '_metadata'))
self.assertTrue("size" in dataset._metadata)


class TestDatasetArray(unittest.TestCase):
def test_shape(self):
filename = os.getenv('PYTEST_CURRENT_TEST')
test_dir, _ = os.path.splitext(filename)
raw_path = os.path.join(test_dir, "simple", "Array.npy")

dataset = DatasetArray(name="Array", root=raw_path, download=False)

self.assertEqual(dataset.shape, (40, 40, 40))

def test_add(self):
filename = os.getenv('PYTEST_CURRENT_TEST')
test_dir, _ = os.path.splitext(filename)
raw_path = os.path.join(test_dir, "simple", "Array.npy")

dataset1 = DatasetArray(name="Array", root=raw_path, download=False)
dataset2 = DatasetArray(name="Array", root=raw_path, download=False)

dataset1.load()
dataset2.load()

np1 = np.load(raw_path)
np2 = np.load(raw_path)

dataset3 = dataset1 + dataset2

np3 = np1 + np2

self.assertTrue(np.array_equal(dataset3, np3))

def test_sub(self):
filename = os.getenv('PYTEST_CURRENT_TEST')
test_dir, _ = os.path.splitext(filename)
raw_path = os.path.join(test_dir, "simple", "Array.npy")

dataset1 = DatasetArray(name="Array", root=raw_path, download=False)
dataset2 = DatasetArray(name="Array", root=raw_path, download=False)

dataset1.load()
dataset2.load()

np1 = np.load(raw_path)
np2 = np.load(raw_path)

dataset3 = dataset1 - dataset2

np3 = np1 - np2

self.assertTrue(np.array_equal(dataset3, np3))

def test_mul(self):
filename = os.getenv('PYTEST_CURRENT_TEST')
test_dir, _ = os.path.splitext(filename)
raw_path = os.path.join(test_dir, "simple", "Array.npy")

dataset1 = DatasetArray(name="Array", root=raw_path, download=False)
dataset2 = DatasetArray(name="Array", root=raw_path, download=False)

dataset1.load()
dataset2.load()

np1 = np.load(raw_path)
np2 = np.load(raw_path)

dataset3 = dataset1 * dataset2

np3 = np1 * np2

self.assertTrue(np.array_equal(dataset3, np3))

# def test_div(self):
# filename = os.getenv('PYTEST_CURRENT_TEST')
# test_dir, _ = os.path.splitext(filename)
# raw_path = os.path.join(test_dir, "simple", "Array.npy")
#
# dataset1 = DatasetArray(name="Array", root=raw_path, download=False)
# dataset2 = DatasetArray(name="Array", root=raw_path, download=False)
#
# dataset1.load()
# dataset2.load()
#
# np1 = np.load(raw_path)
# np2 = np.load(raw_path)
#
# dataset3 = dataset1 / dataset2
#
# np3 = np1 / np2
#
# self.assertTrue(np.array_equal(dataset3, np3))

def test_avg(self):
filename = os.getenv('PYTEST_CURRENT_TEST')
test_dir, _ = os.path.splitext(filename)
raw_path = os.path.join(test_dir, "simple", "Array.npy")

dataset = DatasetArray(name="Array", root=raw_path, download=False)

dataset.load()

self.assertEqual(dataset.avg(), 0.0)

0 comments on commit b3a9ea1

Please sign in to comment.