Skip to content

Commit

Permalink
Include a Slice by percentile class
Browse files Browse the repository at this point in the history
Signed-off-by: Julio Faracco <[email protected]>
  • Loading branch information
jcfaracco committed Feb 15, 2024
1 parent efc902a commit 8c0f3f3
Showing 1 changed file with 83 additions and 1 deletion.
84 changes: 83 additions & 1 deletion dasf/transforms/operations.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/usr/bin/env python3

from dasf.transforms.base import Transform
from dasf.transforms.base import Transform, ReductionTransform


class Reshape:
Expand Down Expand Up @@ -57,3 +57,85 @@ def transform(self, X):
]
else:
raise Exception("The dimmension is not known")


class SliceArrayByPercentile(Transform):
def __init__(self, percentile):
self.p = percentile

def __internal_chunk_array_positive(self, block, axis=None, keepdims=False, xp=np):
block[block < 0] = 0
block[block != 0]
return xp.array([xp.percentile(block.flatten(), self.p)])

def __internal_aggregate_array_positive(self, block, axis=None, keepdims=False, xp=np):
return xp.array([xp.max(block)])

def __internal_chunk_array_negative(self, block, axis=None, keepdims=False, xp=np):
block *= -1
block[block < 0] = 0
block[block != 0]
return xp.array([-xp.percentile(block.flatten(), self.p)])

def __internal_aggregate_array_negative(self, block, axis=None, keepdims=False, xp=np):
return xp.array([xp.min(block)])

def _lazy_transform_cpu(self, X):
positive = ReductionTransform(func_chunk=self.__internal_chunk_array_positive,
func_aggregate=self.__internal_aggregate_array_positive,
output_size=[0])

negative = ReductionTransform(func_chunk=self.__internal_chunk_array_negative,
func_aggregate=self.__internal_aggregate_array_negative,
output_size=[0])

p = positive._lazy_transform_cpu(X, axis=[0])
n = negative._lazy_transform_cpu(X, axis=[0])

# Unfortunately, we need to compute first.
pos_cutoff = p.compute()[0]
neg_cutoff = n.compute()[0]

X[X > pos_cutoff] = pos_cutoff
X[X < neg_cutoff] = neg_cutoff

return X

def _lazy_transform_gpu(self, X):
positive = ReductionTransform(func_chunk=self.__internal_aggregate_array_positive,
func_aggregate=self.__internal_aggregate_array_positive,
output_size=[0])

negative = ReductionTransform(func_chunk=self.__internal_aggregate_array_negative,
func_aggregate=self.__internal_aggregate_array_negative,
output_size=[0])

p = positive._lazy_transform_gpu(X)
n = negative._lazy_transform_gpu(X)

# Unfortunately, we need to compute first.
pos_cutoff = p.compute()[0]
neg_cutoff = n.compute()[0]

X[X > pos_cutoff] = pos_cutoff
X[X < neg_cutoff] = neg_cutoff

return X

def _transform_cpu(self, X):
pos_cufoff = self.__internal_chunk_array_positive(X, xp=np)
neg_cutoff = self.__internal_chunk_array_negative(X, xp=np)

X[X > pos_cutoff] = pos_cutoff
X[X < neg_cutoff] = neg_cutoff

return X

def _transform_gpu(self, X):
pos_cufoff = self.__internal_chunk_array_positive(X, xp=cp)
neg_cutoff = self.__internal_chunk_array_negative(X, xp=cp)

X[X > pos_cutoff] = pos_cutoff
X[X < neg_cutoff] = neg_cutoff

return X

0 comments on commit 8c0f3f3

Please sign in to comment.