Skip to content

Commit

Permalink
Merge branch 'unifyai:main' into rfft_jax
Browse files Browse the repository at this point in the history
  • Loading branch information
AbdullahSabry authored Sep 1, 2023
2 parents 28a4654 + a1db98a commit 2d304c6
Show file tree
Hide file tree
Showing 67 changed files with 3,020 additions and 860 deletions.
2 changes: 1 addition & 1 deletion .devcontainer/build_multiversion/devcontainer.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
"dockerfile": "../../docker/DockerfileMultiversion",
"context": "../..",
"args": {
"fw": ["numpy/1.24.2 tensorflow/2.11.0 tensorflow/2.12.0 jax/0.4.10 jax/0.4.8"]
"fw": ["numpy/1.24.2 tensorflow/2.11.0 tensorflow/2.12.0 jax/0.4.10 jax/0.4.8"]

}
},
Expand Down
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,6 @@ repos:
# Exclude everything in frontends except __init__.py, and func_wrapper.py
exclude: 'ivy/functional/(frontends|backends)/(?!.*/func_wrapper\.py$).*(?!__init__\.py$)'
- repo: https://github.com/unifyai/lint-hook
rev: 27646397c5390f644a645f439535b1061b9c0105
rev: 2ea80bc854c7f74b09620151028579083ff92ec2
hooks:
- id: ivy-lint
Empty file added `_
Empty file.
11 changes: 7 additions & 4 deletions docker/DockerfileMultiversion
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
FROM debian:buster
WORKDIR /ivy




ARG fw
ARG pycon=3.8.10
# Install miniconda
Expand All @@ -29,6 +26,7 @@ RUN apt-get update && \
apt-get install -y rsync && \
apt-get install -y libusb-1.0-0 && \
apt-get install -y libglib2.0-0 && \
pip3 install pip-autoremove && \
pip3 install --upgrade pip && \
pip3 install setuptools==58.5.3

Expand All @@ -42,10 +40,15 @@ RUN git clone --progress --recurse-submodules https://github.com/unifyai/ivy --d

COPY /docker/multiversion_framework_directory.py .
COPY /docker/requirement_mappings_multiversion.json .
COPY /docker/multiversion_testing_requirements.txt .

# requirement mappings directs which dependency to be installed and where
SHELL ["/bin/bash", "-c"]
RUN python3 multiversion_framework_directory.py $fw
RUN python3 multiversion_framework_directory.py $fw && \
pip install -r multiversion_testing_requirements.txt && \
pip-autoremove torch -y && \
pip-autoremove tensorflow -y && \
pip-autoremove jax -y


ENV PATH=/opt/miniconda/envs/multienv/bin:$PATH
Expand Down
10 changes: 5 additions & 5 deletions docker/multiversion_framework_directory.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,7 @@ def install_deps(pkgs, path_to_json, base="/opt/fw/"):
# check to see if this pkg has specific version dependencies
with open(path_to_json, "r") as file:
json_data = json.load(file)
print(json_data.keys())
for keys in json_data[fw]:
print(keys, "here")
# check if key is dict
if isinstance(keys, dict):
# this is a dep with just one key
Expand All @@ -70,7 +68,8 @@ def install_deps(pkgs, path_to_json, base="/opt/fw/"):
)
else:
subprocess.run(
f"pip3 install {keys} --target"
"pip3 install "
f" {keys} {f'-f https://data.pyg.org/whl/torch-{ver}%2Bcpu.html' if keys=='torch-scatter' else ''} --target"
f" {path} --default-timeout=100 --no-cache-dir",
shell=True,
)
Expand All @@ -79,8 +78,9 @@ def install_deps(pkgs, path_to_json, base="/opt/fw/"):
if __name__ == "__main__":
arg_lis = sys.argv

json_path = ( # path to the json file storing version specific deps
"requirement_mappings_multiversion.json"
json_path = os.path.join( # path to the json file storing version specific deps
os.path.dirname(os.path.realpath(sys.argv[0])),
"requirement_mappings_multiversion.json",
)

directory_generator(arg_lis[1:])
Expand Down
17 changes: 13 additions & 4 deletions docker/multiversion_testing_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,7 @@ pymongo==4.3.3
redis==4.3.4
matplotlib==3.5.2
opencv-python==4.6.0.66 # mod_name=cv2
tensorflow-probability==0.17.0 # mod_name=tensorflow_probability
functorch==0.1.1
scipy==1.8.1
dm-haiku==0.0.6 # mod_name=haiku
pydriller
tqdm
coverage
Expand All @@ -20,4 +17,16 @@ colorama
packaging
nvidia-ml-py<=11.495.46 # mod_name=pynvml
paddle-bfloat
jsonpickle
jsonpickle
ml_dtypes
diskcache
google-auth # mod_name=google.auth
requests
pyvis
dill
scikit-learn # mod_name=sklearn
pandas
pyspark
autoflake # for backend generation
snakeviz # for profiling
cryptography
2 changes: 1 addition & 1 deletion docker/requirement_mappings_multiversion.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@

{
"tensorflow": [
"tensorflow-probability"
{"tensorflow-probability":{"2.12.0":"0.20.0","2.11.0":"0.19.0"}}
],
"jax": ["dm-haiku", "flax",{"jaxlib": {"0.4.10": "0.4.10","0.4.8": "0.4.7"}}],
"numpy": ["numpy"],
Expand Down
2 changes: 1 addition & 1 deletion docs/overview/contributing/the_basics.rst
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ c. Comment on the ToDo list issue with a reference to your new issue like so:
At some point after your comment is made, your issue will automatically be added to the ToDo list and the comment will be deleted.
No need to wait for this to happen before progressing to the next stage. Don’t comment anything else on these ToDo issues, which should be kept clean with comments only as described above.

d. Start working on the task, and create a PR as soon as you have a full or partial solution, and then directly reference the issue in the pull request by adding the following content to the description of the PR:
d. Start working on the task, and open a PR as soon as you have a full or partial solution, when you open the PR make sure to follow the `conventional commits format <https://www.conventionalcommits.org/en/v1.0.0/>`_, and then directly reference the issue in the pull request by adding the following content to the description of the PR:

:code:`Close #Issue_number`

Expand Down
4 changes: 2 additions & 2 deletions docs/overview/design/building_blocks.rst
Original file line number Diff line number Diff line change
Expand Up @@ -218,10 +218,10 @@ The contents of this function are as follows:
# if no global backend exists, we try to infer the backend from the arguments
f = _determine_backend_from_args(list(args) + list(kwargs.values()))
if f is not None:
if verbosity.level > 0:
verbosity.cprint("Using backend from type: {}".format(f))
implicit_backend = f.current_backend_str()
return f
if verbosity.level > 0:
verbosity.cprint("Using backend from type: {}".format(f))
return importlib.import_module(_backend_dict[implicit_backend])
If a global backend framework has been previously set using for example :code:`ivy.set_backend('tensorflow')`, then this globally set backend is returned.
Expand Down
4 changes: 4 additions & 0 deletions ivy/data_classes/array/array.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,10 @@ def _init(self, data, dynamic_backend=None):
self._data = data
elif isinstance(data, np.ndarray):
self._data = ivy.asarray(data)._data
elif ivy.is_ivy_sparse_array(data):
self._data = data._data
elif ivy.is_native_sparse_array(data):
self._data = data._data
else:
raise ivy.utils.exceptions.IvyException(
"data must be ivy array, native array or ndarray"
Expand Down
42 changes: 42 additions & 0 deletions ivy/data_classes/array/experimental/losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,3 +145,45 @@ def smooth_l1_loss(
return ivy.smooth_l1_loss(
self._data, target, beta=beta, reduction=reduction, out=out
)

def soft_margin_loss(
self: ivy.Array,
target: Union[ivy.Array, ivy.NativeArray],
/,
*,
reduction: Optional[str] = "mean",
out: Optional[ivy.Array] = None,
) -> ivy.Array:
"""
ivy.Array instance method variant of ivy.soft_margin_loss. This method simply
wraps the function, and so the docstring for ivy.soft_margin_loss also applies
to this method with minimal changes.
Parameters
----------
self
input array containing true labels.
target
input array containing targeted labels.
reduction
``'none'``: No reduction will be applied to the output.
``'mean'``: The output will be averaged.
``'sum'``: The output will be summed. Default: ``'sum'``.
out
optional output array, for writing the result to. It must have a shape
that the inputs broadcast to.
Returns
-------
ret
The soft margin loss between the true and targeticted labels.
Examples
--------
>>> x = ivy.array([1, 1, 0])
>>> y = ivy.array([0.7, 0.8, 0.2])
>>> z = x.soft_margin_loss(y)
>>> print(z)
ivy.array([0.35667497, 0.22314353, 1.60943791])
"""
return ivy.soft_margin_loss(self._data, target, reduction=reduction, out=out)
4 changes: 3 additions & 1 deletion ivy/data_classes/container/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -4199,7 +4199,9 @@ def __getstate__(self):
state_dict["_config_in"] = config_in
config = copy.copy(state_dict["_config"])
config["ivyh"] = (
config["ivyh"].current_backend_str() if config["ivyh"] is not None else None
config["ivyh"].current_backend_str()
if getattr(config, "ivyh", None) is not None
else None
)
state_dict["_config"] = config
return state_dict
Expand Down
118 changes: 118 additions & 0 deletions ivy/data_classes/container/experimental/losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -494,3 +494,121 @@ def huber_loss(
map_sequences=map_sequences,
out=out,
)

@staticmethod
def _static_soft_margin_loss(
input: Union[ivy.Container, ivy.Array, ivy.NativeArray],
target: Union[ivy.Container, ivy.Array, ivy.NativeArray],
/,
*,
reduction: Optional[Union[str, ivy.Container]] = "mean",
key_chains: Optional[Union[List[str], Dict[str, str], ivy.Container]] = None,
to_apply: Union[bool, ivy.Container] = True,
prune_unapplied: Union[bool, ivy.Container] = False,
map_sequences: Union[bool, ivy.Container] = False,
out: Optional[ivy.Container] = None,
) -> ivy.Container:
"""
ivy.Container static method variant of ivy.soft_margin_loss. This method simply
wraps the function, and so the docstring for ivy.soft_margin_loss also applies
to this method with minimal changes.
# Insert the docstring here
Parameters
----------
input
input array or container containing input labels.
target
input array or container containing the targeticted labels.
reduction
the reduction method. Default: "mean".
key_chains
The key-chains to apply or not apply the method to. Default is None.
to_apply
If input, the method will be applied to key_chains, otherwise key_chains
will be skipped. Default is input.
prune_unapplied
Whether to prune key_chains for which the function was not applied.
Default is False.
map_sequences
Whether to also map method to sequences (lists, tuples).
Default is False.
out
optional output container, for writing the result to. It must have a shape
that the inputs broadcast to.
Returns
-------
ret
The soft margin loss between the given distributions.
"""
return ContainerBase.cont_multi_map_in_function(
"soft_margin_loss",
input,
target,
reduction=reduction,
key_chains=key_chains,
to_apply=to_apply,
prune_unapplied=prune_unapplied,
map_sequences=map_sequences,
out=out,
)

def soft_margin_loss(
self: ivy.Container,
target: Union[ivy.Container, ivy.Array, ivy.NativeArray],
/,
*,
reduction: Optional[Union[str, ivy.Container]] = "mean",
key_chains: Optional[Union[List[str], Dict[str, str], ivy.Container]] = None,
to_apply: Union[bool, ivy.Container] = True,
prune_unapplied: Union[bool, ivy.Container] = False,
map_sequences: Union[bool, ivy.Container] = False,
out: Optional[ivy.Container] = None,
) -> ivy.Container:
"""
ivy.Container instance method variant of ivy.soft_margin_loss. This method
simply wraps the function, and so the docstring for ivy.soft_margin_loss also
applies to this method with minimal changes.
# Insert the docstring here
Parameters
----------
self
input container containing input labels.
target
input array or container containing the targeticted labels.
reduction
the reduction method. Default: "mean".
key_chains
The key-chains to apply or not apply the method to. Default is None.
to_apply
If input, the method will be applied to key_chains, otherwise key_chains
will be skipped. Default is input.
prune_unapplied
Whether to prune key_chains for which the function was not applied.
Default is False.
map_sequences
Whether to also map method to sequences (lists, tuples).
Default is False.
out
optional output container, for writing the result to. It must have a shape
that the inputs broadcast to.
Returns
-------
ret
The soft margin loss between the given distributions.
"""
return self._static_soft_margin_loss(
self,
target,
reduction=reduction,
key_chains=key_chains,
to_apply=to_apply,
prune_unapplied=prune_unapplied,
map_sequences=map_sequences,
out=out,
)
17 changes: 17 additions & 0 deletions ivy/functional/backends/jax/experimental/losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,3 +39,20 @@ def smooth_l1_loss(
return jnp.sum(loss)
else:
return loss


def soft_margin_loss(
input: JaxArray,
target: JaxArray,
/,
*,
reduction: Optional[str] = "mean",
) -> JaxArray:
loss = jnp.sum(jnp.log1p(jnp.exp(-input * target))) / jnp.size(input)

if reduction == "mean":
return jnp.mean(loss)
elif reduction == "sum":
return jnp.sum(loss)
else:
return loss
Loading

0 comments on commit 2d304c6

Please sign in to comment.