Skip to content

Commit

Permalink
init mimm & move tests folder (#1804)
Browse files Browse the repository at this point in the history
  • Loading branch information
lvyufeng authored Nov 8, 2024
1 parent 6a2b4f1 commit a5cfaaf
Show file tree
Hide file tree
Showing 875 changed files with 10,657 additions and 1,388 deletions.
7 changes: 6 additions & 1 deletion .github/pylint.conf
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,12 @@ disable=raw-checker-failed,
pointless-string-statement,
redundant-keyword-arg,
too-many-function-args,
assignment-from-none
assignment-from-none,
use-dict-literal,
consider-using-generator,
fixme,
use-a-generator,
nested-min-max

# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
Expand Down
39 changes: 5 additions & 34 deletions .github/workflows/ci_pipeline.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@ on:
branches: [ "master" ]
paths:
- 'mindnlp/**'
- 'tests/ut/**'
- 'tests/**'
- '!mindnlp/dataset/**'
- '!tests/ut/dataset/**'
- '!tests/dataset/**'
- '!docs/**'
- '.github/workflows/**'
push:
Expand Down Expand Up @@ -80,7 +80,7 @@ jobs:
pip install -r download.txt
- name: Test with pytest
run: |
pytest -c pytest.ini -m 'not download and not gpu_only' --ignore=tests/ut/transformers tests/ut
pytest -c pytest.ini -m 'not download and not gpu_only' --ignore=tests/transformers tests/ut
release-test:
needs: pylint-check
Expand All @@ -104,7 +104,7 @@ jobs:
pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/${{matrix.ms_version}}/MindSpore/unified/x86_64/mindspore-${{matrix.ms_version}}-cp39-cp39-linux_x86_64.whl --trusted-host ms-release.obs.cn-north-4.myhuaweicloud.com
- name: Test with pytest
run: |
pytest -c pytest.ini -m 'not download and not gpu_only' --ignore=tests/ut/transformers tests/ut
pytest -c pytest.ini -m 'not download and not gpu_only' --ignore=tests/transformers tests/ut
# pytest -c pytest.ini -m 'not download and not gpu_only' tests/ut
transformers-model-test:
Expand Down Expand Up @@ -133,36 +133,7 @@ jobs:
pip install -r download.txt
- name: Test with pytest
run: |
pytest -vs tests/ut/transformers/models/${{ matrix.alpha }}*/test_modeling*
st-test:
needs: ut-test
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
python: [3.9]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip==24.0
pip install -r requirements/requirements.txt
- name: Install MindSpore
shell: bash
env:
OS: ${{ matrix.os }}
PYTHON: ${{ matrix.python }}
run: |
python .github/install_mindspore.py
pip install -r download.txt
- name: Test ST with pytest
run: |
pytest -c pytest.ini tests/st
pytest -vs tests/transformers/models/${{ matrix.alpha }}*/test_modeling*
kaggle-gpu-test:
needs: pylint-check
Expand Down
4 changes: 4 additions & 0 deletions mindnlp/common/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
"""
common modules for all submodule(transformers, mimm, peft, trl, diffusers, etc), include:
activations, optimization, etc.
"""
Original file line number Diff line number Diff line change
Expand Up @@ -17,27 +17,51 @@
from collections import OrderedDict
from mindspore import Tensor
from mindnlp.core import nn, ops
from mindnlp.core.nn import functional as F

def gelu_tanh(x: Tensor, inplace: bool = False) -> Tensor:
return F.gelu(x, approximate='tanh')

class QuickGELUActivation(nn.Module):
def quick_gelu(x: Tensor, inplace: bool = False) -> Tensor:
return x * ops.sigmoid(1.702 * x)

def hard_mish(x, inplace: bool = False):
""" Hard Mish
Experimental, based on notes by Mish author Diganta Misra at
https://github.com/digantamisra98/H-Mish/blob/0da20d4bc58e696b6803f2523c58d3c8a82782d0/README.md
"""
if inplace:
return x.mul(0.5 * (x + 2).clamp(min=0, max=2))
else:
return 0.5 * x * (x + 2).clamp(min=0, max=2)


class HardMish(nn.Module):
def __init__(self, inplace: bool = False):
super(HardMish, self).__init__()
self.inplace = inplace

def forward(self, x):
return hard_mish(x, self.inplace)

class GELUTanh(nn.Module):
"""Applies the Gaussian Error Linear Units function (w/ dummy inplace arg)
"""
def __init__(self, inplace: bool = False):
super(GELUTanh, self).__init__()

def forward(self, input: Tensor) -> Tensor:
return F.gelu(input, approximate='tanh')

class QuickGELU(nn.Module):
"""
Applies GELU approximation that is fast but somewhat inaccurate. See: https://github.com/hendrycks/GELUs
"""
def forward(self, input: Tensor) -> Tensor:
r"""
forwards the QuickGELU activation function.
Args:
self (QuickGELUActivation): The instance of the QuickGELUActivation class.
input (Tensor): The input tensor to apply the QuickGELU activation to.
Returns:
Tensor: The tensor resulting from applying the QuickGELU activation to the input tensor.
Raises:
None
"""
return input * ops.sigmoid(1.702 * input)
return quick_gelu(input)


class ClippedGELUActivation(nn.Module):
Expand Down Expand Up @@ -288,7 +312,7 @@ def __getitem__(self, key):
"gelu_python": nn.GELU,
"linear": nn.ReLU,
"mish": nn.Mish,
"quick_gelu": QuickGELUActivation,
"quick_gelu": QuickGELU,
"relu": nn.ReLU,
"relu2": ReLUSquaredActivation,
"relu6": nn.ReLU6,
Expand All @@ -313,7 +337,6 @@ def get_activation(activation_string):
gelu_new = get_activation("gelu_new")
gelu = get_activation("gelu")
gelu_fast = get_activation("gelu_fast")
quick_gelu = get_activation("quick_gelu")
silu = get_activation("silu")
mish = get_activation("mish")
linear_act = get_activation("linear")
File renamed without changes.
Loading

0 comments on commit a5cfaaf

Please sign in to comment.