Skip to content

Commit

Permalink
add Built top on Apda[terLoRa packages
Browse files Browse the repository at this point in the history
  • Loading branch information
yunss-ML committed Aug 25, 2023
1 parent f40f78a commit c513ffe
Show file tree
Hide file tree
Showing 7 changed files with 38 additions and 27 deletions.
Binary file modified assets/LoRa.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file removed assets/lora.png
Binary file not shown.
Empty file added exmpales/AdapterLoRa-OFA.ipynb
Empty file.
22 changes: 0 additions & 22 deletions exmpales/Usage.py

This file was deleted.

File renamed without changes.
22 changes: 22 additions & 0 deletions exmpales/transfomerEncoder.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import torch.nn as nn
import torch
import os
import sys

current_dir = os.path.dirname(__file__)
target_dir = os.path.abspath(os.path.join(current_dir, ".././"))
sys.path.insert(0, target_dir)

from core.Quantized import AdapterLoRa

model = nn.TransformerEncoderLayer(d_model=512, nhead=8)

Adpate_model = AdapterLoRa(model , method="LoRa", Rank=4)
Adpate_model.add_layer("self_attn")
Adpate_model.add_layer("linear1")
Adpate_model.add_layer("linear2")
Adpate_model.reconstruct_model()
model = Adpate_model.implement_lora(verbose=True)



21 changes: 16 additions & 5 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,18 +18,29 @@

setuptools.setup(
name="AdapterLoRa",
version="1.1.4",
version="1.1.5",
author="Youness EL BRAG",
author_email="[email protected]",
description="A Tool for adaptation Larger Transfomer-Based model and Quantization built top on libraries LoRa and LoRa-Torch.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/youness-elbrag/AdapterLoRa/",
packages=setuptools.find_packages(),
keywords = ['Quantization', 'AdapterLLM', 'PEFT'], # Keywords that define your package best
install_requires=[ # I get to this in a second
'git+https://github.com/Baijiong-Lin/LoRA-Torch',
'git+https://github.com/microsoft/LoRA',
],
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Development Status :: 4 - Beta', # Chose either "3 - Alpha", "4 - Beta" or "5 - Production/Stable" as the current state of your package
'Intended Audience :: Developers', # Define that your audience are developers
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License', # Again, pick a license
'Programming Language :: Python :: 3.7', #Specify which pyhton versions that you want to support
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
],
python_requires='>=3.7',
)
)

0 comments on commit c513ffe

Please sign in to comment.