Skip to content

Commit

Permalink
add basic scaffold
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Oct 27, 2020
1 parent f3048f6 commit 8685463
Show file tree
Hide file tree
Showing 3 changed files with 45 additions and 0 deletions.
28 changes: 28 additions & 0 deletions setup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
from setuptools import setup, find_packages

setup(
name = 'x-transformers',
packages = find_packages(),
version = '0.0.1',
license='MIT',
description = 'X-Transformers - Pytorch',
author = 'Phil Wang',
author_email = '[email protected]',
url = 'https://github.com/lucidrains/x-transformers',
keywords = [
'artificial intelligence',
'attention mechanism',
'transformers'
],
install_requires=[
'torch>=1.6',
'einops>=0.3'
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.6',
],
)
1 change: 1 addition & 0 deletions x_transformers/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
from x_transformers.x_transformers import XTransformer
16 changes: 16 additions & 0 deletions x_transformers/x_transformers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import torch
import torch.nn.functional as F
from torch import nn, einsum
from einops import rearrange

# helpers

def exists(val):
return val is not None

class XTransformer(nn.Module):
def __init__(self):
super().__init__()

def forward(self, x):
return x

0 comments on commit 8685463

Please sign in to comment.