forked from lucidrains/flash-attention-jax
-
Notifications
You must be signed in to change notification settings - Fork 9
/
setup.py
31 lines (30 loc) · 827 Bytes
/
setup.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
from setuptools import setup, find_packages
setup(
name = 'flash-attention-jax',
packages = find_packages(exclude=[]),
version = '0.2.0',
license='MIT',
description = 'Flash Attention - in Jax',
author = 'Phil Wang',
author_email = '[email protected]',
long_description_content_type = 'text/markdown',
url = 'https://github.com/lucidrains/flash-attention-jax',
keywords = [
'artificial intelligence',
'deep learning',
'transformers',
'attention mechanism',
'jax'
],
install_requires=[
'einops',
'jax>=0.2.20'
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.6',
],
)