We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 12aba87 commit 99fbdf0Copy full SHA for 99fbdf0
setup.py
@@ -3,7 +3,7 @@
3
setup(
4
name = 'memory-efficient-attention-pytorch',
5
packages = find_packages(exclude=[]),
6
- version = '0.0.11',
+ version = '0.0.12',
7
license='MIT',
8
description = 'Memory Efficient Attention - Pytorch',
9
author = 'Phil Wang',
0 commit comments