-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
baa0ce8
commit ec14765
Showing
1 changed file
with
2 additions
and
2 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -9,14 +9,14 @@ | |
setup( | ||
name = 'LongPegasus', | ||
packages = ['LongPegasus'], | ||
version = '0.2', | ||
version = '0.3', | ||
license='MIT', | ||
description = 'A Longer Version of Pegasus TF Model For Abstractive Summarization', | ||
long_description='This package is used for inducing longformer self attention over base pegasus abstractive summarization model to increase the token limit and performance.The Pegasus is a large Transformer-based encoder-decoder model with a new pre-training objective which is adapted to abstractive summarization. More specifically, the pre-training objective, called "Gap Sentence Generation (GSG)", consists of masking important sentences from a document and generating these gap-sentences.On the other hand, the Longformer is a Transformer which replaces the full-attention mechanism (quadratic dependency) with a novel attention mechanism which scale linearly with the input sequence length. Consequently, Longformer can process sequences up to 4,096 tokens long (8 times longer than BERT which is limited to 512 tokens).This package plugs Longformers attention mechanism to Pegasus in order to perform abstractive summarization on long documents. The base modules are built on Tensorflow platform.', | ||
author = 'ABHILASH MAJUMDER', | ||
author_email = '[email protected]', | ||
url = 'https://github.com/abhilash1910/LongPegasus', | ||
download_url = 'https://github.com/abhilash1910/LongPegasus/archive/v_02.tar.gz', | ||
download_url = 'https://github.com/abhilash1910/LongPegasus/archive/v_03.tar.gz', | ||
keywords = ['Longformer','Self Attention','Global Attention','Gap sentence generation','Pegasus','Transformer','Encoder Decoder','Tensorflow'], | ||
install_requires=[ | ||
|
||
|