From ec1476589ff0d2eeacdd68e42509d80d86a23bcf Mon Sep 17 00:00:00 2001 From: abhilash1910 Date: Wed, 1 Dec 2021 00:08:11 +0530 Subject: [PATCH] Modify setup version --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 3f4f04e..25d9f8c 100644 --- a/setup.py +++ b/setup.py @@ -9,14 +9,14 @@ setup( name = 'LongPegasus', packages = ['LongPegasus'], - version = '0.2', + version = '0.3', license='MIT', description = 'A Longer Version of Pegasus TF Model For Abstractive Summarization', long_description='This package is used for inducing longformer self attention over base pegasus abstractive summarization model to increase the token limit and performance.The Pegasus is a large Transformer-based encoder-decoder model with a new pre-training objective which is adapted to abstractive summarization. More specifically, the pre-training objective, called "Gap Sentence Generation (GSG)", consists of masking important sentences from a document and generating these gap-sentences.On the other hand, the Longformer is a Transformer which replaces the full-attention mechanism (quadratic dependency) with a novel attention mechanism which scale linearly with the input sequence length. Consequently, Longformer can process sequences up to 4,096 tokens long (8 times longer than BERT which is limited to 512 tokens).This package plugs Longformers attention mechanism to Pegasus in order to perform abstractive summarization on long documents. The base modules are built on Tensorflow platform.', author = 'ABHILASH MAJUMDER', author_email = 'debabhi1396@gmail.com', url = 'https://github.com/abhilash1910/LongPegasus', - download_url = 'https://github.com/abhilash1910/LongPegasus/archive/v_02.tar.gz', + download_url = 'https://github.com/abhilash1910/LongPegasus/archive/v_03.tar.gz', keywords = ['Longformer','Self Attention','Global Attention','Gap sentence generation','Pegasus','Transformer','Encoder Decoder','Tensorflow'], install_requires=[