From 0401fff04f1e3fecd75c4d17281fcac796048356 Mon Sep 17 00:00:00 2001 From: geirawsm Date: Fri, 6 Oct 2023 22:22:06 +0200 Subject: [PATCH] Updated test-files --- sausage_bot/test/datetime_handling_test.py | 48 ++++++++++++---------- sausage_bot/test/feeds_core_test.py | 25 +++-------- sausage_bot/test/net_io_test.py | 36 ---------------- 3 files changed, 31 insertions(+), 78 deletions(-) diff --git a/sausage_bot/test/datetime_handling_test.py b/sausage_bot/test/datetime_handling_test.py index 1227c1b..267665f 100755 --- a/sausage_bot/test/datetime_handling_test.py +++ b/sausage_bot/test/datetime_handling_test.py @@ -5,34 +5,38 @@ def test_correct_date_converting(): - short_assertion = '2022-05-17T00:00:00+02:00' - long_assertion = '2022-05-17T11:22:00+02:00' - assert str(dt.make_dt('17.05.22')) == short_assertion - assert str(dt.make_dt('17.05.20 22')) == short_assertion - assert str(dt.make_dt('17.05.2022 1122')) == long_assertion - assert str(dt.make_dt('17.05.2022, 11.22')) == long_assertion - assert str(dt.make_dt('17.05.2022, 1122')) == long_assertion - assert str(dt.make_dt('17.05.20 22, 11.22')) == long_assertion + assert str(dt.make_dt('17.05.22')) == '2022-05-17T02:00:00+02:00' + assert str(dt.make_dt('17.05.20 22')) == '2022-05-17T02:00:00+02:00' + assert str(dt.make_dt('17.05.2022 1322')) == '2022-05-17T15:22:00+02:00' + assert str(dt.make_dt('17.05.2022, 13.22')) == '2022-05-17T15:22:00+02:00' + assert str(dt.make_dt('17.05.2022, 1322')) == '2022-05-17T15:22:00+02:00' + assert str(dt.make_dt('17.05.20 22, 13.22')) == '2022-05-17T15:22:00+02:00' def test_change_dt(): - orig_date = dt.make_dt('2022-05-17T11:22:00+02:00Z') + orig_date = dt.make_dt('17.05.2022, 13.22') # All OK - plus_nineteen_years = dt.make_dt('2041-05-17T11:22:00+02:00Z') - minus_four_months = dt.make_dt('2022-01-17T11:22:00+02:00Z') - plus_two_days = dt.make_dt('2022-05-19T11:22:00+02:00Z') - minus_three_hours = dt.make_dt('2022-05-17T08:22:00+02:00Z') - plus_thirty_minutes = dt.make_dt('2022-05-17T11:52:00+02:00Z') - plus_two_and_half_hours = dt.make_dt('2022-05-17T13:52:00+02:00Z') + plus_nineteen_years = dt.make_dt('17.05.2041, 13:22') + minus_four_months = dt.make_dt('17.01.2022, 14.22') + plus_two_days = dt.make_dt('19.05.2022, 13:22') + minus_three_hours = dt.make_dt('17.05.2022, 10.22') + plus_thirty_minutes = dt.make_dt('17.05.2022, 13.52') # All OK - assert dt.change_dt(orig_date, 'add', 19, 'years') == plus_nineteen_years - assert dt.change_dt(orig_date, 'remove', 4, 'months') == minus_four_months - assert dt.change_dt(orig_date, 'add', 2, 'days') == plus_two_days - assert dt.change_dt(orig_date, 'remove', 3, 'hours') == minus_three_hours - assert dt.change_dt(orig_date, 'add', 30, 'minutes') == plus_thirty_minutes assert dt.change_dt( - orig_date, 'add', 2.5, 'hours' - ) == plus_two_and_half_hours + orig_date, 'add', 19, 'years' + ) == plus_nineteen_years + assert dt.change_dt( + orig_date, 'remove', 4, 'months' + ) == minus_four_months + assert dt.change_dt( + orig_date, 'add', 2, 'days' + ) == plus_two_days + assert dt.change_dt( + orig_date, 'remove', 3, 'hours' + ) == minus_three_hours + assert dt.change_dt( + orig_date, 'add', 30, 'minutes' + ) == plus_thirty_minutes # Fails assert dt.change_dt(orig_date, 'add', 'two', 'days') is None diff --git a/sausage_bot/test/feeds_core_test.py b/sausage_bot/test/feeds_core_test.py index 9067d49..a5522e2 100755 --- a/sausage_bot/test/feeds_core_test.py +++ b/sausage_bot/test/feeds_core_test.py @@ -1,29 +1,14 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- import pytest -import requests from ..util import feeds_core, file_io def test_check_similarity_return_number_or_none(): - link1 = 'https://www.kode24.no/artikkel/ny-utviklingsavdeling-skal-reovlusjonere-mattilsynet-vi-ma-torre-a-vaere-mer-risikovillige/7619499' - link2 = 'https://www.kode24.no/artikkel/ny-utviklingsavdeling-skal-revolusjonere-mattilsynet-vi-ma-torre-a-vaere-mer-risikovillige/76194994' + link1 = 'https://www.kode24.no/artikkel/ny-utviklingsavdeling-skal-reo'\ + 'vlusjonere-mattilsynet-vi-ma-torre-a-vaere-mer-risikovillige/7619499' + link2 = 'https://www.kode24.no/artikkel/ny-utviklingsavdeling-skal-rev'\ + 'olusjonere-mattilsynet-vi-ma-torre-a-vaere-mer-risikovillige/76194994' link3 = False - assert file_io.check_similarity(link1, link2) is True + assert file_io.check_similarity(link1, link2) is link2 assert file_io.check_similarity(link1, link3) is None - - -def test_process_feeds_correct(): - ''' - `test_urls` should consist of several rss feeds that are set up - differently to really test the function `get_feed_links` - ''' - test_urls = [ - 'https://rss.kode24.no/', - 'http://lovdata.no/feed?data=newArticles&type=RSS', - 'https://wp.blgr.app/feed', - 'https://www.vif-fotball.no/rss-nyheter' - ] - for url in test_urls: - test_feed = feeds_core.get_feed_links(url) - assert type(test_feed) is list diff --git a/sausage_bot/test/net_io_test.py b/sausage_bot/test/net_io_test.py index 9b4de09..62bb444 100755 --- a/sausage_bot/test/net_io_test.py +++ b/sausage_bot/test/net_io_test.py @@ -1,45 +1,9 @@ #!/usr/bin/env python3 # -*- coding: UTF-8 -*- import pytest -import requests -import bs4 from ..util import net_io -def test_get_link(): - url_ok_full = 'https://www.digi.no' - url_ok_short = 'www.vg.no' - url_ok_shorter = 'vg.no' - url_fail_scheme_error = 'htts://www.vg.no' - url_fail_no_tld = 'www.vgno' - link_not_string = 123 - - assert type(net_io.get_link(url_ok_full)) is requests.models.Response - assert type(net_io.get_link(url_ok_short)) == requests.models.Response - assert type(net_io.get_link(url_ok_shorter)) == requests.models.Response - assert net_io.get_link(url_fail_scheme_error) is None - assert net_io.get_link(url_fail_no_tld) is None - assert net_io.net_io.get_link(link_not_string) is None - - -def test_scrape_page(): - url_ok1 = 'www.digi.no' - url_ok2 = 'www.vg.no' - url_ok3 = 'vg.no' - url_fail1 = 'htts://www.vg.no' - url_fail2 = 'www.vgno' - url_fail3 = 123 - - scrape_page = net_io.parse() - - assert type(scrape_page(url_ok1)) == bs4.BeautifulSoup - assert type(scrape_page(url_ok2)) == bs4.BeautifulSoup - assert type(scrape_page(url_ok3)) == bs4.BeautifulSoup - assert scrape_page(url_fail1) is None - assert scrape_page(url_fail2) is None - assert scrape_page(url_fail3) is None - - def test_make_event_start_stop(): date_yes, time_yes = ('17.05.2022', '21:00') date_yes, time_no = ('17.05.2022', '671:00')