From 70fc84f66afd0419c8cbf79ff47087dd57997f9d Mon Sep 17 00:00:00 2001 From: Edward Powell Date: Sun, 14 Dec 2014 08:55:31 -0500 Subject: [PATCH] Remove use of deprecated debug function --- github.py | 14 +++++++------- ip.py | 11 ++++++++--- movie.py | 8 ++++++-- radio.py | 11 ++++++++--- rss.py | 35 +++++++++++++++++++---------------- safety.py | 5 ++++- url.py | 2 +- 7 files changed, 53 insertions(+), 33 deletions(-) diff --git a/github.py b/github.py index ce37470d99..5f3e46c6ca 100644 --- a/github.py +++ b/github.py @@ -19,6 +19,9 @@ from willie.module import commands, rule, NOLIMIT import os import re +from willie.logger import get_logger + +LOGGER = get_logger(__name__) issueURL = (r'https?://(?:www\.)?github.com/' '([A-z0-9\-]+/[A-z0-9\-]+)/' @@ -83,7 +86,7 @@ def issue(bot, trigger): data = json.loads(raw) bot.say('Issue #%s posted. %s' % (data['number'], data['html_url'])) - bot.debug(__file__, 'Issue #%s created in %s' % (data['number'], trigger.sender), 'warning') + LOGGER.warning('Issue #%s created in %s', data['number'], trigger.sender) @commands('addtrace', 'addtraceback') @@ -147,7 +150,7 @@ def add_traceback(bot, trigger): data = json.loads(raw) bot.say('Added traceback to issue #%s. %s' % (number, data['html_url'])) - bot.debug(__file__, 'Traceback added to #%s in %s.' % (number, trigger.sender), 'warning') + LOGGER.warning('Traceback added to #%s in %s.', number, trigger.sender) @commands('findissue', 'findbug') @@ -190,11 +193,8 @@ def findIssue(bot, trigger): else: body = data['body'].split('\n')[0] except (KeyError): - bot.debug( - 'GitHub KeyErr', - ('API returned an invalid result on query request ' + - trigger.group(2)), - 'always') + LOGGER.exception('API returned an invalid result on query request %s', + trigger.group(2)) bot.say('Invalid result, please try again later.') return NOLIMIT bot.reply('[#%s]\x02title:\x02 %s \x02|\x02 %s' % (data['number'], data['title'], body)) diff --git a/ip.py b/ip.py index cc3524b76b..284025b035 100644 --- a/ip.py +++ b/ip.py @@ -28,6 +28,9 @@ pass from willie.module import commands, example +from willie.logger import get_logger + +LOGGER = get_logger(__name__) def configure(config): @@ -62,7 +65,9 @@ def _find_geoip_db(bot): if os.path.isfile(cities_db) and os.path.isfile(ipasnum_db): return config.ip.GeoIP_db_path else: - bot.debug(__file__, 'GeoIP path configured but DB not found in configured path', 'warning') + LOGGER.warning( + 'GeoIP path configured but DB not found in configured path' + ) if (os.path.isfile(os.path.join(bot.config.homedir, 'GeoLiteCity.dat')) and os.path.isfile(os.path.join(bot.config.homedir, 'GeoIPASNum.dat'))): return bot.config.homedir @@ -70,7 +75,7 @@ def _find_geoip_db(bot): os.path.isfile(os.path.join('/usr/share/GeoIP', 'GeoIPASNum.dat'))): return '/usr/share/GeoIP' elif urlretrieve: - bot.debug(__file__, 'Downloading GeoIP database', 'always') + LOGGER.warning('Downloading GeoIP database') bot.say('Downloading GeoIP database, please wait...') geolite_city_url = 'http://geolite.maxmind.com/download/geoip/database/GeoLiteCity.dat.gz' geolite_ASN_url = 'http://download.maxmind.com/download/geoip/database/asnum/GeoIPASNum.dat.gz' @@ -97,7 +102,7 @@ def ip(bot, trigger): query = trigger.group(2) db_path = _find_geoip_db(bot) if db_path is False: - bot.debug(__file__, 'Can\'t find (or download) usable GeoIP database', 'always') + LOGGER.error('Can\'t find (or download) usable GeoIP database') bot.say('Sorry, I don\'t have a GeoIP database to use for this lookup') return False geolite_city_filepath = os.path.join(_find_geoip_db(bot), 'GeoLiteCity.dat') diff --git a/movie.py b/movie.py index ff74cb836a..b522d416a2 100644 --- a/movie.py +++ b/movie.py @@ -10,6 +10,9 @@ import json import willie.web as web import willie.module +from willie.logger import get_logger + +LOGGER = get_logger(__name__) @willie.module.commands('movie', 'imdb') @@ -29,8 +32,9 @@ def movie(bot, trigger): if 'Error' in data: message = '[MOVIE] %s' % data['Error'] else: - bot.debug(__file__, 'Got an error from the imdb api, search phrase was %s' % word, 'warning') - bot.debug(__file__, str(data), 'warning') + LOGGER.warning( + 'Got an error from the imdb api, search phrase was %s; data was %s', + word, str(data)) message = '[MOVIE] Got an error from imdbapi' else: message = '[MOVIE] Title: ' + data['Title'] + \ diff --git a/radio.py b/radio.py index 6e59659a35..0071449415 100644 --- a/radio.py +++ b/radio.py @@ -12,7 +12,9 @@ from xml.dom.minidom import parseString import willie.web as web from willie.module import commands, OP +from willie.logger import get_logger +LOGGER = get_logger(__name__) def configure(config): """ @@ -65,7 +67,8 @@ def currentSong(bot, trigger): song = web.get(radioURL % 'currentsong') except Exception as e: bot.say('The radio is not responding to the song request.') - bot.debug(__file__, 'Exception while trying to get current song: %s' % e, 'warning') + LOGGER.warning('Exception while trying to get current song.', + exc_info=True) if song: bot.say('Now playing: ' + song) else: @@ -78,7 +81,7 @@ def nextSong(bot, trigger): song = web.get(radioURL % 'nextsong') except Exception as e: bot.say('The radio is not responding to the song request.') - bot.debug(__file__, 'Exception while trying to get next song: %s' % e, 'warning') + LOGGER.exception('Exception while trying to get next song.') if song: bot.say('Next up: ' + song) else: @@ -117,7 +120,9 @@ def radio(bot, trigger): except Exception as e: checkSongs -= 1 if checkSongs == 0: - bot.debug(__file__, 'Exception while trying to get periodic radio data: %s' % e, 'warning') + LOGGER.exception( + 'Exception while trying to get periodic radio data: %s' + ) bot.say('The radio is not responding to the song request.') bot.say('Turning off radio data checking.') break diff --git a/rss.py b/rss.py index 008ce3b1fb..fda54d2ce0 100644 --- a/rss.py +++ b/rss.py @@ -17,7 +17,9 @@ from willie.module import commands, interval from willie.config import ConfigurationError +from willie.logger import get_logger +LOGGER = get_logger(__name__) socket.setdefaulttimeout(10) @@ -112,14 +114,14 @@ def _rss_start(self, bot, trigger, c): """Start fetching feeds. Usage: .rss start""" bot.reply("Okay, I'll start fetching RSS feeds..." if not self.running else "Continuing to fetch RSS feeds.") - bot.debug(__file__, "RSS started.", 'verbose') + LOGGER.debug("RSS started.") self.running = True def _rss_stop(self, bot, trigger, c): """Stop fetching feeds. Usage: .rss stop""" bot.reply("Okay, I'll stop fetching RSS feeds..." if self.running else "Not currently fetching RSS feeds.") - bot.debug(__file__, "RSS stopped.", 'verbose') + LOGGER.debug("RSS stopped.") self.running = False def _rss_add(self, bot, trigger, c): @@ -340,23 +342,23 @@ def disable_feed(): try: fp = feedparser.parse(feed.url, etag=feed.etag, modified=feed.modified) except IOError as e: - bot.debug(__file__, "Can't parse feed on {0}, disabling ({1})".format( - feed.name, str(e)), 'warning') + LOGGER.exception("Can't parse feed on %s, disabling.", + feed.name) disable_feed() continue # fp.status will only exist if pulling from an online feed status = getattr(fp, 'status', None) - bot.debug(feed.channel, "{0}: status = {1}, version = '{2}', items = {3}".format( - feed.name, status, fp.version, len(fp.entries)), 'verbose') + LOGGER.debug("%s: status = %s, version = '%s', items = %s", + feed.name, status, fp.version, len(fp.entries)) # check HTTP status if status == 301: # MOVED_PERMANENTLY - bot.debug( - __file__, - "Got HTTP 301 (Moved Permanently) on {0}, updating URI to {1}".format( - feed.name, fp.href), 'warning') + bot.warning( + "Got HTTP 301 (Moved Permanently) on %s, updating URI to %s", + feed.name, fp.href + ) c.execute(''' UPDATE rss_feeds SET feed_url = ? WHERE channel = ? AND feed_name = ? @@ -364,8 +366,8 @@ def disable_feed(): conn.commit() elif status == 410: # GONE - bot.debug(__file__, "Got HTTP 410 (Gone) on {0}, disabling".format( - feed.name), 'warning') + LOGGER.warning("Got HTTP 410 (Gone) on {0}, disabling", + feed.name) disable_feed() if not fp.entries: @@ -384,8 +386,8 @@ def disable_feed(): # check if article is new, and skip otherwise if (feed.title == entry.title and feed.link == entry.link and feed.etag == feed_etag and feed.modified == feed_modified): - bot.debug(__file__, u"Skipping previously read entry: [{0}] {1}".format( - feed.name, entry.title), 'verbose') + LOGGER.info(u"Skipping previously read entry: [%s] %s", + feed.name, entry.title) continue # save article title, url, and modified date @@ -404,8 +406,9 @@ def disable_feed(): # implemented. Once that happens, deleting or modifying the # latest item would result in the whole feed getting re-msg'd. # This will prevent that from happening. - bot.debug(__file__, u"Skipping older entry: [{0}] {1}, because {2} >= {3}".format( - feed.name, entry.title, published_dt, entry_dt), 'verbose') + LOGGER.info( + "Skipping older entry: [%s] %s, because %s >= %s", + feed.name, entry.title, published_dt, entry_dt) continue # create message for new entry diff --git a/safety.py b/safety.py index eb69596db9..8ef8fa5f3f 100644 --- a/safety.py +++ b/safety.py @@ -11,6 +11,7 @@ import willie.web as web from willie.config import ConfigurationError from willie.formatting import color, bold +from willie.formatting import get_logger import willie.tools import willie.module import sys @@ -27,6 +28,8 @@ from urllib import urlretrieve from urlparse import urlparse +LOGGER = get_logger(__name__) + vt_base_api_url = 'https://www.virustotal.com/vtapi/v2/url/' malware_domains = [] known_good = [] @@ -131,7 +134,7 @@ def url_handler(bot, trigger): positives = result['positives'] total = result['total'] except Exception as e: - bot.debug('[safety]', e, 'debug') + LOGGER.debug('Error from checking URL with VT.', exc_info=True) pass # Ignoring exceptions with VT so MalwareDomains will always work if unicode(netloc).lower() in malware_domains: diff --git a/url.py b/url.py index d0df443b3a..ea866ee4c8 100644 --- a/url.py +++ b/url.py @@ -156,7 +156,7 @@ def process_urls(bot, trigger, urls): if not url.startswith(exclusion_char): # Magic stuff to account for international domain names try: - url = willie.web.iri_to_uri(url) + url = web.iri_to_uri(url) except: pass # First, check that the URL we got doesn't match