From: Jan-Pascal van Best Date: Thu, 18 Feb 2016 21:46:51 +0000 (+0100) Subject: Logging fixups X-Git-Tag: v1.2~6 X-Git-Url: http://www.vanbest.org/gitweb/?a=commitdiff_plain;h=3dd202d7c2e417557abeef050aaa413cf558949b;p=tweet_django Logging fixups --- diff --git a/tweet/jobrunner.py b/tweet/jobrunner.py index 5a52fc4..e4dee94 100644 --- a/tweet/jobrunner.py +++ b/tweet/jobrunner.py @@ -28,9 +28,17 @@ class JobHandler: ) for filename in self.config.excel_names() ] - def log(self, s): + def warning(self, s): + logger.warning(s) + self.job.add_log("WARNING {}: {}".format(datetime.datetime.strptime(datetime.datetime.now(), "%c"), s)) + + def info(self, s): logger.info(s) - self.job.add_log(s) + self.job.add_log("INFO {}: {}".format(datetime.datetime.strptime(datetime.datetime.now(), "%c"), s)) + + def debug(self, s): + logger.debug(s) + self.job.add_log("DEBUG {}: {}".format(datetime.datetime.strptime(datetime.datetime.now(), "%c"), s)) def tweet(self, query_name, status): self.job.num_tweets += 1 @@ -74,7 +82,6 @@ def _get_rate_status(twitter): def _fetch(query_name, terms, max_pages, page_size, handler): - handler.log("_fetch()") # TODO use global twitter object? twitter = Twython(settings.TWEET_OAUTH_CONSUMER_KEY, settings.TWEET_OAUTH_CONSUMER_SECRET, @@ -87,13 +94,12 @@ def _fetch(query_name, terms, max_pages, page_size, handler): max_id = -1 previous_last_id = float('inf') - handler.log("max_pages: {}".format(max_pages)) for k in range(max_pages): while remaining<10: seconds = int(rate_status["reset"] - time.time()) - handler.log("Twitter rate limiting, need to wait {} seconds...".format(seconds)) + handler.info("Twitter rate limiting, need to wait {} seconds...".format(seconds)) handler.status(Job.STATUS_WAITING, seconds) - for i in range(min(60, seconds)): + for i in range(min(120, seconds)): handler.status(Job.STATUS_WAITING, seconds-i) time.sleep(1) rate_status = _get_rate_status(twitter) @@ -108,33 +114,33 @@ def _fetch(query_name, terms, max_pages, page_size, handler): try: results = twitter.search(**query) except TwythonError as e: - handler.log("search exception, error code = {}: {}".format(e.error_code, e)) + handler.warning("search exception, error code = {}: {}".format(e.error_code, e)) handler.status(Job.STATUS_FAILED) # FIXME break remaining = int(twitter.get_lastfunction_header('x-rate-limit-remaining')) - handler.log("remaining: {}".format(remaining)) + handler.debug("remaining: {}".format(remaining)) max_id = results["search_metadata"]["max_id"] - handler.log("Number of results: {}".format(results["search_metadata"]["count"])) + handler.debug("Number of results: {}".format(results["search_metadata"]["count"])) last_id = float('inf') for status in results["statuses"]: handler.tweet(query_name, status) if status["id"]=previous_last_id: - handler.log("max_id not descending, quitting") + handler.info("max_id not descending, quitting") break previous_last_id = max_id if k >= max_pages-1: - handler.log("Warning, more than {} pages of results, ignoring the rest!!!".format(max_pages)); + handler.warning("Warning, more than {} pages of results, ignoring the rest!!!".format(max_pages)); #results = twitter.search(q='nlalert', result_type='recent') # since_id=, max_id=,count=, #for result in results: @@ -142,13 +148,12 @@ def _fetch(query_name, terms, max_pages, page_size, handler): # Runs in a thread pool thread def _execute_job(job, handler): - logger.debug("_execute_job") try: config = job.get_config() handler.status(Job.STATUS_RUNNING) print(str(config.query_names())) for query_name in config.query_names(): - handler.log("Running query {}".format(query_name)); + handler.info("Running query {}".format(query_name)); _fetch(query_name, config.query(query_name), config.max_pages, config.page_size, handler) handler.status(Job.STATUS_DONE) except Exception as e: diff --git a/tweet/streamrunner.py b/tweet/streamrunner.py index 8063a87..f87de6c 100644 --- a/tweet/streamrunner.py +++ b/tweet/streamrunner.py @@ -23,7 +23,7 @@ class Streamer(TwythonStreamer): count = 0 def on_success(self, data): if 'text' in data: - #logger.info("Stream text: "+data['text']) + logger.info("Stream text: "+data['text']) #logger.info("Stream tweet: {}".format(data)) tweet = Tweet.from_status(data) tweet.conforms_to_terms = check_match(data) diff --git a/tweet/utils.py b/tweet/utils.py index 7d7fb7b..33eb1d7 100644 --- a/tweet/utils.py +++ b/tweet/utils.py @@ -45,7 +45,7 @@ class ExcelExporter: def add_tweet(self, tweet): if tweet.id in self.ids: - logger.info("Skipping tweet {}, already in".format(tweet.id)) + logger.debug("Skipping tweet {}, already in".format(tweet.id)) return self.ids.add(tweet.id)