)
for filename in self.config.excel_names() ]
- def log(self, s):
+ def warning(self, s):
+ logger.warning(s)
+ self.job.add_log("WARNING {}: {}".format(datetime.datetime.strptime(datetime.datetime.now(), "%c"), s))
+
+ def info(self, s):
logger.info(s)
- self.job.add_log(s)
+ self.job.add_log("INFO {}: {}".format(datetime.datetime.strptime(datetime.datetime.now(), "%c"), s))
+
+ def debug(self, s):
+ logger.debug(s)
+ self.job.add_log("DEBUG {}: {}".format(datetime.datetime.strptime(datetime.datetime.now(), "%c"), s))
def tweet(self, query_name, status):
self.job.num_tweets += 1
def _fetch(query_name, terms, max_pages, page_size, handler):
- handler.log("_fetch()")
# TODO use global twitter object?
twitter = Twython(settings.TWEET_OAUTH_CONSUMER_KEY,
settings.TWEET_OAUTH_CONSUMER_SECRET,
max_id = -1
previous_last_id = float('inf')
- handler.log("max_pages: {}".format(max_pages))
for k in range(max_pages):
while remaining<10:
seconds = int(rate_status["reset"] - time.time())
- handler.log("Twitter rate limiting, need to wait {} seconds...".format(seconds))
+ handler.info("Twitter rate limiting, need to wait {} seconds...".format(seconds))
handler.status(Job.STATUS_WAITING, seconds)
- for i in range(min(60, seconds)):
+ for i in range(min(120, seconds)):
handler.status(Job.STATUS_WAITING, seconds-i)
time.sleep(1)
rate_status = _get_rate_status(twitter)
try:
results = twitter.search(**query)
except TwythonError as e:
- handler.log("search exception, error code = {}: {}".format(e.error_code, e))
+ handler.warning("search exception, error code = {}: {}".format(e.error_code, e))
handler.status(Job.STATUS_FAILED)
# FIXME
break
remaining = int(twitter.get_lastfunction_header('x-rate-limit-remaining'))
- handler.log("remaining: {}".format(remaining))
+ handler.debug("remaining: {}".format(remaining))
max_id = results["search_metadata"]["max_id"]
- handler.log("Number of results: {}".format(results["search_metadata"]["count"]))
+ handler.debug("Number of results: {}".format(results["search_metadata"]["count"]))
last_id = float('inf')
for status in results["statuses"]:
handler.tweet(query_name, status)
if status["id"]<last_id:
last_id = status["id"]
- handler.log("last_id: {}; max_id: {}".format(last_id, max_id))
+ handler.debug("last_id: {}; max_id: {}".format(last_id, max_id))
max_id = last_id
time.sleep(1)
if max_id>=previous_last_id:
- handler.log("max_id not descending, quitting")
+ handler.info("max_id not descending, quitting")
break
previous_last_id = max_id
if k >= max_pages-1:
- handler.log("Warning, more than {} pages of results, ignoring the rest!!!".format(max_pages));
+ handler.warning("Warning, more than {} pages of results, ignoring the rest!!!".format(max_pages));
#results = twitter.search(q='nlalert', result_type='recent') # since_id=, max_id=,count=,
#for result in results:
# Runs in a thread pool thread
def _execute_job(job, handler):
- logger.debug("_execute_job")
try:
config = job.get_config()
handler.status(Job.STATUS_RUNNING)
print(str(config.query_names()))
for query_name in config.query_names():
- handler.log("Running query {}".format(query_name));
+ handler.info("Running query {}".format(query_name));
_fetch(query_name, config.query(query_name), config.max_pages, config.page_size, handler)
handler.status(Job.STATUS_DONE)
except Exception as e: