Commit fd641fe8 authored by Sacha's avatar Sacha Committed by SXibolet@2PITAU
Browse files

working well

parent 723c9800
......@@ -21,7 +21,7 @@ from sentiment import textblob, is_positive, is_negative, sentistrength
from functools import partial
from operator import is_not
from nltk.corpus import stopwords
from twython.exceptions import TwythonRateLimitError
from twython.exceptions import TwythonRateLimitError, TwythonError
MAX_ATTEMPTS = 6
......@@ -240,13 +240,13 @@ def twitter_search(keyword, training=False):
try:
response = twitter.search(**kwargs)
except TwythonRateLimitError:
# exceeded rate limits
except:
# bad form but a variety of errors could be thrown
# from exceeded rate limits
curr_comb += 1
print(curr_comb)
twitter = get_auth(curr_comb)
i -= 1
break
continue
tweets += map(lambda x: Tweet(x, training=training), response['statuses'])
......
......@@ -202,7 +202,7 @@ def new_doc(doc):
def get_next_doc():
col = get_collection()
col = get_articles_collection()
poss = col.find({
'n_reads' : {
'$lt': 3
......@@ -220,3 +220,15 @@ def get_next_doc():
return None
toggle_being_read(col, to_be_read['url'], True)
return to_be_read
def keyword_exists(kw):
col = get_tweets_collection()
poss = col.find({
'keyword': {
'$exists': kw
}
})
print(poss)
return poss
......@@ -19,7 +19,7 @@ TT_DIR = 'training_terms'
def get_file(name):
with open(name, 'r') as f:
data = f.readlines()
return data
return map(lambda x: x.strip(), data)
def update_progress(n_done, n_tasks):
......@@ -48,17 +48,16 @@ if __name__ == '__main__':
n_done = 0
n_docs = 0
for term in terms:
update_progress(n_done, n_tasks)
try:
social = querier.perform(term, training=True)
except UsageError:
# no articles
continue
n_done += 1
n_docs += mturk.new_doc(social)
n_done += 1
update_progress(n_done, n_tasks)
update_progress(n_done, n_tasks)
print('\t... done')
summary = 'summary: %s doc(s) ready for training from %s terms' % (n_docs, n_tasks)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment