Commit f71e87c5 authored by Sacha's avatar Sacha Committed by SXibolet@2PITAU
Browse files

mongo is now linking correctly

parent 79dc17ab
loading-bar.min.js linguist-vendored
*.html linguist-vendored
controversy/static/lib linguist-vendored
*.css linguist-vendored
controversy/fetcher/mongo-c-driver-1.3.0 linguist-vendored
controversy/fetcher/mongo-c-driver linguist-vendored
controversy/fetcher/libbson linguist-vendored
......@@ -19,10 +19,9 @@ Joint mining of news text and social media to discover controversial points in n
Pending
--------
We're preparing a pre-mturk method for downloading the training data dynamically.
We're currently improving upon our IEEE paper.
1. prepare a pre-mturk method for downloading the training data dynamically (both annotated and not).
2. improving upon the method mentioned in our IEEE paper.
3. move docs to ``docs.controversy.2pitau.org`` and rebuild with Sphinx.
Running for development
-----------------------
......
......@@ -6,7 +6,7 @@ EXEC=fetcher_test queue_test
all: $(EXEC)
fetcher_test: fetcher_test.c fetcher.o queue.o
$(CC) $(shell pkg-config --cflags --libs libmongoc-1.0) $(INC) $(LOPTS) -ggdb $^ -o $@ -lcurl
$(CC) $(shell pkg-config --cflags --libs libmongoc-1.0) $(INC) $(LOPTS) -ggdb $^ -o $@ -lcurl
fetcher.o: fetcher.c fetcher.h queue.o
$(CC) $(shell pkg-config --cflags --libs libmongoc-1.0) $(INC) $(LOPTS) -lcurl -ggdb -c $<
......
0. fetch article URLs and tweets by keyword from mturk_loader.py
1. fetch each article result if not already saved (matching URL)
2. store article for each keyword with URL and keyword
2. store tweet for each keyword in tweets collection
......@@ -11,6 +11,8 @@
#define THREAD_MAX 8
#define MIN(x, y) (((x) < (y)) ? (x) : (y))
Queue *tasks;
int n_fetches, n_todo;
pthread_mutex_t n_fetches_lock = PTHREAD_MUTEX_INITIALIZER;
......@@ -19,9 +21,14 @@ pthread_mutex_t n_fetches_lock = PTHREAD_MUTEX_INITIALIZER;
typedef struct page {
char *string;
size_t len;
char *keyword;
} page_t;
// passed to worker threads
typedef struct thread_task {
char *keyword;
mongoc_client_pool_t *pool;
} thread_task_t;
size_t w_callback(void *ptr, size_t size, size_t nmemb, page_t *pg) {
size_t pckt_size = size * nmemb,
nl = pg->len + pckt_size;
......@@ -33,6 +40,11 @@ size_t w_callback(void *ptr, size_t size, size_t nmemb, page_t *pg) {
return pckt_size;
}
int page_exists(char *url, mongoc_client_pool_t *pool) {
// TODO
return 0;
}
// write ``page`` to db
void write_page(page_t *page, mongoc_client_pool_t *pool, char *url, char *keyword) {
mongoc_client_t *client = mongoc_client_pool_pop(pool);
......@@ -44,7 +56,8 @@ void write_page(page_t *page, mongoc_client_pool_t *pool, char *url, char *keywo
BSON_APPEND_OID(doc, "_id", &oid);
BSON_APPEND_UTF8(doc, "url", url);
BSON_APPEND_UTF8(doc, "url", url);
BSON_APPEND_UTF8(doc, "keyword", keyword);
BSON_APPEND_UTF8(doc, "html", keyword);
// TODO: write to article collection with HTML, keyword, and url
// TODO: write to tweets collection entire JSON response
......@@ -59,7 +72,8 @@ void write_page(page_t *page, mongoc_client_pool_t *pool, char *url, char *keywo
}
void *perform(void *arg) {
mongoc_client_pool_t *pool = arg;
thread_task_t *ttt = (thread_task_t *)arg;
mongoc_client_pool_t *pool = ttt->pool;
while (1) {
pthread_mutex_lock(&n_fetches_lock);
......@@ -106,7 +120,7 @@ void *perform(void *arg) {
}
if (page->len > 0) {
write_page(page, pool, url, keyword);
write_page(page, pool, url, ttt->keyword);
} else {
fprintf(stderr, "(!) ===> page %s is NULL\n", url);
}
......@@ -141,9 +155,13 @@ void Fetcher_fetch(char **sources, char *keyword) {
mongoc_uri_t *uri = mongoc_uri_new("mongodb://localhost/27017/");
mongoc_client_pool_t *pool = mongoc_client_pool_new(uri);
thread_task_t *ttt = malloc(sizeof(thread_task_t));
ttt->keyword = keyword;
ttt->pool = pool;
int i, j;
for (i = 0; i < n_threads; i++) {
pthread_create(&pids[i], NULL, perform, pool);
pthread_create(&pids[i], NULL, perform, ttt);
}
mongoc_uri_destroy(uri);
......@@ -152,6 +170,10 @@ void Fetcher_fetch(char **sources, char *keyword) {
pthread_join(pids[i], NULL);
}
free(ttt);
mongoc_client_pool_destroy(ttt->pool);
mongoc_cleanup();
curl_global_cleanup();
Queue_free(tasks);
}
#include "fetcher.h"
int main(void) {
// TODO
char *urls[] = { "http://www.nytimes.com/2016/05/22/arts/television/roots-remade-for-a-new-era.html?hp&action=click&pgtype=Homepage&clickSource=image&module=photo-spot-region&region=top-news&WT.nav=top-news&_r=0", "http://www.nytimes.com/2016/05/18/us/politics/bernie-sanders-oregon-results.html?action=click&contentCollection=Television&module=Trending&version=Full&region=Marginalia&pgtype=article" };
Fetcher_fetch(urls);
Fetcher_fetch(urls, "_testing");
// TODO: verify collection contains such an object
return 0;
}
libbson @ 89edcc7a
Subproject commit 89edcc7ad7a65552139905ad4818716a2b1a8ae7
mongo-c-driver @ 7bd6891a
Subproject commit 7bd6891aa14a6f0e8811b0caf2de3ca0cbf843ef
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment