Crawl all pages and insert them into the database

This commit is contained in:
Arne Schlüter 2014-12-08 18:25:26 +01:00
commit 36df116ed0
3 changed files with 60 additions and 21 deletions

View file

@ -4,21 +4,36 @@
import sqlite3
import hashlib
import time
from scraper.scraper import Scraper
encoding = 'utf-8'
encoding = 'UTF-8'
# helper function for benchmarking
current_milli_time = lambda: int(round(time.time() * 1000))
#
# this is where the logic starts:
#
print('Start crawling…')
start_time = current_milli_time()
scraper = Scraper()
articles = scraper.scrape()
time_taken = current_milli_time() - start_time
print('Found {} articles in {} ms'.format(len(articles), time_taken))
conn = sqlite3.connect('violence.db')
c = conn.cursor()
# setup database schema
c.execute('PRAGMA encoding = "{}"'.format(encoding))
c.execute('''
CREATE TABLE IF NOT EXISTS incidents (
incident_id INTEGER PRIMARY KEY,
date TEXT,
month_only INTEGER,
place TEXT,
additional_place TEXT,
description TEXT,
@ -40,26 +55,33 @@ c.execute('''
select_query = 'SELECT * FROM incidents WHERE hash=?'
insert_query = '''
INSERT INTO incidents (
date, place, additional_place, description, hash
) VALUES (?)
date, month_only, place, additional_place, description, hash
) VALUES (?,?,?,?,?,?)
'''
print('Starting database work')
for article in articles:
# build a hash so we can more easily find out if we have an article already
h = h.sha256()
h.update(article.date.encode(encoding))
h.update(article.place.encode(encoding))
h.update(article.additional_place.encode(encoding))
h.update(article.description.encode(encoding))
h = hashlib.sha256()
h.update(str(article['date']).encode(encoding))
h.update(article['place'].encode(encoding))
h.update((article['additional_place'] or '').encode(encoding))
h.update(article['description'].encode(encoding))
digest = h.digest()
c.execute(select_query)
c.execute(select_query, (digest,))
# now if it's not in the database insert it
if (not c.fetchone()):
article_tuple = (
article.date,
article.place,
article.additional_place,
article.description,
article['date'],
article['month_only'],
article['place'],
article['additional_place'],
article['description'],
digest
)
c.execute(insert_query, article_tuple)
final_time = current_milli_time() - start_time
print('All done in {} ms'.format(final_time))