aboutsummaryrefslogtreecommitdiff
path: root/backend/hist_data
diff options
context:
space:
mode:
Diffstat (limited to 'backend/hist_data')
-rw-r--r--backend/hist_data/README.md14
-rwxr-xr-xbackend/hist_data/gen_desc_data.py60
-rwxr-xr-xbackend/hist_data/gen_imgs.py1
3 files changed, 69 insertions, 6 deletions
diff --git a/backend/hist_data/README.md b/backend/hist_data/README.md
index c5cf66f..32836e2 100644
--- a/backend/hist_data/README.md
+++ b/backend/hist_data/README.md
@@ -5,7 +5,7 @@ This directory holds files used to generate the history database data.db.
Format:
`id INT PRIMARY KEY, title TEXT UNIQUE, start INT, start_upper INT, end INT, end_upper INT, fmt INT, ctg TEXT`
<br>
- Each row has a Wikidata ID, Wikipedia title, start and end dates, and an event category.
+ Each row has an ID, Wikipedia title, start and end dates, and an event category.
- `start*` and `end*` specify start and end dates.
`start_upper`, `end`, and `end_upper`, are optional.
If `start_upper` is present, it and `start` denote an uncertain range of start times.
@@ -27,15 +27,18 @@ This directory holds files used to generate the history database data.db.
- `event_imgs`: <br>
Format: `id INT PRIMARY KEY, img_id INT` <br>
Assocates events with images
+- `descs` <br>
+ Format: `title TEXT PRIMARY KEY, desc TEXT` <br>
+ Associates an event's enwiki title with a short description.
# Generating the Database
## Environment
Some of the scripts use third-party packages:
- `jdcal`: For date conversion
-- `indexed_bzip2`: For parallelised bzip2 processing.
-- `mwxml`, `mwparserfromhell`: For parsing Wikipedia dumps.
-- `requests`: For downloading data.
+- `indexed_bzip2`: For parallelised bzip2 processing
+- `mwxml`, `mwparserfromhell`: For parsing Wikipedia dumps
+- `requests`: For downloading data
## Generate Event Data
1. Obtain a Wikidata JSON dump in wikidata/, as specified in it's README.
@@ -59,4 +62,5 @@ Some of the scripts use third-party packages:
1. Obtain an enwiki dump in enwiki/, as specified in the README.
1. In enwiki/, run `gen_dump_index.db.py`, which generates a database for indexing the dump.
1. In enwiki/, run `gen_desc_data.py`, which extracts page descriptions into a database.
-1. Run
+1. Run `gen_desc_data.py`, which adds the `descs` table, using data in enwiki/,
+ and the `events` and `images` tables (only adds descriptions for events with images).
diff --git a/backend/hist_data/gen_desc_data.py b/backend/hist_data/gen_desc_data.py
new file mode 100755
index 0000000..68f9e56
--- /dev/null
+++ b/backend/hist_data/gen_desc_data.py
@@ -0,0 +1,60 @@
+#!/usr/bin/python3
+
+"""
+Maps events to short descriptions from Wikipedia,
+and stores them in the database.
+"""
+
+import os, sqlite3
+
+ENWIKI_DB = os.path.join('enwiki', 'desc_data.db')
+DB_FILE = 'data.db'
+
+def genData(enwikiDb: str, dbFile: str) -> None:
+ print('Creating table')
+ dbCon = sqlite3.connect(dbFile)
+ dbCur = dbCon.cursor()
+ dbCur.execute('CREATE TABLE descs (id INT PRIMARY KEY, wiki_id INT, desc TEXT)')
+ #
+ print('Getting events with images')
+ titleToId: dict[str, int] = {}
+ query = 'SELECT events.id, events.title FROM events INNER JOIN event_imgs ON events.id = event_imgs.id'
+ for eventId, title in dbCur.execute(query):
+ titleToId[title] = eventId
+ #
+ print('Getting Wikipedia descriptions')
+ enwikiCon = sqlite3.connect(enwikiDb)
+ enwikiCur = enwikiCon.cursor()
+ iterNum = 0
+ for title, eventId in titleToId.items():
+ iterNum += 1
+ if iterNum % 1e4 == 0:
+ print(f'At iteration {iterNum}')
+ # Get wiki ID
+ row = enwikiCur.execute('SELECT id FROM pages WHERE title = ?', (title,)).fetchone()
+ if row is None:
+ continue
+ wikiId = row[0]
+ # Check for redirect
+ wikiIdToGet = wikiId
+ query = \
+ 'SELECT pages.id FROM redirects INNER JOIN pages ON redirects.target = pages.title WHERE redirects.id = ?'
+ row = enwikiCur.execute(query, (wikiId,)).fetchone()
+ if row is not None:
+ wikiIdToGet = row[0]
+ # Get desc
+ row = enwikiCur.execute('SELECT desc FROM descs where id = ?', (wikiIdToGet,)).fetchone()
+ if row is None:
+ continue
+ dbCur.execute('INSERT INTO descs VALUES (?, ?, ?)', (eventId, wikiId, row[0]))
+ #
+ print('Closing databases')
+ dbCon.commit()
+ dbCon.close()
+
+if __name__ == '__main__':
+ import argparse
+ parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+ args = parser.parse_args()
+ #
+ genData(ENWIKI_DB, DB_FILE)
diff --git a/backend/hist_data/gen_imgs.py b/backend/hist_data/gen_imgs.py
index 526da1b..0b2f480 100755
--- a/backend/hist_data/gen_imgs.py
+++ b/backend/hist_data/gen_imgs.py
@@ -1,7 +1,6 @@
#!/usr/bin/python3
"""
-
Looks at images described by a database, and generates resized/cropped versions
into an output directory, with names of the form 'eventId1.jpg'.
Adds the image associations and metadata to the history database.