aboutsummaryrefslogtreecommitdiff
path: root/backend/tol_data/enwiki/gen_dump_index_db.py
diff options
context:
space:
mode:
authorTerry Truong <terry06890@gmail.com>2022-09-11 14:55:42 +1000
committerTerry Truong <terry06890@gmail.com>2022-09-11 15:04:14 +1000
commit5de5fb93e50fe9006221b30ac4a66f1be0db82e7 (patch)
tree2567c25c902dbb40d44419805cebb38171df47fa /backend/tol_data/enwiki/gen_dump_index_db.py
parentdaccbbd9c73a5292ea9d6746560d7009e5aa666d (diff)
Add backend unit tests
- Add unit testing code in backend/tests/ - Change to snake-case for script/file/directory names - Use os.path.join() instead of '/' - Refactor script code into function defs and a main-guard - Make global vars all-caps Some fixes: - For getting descriptions, some wiki redirects weren't properly resolved - Linked images were sub-optimally propagated - Generation of reduced trees assumed a wiki-id association implied a description - Tilo.py had potential null dereferences by not always using a reduced node set - EOL image downloading didn't properly wait for all threads to end when finishing
Diffstat (limited to 'backend/tol_data/enwiki/gen_dump_index_db.py')
-rwxr-xr-xbackend/tol_data/enwiki/gen_dump_index_db.py60
1 files changed, 60 insertions, 0 deletions
diff --git a/backend/tol_data/enwiki/gen_dump_index_db.py b/backend/tol_data/enwiki/gen_dump_index_db.py
new file mode 100755
index 0000000..5f21c9b
--- /dev/null
+++ b/backend/tol_data/enwiki/gen_dump_index_db.py
@@ -0,0 +1,60 @@
+#!/usr/bin/python3
+
+"""
+Adds data from the wiki dump index-file into a database
+"""
+import sys, os, re
+import bz2
+import sqlite3
+
+INDEX_FILE = 'enwiki-20220501-pages-articles-multistream-index.txt.bz2' # Had about 22e6 lines
+DB_FILE = 'dumpIndex.db'
+
+def genData(indexFile: str, dbFile: str) -> None:
+ """ Reads the index file and creates the db """
+ if os.path.exists(dbFile):
+ raise Exception(f'ERROR: Existing {dbFile}')
+ print('Creating database')
+ dbCon = sqlite3.connect(dbFile)
+ dbCur = dbCon.cursor()
+ dbCur.execute('CREATE TABLE offsets (title TEXT PRIMARY KEY, id INT UNIQUE, offset INT, next_offset INT)')
+ print('Iterating through index file')
+ lineRegex = re.compile(r'([^:]+):([^:]+):(.*)')
+ lastOffset = 0
+ lineNum = 0
+ entriesToAdd: list[tuple[str, str]] = []
+ with bz2.open(indexFile, mode='rt') as file:
+ for line in file:
+ lineNum += 1
+ if lineNum % 1e5 == 0:
+ print(f'At line {lineNum}')
+ #
+ match = lineRegex.fullmatch(line.rstrip())
+ assert match is not None
+ offsetStr, pageId, title = match.group(1,2,3)
+ offset = int(offsetStr)
+ if offset > lastOffset:
+ for t, p in entriesToAdd:
+ try:
+ dbCur.execute('INSERT INTO offsets VALUES (?, ?, ?, ?)', (t, int(p), lastOffset, offset))
+ except sqlite3.IntegrityError as e:
+ # Accounts for certain entries in the file that have the same title
+ print(f'Failed on title "{t}": {e}', file=sys.stderr)
+ entriesToAdd = []
+ lastOffset = offset
+ entriesToAdd.append((title, pageId))
+ for title, pageId in entriesToAdd:
+ try:
+ dbCur.execute('INSERT INTO offsets VALUES (?, ?, ?, ?)', (title, int(pageId), lastOffset, -1))
+ except sqlite3.IntegrityError as e:
+ print(f'Failed on title "{t}": {e}', file=sys.stderr)
+ print('Closing database')
+ dbCon.commit()
+ dbCon.close()
+
+if __name__ == '__main__':
+ import argparse
+ parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+ parser.parse_args()
+ #
+ genData(INDEX_FILE, DB_FILE)