aboutsummaryrefslogtreecommitdiff
path: root/backend/data/enwiki/genDumpIndexDb.py
blob: 39558851e25dd02443690f315ca9bc81ed76d910 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
#!/usr/bin/python3

import sys, os, re
import bz2
import sqlite3

usageInfo = f"""
Usage: {sys.argv[0]}

Adds data from the wiki dump index-file into a database.
"""
if len(sys.argv) > 1:
	print(usageInfo, file=sys.stderr)
	sys.exit(1)

indexFile = "enwiki-20220501-pages-articles-multistream-index.txt.bz2" # Had about 22e6 lines
indexDb = "dumpIndex.db"

if os.path.exists(indexDb):
	raise Exception(f"ERROR: Existing {indexDb}")
print("Creating database")
dbCon = sqlite3.connect(indexDb)
dbCur = dbCon.cursor()
dbCur.execute("CREATE TABLE offsets (title TEXT PRIMARY KEY, id INT UNIQUE, offset INT, next_offset INT)")

print("Iterating through index file")
lineRegex = re.compile(r"([^:]+):([^:]+):(.*)")
lastOffset = 0
lineNum = 0
entriesToAdd = []
with bz2.open(indexFile, mode='rt') as file:
	for line in file:
		lineNum += 1
		if lineNum % 1e5 == 0:
			print(f"At line {lineNum}")
		#
		match = lineRegex.fullmatch(line.rstrip())
		(offset, pageId, title) = match.group(1,2,3)
		offset = int(offset)
		if offset > lastOffset:
			for (t, p) in entriesToAdd:
				try:
					dbCur.execute("INSERT INTO offsets VALUES (?, ?, ?, ?)", (t, p, lastOffset, offset))
				except sqlite3.IntegrityError as e:
					# Accounts for certain entries in the file that have the same title
					print(f"Failed on title \"{t}\": {e}", file=sys.stderr)
			entriesToAdd = []
			lastOffset = offset
		entriesToAdd.append([title, pageId])
for (title, pageId) in entriesToAdd:
	try:
		dbCur.execute("INSERT INTO offsets VALUES (?, ?, ?, ?)", (title, pageId, lastOffset, -1))
	except sqlite3.IntegrityError as e:
		print(f"Failed on title \"{t}\": {e}", file=sys.stderr)

print("Closing database")
dbCon.commit()
dbCon.close()