PSHomeCacheDepot / ODC /scripts /createdb.py
pebxcvi's picture
mini update
e8d5ad1
import sqlite3, os
current_files = [
r"R:\ONLINE\ODC\CURRENT\ODCInfo.txt" # optional extra "live2" (highest priority)
]
live_file = "ODCInfoLIVE.txt"
live2_file = "ODCInfoLIVE2.txt"
db_file = "localisation.db"
# Keep only the 4 fields you want
COLUMNS = ["file_name", "lang", "name", "description"]
# Indices of these columns in the original TXT line (pipe-delimited)
ALL_COLUMNS = [
"file_name","hdk_version","version","uuid","timestamp","lang","default",
"territory","entitlement_ids","category_ids","product_ids",
"maker","maker_image","small_image","large_image",
"scene_entitlement","header","world_map","clans","reward","heat",
"name","description"
]
COL_IDX = [
ALL_COLUMNS.index("file_name"),
ALL_COLUMNS.index("lang"),
ALL_COLUMNS.index("name"),
ALL_COLUMNS.index("description"),
]
ALLOWED_LANGS = {"en-GB", "en-US", "ja-JP", "en-SG"}
def clean_filename(raw_name: str) -> str:
base = os.path.basename(raw_name)
if base.lower().endswith(".odc"):
base = base[:-4]
lower = base.lower()
if lower.startswith("live2$"):
base = base[6:]
elif lower.startswith("live$"):
base = base[5:]
elif lower.startswith("live"):
base = base[4:]
return base
def clean_description(desc: str) -> str:
return desc.replace("[#Legal]", "").strip()
def create_table(conn, table_name):
cur = conn.cursor()
col_defs = ", ".join(f'"{c}" TEXT' for c in COLUMNS)
cur.execute(f"DROP TABLE IF EXISTS {table_name}")
cur.execute(f"CREATE TABLE {table_name} ({col_defs})")
conn.commit()
def insert_file(conn, table_name, filepath, batch_size=5000):
if not os.path.exists(filepath) or os.path.getsize(filepath) == 0:
print(f"Skipped missing/empty file: {filepath}")
return
cur = conn.cursor()
placeholders = ",".join("?"*len(COLUMNS))
sql = f"INSERT INTO {table_name} VALUES ({placeholders})"
with open(filepath, encoding="utf-8", errors="ignore") as f:
next(f, None) # skip header line if present
batch=[]
skipped=0
for i, line in enumerate(f, 1):
parts = line.rstrip("\n").split("|")
selected = [parts[idx] if idx < len(parts) else "" for idx in COL_IDX]
# Clean file_name
if selected[0]:
selected[0] = clean_filename(selected[0])
# Clean description
if selected[3]:
selected[3] = clean_description(selected[3])
lang = selected[1]
# Skip rows if both name and description are empty/whitespace
if not selected[2].strip() and not selected[3].strip():
skipped += 1
continue
# Apply language filter
if lang not in ALLOWED_LANGS:
skipped += 1
continue
batch.append(selected)
if len(batch) >= batch_size:
cur.executemany(sql, batch)
batch.clear()
if batch:
cur.executemany(sql, batch)
conn.commit()
print(f"Inserted {i:,} rows into {table_name} (skipped {skipped:,} rows)")
def remove_duplicates(conn):
cur = conn.cursor()
# Remove from live2 if also in current
cur.execute("""
DELETE FROM live2
WHERE file_name IN (SELECT file_name FROM current)
""")
deleted1 = cur.rowcount
# Remove from live if also in current or live2
cur.execute("""
DELETE FROM live
WHERE file_name IN (SELECT file_name FROM current)
OR file_name IN (SELECT file_name FROM live2)
""")
deleted2 = cur.rowcount
conn.commit()
print(f"Removed {deleted1:,} duplicates from live2 (kept current)")
print(f"Removed {deleted2:,} duplicates from live (kept current/live2)")
def merge_tables(conn):
cur = conn.cursor()
create_table(conn, "Objects")
cur.execute("""
INSERT INTO Objects
SELECT * FROM current
UNION ALL
SELECT * FROM live2
UNION ALL
SELECT * FROM live
ORDER BY file_name
""")
conn.commit()
print("Created merged table Objects")
def main():
if os.path.exists(db_file):
os.remove(db_file)
conn = sqlite3.connect(db_file)
# Performance tweaks
conn.execute("PRAGMA journal_mode=OFF")
conn.execute("PRAGMA synchronous=OFF")
conn.execute("PRAGMA temp_store=MEMORY")
conn.execute("PRAGMA page_size=16384")
create_table(conn, "current")
create_table(conn, "live2")
create_table(conn, "live")
# Process "current" first (highest priority)
for cur_file in current_files:
insert_file(conn, "current", cur_file)
# Then live2, then live
insert_file(conn, "live2", live2_file)
insert_file(conn, "live", live_file)
# Remove duplicates with priority order
remove_duplicates(conn)
# Merge into final table
merge_tables(conn)
conn.execute("VACUUM")
conn.close()
print("Done:", db_file)
if __name__ == "__main__":
main()