austindavis commited on
Commit
5861145
1 Parent(s): 58921bf

Upload prepare.py

Browse files
Files changed (1) hide show
  1. prepare.py +145 -0
prepare.py ADDED
@@ -0,0 +1,145 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import re
3
+ import io
4
+ import requests
5
+ from zipfile import ZipFile
6
+ from tqdm import tqdm
7
+ import chess.pgn as pgn
8
+ import pandas as pd
9
+ from datasets import Dataset, DatasetInfo
10
+
11
+ tqdm.pandas()
12
+
13
+ ZIP_URLS = [
14
+ "https://database.nikonoel.fr/lichess_elite_2023-01.zip",
15
+ "https://database.nikonoel.fr/lichess_elite_2023-02.zip",
16
+ "https://database.nikonoel.fr/lichess_elite_2023-03.zip",
17
+ "https://database.nikonoel.fr/lichess_elite_2023-04.zip",
18
+ "https://database.nikonoel.fr/lichess_elite_2023-05.zip",
19
+ "https://database.nikonoel.fr/lichess_elite_2023-06.zip",
20
+ "https://database.nikonoel.fr/lichess_elite_2023-07.zip",
21
+ "https://database.nikonoel.fr/lichess_elite_2023-08.zip",
22
+ "https://database.nikonoel.fr/lichess_elite_2023-09.zip",
23
+ "https://database.nikonoel.fr/lichess_elite_2023-10.zip",
24
+ "https://database.nikonoel.fr/lichess_elite_2023-11.zip",
25
+ "https://database.nikonoel.fr/lichess_elite_2023-12.zip",
26
+ ]
27
+
28
+
29
+ def download_and_unzip(url, save_directory, force_download=False):
30
+ # Extract the filename from the URL
31
+ filename = url.split("/")[-1]
32
+ file_path = os.path.join(save_directory, filename)
33
+
34
+ # Check if the file already exists
35
+ if force_download or not os.path.exists(file_path):
36
+ # Download the file if it does not exist
37
+ print(f"Downloading {filename}...")
38
+ response = requests.get(url)
39
+ with open(file_path, "wb") as file:
40
+ file.write(response.content)
41
+ print(f"Downloaded {filename}.")
42
+ else:
43
+ print(f"{filename} already exists. Skipping download.")
44
+
45
+ # Unzip the file
46
+ with ZipFile(file_path, "r") as zip_ref:
47
+ print(f"Unzipping {filename}...")
48
+ zip_ref.extractall(save_directory)
49
+ print(f"Unzipped {filename}.")
50
+
51
+
52
+ def parse_pgn_dataset_to_dataframe(pgn_file_path):
53
+ # Regular expressions for matching headers and moves
54
+ header_pattern = re.compile(r"\[([A-Za-z0-9]+) \"(.+?)\"\]")
55
+
56
+ games_list = []
57
+ current_game = {}
58
+ transcript = []
59
+
60
+ with open(pgn_file_path, "r") as file:
61
+ for line in file:
62
+ line = line.encode("utf-8").decode("ascii", "ignore")
63
+ header_match = header_pattern.match(line)
64
+ if header_match:
65
+ # If a new game starts and current_game is not empty, save the current game
66
+ if header_match.group(1) == "Event" and current_game:
67
+ current_game["transcript"] = " ".join(transcript).strip()
68
+ games_list.append(current_game)
69
+ current_game = {}
70
+ transcript = []
71
+ current_game[header_match.group(1)] = header_match.group(2)
72
+ else:
73
+ # Add moves to transcript, ignoring empty lines and game results
74
+ clean_line = line.strip()
75
+ if (
76
+ clean_line
77
+ and not clean_line.startswith("1-0")
78
+ and not clean_line.startswith("1/2-1/2")
79
+ and not clean_line.startswith("0-1")
80
+ ):
81
+ transcript.append(clean_line)
82
+
83
+ # Add the last game if it exists
84
+ if current_game:
85
+ current_game["transcript"] = " ".join(transcript).strip()
86
+ games_list.append(current_game)
87
+
88
+ # Create a DataFrame
89
+ df = pd.DataFrame(games_list)
90
+ return df
91
+
92
+
93
+ def pgn_to_uci_transcript(pgn_transcript):
94
+ game = pgn.read_game(io.StringIO(pgn_transcript))
95
+ if game is None:
96
+ return
97
+
98
+ board = game.board()
99
+ move_list = []
100
+ for move in game.mainline_moves():
101
+ move_list.append(board.uci(move))
102
+ board.push(move)
103
+
104
+ return " ".join(move_list)
105
+
106
+
107
+ if __name__ == "__main__":
108
+ save_directory = "."
109
+
110
+ if not os.path.exists(save_directory):
111
+ os.makedirs(save_directory)
112
+
113
+ for url in ZIP_URLS:
114
+ download_and_unzip(url, save_directory)
115
+
116
+ pgn_files = [file for file in os.listdir(
117
+ save_directory) if file.endswith(".pgn")]
118
+
119
+ file_dfs = []
120
+
121
+ for pgn_file in pgn_files:
122
+ print(f"Parsing PGN from: {pgn_file}")
123
+ df = parse_pgn_dataset_to_dataframe(pgn_file)
124
+ df = df[df["EventType"] == "rapid"]
125
+
126
+ file_dfs.append(df)
127
+
128
+ df = pd.concat(file_dfs)
129
+
130
+ # cast numeric columns
131
+ for column in df.columns:
132
+ if df[column].str.isnumeric().all():
133
+ df[column] = df[column].astype(int)
134
+
135
+ df["transcript"] = df["transcript"].progress_apply(pgn_to_uci_transcript)
136
+
137
+ # filter unresolved games
138
+ df = df[df["Result"] != "*"]
139
+
140
+ df.to_feather("elite_dataset.feather")
141
+ ds_info = DatasetInfo(
142
+ description="The Lichess Elite Dataset includes all (rapid) games from Lichess by players rated 2500+ against players rated 2300+ played during the year 2023. Only games with an outcome of 1/2-1/2, 1-0, or 0-1 are included."
143
+ )
144
+ ds = Dataset.from_pandas(df, info=ds_info)
145
+ ds.push_to_hub("austindavis/chess_world_lichess_elite")