Fizzarolli commited on
Commit
00f84bd
·
verified ·
1 Parent(s): 6b82a6c

Upload 3 files

Browse files
Files changed (2) hide show
  1. output.json +0 -0
  2. output.py +53 -39
output.json CHANGED
The diff for this file is too large to render. See raw diff
 
output.py CHANGED
@@ -1,10 +1,8 @@
1
  import sqlite3
2
  import random
3
  import json
4
- import hashlib
5
- from pybloom_live import ScalableBloomFilter
6
-
7
  from tqdm import tqdm
 
8
 
9
  DATABASE_FILE = "discobase3-29-2021-9-32-09-PM.db" # Replace with your database file
10
 
@@ -117,37 +115,59 @@ def format_conversation_output(conversation):
117
 
118
  return output.strip()
119
 
120
- def main():
121
- """Main function to generate and output conversation paths."""
122
- conn = sqlite3.connect(DATABASE_FILE)
 
123
  cursor = conn.cursor()
124
-
125
- conversations = []
126
- num_paths_to_generate = 1000
127
- paths_generated = 0
128
 
129
- # Track seen conversation patterns
130
- seen_windows = set()
131
- min_window_size = 5 # Minimum consecutive dialogues to consider for duplication
132
 
133
- # Either pick a random starting point:
134
- starting_dialogues = get_starting_dialogues(cursor)
 
 
 
 
135
  if not starting_dialogues:
136
  print("Error: No starting dialogues found in the database.")
137
- conn.close()
138
  return
139
 
140
- with tqdm(total=num_paths_to_generate, desc="Generating paths") as pbar:
141
- while paths_generated < num_paths_to_generate:
142
- start_convo_id, start_dialogue_id = random.choice(starting_dialogues)
143
- path = generate_conversation_path(cursor, start_convo_id, start_dialogue_id)
144
-
145
- # Skip paths that are too short
146
- if len(path) < 5:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
147
  continue
148
-
149
- # Create semantic key for conversation
150
- path_text = " ".join(entry["dialogue"].strip().lower() for entry in path if not entry["dialogue"].startswith("["))
 
151
 
152
  def is_duplicate(new_text):
153
  """Check if new conversation contains or is contained within existing conversations"""
@@ -178,23 +198,17 @@ def main():
178
  seen_windows.add(" ".join(path_dialogues[i:i+min_window_size]))
179
  return False
180
 
181
- # Check for duplicates
182
- try:
183
- if (not is_duplicate(path_text) and
184
- not is_added(path) and
185
- any(entry["dialogue"][0] != "[" for entry in path)):
186
- conversations.append(path)
187
- paths_generated += 1
188
- pbar.update(1)
189
- except Exception as e:
190
- print(f"Error: {e}")
191
- continue
192
 
193
  output = {"conversations": conversations}
194
  with open("output.json", "w") as f:
195
  json.dump(output, f, indent=4)
196
 
197
- conn.close()
198
-
199
  if __name__ == "__main__":
200
  main()
 
1
  import sqlite3
2
  import random
3
  import json
 
 
 
4
  from tqdm import tqdm
5
+ from multiprocessing import Pool, Value, Lock
6
 
7
  DATABASE_FILE = "discobase3-29-2021-9-32-09-PM.db" # Replace with your database file
8
 
 
115
 
116
  return output.strip()
117
 
118
+ def worker_process(args):
119
+ """Worker function for parallel processing"""
120
+ db_file, starting_dialogues = args
121
+ conn = sqlite3.connect(db_file)
122
  cursor = conn.cursor()
 
 
 
 
123
 
124
+ # Generate a single path
125
+ start_convo_id, start_dialogue_id = random.choice(starting_dialogues)
126
+ path = generate_conversation_path(cursor, start_convo_id, start_dialogue_id)
127
 
128
+ conn.close()
129
+ return path
130
+
131
+ def main():
132
+ """Main function using multiprocessing"""
133
+ starting_dialogues = get_starting_dialogues(sqlite3.connect(DATABASE_FILE).cursor())
134
  if not starting_dialogues:
135
  print("Error: No starting dialogues found in the database.")
 
136
  return
137
 
138
+ conversations = []
139
+ seen_windows = set()
140
+ min_window_size = 5
141
+ num_paths_to_generate = 5000
142
+
143
+ # Create worker arguments (same for all workers)
144
+ worker_args = (DATABASE_FILE, starting_dialogues)
145
+
146
+ # Create process pool
147
+ counter = Value('i', 0)
148
+ lock = Lock()
149
+
150
+ def update_progress(_):
151
+ with lock:
152
+ counter.value += 1
153
+ if counter.value % 10 == 0:
154
+ print(f"\rGenerated {counter.value}/{num_paths_to_generate} paths", end="")
155
+
156
+ with Pool(processes=8) as pool: # Adjust number of processes based on CPU cores
157
+ results = []
158
+ # Submit initial batch of tasks
159
+ for _ in range(num_paths_to_generate * 2): # Generate extra to account for rejected paths
160
+ results.append(pool.apply_async(worker_process, (worker_args,), callback=update_progress))
161
+
162
+ # Collect valid results
163
+ while len(conversations) < num_paths_to_generate and results:
164
+ result = results.pop(0).get()
165
+ if len(result) < 5:
166
  continue
167
+
168
+ # Duplicate checking (now in main process)
169
+ path_text = " ".join(entry["dialogue"].strip().lower()
170
+ for entry in result if not entry["dialogue"].startswith("["))
171
 
172
  def is_duplicate(new_text):
173
  """Check if new conversation contains or is contained within existing conversations"""
 
198
  seen_windows.add(" ".join(path_dialogues[i:i+min_window_size]))
199
  return False
200
 
201
+ if (not is_duplicate(path_text) and
202
+ not is_added(result) and
203
+ any(entry["dialogue"][0] != "[" for entry in result)):
204
+ conversations.append(result)
205
+
206
+ pool.close()
207
+ pool.join()
 
 
 
 
208
 
209
  output = {"conversations": conversations}
210
  with open("output.json", "w") as f:
211
  json.dump(output, f, indent=4)
212
 
 
 
213
  if __name__ == "__main__":
214
  main()