Spaces:
Paused
Paused
Commit
•
4c8bae0
1
Parent(s):
1ab9a2a
uncomment inserts
Browse files- algo.py +2 -2
- rq_settings.py +2 -2
- run.py +2 -6
algo.py
CHANGED
@@ -341,11 +341,11 @@ class Algo:
|
|
341 |
results.append(mapping)
|
342 |
|
343 |
if len(result_batch) >= 500:
|
344 |
-
|
345 |
result_batch = []
|
346 |
|
347 |
if len(result_batch) > 0:
|
348 |
-
|
349 |
result_batch = []
|
350 |
|
351 |
|
|
|
341 |
results.append(mapping)
|
342 |
|
343 |
if len(result_batch) >= 500:
|
344 |
+
store_batch_results_to_db(self.db_conn, self.db_cursor, self.run_key, result_batch)
|
345 |
result_batch = []
|
346 |
|
347 |
if len(result_batch) > 0:
|
348 |
+
store_batch_results_to_db(self.db_conn, self.db_cursor, self.run_key, result_batch)
|
349 |
result_batch = []
|
350 |
|
351 |
|
rq_settings.py
CHANGED
@@ -20,14 +20,14 @@ LOGGING = {
|
|
20 |
},
|
21 |
'handlers': {
|
22 |
'console': {
|
23 |
-
'level': '
|
24 |
'class': 'logging.StreamHandler',
|
25 |
'formatter': 'verbose'
|
26 |
},
|
27 |
},
|
28 |
'root': {
|
29 |
'handlers': ['console'],
|
30 |
-
'level': '
|
31 |
},
|
32 |
'rq.worker': {
|
33 |
'handlers': ['console'],
|
|
|
20 |
},
|
21 |
'handlers': {
|
22 |
'console': {
|
23 |
+
'level': 'WARNING',
|
24 |
'class': 'logging.StreamHandler',
|
25 |
'formatter': 'verbose'
|
26 |
},
|
27 |
},
|
28 |
'root': {
|
29 |
'handlers': ['console'],
|
30 |
+
'level': 'WARNING',
|
31 |
},
|
32 |
'rq.worker': {
|
33 |
'handlers': ['console'],
|
run.py
CHANGED
@@ -37,11 +37,7 @@ if __name__ == "__main__":
|
|
37 |
run_key = raw_file_name.split('.')[0]
|
38 |
csv_complete, _ = file_is_complete(raw_file_name, run_key, db_cursor)
|
39 |
if not csv_complete:
|
40 |
-
job = q.enqueue(process_file, raw_file_name,
|
41 |
print(f"Task enqueued with job ID: {job.id}")
|
42 |
|
43 |
-
|
44 |
-
|
45 |
-
# algo.match_words([['bananas']])
|
46 |
-
|
47 |
-
# db_conn.close()
|
|
|
37 |
run_key = raw_file_name.split('.')[0]
|
38 |
csv_complete, _ = file_is_complete(raw_file_name, run_key, db_cursor)
|
39 |
if not csv_complete:
|
40 |
+
job = q.enqueue(process_file, raw_file_name, job_timeout=WORKER_TIMEOUT)
|
41 |
print(f"Task enqueued with job ID: {job.id}")
|
42 |
|
43 |
+
db_conn.close()
|
|
|
|
|
|
|
|