for876543 commited on
Commit
24f4091
1 Parent(s): 0965a5f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -55,7 +55,7 @@ def scrape_page(url):
55
  result = "reference url: " + url + "\n"
56
  content = "\n".join([paragraph.text for paragraph in paragraphs])
57
  result += content
58
- with open("/home/user/app/data1/test_"+str(counter)+".txt", "w") as file:
59
  file.write(result)
60
 
61
  visited_urls.append(url)
@@ -71,7 +71,7 @@ def scrape_page(url):
71
  print(f"Ignoring invalid URL: {url}")
72
  return ""
73
 
74
- result = scrape_page(base_url)
75
 
76
  documents = SimpleDirectoryReader("/home/user/app/data1/").load_data()
77
  index = GPTVectorStoreIndex.from_documents(documents)
@@ -89,7 +89,7 @@ def generate_text(input_text):
89
 
90
  with open(DATA_FILE, mode='a', newline='') as file:
91
  writer = csv.writer(file)
92
- writer.writerow([input_text, output_text, datetime.now().strftime("%Y-%m-%d %H:%M:%S")])
93
 
94
  commit_url = repo.push_to_hub()
95
 
 
55
  result = "reference url: " + url + "\n"
56
  content = "\n".join([paragraph.text for paragraph in paragraphs])
57
  result += content
58
+ with open("/home/user/app/data1/base_url_1_"+str(counter)+".txt", "w") as file:
59
  file.write(result)
60
 
61
  visited_urls.append(url)
 
71
  print(f"Ignoring invalid URL: {url}")
72
  return ""
73
 
74
+ result = scrape_page(base_url_1)
75
 
76
  documents = SimpleDirectoryReader("/home/user/app/data1/").load_data()
77
  index = GPTVectorStoreIndex.from_documents(documents)
 
89
 
90
  with open(DATA_FILE, mode='a', newline='') as file:
91
  writer = csv.writer(file)
92
+ writer.writerow([" ".join(words[1:]), output_text, datetime.now().strftime("%Y-%m-%d %H:%M:%S")])
93
 
94
  commit_url = repo.push_to_hub()
95