sadidul012 commited on
Commit
f1b3791
1 Parent(s): 15cb8f8
Files changed (2) hide show
  1. app.py +13 -13
  2. test.py +2 -2
app.py CHANGED
@@ -34,21 +34,21 @@ graph_config = {
34
 
35
 
36
  def scrape_and_summarize(prompt, source):
37
- with open("file.txt", "w") as file:
38
  file.write(html.unescape(source))
39
 
40
- with open("file.txt", "r") as file:
41
- text = file.read()
42
- return {"prompt": prompt}, {"source": text}
43
- # smart_scraper_graph = SmartScraperGraph(
44
- # prompt=prompt,
45
- # source="file.txt",
46
- # # source=source,
47
- # config=graph_config
48
- # )
49
- # result = smart_scraper_graph.run()
50
- # exec_info = smart_scraper_graph.get_execution_info()
51
- # return result, prettify_exec_info(exec_info)
52
 
53
 
54
  # Gradio interface
 
34
 
35
 
36
  def scrape_and_summarize(prompt, source):
37
+ with open("file.html", "w") as file:
38
  file.write(html.unescape(source))
39
 
40
+ # with open("file.html", "r") as file:
41
+ # text = file.read()
42
+ # return {"prompt": prompt}, {"source": text}
43
+ smart_scraper_graph = SmartScraperGraph(
44
+ prompt=prompt,
45
+ source="file.html",
46
+ # source=source,
47
+ config=graph_config
48
+ )
49
+ result = smart_scraper_graph.run()
50
+ exec_info = smart_scraper_graph.get_execution_info()
51
+ return result, prettify_exec_info(exec_info)
52
 
53
 
54
  # Gradio interface
test.py CHANGED
@@ -1,12 +1,12 @@
1
  from gradio_client import Client
2
  import html
3
 
4
- with open("file.txt", "r") as file:
5
  text = file.read()
6
 
7
  client = Client("sadidul012/test-space")
8
  result = client.predict(
9
- prompt="List me all the press releases with their headlines and urls.",
10
  source=html.escape(text),
11
  api_name="/scrape_and_summarize"
12
  )
 
1
  from gradio_client import Client
2
  import html
3
 
4
+ with open("simple.html", "r") as file:
5
  text = file.read()
6
 
7
  client = Client("sadidul012/test-space")
8
  result = client.predict(
9
+ prompt="List me all the press releases with their headlines and description.",
10
  source=html.escape(text),
11
  api_name="/scrape_and_summarize"
12
  )