sc_ma commited on
Commit
8aec19e
1 Parent(s): caa5017

Add webui (testing).

Browse files
Files changed (4) hide show
  1. app.py +37 -0
  2. auto_backgrounds.py +15 -2
  3. auto_draft.py +13 -1
  4. utils/references.py +19 -1
app.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import openai
3
+ from auto_backgrounds import generate_backgrounds
4
+
5
+ # todo: 1. remove repeated entry in bibfile (go to references.py)
6
+ # 2. (maybe) multiple commas error (see Overleaf)
7
+ # 3. create a huggingface space. test it using multiple devices!
8
+ # 4. further polish auto_backgrounds.py. Make backgrounds have multiple subsection.
9
+ # 5. Design a good layout of huggingface space.
10
+ def generate_backgrounds(t1, t2):
11
+ return "README.md"
12
+
13
+ def clear_inputs(text1, text2):
14
+ return ("", "")
15
+
16
+ with gr.Blocks() as demo:
17
+ gr.Markdown('''
18
+ # Auto-Draft: 论文结构辅助工具
19
+
20
+ 用法: 输入任意论文标题, 点击Submit, 等待大概十分钟, 下载output.zip.
21
+ ''')
22
+ with gr.Row():
23
+ with gr.Column():
24
+ title = gr.Textbox(value="Playing Atari Game with Deep Reinforcement Learning", lines=1, max_lines=1, label="Title")
25
+ description = gr.Textbox(lines=5, label="Description (Optional)")
26
+
27
+ with gr.Row():
28
+ clear_button = gr.Button("Clear")
29
+ submit_button = gr.Button("Submit")
30
+ with gr.Column():
31
+ file_output = gr.outputs.File()
32
+
33
+ clear_button.click(fn=clear_inputs, inputs=[title, description], outputs=[title, description])
34
+ submit_button.click(fn=generate_backgrounds, inputs=[title, description], outputs=file_output)
35
+
36
+ demo.queue(concurrency_count=1, max_size=5, api_open=False)
37
+ demo.launch()
auto_backgrounds.py CHANGED
@@ -6,6 +6,7 @@ import datetime
6
  import shutil
7
  import time
8
  import logging
 
9
 
10
  TOTAL_TOKENS = 0
11
  TOTAL_PROMPTS_TOKENS = 0
@@ -31,6 +32,16 @@ def log_usage(usage, generating_target, print_out=True):
31
  print(message)
32
  logging.info(message)
33
 
 
 
 
 
 
 
 
 
 
 
34
  def pipeline(paper, section, save_to_path, model):
35
  """
36
  The main pipeline of generating a section.
@@ -55,10 +66,10 @@ def pipeline(paper, section, save_to_path, model):
55
  f.write(r"\end{abstract}")
56
  else:
57
  with open(tex_file, "w") as f:
58
- f.write(f"\section{{{section}}}\n")
59
  with open(tex_file, "a") as f:
60
  f.write(output)
61
- time.sleep(20)
62
  print(f"{section} has been generated. Saved to {tex_file}.")
63
  return usage
64
 
@@ -106,6 +117,8 @@ def generate_backgrounds(title, description="", template="ICLR2022", model="gpt-
106
  except Exception as e:
107
  print(f"Failed to generate {section} due to the error: {e}")
108
  print(f"The paper {title} has been generated. Saved to {save_to_path}.")
 
 
109
 
110
  if __name__ == "__main__":
111
  title = "Reinforcement Learning"
 
6
  import shutil
7
  import time
8
  import logging
9
+ import os
10
 
11
  TOTAL_TOKENS = 0
12
  TOTAL_PROMPTS_TOKENS = 0
 
32
  print(message)
33
  logging.info(message)
34
 
35
+ def make_archive(source, destination):
36
+ base = os.path.basename(destination)
37
+ name = base.split('.')[0]
38
+ format = base.split('.')[1]
39
+ archive_from = os.path.dirname(source)
40
+ archive_to = os.path.basename(source.strip(os.sep))
41
+ shutil.make_archive(name, format, archive_from, archive_to)
42
+ shutil.move('%s.%s'%(name,format), destination)
43
+ return destination
44
+
45
  def pipeline(paper, section, save_to_path, model):
46
  """
47
  The main pipeline of generating a section.
 
66
  f.write(r"\end{abstract}")
67
  else:
68
  with open(tex_file, "w") as f:
69
+ f.write(f"\section{{{section.upper()}}}\n")
70
  with open(tex_file, "a") as f:
71
  f.write(output)
72
+ time.sleep(5)
73
  print(f"{section} has been generated. Saved to {tex_file}.")
74
  return usage
75
 
 
117
  except Exception as e:
118
  print(f"Failed to generate {section} due to the error: {e}")
119
  print(f"The paper {title} has been generated. Saved to {save_to_path}.")
120
+ # shutil.make_archive("output.zip", 'zip', save_to_path)
121
+ return make_archive(save_to_path, save_to_path+"output.zip")
122
 
123
  if __name__ == "__main__":
124
  title = "Reinforcement Learning"
auto_draft.py CHANGED
@@ -7,11 +7,22 @@ import datetime
7
  import shutil
8
  import time
9
  import logging
 
10
 
11
  TOTAL_TOKENS = 0
12
  TOTAL_PROMPTS_TOKENS = 0
13
  TOTAL_COMPLETION_TOKENS = 0
14
 
 
 
 
 
 
 
 
 
 
 
15
 
16
  def log_usage(usage, generating_target, print_out=True):
17
  global TOTAL_TOKENS
@@ -59,7 +70,7 @@ def pipeline(paper, section, save_to_path, model):
59
  f.write(f"\section{{{section}}}\n")
60
  with open(tex_file, "a") as f:
61
  f.write(output)
62
- time.sleep(20)
63
  print(f"{section} has been generated. Saved to {tex_file}.")
64
  return usage
65
 
@@ -121,6 +132,7 @@ def generate_draft(title, description="", template="ICLR2022", model="gpt-4"):
121
  except Exception as e:
122
  print(f"Failed to generate {section} due to the error: {e}")
123
  print(f"The paper {title} has been generated. Saved to {save_to_path}.")
 
124
 
125
  if __name__ == "__main__":
126
  # title = "Training Adversarial Generative Neural Network with Adaptive Dropout Rate"
 
7
  import shutil
8
  import time
9
  import logging
10
+ import os
11
 
12
  TOTAL_TOKENS = 0
13
  TOTAL_PROMPTS_TOKENS = 0
14
  TOTAL_COMPLETION_TOKENS = 0
15
 
16
+ def make_archive(source, destination):
17
+ base = os.path.basename(destination)
18
+ name = base.split('.')[0]
19
+ format = base.split('.')[1]
20
+ archive_from = os.path.dirname(source)
21
+ archive_to = os.path.basename(source.strip(os.sep))
22
+ shutil.make_archive(name, format, archive_from, archive_to)
23
+ shutil.move('%s.%s'%(name,format), destination)
24
+ return destination
25
+
26
 
27
  def log_usage(usage, generating_target, print_out=True):
28
  global TOTAL_TOKENS
 
70
  f.write(f"\section{{{section}}}\n")
71
  with open(tex_file, "a") as f:
72
  f.write(output)
73
+ time.sleep(5)
74
  print(f"{section} has been generated. Saved to {tex_file}.")
75
  return usage
76
 
 
132
  except Exception as e:
133
  print(f"Failed to generate {section} due to the error: {e}")
134
  print(f"The paper {title} has been generated. Saved to {save_to_path}.")
135
+ return make_archive(save_to_path, save_to_path+"output.zip")
136
 
137
  if __name__ == "__main__":
138
  # title = "Training Adversarial Generative Neural Network with Adaptive Dropout Rate"
utils/references.py CHANGED
@@ -49,7 +49,14 @@ def _collect_papers_arxiv(keyword, counts=3):
49
  # Extract the year
50
  published = entry.find(f"{namespace}published").text
51
  year = published.split("-")[0]
52
- arxiv_id = re.search(r'\d+\.\d+', link).group(0)
 
 
 
 
 
 
 
53
  journal = f"arXiv preprint arXiv:{arxiv_id}"
54
  result = {
55
  "paper_id": arxiv_id,
@@ -93,6 +100,17 @@ class References:
93
  for key, counts in keywords_dict.items():
94
  self.papers = self.papers + process(key, counts)
95
 
 
 
 
 
 
 
 
 
 
 
 
96
  def to_bibtex(self, path_to_bibtex="ref.bib"):
97
  """
98
  Turn the saved paper list into bibtex file "ref.bib". Return a list of all `paper_id`.
 
49
  # Extract the year
50
  published = entry.find(f"{namespace}published").text
51
  year = published.split("-")[0]
52
+
53
+ founds = re.search(r'\d+\.\d+', link)
54
+ if founds is None:
55
+ # some links are not standard; such as "https://arxiv.org/abs/cs/0603127v1".
56
+ # will be solved in the future.
57
+ continue
58
+ else:
59
+ arxiv_id = founds.group(0)
60
  journal = f"arXiv preprint arXiv:{arxiv_id}"
61
  result = {
62
  "paper_id": arxiv_id,
 
100
  for key, counts in keywords_dict.items():
101
  self.papers = self.papers + process(key, counts)
102
 
103
+ # TODO: remove repeated entries
104
+ # test this
105
+ seen = set()
106
+ papers = []
107
+ for paper in self.papers:
108
+ paper_id = paper["paper_id"]
109
+ if paper_id not in seen:
110
+ seen.add(paper_id)
111
+ papers.append(paper)
112
+ self.papers = papers
113
+
114
  def to_bibtex(self, path_to_bibtex="ref.bib"):
115
  """
116
  Turn the saved paper list into bibtex file "ref.bib". Return a list of all `paper_id`.