Spaces:
Running
Running
feat: config sync cheapAI to StrongAI; reformats; debug mode UIs
Browse files- .gitignore +1 -0
- _data_test.py +5 -6
- _test.py +10 -3
- app.py +32 -5
- config.py +1 -0
- setup.sh +1 -3
- taskAI.py +41 -11
- taskNonAI.py +14 -10
- typst/letter.typ +4 -4
- util.py +9 -7
.gitignore
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
*_secret.py
|
2 |
*_secret.py*
|
3 |
**test_result.pdf
|
|
|
4 |
|
5 |
.local/
|
6 |
.ruff_cache/
|
|
|
1 |
*_secret.py
|
2 |
*_secret.py*
|
3 |
**test_result.pdf
|
4 |
+
**letter.typ
|
5 |
|
6 |
.local/
|
7 |
.ruff_cache/
|
_data_test.py
CHANGED
@@ -71,10 +71,9 @@ Languages:
|
|
71 |
Fluent in English and French
|
72 |
"""
|
73 |
pdf_context = {
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
}
|
80 |
-
|
|
|
71 |
Fluent in English and French
|
72 |
"""
|
73 |
pdf_context = {
|
74 |
+
"companyFullName": "Queen of Hearts' Garden",
|
75 |
+
"jobTitle": "Card Guards",
|
76 |
+
"applicantFullName": "Sherlock Holmes",
|
77 |
+
"applicantContactInformation": "221B Baker Street, London, +44 (020) 1234-5678, sherlock.holmes@sherlockHolmes.com",
|
78 |
+
"letter_body": "text,\n\ntest test",
|
79 |
}
|
|
_test.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
from taskAI import TaskAI
|
2 |
from taskNonAI import compile_pdf
|
3 |
-
from _data_test import
|
4 |
from _secret import api_test
|
5 |
|
6 |
from llama_index.llms.openai_like import OpenAILike
|
@@ -22,17 +22,24 @@ def test_ai_integration():
|
|
22 |
).chat(messages)
|
23 |
print(response)
|
24 |
|
|
|
25 |
def test_taskAI():
|
26 |
taskAI = TaskAI(api_test)
|
27 |
gen = taskAI.cv_preprocess(mock_cv)
|
28 |
for chunk in gen:
|
29 |
print(chunk)
|
30 |
|
|
|
31 |
def test_typst_pdf():
|
32 |
-
compile_pdf(
|
|
|
|
|
|
|
|
|
33 |
# os
|
34 |
|
|
|
35 |
if __name__ == "__main__":
|
36 |
# test_taskAI()
|
37 |
# test_ai_integration()
|
38 |
-
test_typst_pdf()
|
|
|
1 |
from taskAI import TaskAI
|
2 |
from taskNonAI import compile_pdf
|
3 |
+
from _data_test import mock_cv, pdf_context
|
4 |
from _secret import api_test
|
5 |
|
6 |
from llama_index.llms.openai_like import OpenAILike
|
|
|
22 |
).chat(messages)
|
23 |
print(response)
|
24 |
|
25 |
+
|
26 |
def test_taskAI():
|
27 |
taskAI = TaskAI(api_test)
|
28 |
gen = taskAI.cv_preprocess(mock_cv)
|
29 |
for chunk in gen:
|
30 |
print(chunk)
|
31 |
|
32 |
+
|
33 |
def test_typst_pdf():
|
34 |
+
compile_pdf(
|
35 |
+
tmpl_path="typst/template_letter.tmpl",
|
36 |
+
context=pdf_context,
|
37 |
+
output_path="test_result.pdf",
|
38 |
+
)
|
39 |
# os
|
40 |
|
41 |
+
|
42 |
if __name__ == "__main__":
|
43 |
# test_taskAI()
|
44 |
# test_ai_integration()
|
45 |
+
test_typst_pdf()
|
app.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
from config import DEMO_TITLE, IS_SHARE, CV_EXT, EXT_TXT
|
2 |
from config import CHEAP_API_BASE, CHEAP_API_KEY, CHEAP_MODEL
|
3 |
from config import STRONG_API_BASE, STRONG_API_KEY, STRONG_MODEL
|
4 |
from util import is_valid_url
|
@@ -23,6 +23,27 @@ def init():
|
|
23 |
os.system("shot-scraper install -b firefox")
|
24 |
download_pandoc()
|
25 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
26 |
def prepare_input(jd_info, cv_file: str, cv_text):
|
27 |
if jd_info:
|
28 |
if is_valid_url(jd_info):
|
@@ -84,7 +105,7 @@ def finalize_letter_pdf(api_base, api_key, api_model, jd, cv, cover_letter_text)
|
|
84 |
|
85 |
with gr.Blocks(
|
86 |
title=DEMO_TITLE,
|
87 |
-
theme=gr.themes.
|
88 |
) as app:
|
89 |
intro = f"""# {DEMO_TITLE}
|
90 |
> You provide job description and résumé. I write Cover letter for you!
|
@@ -94,22 +115,25 @@ with gr.Blocks(
|
|
94 |
|
95 |
with gr.Row():
|
96 |
with gr.Column(scale=1):
|
97 |
-
with gr.Accordion("AI setup (OpenAI-compatible LLM API)", open=False):
|
|
|
|
|
98 |
gr.Markdown(
|
99 |
"**Cheap AI**, an honest format converter and refiner, extracts essential info from job description and résumé, to reduce subsequent cost on Strong AI."
|
100 |
)
|
101 |
with gr.Group():
|
102 |
cheap_base = gr.Textbox(
|
103 |
-
value=CHEAP_API_BASE, label="API
|
104 |
)
|
105 |
cheap_key = gr.Textbox(value=CHEAP_API_KEY, label="API key", type="password")
|
106 |
cheap_model = gr.Textbox(value=CHEAP_MODEL, label="Model ID")
|
107 |
gr.Markdown(
|
108 |
"---\n**Strong AI**, a thoughtful wordsmith, generates perfect cover letters to make both you and recruiters happy."
|
109 |
)
|
|
|
110 |
with gr.Group():
|
111 |
strong_base = gr.Textbox(
|
112 |
-
value=STRONG_API_BASE, label="API
|
113 |
)
|
114 |
strong_key = gr.Textbox(
|
115 |
value=STRONG_API_KEY, label="API key", type="password"
|
@@ -155,6 +179,9 @@ with gr.Blocks(
|
|
155 |
)
|
156 |
infer_btn = gr.Button("Go!", variant="primary")
|
157 |
|
|
|
|
|
|
|
158 |
|
159 |
infer_btn.click(
|
160 |
fn=prepare_input,
|
|
|
1 |
+
from config import DEMO_TITLE, IS_SHARE, IS_DEBUG, CV_EXT, EXT_TXT
|
2 |
from config import CHEAP_API_BASE, CHEAP_API_KEY, CHEAP_MODEL
|
3 |
from config import STRONG_API_BASE, STRONG_API_KEY, STRONG_MODEL
|
4 |
from util import is_valid_url
|
|
|
23 |
os.system("shot-scraper install -b firefox")
|
24 |
download_pandoc()
|
25 |
|
26 |
+
|
27 |
+
## Config Functions
|
28 |
+
|
29 |
+
def set_same_cheap_strong(set_same:bool, cheap_base, cheap_key, cheap_model):
|
30 |
+
setup_zone = gr.Accordion("AI setup (OpenAI-compatible LLM API)", open=True)
|
31 |
+
if set_same:
|
32 |
+
return (gr.Textbox(value=cheap_base, label="API Base", interactive=False),
|
33 |
+
gr.Textbox(value=cheap_key, label="API key", type="password", interactive=False),
|
34 |
+
gr.Textbox(value=cheap_model, label="Model ID", interactive=False),
|
35 |
+
setup_zone,
|
36 |
+
)
|
37 |
+
else:
|
38 |
+
return (gr.Textbox(value=cheap_base, label="API Base", interactive=True),
|
39 |
+
gr.Textbox(value=cheap_key, label="API key", type="password", interactive=True),
|
40 |
+
gr.Textbox(value=cheap_model, label="Model ID", interactive=True),
|
41 |
+
setup_zone,
|
42 |
+
)
|
43 |
+
|
44 |
+
|
45 |
+
## Main Functions
|
46 |
+
|
47 |
def prepare_input(jd_info, cv_file: str, cv_text):
|
48 |
if jd_info:
|
49 |
if is_valid_url(jd_info):
|
|
|
105 |
|
106 |
with gr.Blocks(
|
107 |
title=DEMO_TITLE,
|
108 |
+
theme=gr.themes.Soft(primary_hue="blue", secondary_hue="sky", neutral_hue="slate"),
|
109 |
) as app:
|
110 |
intro = f"""# {DEMO_TITLE}
|
111 |
> You provide job description and résumé. I write Cover letter for you!
|
|
|
115 |
|
116 |
with gr.Row():
|
117 |
with gr.Column(scale=1):
|
118 |
+
with gr.Accordion("AI setup (OpenAI-compatible LLM API)", open=False) as setup_zone:
|
119 |
+
is_debug = gr.Checkbox( label="Debug Mode", value=IS_DEBUG)
|
120 |
+
|
121 |
gr.Markdown(
|
122 |
"**Cheap AI**, an honest format converter and refiner, extracts essential info from job description and résumé, to reduce subsequent cost on Strong AI."
|
123 |
)
|
124 |
with gr.Group():
|
125 |
cheap_base = gr.Textbox(
|
126 |
+
value=CHEAP_API_BASE, label="API Base"
|
127 |
)
|
128 |
cheap_key = gr.Textbox(value=CHEAP_API_KEY, label="API key", type="password")
|
129 |
cheap_model = gr.Textbox(value=CHEAP_MODEL, label="Model ID")
|
130 |
gr.Markdown(
|
131 |
"---\n**Strong AI**, a thoughtful wordsmith, generates perfect cover letters to make both you and recruiters happy."
|
132 |
)
|
133 |
+
is_same_cheap_strong = gr.Checkbox(label="the same as Cheap AI", value=False, container=False)
|
134 |
with gr.Group():
|
135 |
strong_base = gr.Textbox(
|
136 |
+
value=STRONG_API_BASE, label="API Base"
|
137 |
)
|
138 |
strong_key = gr.Textbox(
|
139 |
value=STRONG_API_KEY, label="API key", type="password"
|
|
|
179 |
)
|
180 |
infer_btn = gr.Button("Go!", variant="primary")
|
181 |
|
182 |
+
is_same_cheap_strong.change(fn= set_same_cheap_strong,
|
183 |
+
inputs=[is_same_cheap_strong, cheap_base, cheap_key, cheap_model],
|
184 |
+
outputs=[strong_base, strong_key, strong_model, setup_zone])
|
185 |
|
186 |
infer_btn.click(
|
187 |
fn=prepare_input,
|
config.py
CHANGED
@@ -12,6 +12,7 @@ STRONG_API_KEY = os.getenv("STRONG_API_KEY") or OPENAI_API_KEY
|
|
12 |
STRONG_MODEL = os.getenv("STRONG_MODEL") or "gpt-4"
|
13 |
|
14 |
IS_SHARE = bool(os.getenv("IS_SHARE")) or False
|
|
|
15 |
|
16 |
DEMO_TITLE = "Cover Letter Generator"
|
17 |
DEMO_DESCRIPTION = "This is a demo of the OpenAI API for generating cover letters. The model is trained on a dataset of cover letters and job descriptions, and generates a cover letter based on the job description and the applicant's CV. The model is fine-tuned on the OpenAI API, and is able to generate cover letters that are tailored to the job description and the applicant's CV. The model is able to generate cover letters for a wide range of jobs, and is able to generate cover letters that are tailored to the job description and the applicant's CV. The model is able to generate cover letters for a wide range of jobs, and is able to generate cover letters that are tailored to the job description and the applicant's CV. The model is able to generate cover letters for a wide range of jobs, and is able to generate cover letters that are tailored to the job description and the applicant's CV."
|
|
|
12 |
STRONG_MODEL = os.getenv("STRONG_MODEL") or "gpt-4"
|
13 |
|
14 |
IS_SHARE = bool(os.getenv("IS_SHARE")) or False
|
15 |
+
IS_DEBUG = bool(os.getenv("IS_DEBUG")) or False
|
16 |
|
17 |
DEMO_TITLE = "Cover Letter Generator"
|
18 |
DEMO_DESCRIPTION = "This is a demo of the OpenAI API for generating cover letters. The model is trained on a dataset of cover letters and job descriptions, and generates a cover letter based on the job description and the applicant's CV. The model is fine-tuned on the OpenAI API, and is able to generate cover letters that are tailored to the job description and the applicant's CV. The model is able to generate cover letters for a wide range of jobs, and is able to generate cover letters that are tailored to the job description and the applicant's CV. The model is able to generate cover letters for a wide range of jobs, and is able to generate cover letters that are tailored to the job description and the applicant's CV. The model is able to generate cover letters for a wide range of jobs, and is able to generate cover letters that are tailored to the job description and the applicant's CV."
|
setup.sh
CHANGED
@@ -1,3 +1 @@
|
|
1 |
-
pip install -r requirements.txt
|
2 |
-
ruff check
|
3 |
-
ruff format
|
|
|
1 |
+
pip install -r requirements.txt
|
|
|
|
taskAI.py
CHANGED
@@ -5,7 +5,8 @@ from llama_index.core.llms import ChatMessage # , MessageRole
|
|
5 |
from llama_index.core import ChatPromptTemplate
|
6 |
|
7 |
from util import mylogger
|
8 |
-
|
|
|
9 |
## define templates
|
10 |
|
11 |
### topic,input
|
@@ -40,7 +41,7 @@ JSON_API = ChatPromptTemplate(
|
|
40 |
ChatMessage(role="user", content="{content}"),
|
41 |
]
|
42 |
)
|
43 |
-
keys_to_template = lambda keys
|
44 |
|
45 |
### resume, jd
|
46 |
LETTER_COMPOSE = ChatPromptTemplate(
|
@@ -51,16 +52,19 @@ LETTER_COMPOSE = ChatPromptTemplate(
|
|
51 |
|
52 |
Before officially write the letter, think step by step. First, list what makes a perfect cover letter in general, and in order to write a perfect cover letter, what key points do you have to learn from the RESUME and JOB_DESCRIPTION. Then, carefully analyze the given RESUME and JOB_DESCRIPTION, take a deep breath and propose 3 best tactics to convince recruiter believe the applicant fit for the role. Ensure your thoughts are express clearly and then write the complete cover letter.""",
|
53 |
),
|
54 |
-
ChatMessage(
|
|
|
|
|
|
|
55 |
]
|
56 |
)
|
57 |
|
58 |
## basic func
|
59 |
|
|
|
60 |
## tasks
|
61 |
class TaskAI(OpenAILike):
|
62 |
def __init__(self, api: dict[str, str], **kwargs):
|
63 |
-
|
64 |
log = logger.info
|
65 |
|
66 |
def guess_window_size(model=api["model"]):
|
@@ -78,11 +82,20 @@ class TaskAI(OpenAILike):
|
|
78 |
return window_size
|
79 |
|
80 |
super().__init__(
|
81 |
-
api_base=api["base"],
|
|
|
|
|
|
|
|
|
|
|
82 |
)
|
83 |
|
84 |
def jd_preprocess(self, input: str):
|
85 |
-
return self.stream_chat(
|
|
|
|
|
|
|
|
|
86 |
|
87 |
def cv_preprocess(self, input: str):
|
88 |
return self.stream_chat(SIMPLIFY_MD.format_messages(input=input))
|
@@ -91,20 +104,37 @@ class TaskAI(OpenAILike):
|
|
91 |
return self.stream_chat(LETTER_COMPOSE.format_messages(resume=resume, jd=jd))
|
92 |
|
93 |
def get_jobapp_meta(self, JD, CV):
|
94 |
-
meta_JD = self.chat(
|
|
|
|
|
|
|
|
|
95 |
# yield meta_JD
|
96 |
-
meta_CV = self.chat(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
97 |
# yield meta_JD+'\n'+meta_CV
|
98 |
try:
|
99 |
meta_JD = json.loads(meta_JD.strip())
|
100 |
meta_CV = json.loads(meta_CV.strip())
|
101 |
except:
|
102 |
-
raise ValueError(
|
|
|
|
|
103 |
meta = dict()
|
104 |
meta.update(meta_JD)
|
105 |
meta.update(meta_CV)
|
106 |
yield json.dumps(meta, indent=2)
|
107 |
|
108 |
def purify_letter(self, full_text):
|
109 |
-
return self.stream_chat(
|
110 |
-
|
|
|
|
|
|
|
|
|
|
5 |
from llama_index.core import ChatPromptTemplate
|
6 |
|
7 |
from util import mylogger
|
8 |
+
|
9 |
+
logger = mylogger(__name__, "%(asctime)s:%(filename)s:%(levelname)s:%(message)s")
|
10 |
## define templates
|
11 |
|
12 |
### topic,input
|
|
|
41 |
ChatMessage(role="user", content="{content}"),
|
42 |
]
|
43 |
)
|
44 |
+
keys_to_template = lambda keys: json.dumps(dict().fromkeys(keys, ""))
|
45 |
|
46 |
### resume, jd
|
47 |
LETTER_COMPOSE = ChatPromptTemplate(
|
|
|
52 |
|
53 |
Before officially write the letter, think step by step. First, list what makes a perfect cover letter in general, and in order to write a perfect cover letter, what key points do you have to learn from the RESUME and JOB_DESCRIPTION. Then, carefully analyze the given RESUME and JOB_DESCRIPTION, take a deep breath and propose 3 best tactics to convince recruiter believe the applicant fit for the role. Ensure your thoughts are express clearly and then write the complete cover letter.""",
|
54 |
),
|
55 |
+
ChatMessage(
|
56 |
+
role="user",
|
57 |
+
content="<RESUME>\n{resume}\n</RESUME>\n\n<JOB_DESCRIPTION>\n{jd}</JOB_DESCRIPTION>\n<ANALYSIS_REPORT>",
|
58 |
+
),
|
59 |
]
|
60 |
)
|
61 |
|
62 |
## basic func
|
63 |
|
64 |
+
|
65 |
## tasks
|
66 |
class TaskAI(OpenAILike):
|
67 |
def __init__(self, api: dict[str, str], **kwargs):
|
|
|
68 |
log = logger.info
|
69 |
|
70 |
def guess_window_size(model=api["model"]):
|
|
|
82 |
return window_size
|
83 |
|
84 |
super().__init__(
|
85 |
+
api_base=api["base"],
|
86 |
+
api_key=api["key"],
|
87 |
+
model=api["model"],
|
88 |
+
is_chat_model=True,
|
89 |
+
context_window=guess_window_size(),
|
90 |
+
**kwargs,
|
91 |
)
|
92 |
|
93 |
def jd_preprocess(self, input: str):
|
94 |
+
return self.stream_chat(
|
95 |
+
EXTRACT_INFO.format_messages(
|
96 |
+
to_extract="the job description part`", input=input
|
97 |
+
)
|
98 |
+
)
|
99 |
|
100 |
def cv_preprocess(self, input: str):
|
101 |
return self.stream_chat(SIMPLIFY_MD.format_messages(input=input))
|
|
|
104 |
return self.stream_chat(LETTER_COMPOSE.format_messages(resume=resume, jd=jd))
|
105 |
|
106 |
def get_jobapp_meta(self, JD, CV):
|
107 |
+
meta_JD = self.chat(
|
108 |
+
JSON_API.format_messages(
|
109 |
+
template=keys_to_template(["companyFullName", "jobTitle"]), content=JD
|
110 |
+
)
|
111 |
+
).message.content
|
112 |
# yield meta_JD
|
113 |
+
meta_CV = self.chat(
|
114 |
+
JSON_API.format_messages(
|
115 |
+
template=keys_to_template(
|
116 |
+
["applicantFullNname", "applicantContactInformation"]
|
117 |
+
),
|
118 |
+
content=CV,
|
119 |
+
)
|
120 |
+
).message.content
|
121 |
# yield meta_JD+'\n'+meta_CV
|
122 |
try:
|
123 |
meta_JD = json.loads(meta_JD.strip())
|
124 |
meta_CV = json.loads(meta_CV.strip())
|
125 |
except:
|
126 |
+
raise ValueError(
|
127 |
+
f"AI didn't return a valid JSON string. Try again or consider a better model for CheapAI. \n{meta_JD}\n{meta_CV}"
|
128 |
+
)
|
129 |
meta = dict()
|
130 |
meta.update(meta_JD)
|
131 |
meta.update(meta_CV)
|
132 |
yield json.dumps(meta, indent=2)
|
133 |
|
134 |
def purify_letter(self, full_text):
|
135 |
+
return self.stream_chat(
|
136 |
+
EXTRACT_INFO.format_messages(
|
137 |
+
to_extract="the cover letter section starting from 'Dear Hiring Manager' or similar to 'Sincerely,' or similar ",
|
138 |
+
input=full_text,
|
139 |
+
)
|
140 |
+
)
|
taskNonAI.py
CHANGED
@@ -40,22 +40,26 @@ def extract_url(url: str) -> Optional[str]:
|
|
40 |
f"Please try copy-paste as input. Failed to extract content from: {url}. Didn't find content from given URL!"
|
41 |
)
|
42 |
|
43 |
-
def
|
44 |
current_date = datetime.now()
|
45 |
return current_date.strftime(
|
46 |
f"%B %d{'th' if 4 <= current_date.day <= 20 or 24 <= current_date.day <= 30 else ['st', 'nd', 'rd'][current_date.day % 10 - 1]} , %Y")
|
47 |
|
48 |
-
def
|
49 |
return str(s).replace('@','\@').replace('#','\#')
|
50 |
-
|
51 |
-
def compile_pdf(context: dict, tmpl_path: str, output_path="/tmp/cover_letter.pdf"):
|
|
|
52 |
with open(tmpl_path, "r", encoding='utf8') as f:
|
53 |
tmpl = Template(f.read())
|
54 |
-
context = {k:
|
55 |
-
context.update({'date_string':
|
56 |
letter_typ = tmpl.safe_substitute(context)
|
57 |
-
with open(
|
58 |
f.write(letter_typ)
|
59 |
-
typst.compile(
|
60 |
-
os.remove(
|
61 |
-
|
|
|
|
|
|
|
|
40 |
f"Please try copy-paste as input. Failed to extract content from: {url}. Didn't find content from given URL!"
|
41 |
)
|
42 |
|
43 |
+
def _date()->str:
|
44 |
current_date = datetime.now()
|
45 |
return current_date.strftime(
|
46 |
f"%B %d{'th' if 4 <= current_date.day <= 20 or 24 <= current_date.day <= 30 else ['st', 'nd', 'rd'][current_date.day % 10 - 1]} , %Y")
|
47 |
|
48 |
+
def _typst_escape(s)->str:
|
49 |
return str(s).replace('@','\@').replace('#','\#')
|
50 |
+
|
51 |
+
def compile_pdf(context: dict, tmpl_path: str, output_path="/tmp/cover_letter.pdf", is_debug=False)->list[str]:
|
52 |
+
letter_src_filepath = 'typst/letter.typ'
|
53 |
with open(tmpl_path, "r", encoding='utf8') as f:
|
54 |
tmpl = Template(f.read())
|
55 |
+
context = {k: _typst_escape(v) for k, v in context.items()}
|
56 |
+
context.update({'date_string': _date()})
|
57 |
letter_typ = tmpl.safe_substitute(context)
|
58 |
+
with open(letter_src_filepath, 'w', encoding='utf8') as f:
|
59 |
f.write(letter_typ)
|
60 |
+
typst.compile(letter_src_filepath, output_path, root=Path('./typst/'), font_paths=[Path('./fonts/')])
|
61 |
+
# os.remove(letter_src_filepath)
|
62 |
+
if is_debug:
|
63 |
+
return [letter_src_filepath, output_path]
|
64 |
+
else:
|
65 |
+
return [output_path]
|
typst/letter.typ
CHANGED
@@ -1,15 +1,15 @@
|
|
1 |
#import "template_base.typ": *
|
2 |
#show: letter.with(
|
3 |
sender: [
|
4 |
-
|
5 |
],
|
6 |
recipient: [
|
7 |
Hiring Manager \
|
8 |
-
|
9 |
],
|
10 |
date: [March 30th , 2024],
|
11 |
-
subject: [Cover Letter for
|
12 |
-
name: [
|
13 |
)
|
14 |
|
15 |
text,
|
|
|
1 |
#import "template_base.typ": *
|
2 |
#show: letter.with(
|
3 |
sender: [
|
4 |
+
221B Baker Street, London, +44 (020) 1234-5678, sherlock.holmes\@sherlockHolmes.com
|
5 |
],
|
6 |
recipient: [
|
7 |
Hiring Manager \
|
8 |
+
Queen of Hearts' Garden \
|
9 |
],
|
10 |
date: [March 30th , 2024],
|
11 |
+
subject: [Cover Letter for Card Guards],
|
12 |
+
name: [Sherlock Holmes],
|
13 |
)
|
14 |
|
15 |
text,
|
util.py
CHANGED
@@ -6,6 +6,7 @@ import logging
|
|
6 |
|
7 |
from typing import Generator
|
8 |
|
|
|
9 |
def mylogger(name, format, level=logging.INFO):
|
10 |
# Create a custom logger
|
11 |
logger = logging.getLogger(name)
|
@@ -33,7 +34,7 @@ def is_valid_url(url: str) -> bool:
|
|
33 |
return False
|
34 |
|
35 |
|
36 |
-
def is_valid_openai_api_key(api_base:str, api_key: str)->bool:
|
37 |
headers = {"Authorization": f"Bearer {api_key}"}
|
38 |
|
39 |
response = requests.get(api_base, headers=headers)
|
@@ -41,19 +42,20 @@ def is_valid_openai_api_key(api_base:str, api_key: str)->bool:
|
|
41 |
return response.status_code == 200
|
42 |
|
43 |
|
44 |
-
def zip_api(api_base:str, api_key:str, model:str)->dict[str, str]:
|
45 |
return {"base": api_base, "key": api_key, "model": model}
|
46 |
|
|
|
47 |
def stream_together(*gens: Generator):
|
48 |
-
ln=len(gens)
|
49 |
-
result = [""] * ln
|
50 |
while 1:
|
51 |
stop: bool = True
|
52 |
for i in range(ln):
|
53 |
try:
|
54 |
-
n=next(gens[i])
|
55 |
if "delta" in dir(n):
|
56 |
-
n=n.delta
|
57 |
result[i] += n
|
58 |
stop = False
|
59 |
except StopIteration:
|
@@ -61,4 +63,4 @@ def stream_together(*gens: Generator):
|
|
61 |
pass
|
62 |
yield result
|
63 |
if stop:
|
64 |
-
break
|
|
|
6 |
|
7 |
from typing import Generator
|
8 |
|
9 |
+
|
10 |
def mylogger(name, format, level=logging.INFO):
|
11 |
# Create a custom logger
|
12 |
logger = logging.getLogger(name)
|
|
|
34 |
return False
|
35 |
|
36 |
|
37 |
+
def is_valid_openai_api_key(api_base: str, api_key: str) -> bool:
|
38 |
headers = {"Authorization": f"Bearer {api_key}"}
|
39 |
|
40 |
response = requests.get(api_base, headers=headers)
|
|
|
42 |
return response.status_code == 200
|
43 |
|
44 |
|
45 |
+
def zip_api(api_base: str, api_key: str, model: str) -> dict[str, str]:
|
46 |
return {"base": api_base, "key": api_key, "model": model}
|
47 |
|
48 |
+
|
49 |
def stream_together(*gens: Generator):
|
50 |
+
ln = len(gens)
|
51 |
+
result = [""] * ln # Mind type here
|
52 |
while 1:
|
53 |
stop: bool = True
|
54 |
for i in range(ln):
|
55 |
try:
|
56 |
+
n = next(gens[i])
|
57 |
if "delta" in dir(n):
|
58 |
+
n = n.delta
|
59 |
result[i] += n
|
60 |
stop = False
|
61 |
except StopIteration:
|
|
|
63 |
pass
|
64 |
yield result
|
65 |
if stop:
|
66 |
+
break
|