Tuana commited on
Commit
7018b53
1 Parent(s): b09d26d

updated requirements and prompts

Browse files
Files changed (2) hide show
  1. requirements.txt +1 -1
  2. utils/haystack.py +6 -6
requirements.txt CHANGED
@@ -1,4 +1,4 @@
1
- farm-haystack==1.13.0
2
  streamlit==1.18.0
3
  markdown
4
  st-annotated-text
 
1
+ git+https://github.com/deepset-ai/haystack.git@openai/token_limit#egg=farm-haystack
2
  streamlit==1.18.0
3
  markdown
4
  st-annotated-text
utils/haystack.py CHANGED
@@ -10,11 +10,11 @@ def start_haystack():
10
  #Use this function to contruct a pipeline
11
  prompt_node = PromptNode(model_name_or_path="text-davinci-003", api_key=OEPN_AI_KEY)
12
 
13
- twitter_template = PromptTemplate(name="twitter-voice", prompt_text="""You will be given a twitter stream belonging to a specific profile. Tell us what they've lately been tweeting about and in what languages.
14
  You may go into some detail about what topics they tend to like tweeting about. Please also mention their overall tone, for example: positive,
15
  negative, political, sarcastic or something else.
16
-
17
- Use the following format:
18
 
19
  Twitter stream: Many people in our community asked how to utilize LLMs in their NLP pipelines and how to modify prompts for their tasks.…
20
  RT @deepset_ai: We use parts of news articles from The Guardian as documents and create custom prompt templates to categorize these article
@@ -30,7 +30,7 @@ def start_haystack():
30
 
31
  Twitter stream: $tweets
32
 
33
- Summary:
34
  """)
35
  return prompt_node, twitter_template
36
 
@@ -43,13 +43,13 @@ def query(username):
43
 
44
  headers = {"Authorization": "Bearer {}".format(bearer_token)}
45
 
46
- url = f"https://api.twitter.com/1.1/statuses/user_timeline.json?screen_name={username}&count={100}"
47
  try:
48
  response = requests.request("GET", url, headers = headers)
49
  twitter_stream = ""
50
  for tweet in response.json():
51
  twitter_stream += tweet["text"]
52
- result = prompter.prompt(prompt_template=template, tweets=twitter_stream[0:10000])
53
  except Exception as e:
54
  print(e)
55
  result = ["Please make sure you are providing a correct, public twitter accout"]
 
10
  #Use this function to contruct a pipeline
11
  prompt_node = PromptNode(model_name_or_path="text-davinci-003", api_key=OEPN_AI_KEY)
12
 
13
+ twitter_template = PromptTemplate(name="twitter-voice", prompt_text="""You will be given a twitter stream belonging to a specific profile. Answer with a summary of what they've lately been tweeting about and in what languages.
14
  You may go into some detail about what topics they tend to like tweeting about. Please also mention their overall tone, for example: positive,
15
  negative, political, sarcastic or something else.
16
+
17
+ Examples:
18
 
19
  Twitter stream: Many people in our community asked how to utilize LLMs in their NLP pipelines and how to modify prompts for their tasks.…
20
  RT @deepset_ai: We use parts of news articles from The Guardian as documents and create custom prompt templates to categorize these article
 
30
 
31
  Twitter stream: $tweets
32
 
33
+ Summary:
34
  """)
35
  return prompt_node, twitter_template
36
 
 
43
 
44
  headers = {"Authorization": "Bearer {}".format(bearer_token)}
45
 
46
+ url = f"https://api.twitter.com/1.1/statuses/user_timeline.json?screen_name={username}&count={60}"
47
  try:
48
  response = requests.request("GET", url, headers = headers)
49
  twitter_stream = ""
50
  for tweet in response.json():
51
  twitter_stream += tweet["text"]
52
+ result = prompter.prompt(prompt_template=template, tweets=twitter_stream)
53
  except Exception as e:
54
  print(e)
55
  result = ["Please make sure you are providing a correct, public twitter accout"]