mishig HF staff commited on
Commit
f1b84d3
1 Parent(s): 635c529

Support bash snippets (#21)

Browse files
src/lib/components/CodeSnippets.svelte CHANGED
@@ -147,45 +147,36 @@ print(output.choices[0].message)`
147
  }
148
 
149
  function getBashSnippets() {
 
150
  const snippets: Snippet[] = [];
151
- snippets.push({
152
- label: 'Install',
153
- code: `import { HfInference } from '@huggingface/inference'
154
 
155
- const hf = new HfInference("your access token")`
156
- });
157
  if (conversation.config.streaming) {
158
  snippets.push({
159
  label: 'Streaming API',
160
- code: `let out = "";
161
-
162
- for await (const chunk of hf.chatCompletionStream({
163
- model: "${conversation.model}",
164
- messages: [
165
- { role: "user", content: "Complete the equation 1+1= ,just the answer" },
166
- ],
167
- max_tokens: ${conversation.config.maxTokens},
168
- temperature: ${conversation.config.temperature},
169
- seed: 0,
170
- })) {
171
- if (chunk.choices && chunk.choices.length > 0) {
172
- out += chunk.choices[0].delta.content;
173
- }
174
- }`
175
  });
176
  } else {
177
  // non-streaming
178
  snippets.push({
179
  label: 'Non-Streaming API',
180
- code: `await hf.chatCompletion({
181
- model: "${conversation.model}",
182
- messages: [
183
- { role: "user", content: "Complete the this sentence with words one plus one is equal " }
184
- ],
185
- max_tokens: ${conversation.config.maxTokens},
186
- temperature: ${conversation.config.temperature},
187
- seed: 0,
188
- });`
189
  });
190
  }
191
 
 
147
  }
148
 
149
  function getBashSnippets() {
150
+ const messagesStr = getMessages();
151
  const snippets: Snippet[] = [];
 
 
 
152
 
 
 
153
  if (conversation.config.streaming) {
154
  snippets.push({
155
  label: 'Streaming API',
156
+ code: `curl 'https://api-inference.huggingface.co/models/${conversation.model}/v1/chat/completions' \
157
+ --header "Authorization: Bearer {YOUR_HF_TOKEN}" \
158
+ --header 'Content-Type: application/json' \
159
+ --data '{
160
+ "model": "meta-llama/Meta-Llama-3-8B-Instruct",
161
+ "messages": ${messagesStr},
162
+ "temperature": ${conversation.config.temperature},
163
+ "max_tokens": ${conversation.config.maxTokens},
164
+ "stream": true
165
+ }'`
 
 
 
 
 
166
  });
167
  } else {
168
  // non-streaming
169
  snippets.push({
170
  label: 'Non-Streaming API',
171
+ code: `curl 'https://api-inference.huggingface.co/models/${conversation.model}/v1/chat/completions' \
172
+ --header "Authorization: Bearer {YOUR_HF_TOKEN}" \
173
+ --header 'Content-Type: application/json' \
174
+ --data '{
175
+ "model": "meta-llama/Meta-Llama-3-8B-Instruct",
176
+ "messages": ${messagesStr},
177
+ "temperature": ${conversation.config.temperature},
178
+ "max_tokens": ${conversation.config.maxTokens}
179
+ }'`
180
  });
181
  }
182