CineAI commited on
Commit
ed8b9f6
·
verified ·
1 Parent(s): d647db3

Update llm/apimodels/hf_model.py

Browse files
Files changed (1) hide show
  1. llm/apimodels/hf_model.py +366 -7
llm/apimodels/hf_model.py CHANGED
@@ -104,8 +104,6 @@ class HF_Mistaril(HFInterface, ABC):
104
  llm_info = f"llm={self.llm}" if hasattr(self, 'llm') else 'llm=not initialized'
105
  return f"{self.__class__.__name__}({llm_info})"
106
 
107
-
108
-
109
  class HF_TinyLlama(HFInterface, ABC):
110
  """
111
  This class represents an interface for the TinyLlama large language model from Hugging Face.
@@ -118,7 +116,7 @@ class HF_TinyLlama(HFInterface, ABC):
118
  """
119
  Initializer for the `HF_TinyLlama` class.
120
 
121
- - Retrieves configuration values for the Mistaril model from a `config` dictionary:
122
  - `repo_id`: The ID of the repository containing the TinyLlama model on Hugging Face.
123
  - `max_length`: Maximum length of the generated text.
124
  - `temperature`: Controls randomness in the generation process.
@@ -143,7 +141,6 @@ class HF_TinyLlama(HFInterface, ABC):
143
  def execution(self) -> Any:
144
  """
145
  This method attempts to return the underlying `llm` (likely a language model object).
146
-
147
  It wraps the retrieval in a `try-except` block to catch potential exceptions.
148
  On success, it returns the `llm` object.
149
  On failure, it logs an error message with the exception details using a logger
@@ -158,7 +155,6 @@ class HF_TinyLlama(HFInterface, ABC):
158
  def model_name(self):
159
  """
160
  Simple method that returns the TinyLlama model name from the configuration.
161
-
162
  This can be useful for identifying the specific model being used.
163
  """
164
  return config["HF_TinyLlama"]["model"]
@@ -166,7 +162,6 @@ class HF_TinyLlama(HFInterface, ABC):
166
  def __str__(self):
167
  """
168
  Defines the string representation of the `HF_TinyLlama` object for human readability.
169
-
170
  It combines the class name and the model name retrieved from the `model_name` method
171
  with an underscore separator.
172
  """
@@ -175,7 +170,6 @@ class HF_TinyLlama(HFInterface, ABC):
175
  def __repr__(self):
176
  """
177
  Defines the representation of the `HF_TinyLlama` object for debugging purposes.
178
-
179
  It uses `hasattr` to check if the `llm` attribute is set.
180
  - If `llm` exists, it returns a string like `HF_TinyLlama(llm=HuggingFaceEndpoint(...))`,
181
  showing the class name and the `llm` object information.
@@ -184,3 +178,368 @@ class HF_TinyLlama(HFInterface, ABC):
184
  """
185
  llm_info = f"llm={self.llm}" if hasattr(self, 'llm') else 'llm=not initialized'
186
  return f"{self.__class__.__name__}({llm_info})"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
104
  llm_info = f"llm={self.llm}" if hasattr(self, 'llm') else 'llm=not initialized'
105
  return f"{self.__class__.__name__}({llm_info})"
106
 
 
 
107
  class HF_TinyLlama(HFInterface, ABC):
108
  """
109
  This class represents an interface for the TinyLlama large language model from Hugging Face.
 
116
  """
117
  Initializer for the `HF_TinyLlama` class.
118
 
119
+ - Retrieves configuration values for the TinyLlama model from a `config` dictionary:
120
  - `repo_id`: The ID of the repository containing the TinyLlama model on Hugging Face.
121
  - `max_length`: Maximum length of the generated text.
122
  - `temperature`: Controls randomness in the generation process.
 
141
  def execution(self) -> Any:
142
  """
143
  This method attempts to return the underlying `llm` (likely a language model object).
 
144
  It wraps the retrieval in a `try-except` block to catch potential exceptions.
145
  On success, it returns the `llm` object.
146
  On failure, it logs an error message with the exception details using a logger
 
155
  def model_name(self):
156
  """
157
  Simple method that returns the TinyLlama model name from the configuration.
 
158
  This can be useful for identifying the specific model being used.
159
  """
160
  return config["HF_TinyLlama"]["model"]
 
162
  def __str__(self):
163
  """
164
  Defines the string representation of the `HF_TinyLlama` object for human readability.
 
165
  It combines the class name and the model name retrieved from the `model_name` method
166
  with an underscore separator.
167
  """
 
170
  def __repr__(self):
171
  """
172
  Defines the representation of the `HF_TinyLlama` object for debugging purposes.
 
173
  It uses `hasattr` to check if the `llm` attribute is set.
174
  - If `llm` exists, it returns a string like `HF_TinyLlama(llm=HuggingFaceEndpoint(...))`,
175
  showing the class name and the `llm` object information.
 
178
  """
179
  llm_info = f"llm={self.llm}" if hasattr(self, 'llm') else 'llm=not initialized'
180
  return f"{self.__class__.__name__}({llm_info})"
181
+
182
+ class HF_SmolLM135(HFInterface, ABC):
183
+ """
184
+ This class represents an interface for the SmolLm tiny language model from Hugging Face.
185
+ It inherits from `HFInterface` (likely an interface from a Hugging Face library)
186
+ and `ABC` (for abstract base class) to enforce specific functionalities.
187
+ """
188
+
189
+ def __init__(self):
190
+ """
191
+ Initializer for the `HF_SmolLM135` class.
192
+ - Retrieves configuration values for the SmolLM135 model from a `config` dictionary:
193
+ - `repo_id`: The ID of the repository containing the SmolLM135 model on Hugging Face.
194
+ - `max_length`: Maximum length of the generated text.
195
+ - `temperature`: Controls randomness in the generation process.
196
+ - `top_k`: Restricts the vocabulary used for generation.
197
+ - Raises a `ValueError` if the `api` key (presumably stored elsewhere) is missing.
198
+ - Creates an instance of `HuggingFaceEndpoint` using the retrieved configuration
199
+ and the `api` key.
200
+ """
201
+
202
+ repo_id = config["HF_SmolLM135"]["model"]
203
+ max_length = config["HF_SmolLM135"]["max_new_tokens"]
204
+ temperature = config["HF_SmolLM135"]["temperature"]
205
+ top_k = config["HF_SmolLM135"]["top_k"]
206
+
207
+ if not _api:
208
+ raise ValueError(f"API key not provided {_api}")
209
+
210
+ self.llm = HuggingFaceEndpoint(
211
+ repo_id=repo_id, max_length=max_length, temperature=temperature, top_k=top_k, token=_api
212
+ )
213
+
214
+ def execution(self) -> Any:
215
+ """
216
+ This method attempts to return the underlying `llm` (likely a language model object).
217
+ It wraps the retrieval in a `try-except` block to catch potential exceptions.
218
+ On success, it returns the `llm` object.
219
+ On failure, it logs an error message with the exception details using a logger
220
+ (assumed to be available elsewhere).
221
+ """
222
+ try:
223
+ return self.llm # `invoke()`
224
+ except Exception as e:
225
+ logger.error("Something wrong with API or HuggingFaceEndpoint", exc_info=e)
226
+ print(f"Something wrong with API or HuggingFaceEndpoint: {e}")
227
+
228
+ def model_name(self):
229
+ """
230
+ Simple method that returns the SmolLM135 model name from the configuration.
231
+ This can be useful for identifying the specific model being used.
232
+ """
233
+ return config["HF_SmolLM135"]["model"]
234
+
235
+ def __str__(self):
236
+ """
237
+ Defines the string representation of the `HF_SmolLM135` object for human readability.
238
+ It combines the class name and the model name retrieved from the `model_name` method
239
+ with an underscore separator.
240
+ """
241
+ return f"{self.__class__.__name__}_{self.model_name()}"
242
+
243
+ def __repr__(self):
244
+ """
245
+ Defines the representation of the `HF_SmolLM135` object for debugging purposes.
246
+ It uses `hasattr` to check if the `llm` attribute is set.
247
+ - If `llm` exists, it returns a string like `HF_SmolLM135(llm=HuggingFaceEndpoint(...))`,
248
+ showing the class name and the `llm` object information.
249
+ - If `llm` is not yet set (during initialization), it returns
250
+ `HF_SmolLM135(llm=not initialized)`, indicating the state.
251
+ """
252
+ llm_info = f"llm={self.llm}" if hasattr(self, 'llm') else 'llm=not initialized'
253
+ return f"{self.__class__.__name__}({llm_info})"
254
+
255
+ class HF_SmolLM360(HFInterface, ABC):
256
+ """
257
+ This class represents an interface for the SmolLm tiny language model from Hugging Face.
258
+ It inherits from `HFInterface` (likely an interface from a Hugging Face library)
259
+ and `ABC` (for abstract base class) to enforce specific functionalities.
260
+ """
261
+
262
+ def __init__(self):
263
+ """
264
+ Initializer for the `HF_SmolLM360` class.
265
+ - Retrieves configuration values for the SmolLM360 model from a `config` dictionary:
266
+ - `repo_id`: The ID of the repository containing the SmolLM360 model on Hugging Face.
267
+ - `max_length`: Maximum length of the generated text.
268
+ - `temperature`: Controls randomness in the generation process.
269
+ - `top_k`: Restricts the vocabulary used for generation.
270
+ - Raises a `ValueError` if the `api` key (presumably stored elsewhere) is missing.
271
+ - Creates an instance of `HuggingFaceEndpoint` using the retrieved configuration
272
+ and the `api` key.
273
+ """
274
+
275
+ repo_id = config["HF_SmolLM360"]["model"]
276
+ max_length = config["HF_SmolLM360"]["max_new_tokens"]
277
+ temperature = config["HF_SmolLM360"]["temperature"]
278
+ top_k = config["HF_SmolLM360"]["top_k"]
279
+
280
+ if not _api:
281
+ raise ValueError(f"API key not provided {_api}")
282
+
283
+ self.llm = HuggingFaceEndpoint(
284
+ repo_id=repo_id, max_length=max_length, temperature=temperature, top_k=top_k, token=_api
285
+ )
286
+
287
+ def execution(self) -> Any:
288
+ """
289
+ This method attempts to return the underlying `llm` (likely a language model object).
290
+ It wraps the retrieval in a `try-except` block to catch potential exceptions.
291
+ On success, it returns the `llm` object.
292
+ On failure, it logs an error message with the exception details using a logger
293
+ (assumed to be available elsewhere).
294
+ """
295
+ try:
296
+ return self.llm # `invoke()`
297
+ except Exception as e:
298
+ logger.error("Something wrong with API or HuggingFaceEndpoint", exc_info=e)
299
+ print(f"Something wrong with API or HuggingFaceEndpoint: {e}")
300
+
301
+ def model_name(self):
302
+ """
303
+ Simple method that returns the SmolLM360 model name from the configuration.
304
+ This can be useful for identifying the specific model being used.
305
+ """
306
+ return config["HF_SmolLM360"]["model"]
307
+
308
+ def __str__(self):
309
+ """
310
+ Defines the string representation of the `HF_SmolLM360` object for human readability.
311
+ It combines the class name and the model name retrieved from the `model_name` method
312
+ with an underscore separator.
313
+ """
314
+ return f"{self.__class__.__name__}_{self.model_name()}"
315
+
316
+ def __repr__(self):
317
+ """
318
+ Defines the representation of the `HF_SmolLM360` object for debugging purposes.
319
+ It uses `hasattr` to check if the `llm` attribute is set.
320
+ - If `llm` exists, it returns a string like `HF_SmolLM360(llm=HuggingFaceEndpoint(...))`,
321
+ showing the class name and the `llm` object information.
322
+ - If `llm` is not yet set (during initialization), it returns
323
+ `HF_SmolLM360(llm=not initialized)`, indicating the state.
324
+ """
325
+ llm_info = f"llm={self.llm}" if hasattr(self, 'llm') else 'llm=not initialized'
326
+ return f"{self.__class__.__name__}({llm_info})"
327
+
328
+ class HF_SmolLM(HFInterface, ABC):
329
+ """
330
+ This class represents an interface for the SmolLm small language model from Hugging Face.
331
+ It inherits from `HFInterface` (likely an interface from a Hugging Face library)
332
+ and `ABC` (for abstract base class) to enforce specific functionalities.
333
+ """
334
+
335
+ def __init__(self):
336
+ """
337
+ Initializer for the `HF_SmolLM` class.
338
+ - Retrieves configuration values for the SmolLM model from a `config` dictionary:
339
+ - `repo_id`: The ID of the repository containing the SmolLM model on Hugging Face.
340
+ - `max_length`: Maximum length of the generated text.
341
+ - `temperature`: Controls randomness in the generation process.
342
+ - `top_k`: Restricts the vocabulary used for generation.
343
+ - Raises a `ValueError` if the `api` key (presumably stored elsewhere) is missing.
344
+ - Creates an instance of `HuggingFaceEndpoint` using the retrieved configuration
345
+ and the `api` key.
346
+ """
347
+
348
+ repo_id = config["HF_SmolLM"]["model"]
349
+ max_length = config["HF_SmolLM"]["max_new_tokens"]
350
+ temperature = config["HF_SmolLM"]["temperature"]
351
+ top_k = config["HF_SmolLM"]["top_k"]
352
+
353
+ if not _api:
354
+ raise ValueError(f"API key not provided {_api}")
355
+
356
+ self.llm = HuggingFaceEndpoint(
357
+ repo_id=repo_id, max_length=max_length, temperature=temperature, top_k=top_k, token=_api
358
+ )
359
+
360
+ def execution(self) -> Any:
361
+ """
362
+ This method attempts to return the underlying `llm` (likely a language model object).
363
+ It wraps the retrieval in a `try-except` block to catch potential exceptions.
364
+ On success, it returns the `llm` object.
365
+ On failure, it logs an error message with the exception details using a logger
366
+ (assumed to be available elsewhere).
367
+ """
368
+ try:
369
+ return self.llm # `invoke()`
370
+ except Exception as e:
371
+ logger.error("Something wrong with API or HuggingFaceEndpoint", exc_info=e)
372
+ print(f"Something wrong with API or HuggingFaceEndpoint: {e}")
373
+
374
+ def model_name(self):
375
+ """
376
+ Simple method that returns the SmolLM model name from the configuration.
377
+ This can be useful for identifying the specific model being used.
378
+ """
379
+ return config["HF_SmolLM"]["model"]
380
+
381
+ def __str__(self):
382
+ """
383
+ Defines the string representation of the `HF_SmolLM` object for human readability.
384
+ It combines the class name and the model name retrieved from the `model_name` method
385
+ with an underscore separator.
386
+ """
387
+ return f"{self.__class__.__name__}_{self.model_name()}"
388
+
389
+ def __repr__(self):
390
+ """
391
+ Defines the representation of the `HF_SmolLM` object for debugging purposes.
392
+ It uses `hasattr` to check if the `llm` attribute is set.
393
+ - If `llm` exists, it returns a string like `HF_SmolLM(llm=HuggingFaceEndpoint(...))`,
394
+ showing the class name and the `llm` object information.
395
+ - If `llm` is not yet set (during initialization), it returns
396
+ `HF_SmolLM(llm=not initialized)`, indicating the state.
397
+ """
398
+ llm_info = f"llm={self.llm}" if hasattr(self, 'llm') else 'llm=not initialized'
399
+ return f"{self.__class__.__name__}({llm_info})"
400
+
401
+ class HF_Gemma2(HFInterface, ABC):
402
+ """
403
+ This class represents an interface for the Gemma2 small language model from Hugging Face.
404
+ It inherits from `HFInterface` (likely an interface from a Hugging Face library)
405
+ and `ABC` (for abstract base class) to enforce specific functionalities.
406
+ """
407
+
408
+ def __init__(self):
409
+ """
410
+ Initializer for the `HF_Gemma2` class.
411
+ - Retrieves configuration values for the Gemma2 model from a `config` dictionary:
412
+ - `repo_id`: The ID of the repository containing the Gemma2 model on Hugging Face.
413
+ - `max_length`: Maximum length of the generated text.
414
+ - `temperature`: Controls randomness in the generation process.
415
+ - `top_k`: Restricts the vocabulary used for generation.
416
+ - Raises a `ValueError` if the `api` key (presumably stored elsewhere) is missing.
417
+ - Creates an instance of `HuggingFaceEndpoint` using the retrieved configuration
418
+ and the `api` key.
419
+ """
420
+
421
+ repo_id = config["HF_Gemma2"]["model"]
422
+ max_length = config["HF_Gemma2"]["max_new_tokens"]
423
+ temperature = config["HF_Gemma2"]["temperature"]
424
+ top_k = config["HF_Gemma2"]["top_k"]
425
+
426
+ if not _api:
427
+ raise ValueError(f"API key not provided {_api}")
428
+
429
+ self.llm = HuggingFaceEndpoint(
430
+ repo_id=repo_id, max_length=max_length, temperature=temperature, top_k=top_k, token=_api
431
+ )
432
+
433
+ def execution(self) -> Any:
434
+ """
435
+ This method attempts to return the underlying `llm` (likely a language model object).
436
+ It wraps the retrieval in a `try-except` block to catch potential exceptions.
437
+ On success, it returns the `llm` object.
438
+ On failure, it logs an error message with the exception details using a logger
439
+ (assumed to be available elsewhere).
440
+ """
441
+ try:
442
+ return self.llm # `invoke()`
443
+ except Exception as e:
444
+ logger.error("Something wrong with API or HuggingFaceEndpoint", exc_info=e)
445
+ print(f"Something wrong with API or HuggingFaceEndpoint: {e}")
446
+
447
+ def model_name(self):
448
+ """
449
+ Simple method that returns the Gemma2 model name from the configuration.
450
+ This can be useful for identifying the specific model being used.
451
+ """
452
+ return config["HF_Gemma2"]["model"]
453
+
454
+ def __str__(self):
455
+ """
456
+ Defines the string representation of the `HF_Gemma2` object for human readability.
457
+ It combines the class name and the model name retrieved from the `model_name` method
458
+ with an underscore separator.
459
+ """
460
+ return f"{self.__class__.__name__}_{self.model_name()}"
461
+
462
+ def __repr__(self):
463
+ """
464
+ Defines the representation of the `HF_Gemma2` object for debugging purposes.
465
+ It uses `hasattr` to check if the `llm` attribute is set.
466
+ - If `llm` exists, it returns a string like `HF_Gemma2(llm=HuggingFaceEndpoint(...))`,
467
+ showing the class name and the `llm` object information.
468
+ - If `llm` is not yet set (during initialization), it returns
469
+ `HF_Gemma2(llm=not initialized)`, indicating the state.
470
+ """
471
+ llm_info = f"llm={self.llm}" if hasattr(self, 'llm') else 'llm=not initialized'
472
+ return f"{self.__class__.__name__}({llm_info})"
473
+
474
+ class HF_Qwen2(HFInterface, ABC):
475
+ """
476
+ This class represents an interface for the Qwen2 small language model from Hugging Face.
477
+ It inherits from `HFInterface` (likely an interface from a Hugging Face library)
478
+ and `ABC` (for abstract base class) to enforce specific functionalities.
479
+ """
480
+
481
+ def __init__(self):
482
+ """
483
+ Initializer for the `HF_Qwen2` class.
484
+ - Retrieves configuration values for the Qwen2 model from a `config` dictionary:
485
+ - `repo_id`: The ID of the repository containing the Qwen2 model on Hugging Face.
486
+ - `max_length`: Maximum length of the generated text.
487
+ - `temperature`: Controls randomness in the generation process.
488
+ - `top_k`: Restricts the vocabulary used for generation.
489
+ - Raises a `ValueError` if the `api` key (presumably stored elsewhere) is missing.
490
+ - Creates an instance of `HuggingFaceEndpoint` using the retrieved configuration
491
+ and the `api` key.
492
+ """
493
+
494
+ repo_id = config["HF_Qwen2"]["model"]
495
+ max_length = config["HF_Qwen2"]["max_new_tokens"]
496
+ temperature = config["HF_Qwen2"]["temperature"]
497
+ top_k = config["HF_Qwen2"]["top_k"]
498
+
499
+ if not _api:
500
+ raise ValueError(f"API key not provided {_api}")
501
+
502
+ self.llm = HuggingFaceEndpoint(
503
+ repo_id=repo_id, max_length=max_length, temperature=temperature, top_k=top_k, token=_api
504
+ )
505
+
506
+ def execution(self) -> Any:
507
+ """
508
+ This method attempts to return the underlying `llm` (likely a language model object).
509
+ It wraps the retrieval in a `try-except` block to catch potential exceptions.
510
+ On success, it returns the `llm` object.
511
+ On failure, it logs an error message with the exception details using a logger
512
+ (assumed to be available elsewhere).
513
+ """
514
+ try:
515
+ return self.llm # `invoke()`
516
+ except Exception as e:
517
+ logger.error("Something wrong with API or HuggingFaceEndpoint", exc_info=e)
518
+ print(f"Something wrong with API or HuggingFaceEndpoint: {e}")
519
+
520
+ def model_name(self):
521
+ """
522
+ Simple method that returns the Qwen2 model name from the configuration.
523
+ This can be useful for identifying the specific model being used.
524
+ """
525
+ return config["HF_Qwen2"]["model"]
526
+
527
+ def __str__(self):
528
+ """
529
+ Defines the string representation of the `HF_Qwen2` object for human readability.
530
+ It combines the class name and the model name retrieved from the `model_name` method
531
+ with an underscore separator.
532
+ """
533
+ return f"{self.__class__.__name__}_{self.model_name()}"
534
+
535
+ def __repr__(self):
536
+ """
537
+ Defines the representation of the `HF_Qwen2` object for debugging purposes.
538
+ It uses `hasattr` to check if the `llm` attribute is set.
539
+ - If `llm` exists, it returns a string like `HF_Qwen2(llm=HuggingFaceEndpoint(...))`,
540
+ showing the class name and the `llm` object information.
541
+ - If `llm` is not yet set (during initialization), it returns
542
+ `HF_Qwen2(llm=not initialized)`, indicating the state.
543
+ """
544
+ llm_info = f"llm={self.llm}" if hasattr(self, 'llm') else 'llm=not initialized'
545
+ return f"{self.__class__.__name__}({llm_info})"