Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -87,26 +87,27 @@ EMBEDDER_NAME = "paraphrase-multilingual-MiniLM-L12-v2"
|
|
| 87 |
DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
|
| 88 |
FEEDBACK_FILE = "chatbot_feedback.jsonl"
|
| 89 |
QA_PATH = "qa_dataset.jsonl"
|
|
|
|
| 90 |
BASE_MODEL = "ytu-ce-cosmos/turkish-gpt2-large"
|
| 91 |
MODEL_PATH = "lordzukoiroh/montaggppt2lora"
|
| 92 |
-
|
| 93 |
-
model = PeftModel.from_pretrained(BASE_MODEL, MODEL_PATH).to(DEVICE)
|
| 94 |
-
|
| 95 |
|
| 96 |
-
# === Model ve Tokenizer yükleme ===
|
| 97 |
def load_model_and_tokenizer():
|
| 98 |
-
print("Model yükleniyor...")
|
| 99 |
tokenizer = AutoTokenizer.from_pretrained(BASE_MODEL)
|
| 100 |
base_model = AutoModelForCausalLM.from_pretrained(BASE_MODEL).to(DEVICE)
|
| 101 |
-
model = PeftModel.from_pretrained(base_model,
|
| 102 |
|
| 103 |
if tokenizer.pad_token is None:
|
| 104 |
tokenizer.pad_token = tokenizer.eos_token
|
| 105 |
|
| 106 |
model.eval()
|
| 107 |
-
print("Model yüklendi.")
|
| 108 |
return model, tokenizer
|
| 109 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 110 |
|
| 111 |
# === Fahrenheit 451 Metni (Örnek paragraflar) ===
|
| 112 |
FAHRENHEIT_451_TEXT = """
|
|
|
|
| 87 |
DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
|
| 88 |
FEEDBACK_FILE = "chatbot_feedback.jsonl"
|
| 89 |
QA_PATH = "qa_dataset.jsonl"
|
| 90 |
+
# Değişkenler global scope'da tanımlı
|
| 91 |
BASE_MODEL = "ytu-ce-cosmos/turkish-gpt2-large"
|
| 92 |
MODEL_PATH = "lordzukoiroh/montaggppt2lora"
|
| 93 |
+
DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
|
|
|
|
|
|
|
| 94 |
|
|
|
|
| 95 |
def load_model_and_tokenizer():
|
|
|
|
| 96 |
tokenizer = AutoTokenizer.from_pretrained(BASE_MODEL)
|
| 97 |
base_model = AutoModelForCausalLM.from_pretrained(BASE_MODEL).to(DEVICE)
|
| 98 |
+
model = PeftModel.from_pretrained(base_model, MODEL_PATH).to(DEVICE)
|
| 99 |
|
| 100 |
if tokenizer.pad_token is None:
|
| 101 |
tokenizer.pad_token = tokenizer.eos_token
|
| 102 |
|
| 103 |
model.eval()
|
|
|
|
| 104 |
return model, tokenizer
|
| 105 |
|
| 106 |
+
model, tokenizer = load_model_and_tokenizer()
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
#
|
| 110 |
+
|
| 111 |
|
| 112 |
# === Fahrenheit 451 Metni (Örnek paragraflar) ===
|
| 113 |
FAHRENHEIT_451_TEXT = """
|