Spaces:
Sleeping
Sleeping
Updatefe3
Browse files- app.py +68 -149
- frontend/login.html +8 -0
- frontend/register.html +8 -0
app.py
CHANGED
|
@@ -32,6 +32,12 @@ app.permanent_session_lifetime = timedelta(hours=8)
|
|
| 32 |
# ---------- Feature Flags ----------
|
| 33 |
PRELOAD_MODEL = os.getenv("PRELOAD_MODEL", "true").lower() in ("1","true","yes")
|
| 34 |
FALLBACK_TRANSLATE = os.getenv("FALLBACK_TRANSLATE", "false").lower() in ("1","true","yes")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 35 |
|
| 36 |
# ---------- Database ----------
|
| 37 |
from sqlalchemy import create_engine, Column, Integer, Text, DateTime, ForeignKey, func
|
|
@@ -93,7 +99,7 @@ def login_required(fn):
|
|
| 93 |
return fn(*args, **kwargs)
|
| 94 |
return _wrap
|
| 95 |
|
| 96 |
-
# ---------- Prenorm ----------
|
| 97 |
PRENORM_LEVEL = os.getenv("PRENORM_LEVEL", "basic").lower()
|
| 98 |
PRENORM_DEBUG = os.getenv("PRENORM_DEBUG", "0") == "1"
|
| 99 |
|
|
@@ -125,57 +131,37 @@ PRON_MAP = {
|
|
| 125 |
"kitong": "kita", "kitorang": "kita", "kita": "kita", "torang": "kita",
|
| 126 |
}
|
| 127 |
|
| 128 |
-
def _normalize_unicode(text: str) -> str:
|
| 129 |
-
return unicodedata.normalize("NFKC", text)
|
| 130 |
def _strip_emoji_and_noise(text: str) -> str:
|
| 131 |
-
text = PUNC_RE.sub(" ", text)
|
| 132 |
-
|
| 133 |
-
|
| 134 |
-
return text
|
| 135 |
-
def _normalize_ws(text: str) -> str:
|
| 136 |
-
return WS_RE.sub(" ", text).strip()
|
| 137 |
def _reduce_elongation(token: str) -> str:
|
| 138 |
base = token.lower()
|
| 139 |
if base in WHITELIST_KEEP_ELONG: return token
|
| 140 |
return ELONG_RE.sub(r"\1\1", token)
|
| 141 |
-
def _apply_papua_map(token: str) -> str:
|
| 142 |
-
low = token.lower()
|
| 143 |
-
return PAPUA_MAP.get(low, token)
|
| 144 |
def _handle_pu_constructs(text: str) -> str:
|
| 145 |
def repl(m):
|
| 146 |
-
pron = m.group(1).lower()
|
| 147 |
-
|
| 148 |
-
|
| 149 |
-
return f"punya {pron_std} {rest}"
|
| 150 |
-
return re.sub(r"\b(sa|saya|ko|kamu|dia|dong|kam|kalian|kitong|kitorang|kita|torang)\s*pu\s+([^.,;:!?]+)",
|
| 151 |
-
repl, text, flags=re.IGNORECASE)
|
| 152 |
def _token_level_ops(text: str, aggressive: bool) -> str:
|
| 153 |
-
|
| 154 |
-
|
| 155 |
-
for t in tokens:
|
| 156 |
t2 = _reduce_elongation(t) if aggressive else t
|
| 157 |
-
|
| 158 |
-
out.append(t3)
|
| 159 |
return " ".join(out)
|
| 160 |
|
| 161 |
def papua_prenorm(inp: str, level: str = "basic", return_trace: bool = False):
|
| 162 |
-
|
| 163 |
-
|
| 164 |
-
return (inp, trace) if return_trace else inp
|
| 165 |
-
s0 = inp
|
| 166 |
-
s1 = _normalize_unicode(s0)
|
| 167 |
s2 = _strip_emoji_and_noise(s1) if level == "aggressive" else s1
|
| 168 |
s3 = _normalize_ws(s2)
|
| 169 |
s4 = _handle_pu_constructs(s3)
|
| 170 |
s5 = _token_level_ops(s4, aggressive=(level == "aggressive"))
|
| 171 |
s6 = _normalize_ws(s5)
|
| 172 |
-
if return_trace
|
| 173 |
-
trace.update({"original": s0, "unicode_norm": s1,
|
| 174 |
-
"strip_noise": s2 if level == "aggressive" else "(skip)",
|
| 175 |
-
"ws_norm_1": s3, "pu_constructs": s4,
|
| 176 |
-
"token_ops": s5, "final": s6})
|
| 177 |
-
return s6, trace
|
| 178 |
-
return s6
|
| 179 |
|
| 180 |
def prenorm(text: str) -> str:
|
| 181 |
if PRENORM_DEBUG:
|
|
@@ -189,20 +175,15 @@ BASE_MODEL_ID = os.getenv("BASE_MODEL_ID", "amosnbn/cendol-mt5-base-inst")
|
|
| 189 |
ADAPTER_ID = os.getenv("ADAPTER_ID", "amosnbn/papua-lora-ckpt-168")
|
| 190 |
DEVICE = "cuda" if os.getenv("DEVICE", "cpu") == "cuda" else "cpu"
|
| 191 |
|
| 192 |
-
TOK = None
|
| 193 |
-
|
| 194 |
-
_MODEL_LOCK = threading.Lock()
|
| 195 |
-
_MODEL_READY = False
|
| 196 |
-
_MODEL_ERROR = None
|
| 197 |
|
| 198 |
def _strip_bom_in_dir(root_dir: str):
|
| 199 |
root = pathlib.Path(root_dir)
|
| 200 |
for p in root.rglob("*.json"):
|
| 201 |
try:
|
| 202 |
-
with codecs.open(p, "r", encoding="utf-8-sig") as f:
|
| 203 |
-
|
| 204 |
-
with open(p, "w", encoding="utf-8") as f:
|
| 205 |
-
json.dump(data, f, ensure_ascii=False, indent=2)
|
| 206 |
log.info(f"[BOM] stripped: {p}")
|
| 207 |
except Exception as e:
|
| 208 |
log.warning(f"[BOM] skip {p}: {e}")
|
|
@@ -210,39 +191,19 @@ def _strip_bom_in_dir(root_dir: str):
|
|
| 210 |
def _sanitize_adapter_config(adapter_dir: str):
|
| 211 |
try:
|
| 212 |
from peft import LoraConfig
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 213 |
except Exception as e:
|
| 214 |
-
log.warning(f"[SAN]
|
| 215 |
-
return
|
| 216 |
-
try:
|
| 217 |
-
sig = inspect.signature(LoraConfig.__init__)
|
| 218 |
-
allowed = set(p.name for p in sig.parameters.values())
|
| 219 |
-
except Exception as e:
|
| 220 |
-
log.warning(f"[SAN] Tidak bisa baca signature LoraConfig: {e}")
|
| 221 |
-
return
|
| 222 |
-
|
| 223 |
-
cfg_path = pathlib.Path(adapter_dir) / "adapter_config.json"
|
| 224 |
-
if not cfg_path.exists():
|
| 225 |
-
for alt in ("adapter_config.json","adapter_config_0.json","config.json"):
|
| 226 |
-
c = pathlib.Path(adapter_dir)/alt
|
| 227 |
-
if c.exists():
|
| 228 |
-
cfg_path = c; break
|
| 229 |
-
if not cfg_path.exists():
|
| 230 |
-
log.warning(f"[SAN] adapter_config tidak ditemukan di {adapter_dir}")
|
| 231 |
-
return
|
| 232 |
-
|
| 233 |
-
try:
|
| 234 |
-
with codecs.open(cfg_path, "r", encoding="utf-8-sig") as f:
|
| 235 |
-
cfg = json.load(f)
|
| 236 |
-
except Exception as e:
|
| 237 |
-
log.warning(f"[SAN] gagal baca adapter_config: {e}")
|
| 238 |
-
return
|
| 239 |
-
|
| 240 |
-
cleaned = {k: v for k, v in cfg.items() if k in allowed}
|
| 241 |
-
dropped = [k for k in cfg.keys() if k not in allowed]
|
| 242 |
-
if dropped:
|
| 243 |
-
log.info(f"[SAN] drop fields tidak dikenal: {dropped}")
|
| 244 |
-
with open(cfg_path, "w", encoding="utf-8") as f:
|
| 245 |
-
json.dump(cleaned, f, ensure_ascii=False, indent=2)
|
| 246 |
|
| 247 |
def _load_model():
|
| 248 |
global TOK, MODEL, _MODEL_READY, _MODEL_ERROR
|
|
@@ -250,12 +211,10 @@ def _load_model():
|
|
| 250 |
log.info("[MODEL] downloading base=%s adapter=%s", BASE_MODEL_ID, ADAPTER_ID or "-")
|
| 251 |
base_dir = snapshot_download(repo_id=BASE_MODEL_ID, local_dir="/tmp/hf_base", local_dir_use_symlinks=False)
|
| 252 |
_strip_bom_in_dir(base_dir)
|
| 253 |
-
|
| 254 |
adapter_dir = None
|
| 255 |
if ADAPTER_ID:
|
| 256 |
adapter_dir = snapshot_download(repo_id=ADAPTER_ID, local_dir="/tmp/hf_adapter", local_dir_use_symlinks=False)
|
| 257 |
-
_strip_bom_in_dir(adapter_dir)
|
| 258 |
-
_sanitize_adapter_config(adapter_dir)
|
| 259 |
|
| 260 |
import torch
|
| 261 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
|
@@ -266,20 +225,17 @@ def _load_model():
|
|
| 266 |
MODEL = PeftModel.from_pretrained(base, adapter_dir) if adapter_dir else base
|
| 267 |
MODEL.eval().to(DEVICE)
|
| 268 |
|
| 269 |
-
_MODEL_READY = True
|
| 270 |
-
_MODEL_ERROR = None
|
| 271 |
log.info("[MODEL] ready on %s", DEVICE)
|
| 272 |
except Exception as e:
|
| 273 |
-
_MODEL_READY = False
|
| 274 |
-
_MODEL_ERROR = f"{type(e).__name__}: {e}"
|
| 275 |
log.exception("[MODEL] load error")
|
| 276 |
|
| 277 |
def get_model():
|
| 278 |
global MODEL
|
| 279 |
if MODEL is None:
|
| 280 |
with _MODEL_LOCK:
|
| 281 |
-
if MODEL is None:
|
| 282 |
-
_load_model()
|
| 283 |
return TOK, MODEL
|
| 284 |
|
| 285 |
def translate_with_model(text: str, max_new_tokens: int = 48) -> str:
|
|
@@ -289,14 +245,8 @@ def translate_with_model(text: str, max_new_tokens: int = 48) -> str:
|
|
| 289 |
raise RuntimeError(f"Model not ready: {_MODEL_ERROR or 'unknown error'}")
|
| 290 |
enc = tok([text], return_tensors="pt", truncation=True, max_length=256)
|
| 291 |
enc = {k: v.to(DEVICE) for k, v in enc.items()}
|
| 292 |
-
out = m.generate(
|
| 293 |
-
|
| 294 |
-
max_new_tokens=int(max_new_tokens),
|
| 295 |
-
num_beams=4,
|
| 296 |
-
length_penalty=0.9,
|
| 297 |
-
no_repeat_ngram_size=3,
|
| 298 |
-
early_stopping=True,
|
| 299 |
-
)
|
| 300 |
return tok.decode(out[0], skip_special_tokens=True)
|
| 301 |
|
| 302 |
def _preload_thread():
|
|
@@ -323,34 +273,25 @@ def diag():
|
|
| 323 |
import sys
|
| 324 |
try:
|
| 325 |
import torch, transformers, peft
|
| 326 |
-
torch_v = torch.__version__
|
| 327 |
-
tf_v = transformers.__version__
|
| 328 |
-
peft_v = peft.__version__
|
| 329 |
except Exception as e:
|
| 330 |
torch_v = tf_v = peft_v = f"import error: {e}"
|
| 331 |
return jsonify({
|
| 332 |
-
"ok": True,
|
| 333 |
-
"
|
| 334 |
-
"
|
| 335 |
-
"base_model": BASE_MODEL_ID,
|
| 336 |
-
"adapter": ADAPTER_ID or None,
|
| 337 |
-
"model_ready": _MODEL_READY,
|
| 338 |
-
"model_error": _MODEL_ERROR,
|
| 339 |
"versions": {"python": sys.version, "torch": torch_v, "transformers": tf_v, "peft": peft_v},
|
| 340 |
-
"preload": PRELOAD_MODEL,
|
| 341 |
-
"
|
| 342 |
})
|
| 343 |
|
| 344 |
# ---------- Auth & Pages ----------
|
| 345 |
-
@app.get("/health")
|
| 346 |
-
|
| 347 |
-
def health():
|
| 348 |
-
return jsonify({"ok": True, "time": datetime.now(timezone.utc).isoformat()})
|
| 349 |
|
| 350 |
@app.get("/login")
|
| 351 |
def login_get():
|
| 352 |
-
if session.get("uid"):
|
| 353 |
-
return redirect(url_for("index"))
|
| 354 |
return render_template("login.html")
|
| 355 |
|
| 356 |
@app.post("/login")
|
|
@@ -358,24 +299,17 @@ def login_post():
|
|
| 358 |
email = (request.form.get("email") or "").strip().lower()
|
| 359 |
pwd = request.form.get("password") or ""
|
| 360 |
if not email or not pwd:
|
| 361 |
-
flash("Isi email dan password", "error")
|
| 362 |
-
return redirect(url_for("login_get"))
|
| 363 |
-
|
| 364 |
with SessionLocal() as s:
|
| 365 |
u = s.query(User).filter_by(email=email).first()
|
| 366 |
if not u or not verify_password(u, pwd):
|
| 367 |
-
flash("Email atau password salah", "error")
|
| 368 |
-
|
| 369 |
-
|
| 370 |
-
session.permanent = True
|
| 371 |
-
session["uid"], session["email"] = u.id, u.email
|
| 372 |
-
|
| 373 |
return redirect(url_for("index"))
|
| 374 |
|
| 375 |
@app.get("/register")
|
| 376 |
def register_get():
|
| 377 |
-
if session.get("uid"):
|
| 378 |
-
return redirect(url_for("index"))
|
| 379 |
return render_template("register.html")
|
| 380 |
|
| 381 |
@app.post("/register")
|
|
@@ -383,24 +317,17 @@ def register_post():
|
|
| 383 |
email = (request.form.get("email") or "").strip().lower()
|
| 384 |
pwd = (request.form.get("password") or "")
|
| 385 |
if not email or not pwd:
|
| 386 |
-
flash("Isi email dan password", "error")
|
| 387 |
-
return redirect(url_for("register_get"))
|
| 388 |
-
|
| 389 |
with SessionLocal() as s:
|
| 390 |
if s.query(User).filter_by(email=email).first():
|
| 391 |
-
flash("Email sudah terdaftar", "error")
|
| 392 |
-
|
| 393 |
-
u = User(email=email); set_password(u, pwd)
|
| 394 |
-
s.add(u); s.commit()
|
| 395 |
-
|
| 396 |
-
# BEDA DI SINI: tidak auto-login. Wajib login manual.
|
| 397 |
flash("Registrasi berhasil. Silakan login.", "success")
|
| 398 |
return redirect(url_for("login_get"))
|
| 399 |
|
| 400 |
@app.get("/logout")
|
| 401 |
def logout():
|
| 402 |
-
session.clear()
|
| 403 |
-
return redirect(url_for("login_get"))
|
| 404 |
|
| 405 |
@app.get("/")
|
| 406 |
@login_required
|
|
@@ -409,48 +336,40 @@ def index():
|
|
| 409 |
uid = session["uid"]
|
| 410 |
items = (s.query(Translation)
|
| 411 |
.filter(Translation.user_id == uid)
|
| 412 |
-
.order_by(Translation.id.desc())
|
| 413 |
-
.limit(10).all())
|
| 414 |
recent = [{"src": it.src, "mt": it.mt, "created_at": it.created_at.strftime("%Y-%m-%d %H:%M")} for it in items]
|
| 415 |
return render_template("index.html", logged_in=True, device=DEVICE, recent=recent)
|
| 416 |
|
| 417 |
@app.get("/about")
|
| 418 |
-
def about_page():
|
| 419 |
-
return render_template("about.html")
|
| 420 |
|
| 421 |
# ---------- API ----------
|
| 422 |
@app.get("/history")
|
| 423 |
def api_history():
|
| 424 |
-
if not session.get("uid"):
|
| 425 |
-
return jsonify({"items": []})
|
| 426 |
with SessionLocal() as s:
|
| 427 |
uid = session["uid"]
|
| 428 |
items = (s.query(Translation)
|
| 429 |
.filter(Translation.user_id == uid)
|
| 430 |
-
.order_by(Translation.id.desc())
|
| 431 |
-
.limit(10).all())
|
| 432 |
out = [{"src": it.src, "mt": it.mt, "created_at": it.created_at.strftime("%Y-%m-%d %H:%M")} for it in items]
|
| 433 |
return jsonify({"items": out})
|
| 434 |
|
| 435 |
@app.post("/translate")
|
| 436 |
def api_translate():
|
| 437 |
-
if not session.get("uid"):
|
| 438 |
-
return jsonify({"ok": False, "error": "Unauthorized"}), 401
|
| 439 |
payload = request.get_json(silent=True) or {}
|
| 440 |
text = (payload.get("text") or "").strip()
|
| 441 |
max_new = int(payload.get("max_new_tokens", 48))
|
| 442 |
-
if not text:
|
| 443 |
-
return jsonify({"ok": False, "error": "Empty text"}), 400
|
| 444 |
try:
|
| 445 |
clean = prenorm(text)
|
| 446 |
mt = f"[FAKE] {clean}" if FALLBACK_TRANSLATE else translate_with_model(clean, max_new_tokens=max_new)
|
| 447 |
with SessionLocal() as s:
|
| 448 |
-
s.add(Translation(user_id=session["uid"], src=text, mt=mt))
|
| 449 |
-
s.commit()
|
| 450 |
return jsonify({"ok": True, "mt": mt})
|
| 451 |
except Exception as e:
|
| 452 |
-
log.error("[API] translate error: %s", e)
|
| 453 |
-
log.error(traceback.format_exc())
|
| 454 |
return jsonify({"ok": False, "error": f"{type(e).__name__}: {e}"}), 500
|
| 455 |
|
| 456 |
# ---------- Run ----------
|
|
|
|
| 32 |
# ---------- Feature Flags ----------
|
| 33 |
PRELOAD_MODEL = os.getenv("PRELOAD_MODEL", "true").lower() in ("1","true","yes")
|
| 34 |
FALLBACK_TRANSLATE = os.getenv("FALLBACK_TRANSLATE", "false").lower() in ("1","true","yes")
|
| 35 |
+
PUBLIC_APP_URL = os.getenv("PUBLIC_APP_URL", "").strip() # ← URL fullscreen (hf.space)
|
| 36 |
+
|
| 37 |
+
# bikin variabel template global
|
| 38 |
+
@app.context_processor
|
| 39 |
+
def inject_globals():
|
| 40 |
+
return {"public_app_url": PUBLIC_APP_URL}
|
| 41 |
|
| 42 |
# ---------- Database ----------
|
| 43 |
from sqlalchemy import create_engine, Column, Integer, Text, DateTime, ForeignKey, func
|
|
|
|
| 99 |
return fn(*args, **kwargs)
|
| 100 |
return _wrap
|
| 101 |
|
| 102 |
+
# ---------- Prenorm (Papua) ----------
|
| 103 |
PRENORM_LEVEL = os.getenv("PRENORM_LEVEL", "basic").lower()
|
| 104 |
PRENORM_DEBUG = os.getenv("PRENORM_DEBUG", "0") == "1"
|
| 105 |
|
|
|
|
| 131 |
"kitong": "kita", "kitorang": "kita", "kita": "kita", "torang": "kita",
|
| 132 |
}
|
| 133 |
|
| 134 |
+
def _normalize_unicode(text: str) -> str: return unicodedata.normalize("NFKC", text)
|
|
|
|
| 135 |
def _strip_emoji_and_noise(text: str) -> str:
|
| 136 |
+
text = PUNC_RE.sub(" ", text); text = MULTI_PUNC.sub(r"\1", text)
|
| 137 |
+
return DASH_SPACES.sub(r" \1 ", text)
|
| 138 |
+
def _normalize_ws(text: str) -> str: return WS_RE.sub(" ", text).strip()
|
|
|
|
|
|
|
|
|
|
| 139 |
def _reduce_elongation(token: str) -> str:
|
| 140 |
base = token.lower()
|
| 141 |
if base in WHITELIST_KEEP_ELONG: return token
|
| 142 |
return ELONG_RE.sub(r"\1\1", token)
|
| 143 |
+
def _apply_papua_map(token: str) -> str: return PAPUA_MAP.get(token.lower(), token)
|
|
|
|
|
|
|
| 144 |
def _handle_pu_constructs(text: str) -> str:
|
| 145 |
def repl(m):
|
| 146 |
+
pron = m.group(1).lower(); rest = m.group(2).strip()
|
| 147 |
+
pron_std = PRON_MAP.get(pron, pron); return f"punya {pron_std} {rest}"
|
| 148 |
+
return re.sub(r"\b(sa|saya|ko|kamu|dia|dong|kam|kalian|kitong|kitorang|kita|torang)\s*pu\s+([^.,;:!?]+)", repl, text, flags=re.IGNORECASE)
|
|
|
|
|
|
|
|
|
|
| 149 |
def _token_level_ops(text: str, aggressive: bool) -> str:
|
| 150 |
+
toks = text.split(); out = []
|
| 151 |
+
for t in toks:
|
|
|
|
| 152 |
t2 = _reduce_elongation(t) if aggressive else t
|
| 153 |
+
out.append(_apply_papua_map(t2))
|
|
|
|
| 154 |
return " ".join(out)
|
| 155 |
|
| 156 |
def papua_prenorm(inp: str, level: str = "basic", return_trace: bool = False):
|
| 157 |
+
if level == "off": return (inp, {}) if return_trace else inp
|
| 158 |
+
s1 = _normalize_unicode(inp)
|
|
|
|
|
|
|
|
|
|
| 159 |
s2 = _strip_emoji_and_noise(s1) if level == "aggressive" else s1
|
| 160 |
s3 = _normalize_ws(s2)
|
| 161 |
s4 = _handle_pu_constructs(s3)
|
| 162 |
s5 = _token_level_ops(s4, aggressive=(level == "aggressive"))
|
| 163 |
s6 = _normalize_ws(s5)
|
| 164 |
+
return (s6, {"final": s6}) if return_trace else s6
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 165 |
|
| 166 |
def prenorm(text: str) -> str:
|
| 167 |
if PRENORM_DEBUG:
|
|
|
|
| 175 |
ADAPTER_ID = os.getenv("ADAPTER_ID", "amosnbn/papua-lora-ckpt-168")
|
| 176 |
DEVICE = "cuda" if os.getenv("DEVICE", "cpu") == "cuda" else "cpu"
|
| 177 |
|
| 178 |
+
TOK = None; MODEL = None
|
| 179 |
+
_MODEL_LOCK = threading.Lock(); _MODEL_READY = False; _MODEL_ERROR = None
|
|
|
|
|
|
|
|
|
|
| 180 |
|
| 181 |
def _strip_bom_in_dir(root_dir: str):
|
| 182 |
root = pathlib.Path(root_dir)
|
| 183 |
for p in root.rglob("*.json"):
|
| 184 |
try:
|
| 185 |
+
with codecs.open(p, "r", encoding="utf-8-sig") as f: data = json.load(f)
|
| 186 |
+
with open(p, "w", encoding="utf-8") as f: json.dump(data, f, ensure_ascii=False, indent=2)
|
|
|
|
|
|
|
| 187 |
log.info(f"[BOM] stripped: {p}")
|
| 188 |
except Exception as e:
|
| 189 |
log.warning(f"[BOM] skip {p}: {e}")
|
|
|
|
| 191 |
def _sanitize_adapter_config(adapter_dir: str):
|
| 192 |
try:
|
| 193 |
from peft import LoraConfig
|
| 194 |
+
sig = inspect.signature(LoraConfig.__init__); allowed = set(p.name for p in sig.parameters.values())
|
| 195 |
+
cfg_path = pathlib.Path(adapter_dir) / "adapter_config.json"
|
| 196 |
+
if not cfg_path.exists():
|
| 197 |
+
for alt in ("adapter_config.json","adapter_config_0.json","config.json"):
|
| 198 |
+
c = pathlib.Path(adapter_dir)/alt
|
| 199 |
+
if c.exists(): cfg_path = c; break
|
| 200 |
+
if not cfg_path.exists(): return
|
| 201 |
+
with codecs.open(cfg_path, "r", encoding="utf-8-sig") as f: cfg = json.load(f)
|
| 202 |
+
cleaned = {k:v for k,v in cfg.items() if k in allowed}
|
| 203 |
+
if set(cleaned.keys()) != set(cfg.keys()):
|
| 204 |
+
with open(cfg_path, "w", encoding="utf-8") as f: json.dump(cleaned, f, ensure_ascii=False, indent=2)
|
| 205 |
except Exception as e:
|
| 206 |
+
log.warning(f"[SAN] skip: {e}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 207 |
|
| 208 |
def _load_model():
|
| 209 |
global TOK, MODEL, _MODEL_READY, _MODEL_ERROR
|
|
|
|
| 211 |
log.info("[MODEL] downloading base=%s adapter=%s", BASE_MODEL_ID, ADAPTER_ID or "-")
|
| 212 |
base_dir = snapshot_download(repo_id=BASE_MODEL_ID, local_dir="/tmp/hf_base", local_dir_use_symlinks=False)
|
| 213 |
_strip_bom_in_dir(base_dir)
|
|
|
|
| 214 |
adapter_dir = None
|
| 215 |
if ADAPTER_ID:
|
| 216 |
adapter_dir = snapshot_download(repo_id=ADAPTER_ID, local_dir="/tmp/hf_adapter", local_dir_use_symlinks=False)
|
| 217 |
+
_strip_bom_in_dir(adapter_dir); _sanitize_adapter_config(adapter_dir)
|
|
|
|
| 218 |
|
| 219 |
import torch
|
| 220 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
|
|
|
| 225 |
MODEL = PeftModel.from_pretrained(base, adapter_dir) if adapter_dir else base
|
| 226 |
MODEL.eval().to(DEVICE)
|
| 227 |
|
| 228 |
+
_MODEL_READY = True; _MODEL_ERROR = None
|
|
|
|
| 229 |
log.info("[MODEL] ready on %s", DEVICE)
|
| 230 |
except Exception as e:
|
| 231 |
+
_MODEL_READY = False; _MODEL_ERROR = f"{type(e).__name__}: {e}"
|
|
|
|
| 232 |
log.exception("[MODEL] load error")
|
| 233 |
|
| 234 |
def get_model():
|
| 235 |
global MODEL
|
| 236 |
if MODEL is None:
|
| 237 |
with _MODEL_LOCK:
|
| 238 |
+
if MODEL is None: _load_model()
|
|
|
|
| 239 |
return TOK, MODEL
|
| 240 |
|
| 241 |
def translate_with_model(text: str, max_new_tokens: int = 48) -> str:
|
|
|
|
| 245 |
raise RuntimeError(f"Model not ready: {_MODEL_ERROR or 'unknown error'}")
|
| 246 |
enc = tok([text], return_tensors="pt", truncation=True, max_length=256)
|
| 247 |
enc = {k: v.to(DEVICE) for k, v in enc.items()}
|
| 248 |
+
out = m.generate(**enc, max_new_tokens=int(max_new_tokens), num_beams=4,
|
| 249 |
+
length_penalty=0.9, no_repeat_ngram_size=3, early_stopping=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 250 |
return tok.decode(out[0], skip_special_tokens=True)
|
| 251 |
|
| 252 |
def _preload_thread():
|
|
|
|
| 273 |
import sys
|
| 274 |
try:
|
| 275 |
import torch, transformers, peft
|
| 276 |
+
torch_v = torch.__version__; tf_v = transformers.__version__; peft_v = peft.__version__
|
|
|
|
|
|
|
| 277 |
except Exception as e:
|
| 278 |
torch_v = tf_v = peft_v = f"import error: {e}"
|
| 279 |
return jsonify({
|
| 280 |
+
"ok": True, "time": datetime.now(timezone.utc).isoformat(), "device": DEVICE,
|
| 281 |
+
"base_model": BASE_MODEL_ID, "adapter": ADAPTER_ID or None,
|
| 282 |
+
"model_ready": _MODEL_READY, "model_error": _MODEL_ERROR,
|
|
|
|
|
|
|
|
|
|
|
|
|
| 283 |
"versions": {"python": sys.version, "torch": torch_v, "transformers": tf_v, "peft": peft_v},
|
| 284 |
+
"preload": PRELOAD_MODEL, "prenorm": {"level": PRENORM_LEVEL, "debug": PRENORM_DEBUG},
|
| 285 |
+
"public_app_url": PUBLIC_APP_URL or None
|
| 286 |
})
|
| 287 |
|
| 288 |
# ---------- Auth & Pages ----------
|
| 289 |
+
@app.get("/health"); @app.get("/ping")
|
| 290 |
+
def health(): return jsonify({"ok": True, "time": datetime.now(timezone.utc).isoformat()})
|
|
|
|
|
|
|
| 291 |
|
| 292 |
@app.get("/login")
|
| 293 |
def login_get():
|
| 294 |
+
if session.get("uid"): return redirect(url_for("index"))
|
|
|
|
| 295 |
return render_template("login.html")
|
| 296 |
|
| 297 |
@app.post("/login")
|
|
|
|
| 299 |
email = (request.form.get("email") or "").strip().lower()
|
| 300 |
pwd = request.form.get("password") or ""
|
| 301 |
if not email or not pwd:
|
| 302 |
+
flash("Isi email dan password", "error"); return redirect(url_for("login_get"))
|
|
|
|
|
|
|
| 303 |
with SessionLocal() as s:
|
| 304 |
u = s.query(User).filter_by(email=email).first()
|
| 305 |
if not u or not verify_password(u, pwd):
|
| 306 |
+
flash("Email atau password salah", "error"); return redirect(url_for("login_get"))
|
| 307 |
+
session.permanent = True; session["uid"], session["email"] = u.id, u.email
|
|
|
|
|
|
|
|
|
|
|
|
|
| 308 |
return redirect(url_for("index"))
|
| 309 |
|
| 310 |
@app.get("/register")
|
| 311 |
def register_get():
|
| 312 |
+
if session.get("uid"): return redirect(url_for("index"))
|
|
|
|
| 313 |
return render_template("register.html")
|
| 314 |
|
| 315 |
@app.post("/register")
|
|
|
|
| 317 |
email = (request.form.get("email") or "").strip().lower()
|
| 318 |
pwd = (request.form.get("password") or "")
|
| 319 |
if not email or not pwd:
|
| 320 |
+
flash("Isi email dan password", "error"); return redirect(url_for("register_get"))
|
|
|
|
|
|
|
| 321 |
with SessionLocal() as s:
|
| 322 |
if s.query(User).filter_by(email=email).first():
|
| 323 |
+
flash("Email sudah terdaftar", "error"); return redirect(url_for("register_get"))
|
| 324 |
+
u = User(email=email); set_password(u, pwd); s.add(u); s.commit()
|
|
|
|
|
|
|
|
|
|
|
|
|
| 325 |
flash("Registrasi berhasil. Silakan login.", "success")
|
| 326 |
return redirect(url_for("login_get"))
|
| 327 |
|
| 328 |
@app.get("/logout")
|
| 329 |
def logout():
|
| 330 |
+
session.clear(); return redirect(url_for("login_get"))
|
|
|
|
| 331 |
|
| 332 |
@app.get("/")
|
| 333 |
@login_required
|
|
|
|
| 336 |
uid = session["uid"]
|
| 337 |
items = (s.query(Translation)
|
| 338 |
.filter(Translation.user_id == uid)
|
| 339 |
+
.order_by(Translation.id.desc()).limit(10).all())
|
|
|
|
| 340 |
recent = [{"src": it.src, "mt": it.mt, "created_at": it.created_at.strftime("%Y-%m-%d %H:%M")} for it in items]
|
| 341 |
return render_template("index.html", logged_in=True, device=DEVICE, recent=recent)
|
| 342 |
|
| 343 |
@app.get("/about")
|
| 344 |
+
def about_page(): return render_template("about.html")
|
|
|
|
| 345 |
|
| 346 |
# ---------- API ----------
|
| 347 |
@app.get("/history")
|
| 348 |
def api_history():
|
| 349 |
+
if not session.get("uid"): return jsonify({"items": []})
|
|
|
|
| 350 |
with SessionLocal() as s:
|
| 351 |
uid = session["uid"]
|
| 352 |
items = (s.query(Translation)
|
| 353 |
.filter(Translation.user_id == uid)
|
| 354 |
+
.order_by(Translation.id.desc()).limit(10).all())
|
|
|
|
| 355 |
out = [{"src": it.src, "mt": it.mt, "created_at": it.created_at.strftime("%Y-%m-%d %H:%M")} for it in items]
|
| 356 |
return jsonify({"items": out})
|
| 357 |
|
| 358 |
@app.post("/translate")
|
| 359 |
def api_translate():
|
| 360 |
+
if not session.get("uid"): return jsonify({"ok": False, "error": "Unauthorized"}), 401
|
|
|
|
| 361 |
payload = request.get_json(silent=True) or {}
|
| 362 |
text = (payload.get("text") or "").strip()
|
| 363 |
max_new = int(payload.get("max_new_tokens", 48))
|
| 364 |
+
if not text: return jsonify({"ok": False, "error": "Empty text"}), 400
|
|
|
|
| 365 |
try:
|
| 366 |
clean = prenorm(text)
|
| 367 |
mt = f"[FAKE] {clean}" if FALLBACK_TRANSLATE else translate_with_model(clean, max_new_tokens=max_new)
|
| 368 |
with SessionLocal() as s:
|
| 369 |
+
s.add(Translation(user_id=session["uid"], src=text, mt=mt)); s.commit()
|
|
|
|
| 370 |
return jsonify({"ok": True, "mt": mt})
|
| 371 |
except Exception as e:
|
| 372 |
+
log.error("[API] translate error: %s", e); log.error(traceback.format_exc())
|
|
|
|
| 373 |
return jsonify({"ok": False, "error": f"{type(e).__name__}: {e}"}), 500
|
| 374 |
|
| 375 |
# ---------- Run ----------
|
frontend/login.html
CHANGED
|
@@ -18,6 +18,8 @@
|
|
| 18 |
button{width:100%;margin-top:14px;padding:12px;background:#000;color:#fff;border:0;border-radius:8px;cursor:pointer}
|
| 19 |
.note{margin-top:10px;font-size:14px;color:#666;text-align:center}
|
| 20 |
.flash{margin-top:10px;padding:10px;border-radius:6px;font-size:14px;background:#fdecec;color:#a02222;border:1px solid #f5b5b5}
|
|
|
|
|
|
|
| 21 |
</style>
|
| 22 |
</head>
|
| 23 |
<body>
|
|
@@ -47,6 +49,12 @@
|
|
| 47 |
<button type="submit">Login</button>
|
| 48 |
</form>
|
| 49 |
<p class="note">Belum punya akun? <a href="/register">Daftar</a></p>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 50 |
</div>
|
| 51 |
</div>
|
| 52 |
</body>
|
|
|
|
| 18 |
button{width:100%;margin-top:14px;padding:12px;background:#000;color:#fff;border:0;border-radius:8px;cursor:pointer}
|
| 19 |
.note{margin-top:10px;font-size:14px;color:#666;text-align:center}
|
| 20 |
.flash{margin-top:10px;padding:10px;border-radius:6px;font-size:14px;background:#fdecec;color:#a02222;border:1px solid #f5b5b5}
|
| 21 |
+
.hint{margin-top:14px;text-align:center;font-size:14px;color:#555}
|
| 22 |
+
.hint a{color:#000;text-decoration:underline}
|
| 23 |
</style>
|
| 24 |
</head>
|
| 25 |
<body>
|
|
|
|
| 49 |
<button type="submit">Login</button>
|
| 50 |
</form>
|
| 51 |
<p class="note">Belum punya akun? <a href="/register">Daftar</a></p>
|
| 52 |
+
|
| 53 |
+
{% if public_app_url %}
|
| 54 |
+
<div class="hint">
|
| 55 |
+
Masalah login di HP? <a href="{{ public_app_url }}" target="_top">Buka Full App</a>
|
| 56 |
+
</div>
|
| 57 |
+
{% endif %}
|
| 58 |
</div>
|
| 59 |
</div>
|
| 60 |
</body>
|
frontend/register.html
CHANGED
|
@@ -18,6 +18,8 @@
|
|
| 18 |
button{width:100%;margin-top:14px;padding:12px;background:#000;color:#fff;border:0;border-radius:8px;cursor:pointer}
|
| 19 |
.note{margin-top:10px;font-size:14px;color:#666;text-align:center}
|
| 20 |
.flash{margin-top:10px;padding:10px;border-radius:6px;font-size:14px;background:#fdecec;color:#a02222;border:1px solid #f5b5b5}
|
|
|
|
|
|
|
| 21 |
</style>
|
| 22 |
</head>
|
| 23 |
<body>
|
|
@@ -47,6 +49,12 @@
|
|
| 47 |
<button type="submit">Daftar</button>
|
| 48 |
</form>
|
| 49 |
<p class="note">Sudah punya akun? <a href="/login">Login</a></p>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 50 |
</div>
|
| 51 |
</div>
|
| 52 |
</body>
|
|
|
|
| 18 |
button{width:100%;margin-top:14px;padding:12px;background:#000;color:#fff;border:0;border-radius:8px;cursor:pointer}
|
| 19 |
.note{margin-top:10px;font-size:14px;color:#666;text-align:center}
|
| 20 |
.flash{margin-top:10px;padding:10px;border-radius:6px;font-size:14px;background:#fdecec;color:#a02222;border:1px solid #f5b5b5}
|
| 21 |
+
.hint{margin-top:14px;text-align:center;font-size:14px;color:#555}
|
| 22 |
+
.hint a{color:#000;text-decoration:underline}
|
| 23 |
</style>
|
| 24 |
</head>
|
| 25 |
<body>
|
|
|
|
| 49 |
<button type="submit">Daftar</button>
|
| 50 |
</form>
|
| 51 |
<p class="note">Sudah punya akun? <a href="/login">Login</a></p>
|
| 52 |
+
|
| 53 |
+
{% if public_app_url %}
|
| 54 |
+
<div class="hint">
|
| 55 |
+
Login di HP kadang terblokir iframe. Setelah daftar, silakan <a href="{{ public_app_url }}" target="_top">Buka Full App</a> lalu login.
|
| 56 |
+
</div>
|
| 57 |
+
{% endif %}
|
| 58 |
</div>
|
| 59 |
</div>
|
| 60 |
</body>
|