Spaces:
Sleeping
Sleeping
#!/usr/bin/env python3 | |
""" | |
Hugging Face ํด๋ผ์ฐ๋ GPU ํ๊ฒฝ ์ค์ | |
""" | |
import os | |
import requests | |
import json | |
import logging | |
from huggingface_hub import login, HfApi | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
# ๋ก๊น ์ค์ | |
logging.basicConfig(level=logging.INFO) | |
logger = logging.getLogger(__name__) | |
class HuggingFaceCloudSetup: | |
"""Hugging Face ํด๋ผ์ฐ๋ GPU ์ค์ ํด๋์ค""" | |
def __init__(self): | |
"""์ด๊ธฐํ""" | |
self.api = None | |
self.model_name = "heegyu/polyglot-ko-5.8b-chat" | |
self.space_name = "lily-math-rag" | |
def setup_huggingface_login(self): | |
"""Hugging Face ๋ก๊ทธ์ธ ์ค์ """ | |
logger.info("๐ Hugging Face ๋ก๊ทธ์ธ ์ค์ ") | |
try: | |
# ํ ํฐ ์ ๋ ฅ ์์ฒญ | |
token = input("Hugging Face ํ ํฐ์ ์ ๋ ฅํ์ธ์: ").strip() | |
if token: | |
login(token) | |
self.api = HfApi(token=token) | |
logger.info("โ Hugging Face ๋ก๊ทธ์ธ ์ฑ๊ณต") | |
return True | |
else: | |
logger.error("โ ํ ํฐ์ด ํ์ํฉ๋๋ค") | |
return False | |
except Exception as e: | |
logger.error(f"โ Hugging Face ๋ก๊ทธ์ธ ์คํจ: {e}") | |
return False | |
def create_inference_endpoint(self): | |
"""์ถ๋ก ์๋ํฌ์ธํธ ์์ฑ""" | |
logger.info("๐ ์ถ๋ก ์๋ํฌ์ธํธ ์์ฑ ์ค...") | |
try: | |
# ์๋ํฌ์ธํธ ์ค์ | |
endpoint_config = { | |
"account": "your-username", # Hugging Face ์ฌ์ฉ์๋ช | |
"name": "lily-math-rag-endpoint", | |
"repository": self.model_name, | |
"framework": "pytorch", | |
"accelerator": "gpu", | |
"instance_type": "gpu.t4.medium", # GPU ์ธ์คํด์ค ํ์ | |
"region": "us-east-1", | |
"vendor": "aws" | |
} | |
logger.info("โ ์๋ํฌ์ธํธ ์ค์ ์๋ฃ") | |
logger.info(f" ๋ชจ๋ธ: {self.model_name}") | |
logger.info(f" GPU: {endpoint_config['instance_type']}") | |
logger.info(f" ์ง์ญ: {endpoint_config['region']}") | |
return endpoint_config | |
except Exception as e: | |
logger.error(f"โ ์๋ํฌ์ธํธ ์์ฑ ์คํจ: {e}") | |
return None | |
def create_huggingface_space(self): | |
"""Hugging Face Space ์์ฑ""" | |
logger.info("๐ Hugging Face Space ์์ฑ ์ค...") | |
try: | |
# Space ์ค์ | |
space_config = { | |
"name": self.space_name, | |
"type": "gradio", | |
"sdk": "gradio", | |
"title": "Lily Math RAG System", | |
"description": "์ํ ๋ฌธ์ ํด๊ฒฐ์ ์ํ RAG ์์คํ ", | |
"license": "mit", | |
"python_version": "3.9" | |
} | |
logger.info("โ Space ์ค์ ์๋ฃ") | |
logger.info(f" Space ์ด๋ฆ: {space_config['name']}") | |
logger.info(f" ํ์ : {space_config['type']}") | |
return space_config | |
except Exception as e: | |
logger.error(f"โ Space ์์ฑ ์คํจ: {e}") | |
return None | |
def upload_model_to_hub(self): | |
"""๋ชจ๋ธ์ Hugging Face Hub์ ์ ๋ก๋""" | |
logger.info("๐ค ๋ชจ๋ธ ์ ๋ก๋ ์ค...") | |
try: | |
# ๋ก์ปฌ ๋ชจ๋ธ ๊ฒฝ๋ก | |
local_model_path = "hearth_llm_model" | |
if os.path.exists(local_model_path): | |
logger.info(f"โ ๋ก์ปฌ ๋ชจ๋ธ ๋ฐ๊ฒฌ: {local_model_path}") | |
# ๋ชจ๋ธ ์ ๋ก๋ (์ค์ ๋ก๋ Hugging Face CLI ์ฌ์ฉ) | |
logger.info("๐ก ๋ค์ ๋ช ๋ น์ด๋ก ๋ชจ๋ธ์ ์ ๋ก๋ํ์ธ์:") | |
logger.info(f" huggingface-cli upload your-username/lily-math-model {local_model_path}") | |
return True | |
else: | |
logger.warning(f"โ ๏ธ ๋ก์ปฌ ๋ชจ๋ธ์ ์ฐพ์ ์ ์์ต๋๋ค: {local_model_path}") | |
return False | |
except Exception as e: | |
logger.error(f"โ ๋ชจ๋ธ ์ ๋ก๋ ์คํจ: {e}") | |
return False | |
def test_cloud_inference(self, endpoint_url): | |
"""ํด๋ผ์ฐ๋ ์ถ๋ก ํ ์คํธ""" | |
logger.info("๐งช ํด๋ผ์ฐ๋ ์ถ๋ก ํ ์คํธ") | |
try: | |
# ํ ์คํธ ์์ฒญ | |
test_data = { | |
"inputs": "์๋ ํ์ธ์! ์ํ ๋ฌธ์ ๋ฅผ ๋์์ฃผ์ธ์.", | |
"parameters": { | |
"max_length": 100, | |
"temperature": 0.7 | |
} | |
} | |
response = requests.post( | |
f"{endpoint_url}/predict", | |
json=test_data, | |
headers={"Authorization": f"Bearer {os.getenv('HF_TOKEN')}"} | |
) | |
if response.status_code == 200: | |
result = response.json() | |
logger.info(f"โ ์ถ๋ก ํ ์คํธ ์ฑ๊ณต: {result}") | |
return True | |
else: | |
logger.error(f"โ ์ถ๋ก ํ ์คํธ ์คํจ: {response.status_code}") | |
return False | |
except Exception as e: | |
logger.error(f"โ ์ถ๋ก ํ ์คํธ ์คํจ: {e}") | |
return False | |
def main(): | |
"""๋ฉ์ธ ์ค์ ํจ์""" | |
print("๐ Hugging Face ํด๋ผ์ฐ๋ GPU ํ๊ฒฝ ์ค์ ") | |
print("=" * 50) | |
# 1. Hugging Face ์ค์ ํด๋์ค ์ด๊ธฐํ | |
setup = HuggingFaceCloudSetup() | |
# 2. Hugging Face ๋ก๊ทธ์ธ | |
if not setup.setup_huggingface_login(): | |
print("โ ๋ก๊ทธ์ธ ์คํจ") | |
return | |
# 3. ์ถ๋ก ์๋ํฌ์ธํธ ์์ฑ | |
endpoint_config = setup.create_inference_endpoint() | |
# 4. Hugging Face Space ์์ฑ | |
space_config = setup.create_huggingface_space() | |
# 5. ๋ชจ๋ธ ์ ๋ก๋ | |
setup.upload_model_to_hub() | |
print("\n๐ Hugging Face ํด๋ผ์ฐ๋ ์ค์ ์๋ฃ!") | |
print("โ ๋ค์ ๋จ๊ณ:") | |
print("1. Hugging Face Inference Endpoints์์ ์๋ํฌ์ธํธ ์์ฑ") | |
print("2. ๋ชจ๋ธ์ Hugging Face Hub์ ์ ๋ก๋") | |
print("3. Railway Hearth Chat๊ณผ ์ฐ๋ ์ค์ ") | |
if __name__ == "__main__": | |
main() |