Spaces:
Running
Running
Upload 2 files
Browse files- main.py +3 -1
- whisper.py +47 -0
main.py
CHANGED
@@ -83,6 +83,7 @@ from gpytranslate import SyncTranslator
|
|
83 |
import logging
|
84 |
import functions as code
|
85 |
from fluxai import router as fluxai_router
|
|
|
86 |
|
87 |
logging.basicConfig(level=logging.ERROR)
|
88 |
logging.basicConfig(level=logging.INFO)
|
@@ -127,6 +128,7 @@ trans = SyncTranslator()
|
|
127 |
|
128 |
app = FastAPI(docs_url=None, redoc_url="/")
|
129 |
app.include_router(fluxai_router, prefix="/api/v1")
|
|
|
130 |
|
131 |
timeout = 100
|
132 |
|
@@ -1700,7 +1702,7 @@ def custom_openapi():
|
|
1700 |
return app.openapi_schema
|
1701 |
openapi_schema = get_openapi(
|
1702 |
title="RyuzakiLib API",
|
1703 |
-
version="3.3.
|
1704 |
summary="Use It Only For Personal Project Else I Need To Delete The Api",
|
1705 |
description=description,
|
1706 |
routes=app.routes,
|
|
|
83 |
import logging
|
84 |
import functions as code
|
85 |
from fluxai import router as fluxai_router
|
86 |
+
from whisper import router as whisper_router
|
87 |
|
88 |
logging.basicConfig(level=logging.ERROR)
|
89 |
logging.basicConfig(level=logging.INFO)
|
|
|
128 |
|
129 |
app = FastAPI(docs_url=None, redoc_url="/")
|
130 |
app.include_router(fluxai_router, prefix="/api/v1")
|
131 |
+
app.include_router(whisper_router, prefix="/api/v1")
|
132 |
|
133 |
timeout = 100
|
134 |
|
|
|
1702 |
return app.openapi_schema
|
1703 |
openapi_schema = get_openapi(
|
1704 |
title="RyuzakiLib API",
|
1705 |
+
version="3.3.3",
|
1706 |
summary="Use It Only For Personal Project Else I Need To Delete The Api",
|
1707 |
description=description,
|
1708 |
routes=app.routes,
|
whisper.py
ADDED
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import APIRouter, Depends
|
2 |
+
from fastapi.responses import StreamingResponse
|
3 |
+
import io
|
4 |
+
import requests
|
5 |
+
import os
|
6 |
+
from dotenv import load_dotenv
|
7 |
+
from pydantic import BaseModel
|
8 |
+
from models import *
|
9 |
+
|
10 |
+
class WhisperX(BaseModel):
|
11 |
+
filename: str
|
12 |
+
|
13 |
+
router = APIRouter()
|
14 |
+
|
15 |
+
load_dotenv()
|
16 |
+
HUGGING_TOKEN = os.environ["HUGGING_TOKEN"]
|
17 |
+
|
18 |
+
async def whsiper_to_text(filename):
|
19 |
+
API_URL = "https://api-inference.huggingface.co/models/openai/whisper-large-v3"
|
20 |
+
headers = {"Authorization": f"Bearer {HUGGING_TOKEN}"}
|
21 |
+
with open(filename, "rb") as f:
|
22 |
+
data = f.read()
|
23 |
+
response = requests.post(API_URL, headers=headers, data=data)
|
24 |
+
if response.status_code != 200:
|
25 |
+
print(f"Error status {response.status_code}")
|
26 |
+
return None
|
27 |
+
return response.json()
|
28 |
+
|
29 |
+
@router.post("/akeno/whsiper", response_model=SuccessResponse, responses={422: {"model": SuccessResponse}})
|
30 |
+
async def whsiper_new(payload: WhisperX):
|
31 |
+
try:
|
32 |
+
response_data = await whsiper_to_text(payload.filename)
|
33 |
+
if response_data is None:
|
34 |
+
return SuccessResponse(
|
35 |
+
status="False",
|
36 |
+
randydev={"error": "Failed to whsiper an filename"}
|
37 |
+
)
|
38 |
+
|
39 |
+
return SuccessResponse(
|
40 |
+
status="True",
|
41 |
+
randydev={"message": response_data.get("text")}
|
42 |
+
)
|
43 |
+
except Exception as e:
|
44 |
+
return SuccessResponse(
|
45 |
+
status="False",
|
46 |
+
randydev={"error": f"An error occurred: {str(e)}"}
|
47 |
+
)
|