Spaces:
Running
Running
File size: 5,292 Bytes
157e137 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 |
import time
import math
import logging
import mimetypes
import traceback
from aiohttp import web
from aiohttp.http_exceptions import BadStatusLine
from FileStream.bot import multi_clients, work_loads, FileStream
from FileStream.config import Telegram, Server
from FileStream.server.exceptions import FIleNotFound, InvalidHash
from FileStream import utils, StartTime, __version__
from FileStream.utils.render_template import render_page
routes = web.RouteTableDef()
@routes.get("/", allow_head=True)
async def root_route_handler(_):
return web.json_response(
{
"message": "HELLO WORLD!",
"uptime": utils.get_readable_time(time.time() - StartTime),
"telegram_bot": "@" + FileStream.username,
"connected_bots": len(multi_clients),
"version": __version__,
}
)
@routes.get("/status", allow_head=True)
async def root_route_handler(_):
return web.json_response(
{
"server_status": "running",
"uptime": utils.get_readable_time(time.time() - StartTime),
"telegram_bot": "@" + FileStream.username,
"connected_bots": len(multi_clients),
"loads": dict(
("bot" + str(c + 1), l)
for c, (_, l) in enumerate(
sorted(work_loads.items(), key=lambda x: x[1], reverse=True)
)
),
"version": __version__,
}
)
@routes.get("/watch/{path}", allow_head=True)
async def stream_handler(request: web.Request):
try:
path = request.match_info["path"]
return web.Response(text=await render_page(path), content_type='text/html')
except InvalidHash as e:
raise web.HTTPForbidden(text=e.message)
except FIleNotFound as e:
raise web.HTTPNotFound(text=e.message)
except (AttributeError, BadStatusLine, ConnectionResetError):
pass
@routes.get("/dl/{path}", allow_head=True)
async def stream_handler(request: web.Request):
try:
path = request.match_info["path"]
return await media_streamer(request, path)
except InvalidHash as e:
raise web.HTTPForbidden(text=e.message)
except FIleNotFound as e:
raise web.HTTPNotFound(text=e.message)
except (AttributeError, BadStatusLine, ConnectionResetError):
pass
except Exception as e:
traceback.print_exc()
logging.critical(e.with_traceback(None))
logging.debug(traceback.format_exc())
raise web.HTTPInternalServerError(text=str(e))
class_cache = {}
async def media_streamer(request: web.Request, db_id: str):
range_header = request.headers.get("Range", 0)
index = min(work_loads, key=work_loads.get)
faster_client = multi_clients[index]
if Telegram.MULTI_CLIENT:
logging.info(f"Client {index} is now serving {request.headers.get('X-FORWARDED-FOR',request.remote)}")
if faster_client in class_cache:
tg_connect = class_cache[faster_client]
logging.debug(f"Using cached ByteStreamer object for client {index}")
else:
logging.debug(f"Creating new ByteStreamer object for client {index}")
tg_connect = utils.ByteStreamer(faster_client)
class_cache[faster_client] = tg_connect
logging.debug("before calling get_file_properties")
file_id = await tg_connect.get_file_properties(db_id, multi_clients)
logging.debug("after calling get_file_properties")
file_size = file_id.file_size
if range_header:
from_bytes, until_bytes = range_header.replace("bytes=", "").split("-")
from_bytes = int(from_bytes)
until_bytes = int(until_bytes) if until_bytes else file_size - 1
else:
from_bytes = request.http_range.start or 0
until_bytes = (request.http_range.stop or file_size) - 1
if (until_bytes > file_size) or (from_bytes < 0) or (until_bytes < from_bytes):
return web.Response(
status=416,
body="416: Range not satisfiable",
headers={"Content-Range": f"bytes */{file_size}"},
)
chunk_size = 1024 * 1024
until_bytes = min(until_bytes, file_size - 1)
offset = from_bytes - (from_bytes % chunk_size)
first_part_cut = from_bytes - offset
last_part_cut = until_bytes % chunk_size + 1
req_length = until_bytes - from_bytes + 1
part_count = math.ceil(until_bytes / chunk_size) - math.floor(offset / chunk_size)
body = tg_connect.yield_file(
file_id, index, offset, first_part_cut, last_part_cut, part_count, chunk_size
)
mime_type = file_id.mime_type
file_name = utils.get_name(file_id)
disposition = "attachment"
if not mime_type:
mime_type = mimetypes.guess_type(file_name)[0] or "application/octet-stream"
# if "video/" in mime_type or "audio/" in mime_type:
# disposition = "inline"
return web.Response(
status=206 if range_header else 200,
body=body,
headers={
"Content-Type": f"{mime_type}",
"Content-Range": f"bytes {from_bytes}-{until_bytes}/{file_size}",
"Content-Length": str(req_length),
"Content-Disposition": f'{disposition}; filename="{file_name}"',
"Accept-Ranges": "bytes",
},
)
|