superdup95's picture
Update app.py
0535fb6 verified
raw
history blame
10.8 kB
import gradio as gr
import requests, re
from api_usage import get_subscription, check_key_availability, check_key_ant_availability, check_ant_rate_limit, check_key_gemini_availability, check_key_azure_availability, get_azure_status, get_azure_deploy, check_key_mistral_availability, check_mistral_quota, check_key_replicate_availability, check_key_aws_availability, check_key_or_availability, check_key_or_limits
async def sort_key(key, rate_limit, claude_opus):
_key = key.strip()
if _key.startswith("sk-or-v1-") and re.match(re.compile("sk-or-v1-[a-z0-9]{64}"), _key):
return get_key_openrouter_info(_key)
if _key.startswith("sk-ant-") and re.match(re.compile("sk-ant-api03-[a-zA-Z0-9\-_]{93}AA"), _key):
return await get_key_ant_info(_key, rate_limit, claude_opus)
if _key.startswith("sk-"):
return get_key_oai_info(_key)
if _key.startswith("AIzaSy"):
return get_key_gemini_info(_key)
if (_key.startswith("r8_") and len(_key) == 40) or (_key.islower() and len(_key) == 40):
return get_key_replicate_info(_key)
if len(_key.split(':')) == 2 and _key.split(':')[1].islower() and len(_key.split(':')[1]) == 32 and "openai.azure.com" not in _key.split(':')[1]:
endpoint = f"{_key.split(':')[0]}.openai.azure.com"
api_key = _key.split(':')[1]
return get_key_azure_info(endpoint, api_key)
if "openai.azure.com" in _key.split(';')[0]:
endpoint = _key.split(';')[0]
api_key = _key.split(';')[1]
return get_key_azure_info(endpoint, api_key)
if _key.startswith("AKIA") and len(_key.split(':')[0]) == 20 and _key.split(':')[0].isupper():
return await get_key_aws_info(_key)
if len(_key) == 32:
return get_key_mistral_info(_key)
return not_supported(_key)
def get_key_oai_info(key):
# Return a dictionary containing key information
session = requests.Session()
key_avai = check_key_availability(session, key)
info_dict = {#"account_name": "",
"key_type": "OpenAI",
"key_availability": True if key_avai[0] else False,
"gpt4_availability": "",
"gpt4_32k_availability": "",
"default_org": "",
"org_description": "",
"organization": "",
"models": "",
"requests_per_minute": "",
"tokens_per_minute": "",
#"tokens_per_minute_left": "",
"quota": ""}
if key_avai[0]:
if key_avai[0] == 200:
info = get_subscription(key, session, key_avai[1])
info_dict["default_org"] = info["default_org"]
info_dict["org_description"] = info["org_description"]
info_dict["organization"] = info["organization"]
else:
empty_org = { 'id': '' }
info = get_subscription(key, session, [empty_org])
info_dict["organization"] = key_avai[1]
info_dict["gpt4_availability"] = info["has_gpt4"]
info_dict["gpt4_32k_availability"] = info["has_gpt4_32k"]
info_dict["models"] = info["models"]
info_dict["requests_per_minute"] = info["rpm"]
info_dict["tokens_per_minute"] = info["tpm"]
info_dict["quota"] = info["quota"]
return info_dict
async def get_key_ant_info(key, rate_limit, claude_opus):
# Return a dictionary containing key information
key_avai = check_key_ant_availability(key, claude_opus)
info_dict = {#"account_name": "",
"key_type": "Anthropic Claude",
"key_availability": key_avai[0],
"status": "",
"filter_response": "",
"requests_per_minute": "",
"tokens_per_minute": "",
"tier": "",
"concurrent_rate_limit": ""}
info_dict["status"] = key_avai[1]
info_dict["filter_response"] = key_avai[2]
info_dict["requests_per_minute"] = key_avai[3] + ("" if key_avai[3] == "" else f" ({key_avai[4]} left)")
info_dict["tokens_per_minute"] = key_avai[5] + ("" if key_avai[5] == "" else f" ({key_avai[6]} left)")
info_dict["tier"] = key_avai[7]
if rate_limit:
rate = await check_ant_rate_limit(key)
info_dict["concurrent_rate_limit"] = rate
return info_dict
def get_key_gemini_info(key):
key_avai = check_key_gemini_availability(key)
info_dict = {#"account_name": "",
"key_type": "Google Gemini",
"key_availability": key_avai[0],
"models": key_avai[1]}
return info_dict
def get_key_azure_info(endpoint, api_key):
key_avai = check_key_azure_availability(endpoint, api_key)
info_dict = {#"account_name": "",
"key_type": "Microsoft Azure OpenAI",
"key_availability": key_avai[0],
"gpt35_availability": "",
"gpt4_availability": "",
"gpt4_32k_availability": "",
"moderation_status": "",
"models": "",
"deployments": ""}
if key_avai[0]:
azure_deploy = get_azure_deploy(endpoint, api_key)
status = get_azure_status(endpoint, api_key, azure_deploy)
info_dict["gpt35_availability"] = status[1]
info_dict["gpt4_availability"] = status[2]
info_dict["gpt4_32k_availability"] = status[3]
info_dict["moderation_status"] = status[0]
info_dict["models"] = key_avai[1]
info_dict["deployments"] = azure_deploy
return info_dict
def get_key_mistral_info(key):
key_avai = check_key_mistral_availability(key)
info_dict = {#"account_name": "",
"key_type": "Mistral AI",
"key_availability": True if key_avai else False,
"has_quota": "",
"models": ""}
if key_avai:
info_dict['has_quota'] = check_mistral_quota(key)
info_dict['models'] = key_avai
return info_dict
def get_key_replicate_info(key):
key_avai = check_key_replicate_availability(key)
info_dict = {#"account_name": "",
"key_type": "Replicate",
"key_availability": key_avai[0],
"account_name": "",
"type": "",
"has_quota": "",
"hardware_available": ""}
if key_avai[0]:
info_dict['account_name'] = key_avai[1]['username']
info_dict['type'] = key_avai[1]['type']
info_dict['has_quota'] = key_avai[2]
info_dict['hardware_available'] = key_avai[3]
return info_dict
async def get_key_aws_info(key):
key_avai = await check_key_aws_availability(key)
info_dict = {#"account_name": "",
"key_type": "Amazon AWS Claude",
"key_availability": key_avai[0],
"username": "",
"root": "",
"admin": "",
"quarantine": "",
"iam_full_access": "",
"iam_user_change_password": "",
"aws_bedrock_full_access": "",
"enabled_region": "",
"models_usage": "",
"cost_and_usage": key_avai[1]}
if key_avai[0]:
info_dict['username'] = key_avai[1]
info_dict['root'] = key_avai[2]
info_dict['admin'] = key_avai[3]
info_dict['quarantine'] = key_avai[4]
info_dict['iam_full_access'] = key_avai[5]
info_dict['iam_user_change_password'] = key_avai[6]
info_dict['aws_bedrock_full_access'] = key_avai[7]
info_dict['enabled_region'] = key_avai[8]
info_dict['models_usage'] = key_avai[9]
info_dict['cost_and_usage'] = key_avai[10]
return info_dict
def get_key_openrouter_info(key):
key_avai = check_key_or_availability(key)
info_dict = {#"account_name": "",
"key_type": "OpenRouter",
"key_availability": key_avai[0],
"is_free_tier": "",
"usage": "",
"balance": "",
"limit": "",
"limit_remaining": "",
"rate_limit_per_minite": "",
"4_turbo_per_request_tokens_limit": "",
"sonnet_per_request_tokens_limit": "",
"opus_per_request_tokens_limit": ""}
if key_avai[0]:
models_info = check_key_or_limits(key)
info_dict['is_free_tier'] = key_avai[1]['is_free_tier']
info_dict['limit'] = key_avai[1]['limit']
info_dict['limit_remaining'] = key_avai[1]['limit_remaining']
info_dict['usage'] = f"${format(key_avai[1]['usage'], '.4f')}"
info_dict['balance'] = f"${format(models_info[0], '.4f')}"
info_dict['rate_limit_per_minite'] = key_avai[2]
info_dict['4_turbo_per_request_tokens_limit'] = models_info[1]['openai/gpt-4-turbo-preview']
info_dict['sonnet_per_request_tokens_limit'] = models_info[1]['anthropic/claude-3-sonnet:beta']
info_dict['opus_per_request_tokens_limit'] = models_info[1]['anthropic/claude-3-opus:beta']
else:
info_dict['usage'] = key_avai[1]
return info_dict
def not_supported(key):
info_dict = {#"account_name": "",
"key_type": "Not supported",
"status": ""}
return info_dict
def clear_inputs(text):
return ""
with gr.Blocks() as demo:
gr.Markdown('''
# OpenAI/Anthropic/Gemini/Azure/Mistral/Replicate/AWS Claude/OpenRouter API Key Status Checker
*(Based on shaocongma, CncAnon1, su, Drago, kingbased key checkers)*
AWS credential's format: AWS_ACCESS_KEY_ID:AWS_SECRET_ACCESS_KEY (root might not be accurate)
Azure endpoint's format: YOUR_RESOURCE_NAME:YOUR_API_KEY or (https://)YOUR_RESOURCE_NAME.openai.azure.com;YOUR_API_KEY
''')
with gr.Row():
with gr.Column():
key = gr.Textbox(lines=1, max_lines=2, label="API Key")
rate_limit = gr.Checkbox(label="Check concurrent rate limit (API Claude) (experimental)")
claude_opus = gr.Checkbox(label="Use claude-3-opus for filter_response check? (default: claude-3-haiku)")
with gr.Row():
clear_button = gr.Button("Clear")
submit_button = gr.Button("Submit", variant="primary")
with gr.Column():
info = gr.JSON(label="API Key Information")
clear_button.click(fn=clear_inputs, inputs=[key], outputs=[key])
submit_button.click(fn=sort_key, inputs=[key, rate_limit, claude_opus], outputs=[info], api_name="sort_key")
demo.launch()