|
import gradio as gr |
|
def one(text): |
|
return text |
|
if __name__ == "__main__": |
|
|
|
with gr.Blocks() as demo: |
|
gr.Markdown("<h1><center>Diff-AMP</center></h1>") |
|
|
|
|
|
gr.Markdown(""" |
|
![image](https://drive.google.com/file/d/1j99AXrLVPgIkcGEZG8pOaS3P1Ld3293D/view?usp=drive_link) |
|
""") |
|
|
|
gr.Markdown( |
|
"<p align='center' style='font-size: 20px;'>A system to connect LLMs with ML community. See our <a href='https://github.com/wrab12/diff-amp'>Project</a></p>") |
|
gr.HTML( |
|
'''<center><a href="https://huggingface.co/spaces/jackrui/diff-amp-AMP_Sequence_Detector?duplicate=true"><img src="https://bit.ly/3gLdBN6" alt="Duplicate Space"></a></center>''') |
|
gr.HTML( |
|
'''<center>Note: Only a few models are deployed in the local inference endpoint due to hardware limitations. In addition, online HuggingFace inference endpoints may sometimes not be available. Thus the capability of HuggingGPT is limited.</center>''') |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
examples = [ |
|
["QGLFFLGAKLFYLLTLFL"], |
|
["FLGLLFHGVHHVGKWIHGLIHGHH"], |
|
["GLMSTLKGAATNAAVTLLNKLQCKLTGTC"] |
|
] |
|
|
|
|
|
iface = gr.Interface( |
|
fn=one, |
|
inputs="text", |
|
outputs="text", |
|
title="AMP Sequence Detector", |
|
examples=examples |
|
) |
|
|
|
demo.launch() |