File size: 1,549 Bytes
e063e29
 
 
 
 
 
cbfb9df
e063e29
 
 
cbfb9df
 
 
 
 
 
 
 
 
 
 
 
 
 
e063e29
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
# -*- coding: utf-8 -*-
"""gradio_app_0

Automatically generated by Colaboratory.

Original file is located at
    https://colab.research.google.com/drive/lalalalalal_HAHAHHAH!@#$%^&
"""

from EMO_AI.all import *

"""
# should add kwargs in get_model(), time to update our lib
from pathlib import Path
# return a dict of options
def load_weight(f="emo_0.pt"):
  p = Path(f)
  if p.is_file():
    return {"PATH": f, "inference_only": False}
  return {"pretrained":False}
"""
model = get_model("emo_0.pt", inference_only=False)

# stable: model = get_model(pretrained=False)

import gradio as gr

"""# TODO

## model

### put our model (and pretrained weight) to huggingface space


## App

### store users result there (and pass to firebase?)

### enable to send request from the mobile to space, and send result back from space to mobile

### use socket?
"""

"for creating filename that won't collide as much"
import hashlib
from pathlib import Path

"add write data function"

def get_filename(text):
  return hashlib.md5(text.encode("utf-8")).hexdigest()

def write_result(text, content):
  # decide the filename: use hash function?

  # write up the result
  filename = Path(get_filename(text))
  if not filename.is_file():
    filename.write_text(content)
  # return the file name (try to integrate with socket?)
  return filename


def fn2(text, model=model):
  out = get_output(text,model)
  filename = write_result(text, out)
  return out

interface = gr.Interface(
    fn = fn2,
    inputs="text",
    outputs="text"
)

interface.launch()