Spaces:
Runtime error
Runtime error
環境ファイルの追加
Browse files- app.py +18 -20
- requirements.txt +2 -0
app.py
CHANGED
@@ -7,7 +7,13 @@ from bs4 import BeautifulSoup
|
|
7 |
|
8 |
|
9 |
|
10 |
-
def generate_cloud(
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
t = Tokenizer()
|
12 |
|
13 |
tokens = t.tokenize(text)
|
@@ -16,7 +22,8 @@ def generate_cloud(text, width, height):
|
|
16 |
width=int(width),
|
17 |
height=int(height),
|
18 |
regexp="[\w']+",
|
19 |
-
font_path="./ipaexm00401/ipaexm.ttf"
|
|
|
20 |
|
21 |
nouns = [token.base_form for token in tokens if token.part_of_speech.startswith('名詞')]
|
22 |
nouns = (" ").join(nouns)
|
@@ -24,16 +31,8 @@ def generate_cloud(text, width, height):
|
|
24 |
return wc.to_array()
|
25 |
|
26 |
|
27 |
-
def
|
28 |
-
|
29 |
-
# default setting
|
30 |
-
width = width if width is not None else 1024
|
31 |
-
height = height if height is not None else 768
|
32 |
-
|
33 |
-
return generate_cloud(text, width, height)
|
34 |
-
|
35 |
-
|
36 |
-
def generate_cloud_from_url(url, width, height):
|
37 |
|
38 |
# URLからHTMLを取得
|
39 |
html = urlopen(url)
|
@@ -41,19 +40,17 @@ def generate_cloud_from_url(url, width, height):
|
|
41 |
# HTMLからbodyのテキストを抽出
|
42 |
soup = BeautifulSoup(html, "html.parser")
|
43 |
body_text = soup.body.get_text()
|
44 |
-
# default setting
|
45 |
-
width = width if width is not None else 1024
|
46 |
-
height = height if height is not None else 768
|
47 |
|
48 |
-
return generate_cloud(body_text, width, height)
|
49 |
|
50 |
|
51 |
from_text = gr.Interface(
|
52 |
-
fn=
|
53 |
inputs=[
|
54 |
gr.Textbox(label="入力テキスト"),
|
55 |
gr.Number(value=1024, label="横幅(デフォルト値:1024)"),
|
56 |
-
gr.Number(value=768, label="高さ(デフォルト値:768)")
|
|
|
57 |
],
|
58 |
outputs=gr.Image(type="pil"),
|
59 |
title="☁️にほんご わーどくらうど☁"
|
@@ -61,11 +58,12 @@ from_text = gr.Interface(
|
|
61 |
|
62 |
|
63 |
from_url = gr.Interface(
|
64 |
-
fn=
|
65 |
inputs=[
|
66 |
gr.Textbox(label="URL"),
|
67 |
gr.Number(value=1024, label="横幅(デフォルト値:1024)"),
|
68 |
-
gr.Number(value=768, label="高さ(デフォルト値:768)")
|
|
|
69 |
],
|
70 |
outputs=gr.Image(type="pil"),
|
71 |
title="☁️にほんご わーどくらうど☁"
|
|
|
7 |
|
8 |
|
9 |
|
10 |
+
def generate_cloud(*args):
|
11 |
+
text, width, height, background_color = args
|
12 |
+
|
13 |
+
# default setting
|
14 |
+
width = width if width is not None else 1024
|
15 |
+
height = height if height is not None else 768
|
16 |
+
|
17 |
t = Tokenizer()
|
18 |
|
19 |
tokens = t.tokenize(text)
|
|
|
22 |
width=int(width),
|
23 |
height=int(height),
|
24 |
regexp="[\w']+",
|
25 |
+
font_path="./ipaexm00401/ipaexm.ttf",
|
26 |
+
background_color=background_color)
|
27 |
|
28 |
nouns = [token.base_form for token in tokens if token.part_of_speech.startswith('名詞')]
|
29 |
nouns = (" ").join(nouns)
|
|
|
31 |
return wc.to_array()
|
32 |
|
33 |
|
34 |
+
def get_text_from_url(*args):
|
35 |
+
url, width, height, background_color = args
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
36 |
|
37 |
# URLからHTMLを取得
|
38 |
html = urlopen(url)
|
|
|
40 |
# HTMLからbodyのテキストを抽出
|
41 |
soup = BeautifulSoup(html, "html.parser")
|
42 |
body_text = soup.body.get_text()
|
|
|
|
|
|
|
43 |
|
44 |
+
return generate_cloud(body_text, width, height, background_color)
|
45 |
|
46 |
|
47 |
from_text = gr.Interface(
|
48 |
+
fn=generate_cloud,
|
49 |
inputs=[
|
50 |
gr.Textbox(label="入力テキスト"),
|
51 |
gr.Number(value=1024, label="横幅(デフォルト値:1024)"),
|
52 |
+
gr.Number(value=768, label="高さ(デフォルト値:768)"),
|
53 |
+
gr.Radio(choices=["black", "white"], value="black", label="背景色")
|
54 |
],
|
55 |
outputs=gr.Image(type="pil"),
|
56 |
title="☁️にほんご わーどくらうど☁"
|
|
|
58 |
|
59 |
|
60 |
from_url = gr.Interface(
|
61 |
+
fn=get_text_from_url,
|
62 |
inputs=[
|
63 |
gr.Textbox(label="URL"),
|
64 |
gr.Number(value=1024, label="横幅(デフォルト値:1024)"),
|
65 |
+
gr.Number(value=768, label="高さ(デフォルト値:768)"),
|
66 |
+
gr.Radio(choices=["black", "white"], value="black", label="背景色")
|
67 |
],
|
68 |
outputs=gr.Image(type="pil"),
|
69 |
title="☁️にほんご わーどくらうど☁"
|
requirements.txt
CHANGED
@@ -5,6 +5,7 @@ altair==4.2.0
|
|
5 |
anyio==3.6.2
|
6 |
async-timeout==4.0.2
|
7 |
attrs==22.2.0
|
|
|
8 |
certifi==2022.12.7
|
9 |
charset-normalizer==2.1.1
|
10 |
click==8.1.3
|
@@ -52,6 +53,7 @@ requests==2.28.2
|
|
52 |
rfc3986==1.5.0
|
53 |
six==1.16.0
|
54 |
sniffio==1.3.0
|
|
|
55 |
starlette==0.22.0
|
56 |
toolz==0.12.0
|
57 |
typing_extensions==4.4.0
|
|
|
5 |
anyio==3.6.2
|
6 |
async-timeout==4.0.2
|
7 |
attrs==22.2.0
|
8 |
+
beautifulsoup4==4.11.1
|
9 |
certifi==2022.12.7
|
10 |
charset-normalizer==2.1.1
|
11 |
click==8.1.3
|
|
|
53 |
rfc3986==1.5.0
|
54 |
six==1.16.0
|
55 |
sniffio==1.3.0
|
56 |
+
soupsieve==2.3.2.post1
|
57 |
starlette==0.22.0
|
58 |
toolz==0.12.0
|
59 |
typing_extensions==4.4.0
|