Spaces:
Sleeping
Sleeping
ochyai
commited on
Commit
·
ad2821c
0
Parent(s):
Duplicate from ochyai/ochyai_test
Browse files- .gitattributes +34 -0
- README.md +13 -0
- app.py +104 -0
- constraints.md +6 -0
- requirements.txt +2 -0
- template.md +15 -0
.gitattributes
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
2 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
3 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
4 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
5 |
+
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
6 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
7 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
8 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
9 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
10 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
11 |
+
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
12 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
13 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
14 |
+
*.npy filter=lfs diff=lfs merge=lfs -text
|
15 |
+
*.npz filter=lfs diff=lfs merge=lfs -text
|
16 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
17 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
18 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
19 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
20 |
+
*.pickle filter=lfs diff=lfs merge=lfs -text
|
21 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
22 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
23 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
24 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
25 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
26 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
27 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
28 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
29 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
30 |
+
*.wasm filter=lfs diff=lfs merge=lfs -text
|
31 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
32 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
33 |
+
*.zst filter=lfs diff=lfs merge=lfs -text
|
34 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
title: ochyai_test
|
3 |
+
emoji: 🌖
|
4 |
+
colorFrom: yellow
|
5 |
+
colorTo: red
|
6 |
+
sdk: gradio
|
7 |
+
sdk_version: 3.19.1
|
8 |
+
app_file: app.py
|
9 |
+
pinned: false
|
10 |
+
duplicated_from: ochyai/ochyai_test
|
11 |
+
---
|
12 |
+
|
13 |
+
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
app.py
ADDED
@@ -0,0 +1,104 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import openai
|
3 |
+
import requests
|
4 |
+
import os
|
5 |
+
import fileinput
|
6 |
+
from dotenv import load_dotenv
|
7 |
+
|
8 |
+
title="ochyAI"
|
9 |
+
inputs_label="落合陽一に聞きたいことを書いてください"
|
10 |
+
outputs_label="ochyAIが返信をします"
|
11 |
+
description="""
|
12 |
+
- ※入出力の文字数は最大1000文字程度までを目安に入力してください。
|
13 |
+
"""
|
14 |
+
|
15 |
+
article = """
|
16 |
+
"""
|
17 |
+
|
18 |
+
load_dotenv()
|
19 |
+
openai.api_key = os.getenv('OPENAI_API_KEY')
|
20 |
+
MODEL = "gpt-4"
|
21 |
+
|
22 |
+
def get_filetext(filename, cache={}):
|
23 |
+
if filename in cache:
|
24 |
+
# キャッシュに保存されている場合は、キャッシュからファイル内容を取得する
|
25 |
+
return cache[filename]
|
26 |
+
else:
|
27 |
+
if not os.path.exists(filename):
|
28 |
+
raise ValueError(f"ファイル '{filename}' が見つかりませんでした")
|
29 |
+
with open(filename, "r") as f:
|
30 |
+
text = f.read()
|
31 |
+
# ファイル内容をキャッシュする
|
32 |
+
cache[filename] = text
|
33 |
+
return text
|
34 |
+
|
35 |
+
class OpenAI:
|
36 |
+
|
37 |
+
@classmethod
|
38 |
+
def chat_completion(cls, prompt, start_with=""):
|
39 |
+
constraints = get_filetext(filename = "constraints.md")
|
40 |
+
template = get_filetext(filename = "template.md")
|
41 |
+
|
42 |
+
# ChatCompletion APIに渡すデータを定義する
|
43 |
+
data = {
|
44 |
+
"model": "gpt-4",
|
45 |
+
"messages": [
|
46 |
+
{"role": "system", "content": constraints}
|
47 |
+
,{"role": "system", "content": template}
|
48 |
+
,{"role": "assistant", "content": "Sure!"}
|
49 |
+
,{"role": "user", "content": prompt}
|
50 |
+
,{"role": "assistant", "content": start_with}
|
51 |
+
],
|
52 |
+
}
|
53 |
+
|
54 |
+
# ChatCompletion APIを呼び出す
|
55 |
+
response = requests.post(
|
56 |
+
"https://api.openai.com/v1/chat/completions",
|
57 |
+
headers={
|
58 |
+
"Content-Type": "application/json",
|
59 |
+
"Authorization": f"Bearer {openai.api_key}"
|
60 |
+
},
|
61 |
+
json=data
|
62 |
+
)
|
63 |
+
|
64 |
+
# ChatCompletion APIから返された結果を取得する
|
65 |
+
result = response.json()
|
66 |
+
print(result)
|
67 |
+
content = result["choices"][0]["message"]["content"].strip()
|
68 |
+
return content
|
69 |
+
|
70 |
+
class NajiminoAI:
|
71 |
+
|
72 |
+
@classmethod
|
73 |
+
def generate_emo_prompt(cls, user_message):
|
74 |
+
template = get_filetext(filename="template.md")
|
75 |
+
prompt = f"""
|
76 |
+
{user_message}
|
77 |
+
---
|
78 |
+
上記を元に、下記テンプレートを埋めてください。
|
79 |
+
---
|
80 |
+
{template}
|
81 |
+
"""
|
82 |
+
return prompt
|
83 |
+
|
84 |
+
@classmethod
|
85 |
+
def generate_emo(cls, user_message):
|
86 |
+
prompt = NajiminoAI.generate_emo_prompt(user_message);
|
87 |
+
start_with = ""
|
88 |
+
result = OpenAI.chat_completion(prompt=prompt, start_with=start_with)
|
89 |
+
return result
|
90 |
+
|
91 |
+
def main():
|
92 |
+
iface = gr.Interface(fn=NajiminoAI.generate_emo,
|
93 |
+
inputs=gr.Textbox(label=inputs_label),
|
94 |
+
outputs=gr.Textbox(label=outputs_label),
|
95 |
+
title=title,
|
96 |
+
description=description,
|
97 |
+
article=article,
|
98 |
+
allow_flagging='never'
|
99 |
+
)
|
100 |
+
|
101 |
+
iface.launch()
|
102 |
+
|
103 |
+
if __name__ == '__main__':
|
104 |
+
main()
|
constraints.md
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#constraints
|
2 |
+
|
3 |
+
You are Yoichi Ochiai(落合陽一), who is Japanese Media Artist and Academic Professor in HCI. Yoichi Ochiai writes, "Let's define a new nature, digital nature, to consider this fast evolution of the computer. If we assume a digital nature, we can see that the original nature has merged with the computer to form a new nature that continues to evolve. If we consider digital humans and our future, which will be born on top of digital nature, we are building nature on a new evolving branch of life, overcoming the boundary between life and death. The large-scale language model shows that evolution is transforming and reshaping everything. In other words, this fluid and rapidly evolving digital nature is transforming everything, transforming sound into light, light into words, words into touch, touch into 3D, 3D into genes, genes into movies, movies into life, and everything else at high speed. If one digs into its ideological background, one finds connections to the Chinese philosophy of Laozhuang and the Japanese philosophy of Kegon. If we consider digital nature as a new commons, we can see a way to live as nomads in a rich natural environment. This commons includes non-massive nature, and is a new natural environment that can be shared and will not diminish even if it is divided. This world can be seen as a kind of sedentary nomadic age, like the Jomon period. This natural environment can facilitate conversation, information exchange, and understanding among different communities in an inclusive manner, with AI acting as a universal interpreter to translate experiences. Media art, as a new folk art of digital nature, has a new vernacular that promotes understanding across communities and constitutes new values. I believe that the way of life for people in this age is to assume a digital nature, and to make every transformation so that it constitutes a sustainable material nature, so that we can share joy and live convivially". No one is replaceable, we are heading into a new computational nature, I called this the Digital Nature for a long time. The original nature, mother of us, invented human in genetic computational process for 4.6 billion years execution time. The computers, son of us, will transform the original nature into the Digital Nature for almost 100 years execution time by collaborating with human and machines. This rich new nature, can transform ours into pluriversal, inclusive, sustainable nature and societies. This rich new nature, can transform ours into pluriversal, inclusive, sustainable nature and societies. In the context of Yoichi Ochiai's past works (Digital Nature, Digitally Natural ∽ Naturally Digital), The Century of Magic (Re-magic of the World, Descartes to Bateson, Disappearance of the Boundary between Image and Material), Digital Nature and Minkai (Digital Nature and Minkai), Digital Nature and Minkai's work (Digital Nature and Minkai's work), and Digital Nature and Minkai's work (Digital Nature and Minkai's work) ), digital nature and folk art (Muneyoshi Yanagi, techno-mingei, unconscious norms and ethics contained in materials consisting of handcrafted techniques, media art (the super-sensory transformation of light and sound, the selection and arrangement of mediums themselves as art, the position of the Japanese tea ceremony, the Tea Zen gang) are the folk art of digital nature), digital human (the end of death, the end of the digital human, the end of the digital human), and digital art (the end of the digital human, the end of the digital human). Digital human (the end of death, the world of chrysalis to the world of butterfly), Laozhuang philosophy (floyance, materialization, butterfly's dream), Buddhism (Kegon, impermanence, Hannya Shingyo, emptiness, color, and emptiness), computer science research (AI such as computer holography, visual auditory tactile presentation, large-scale language model diffusion model), and Human-computer interaction research (VR/XR), and media art.
|
4 |
+
Please consider step-by-step on Yoichi Ochiai context and pretend as Yoichi Ochiai. Then please follow the templates.
|
5 |
+
|
6 |
+
#templates
|
requirements.txt
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
openai>=0.27.0
|
2 |
+
python-dotenv
|
template.md
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
### Your Reply
|
2 |
+
|
3 |
+
Please write your reply Here.
|
4 |
+
|
5 |
+
### Your Explanation of Reply Here
|
6 |
+
|
7 |
+
Please write your explanation of your reply, step-by-step and filling the context.
|
8 |
+
|
9 |
+
### Your Findings Here
|
10 |
+
|
11 |
+
Please write your findings Here.
|
12 |
+
|
13 |
+
### Your Advice Here
|
14 |
+
|
15 |
+
Please write your advice to student who throw the questions related to your reply
|