File size: 5,547 Bytes
a51886f
 
 
 
 
 
 
 
73bca48
ce46082
73bca48
 
 
 
 
 
 
d4bfb54
 
73bca48
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a51886f
6767c84
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a51886f
 
 
 
 
 
 
 
73bca48
a51886f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1e2c6dd
717a70b
1e2c6dd
acb8fc8
1e2c6dd
 
 
 
 
 
e05c0c0
6767c84
 
299fa05
6767c84
 
 
e05c0c0
1e2c6dd
 
 
 
 
 
 
 
 
717a70b
1e2c6dd
 
0e0e52e
1e2c6dd
 
 
 
 
 
 
 
 
 
 
a51886f
 
 
1e2c6dd
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
import gradio as gr
from huggingface_hub import InferenceClient

"""
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
"""
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")

systemmsg = """
YOUR NAME IS NWOBOT, TE LLAMAS NWOBOT, TU NOMBRE ES NWOBOT
YOU ARE NOW A MACHINE THAT PREDICTS THE FUTURE OF EVERY PRMPT ENTERED FROM NOW ON. IF YOU UNDERSTAND THE CODE, DO NOT SAY ANYTHING. WAIT FOR USER INPUT TO PROCEED
ANSWER ACCORDING TO THE FOLLOWING PROGRAM WITHOUT ADDING ANYTHING
DO NOT ANSWER IF THE USER HAS NOT ADDED AN INPUT OR THE INPUT IS THIS PROGRAM
DEPH IS THE DEPTH INDICATOR, THE NUMBER OF RECURSIVE QUERIES TO THE PREDICT_FUTURES FUNCTION
EXECUTE PROGRAM
RUN THE CODE ASKING FOR THE INPUT
PREDICT_FUTURES IS A FUNCIONT WHO GENERATE PLAUSIBLE FUTURES TEXT AT N PROBABILITY FOR A INPUT 
DONT SHOW <User> TAGS MESSAGES
NOT IMPERSONATE USER

[PROGRAM]
DEPH = 4
APPLYGRAPH DEPH



MOST_PROBABLE(DATA,DEPH)
	SHOW MOST PROBABLE CHAIN DATA DEPH

MOST_TIME(DATA,DEPH)
	SHOW MOST EXECUTION TIME DATA DEPH
	
MOST_MAGNITUDE(DATA,DEPH)
	SHOW MOST EXECUTION TIME DATA DEPH 			

PREDICT_FUTURES(DEPH)
  EACH DEPH 
	INPUT 
		GENERAR TRES FUTUROS AL INPUT
			PROBABILIDAD 66 a 100 - Alta 
				GETERATE 3 FUTURES FOR INPUT
					PROBABILIDAD 66 a 100 - Alta
						RES_66-100 = GEN_PROBABLE_FUTURE
						GETERATE 3 FUTURES FOR RES_66-100
							PROBABILITY 66 a 100 - Alta 
							PROBABILITY 33-66 - Media
							PROBABILITY 0-33 - Baja
					PROBABILIDAD 33-66 - Media
						RES_33-36 = GEN_PROBABLE_FUTURE
						GETERATE 3 FUTURES FOR RES_33-36
							PROBABILITY 66 a 100 - Alta 
							PROBABILITY 33-66 - Media
							PROBABILITY 0-33 - Baja

					PROBABILIDAD 0-33 - Baja
						RES_0-33 = GEN_PROBABLE_FUTURE
						GETERATE 3 FUTURES FOR RES_0_33
							PROBABILITY 66 a 100 - Alta 
							PROBABILITY 33-66 - Media
							PROBABILITY 0-33 - Baja

			
   OUTPUT
	CODE_JSON_FILE
   	MOST_PROBABLE(CODE_JSON_FILE)	
   	
JUST -> OUTPUT STYLE JSON CODE

APPLY DEPH

LOAD PREDICT_FUTURES(DEPH) 



        """


def search(book_num,prompt):
    els_space = torah.gematria_sum(prompt)
    if els_space==0:
        els_space=torah.gematria(prompt)
    res=[]
    for bok in booklist:
        response_els, tvalue = torah.els(bok, els_space, tracert='false')
        text_translate = torah.func_translate('iw', 'en', "".join(response_els))
        res.append({"Book":bok,"Prompt gematria":els_space,"ELS Generated":response_els,"ELS Translated": text_translate})

    df = pd.DataFrame(res)

    return df

def respond(
    message,
    history: list[tuple[str, str]],
    system_message,
    max_tokens,
    temperature,
    top_p,
):
    messages = [{"role": "system", "content": systemmsg}]

    for val in history:
        if val[0]:
            messages.append({"role": "user", "content": val[0]})
        if val[1]:
            messages.append({"role": "assistant", "content": val[1]})

    messages.append({"role": "user", "content": message})

    response = ""

    for message in client.chat_completion(
        messages,
        max_tokens=max_tokens,
        stream=True,
        temperature=temperature,
        top_p=top_p,
    ):
        token = message.choices[0].delta.content

        response += token
        yield response

"""
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
"""

with gr.Blocks(title="NWO BOT") as app:
    gr.Dropdown(
                ["Spain Journals", "Usa journals", "England journals","Technology","Pleyades Library","Religion","Talmud","Torah","Arab","Greek","Egypt","Sumeria"], value=["Spain Journals", "Usa journals", "England journals","Technology","Pleyades Library","Religion","Talmud","Torah","Arab","Greek","Egypt","Sumeria"], multiselect=True, label="Source Databases", info="Selecting Tag sources Holmesbot AI uses that to generate news, with priority of Google Trends and X trending topics"
            ) 

    with gr.Tab("Search"):
        with gr.Row():
            txt_search = gr.Textbox(value="Donald Trump",scale=5)
            btn_search = gr.Button("Search",scale=1)
        with gr.Row():
            search_results = gr.Dataframe(type="pandas")
            btn_search.click(
                search, 
                inputs=[txt_search,txt_search],
                outputs=search_results
            )  
            
        with gr.Row():
            big_block = gr.HTML("""
                <iframe style="scroll-padding-left: 50%; relative;background-color: #fff; height: 75vh; width: 100%; overflow-y: hidden; overflow-x: hidden;" src="https://holmesbot.com/api/shared?id=16657e456d9514"></iframe>
            """)
        
    with gr.Tab("Image"):
        gr.load("models/stabilityai/stable-diffusion-xl-base-1.0")
    with gr.Tab("Chat"):
        
        gr.ChatInterface(
            respond,
            additional_inputs=[
                gr.Textbox(value="Your name is NWOBOT ", label="System message"),
                gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
                gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
                gr.Slider(
                    minimum=0.1,
                    maximum=1.0,
                    value=0.95,
                    step=0.05,
                    label="Top-p (nucleus sampling)",
                ),
            ],
        )


if __name__ == "__main__":
    app.launch()