|
import streamlit as st |
|
import os |
|
import glob |
|
import re |
|
from urllib.parse import quote |
|
from gradio_client import Client |
|
import json |
|
from datetime import datetime |
|
|
|
|
|
if 'selected_file' not in st.session_state: |
|
st.session_state.selected_file = None |
|
if 'view_mode' not in st.session_state: |
|
st.session_state.view_mode = 'view' |
|
if 'files' not in st.session_state: |
|
st.session_state.files = [] |
|
|
|
|
|
Boxing_and_MMA_Commentary_and_Knowledge = """ |
|
# Boxing and UFC Study of 1971 - 2024 The Greatest Fights History |
|
|
|
1. In Boxing, the most heart breaking fight in Boxing was the Boom Boom Mancini fight with Duku Kim. |
|
2. After changes to Boxing made it more safe due to the heart break. |
|
3. Rehydration of the brain after weight ins loss preparation for a match is life saving change. |
|
4. Fighting went from 15 rounds to 12. |
|
|
|
# UFC By Contrast.. |
|
1. 5 Rounds of 5 Minutes each. |
|
2. Greatest UFC Fighters: |
|
- Jon Jones could be the greatest of all time (GOAT) since he never lost. |
|
- George St. Pierre |
|
- BJ Penn |
|
- Anderson Silva |
|
- Mighty Mouse MMA's heart at 125 pounds |
|
- Kabib retired 29 and 0 |
|
- Fedor Milliano |
|
- Alex Pereira |
|
- James Tony |
|
- Randy Couture |
|
3. You have to Judge them in their Championship Peak |
|
4. Chris Weidman |
|
5. Connor McGregor |
|
6. Leg Breaking - Shin calcification and breaking baseball bats |
|
|
|
# References: |
|
1. Joe Rogan - Interview #2219 |
|
2. Donald J Trump |
|
""" |
|
|
|
Multiplayer_Custom_Hosting_Game_Servers_For_Simulated_Worlds = """ |
|
# Multiplayer Simulated Worlds |
|
|
|
1. 7 Days To Die PC |
|
2. ARK: Survival Evolved PC |
|
3. Arma 3 PC |
|
4. Atlas PC |
|
5. Conan Exiles PC |
|
6. Craftopia PC |
|
7. DayZ PC |
|
8. Eco - Global Survival PC |
|
9. Empyrion - Galactic Survival PC |
|
10. Factorio PC |
|
11. Farming Simulator 19 PC |
|
12. Crossplay |
|
13. Farming Simulator 22 |
|
14. Last Oasis PC |
|
15. Last Oasis Classic PC |
|
16. Minecraft (Vanilla) PC |
|
17. Crossplay |
|
18. Path of Titans |
|
19. Rust PC |
|
20. SCP: Secret Laboratory PC |
|
21. SCUM PC |
|
22. Satisfactory PC |
|
23. Satisfactory (Experimental) PC |
|
24. Crossplay |
|
25. Space Engineers |
|
26. Terraria (tShock & Vanilla) PC |
|
27. The Forest PC |
|
28. Crossplay |
|
29. Valheim |
|
""" |
|
|
|
|
|
def extract_terms(markdown_text): |
|
lines = markdown_text.strip().split('\n') |
|
terms = [] |
|
for line in lines: |
|
line = re.sub(r'^[#*\->\d\.\s]+', '', line).strip() |
|
if line: |
|
terms.append(line) |
|
return terms |
|
|
|
|
|
def display_terms_with_links(terms): |
|
search_urls = { |
|
"🚀🌌ArXiv": lambda k: f"/?q={quote(k)}", |
|
"📖": lambda k: f"https://en.wikipedia.org/wiki/{quote(k)}", |
|
"🔍": lambda k: f"https://www.google.com/search?q={quote(k)}", |
|
"▶️": lambda k: f"https://www.youtube.com/results?search_query={quote(k)}", |
|
"🔎": lambda k: f"https://www.bing.com/search?q={quote(k)}", |
|
"🐦": lambda k: f"https://twitter.com/search?q={quote(k)}", |
|
} |
|
for term in terms: |
|
links_md = ' '.join([f"[{emoji}]({url(term)})" for emoji, url in search_urls.items()]) |
|
st.markdown(f"- **{term}** {links_md}", unsafe_allow_html=True) |
|
|
|
|
|
def perform_ai_lookup(query): |
|
st.write("Performing AI Lookup...") |
|
|
|
client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern") |
|
|
|
result1 = client.predict( |
|
prompt=query, |
|
llm_model_picked="mistralai/Mixtral-8x7B-Instruct-v0.1", |
|
stream_outputs=True, |
|
api_name="/ask_llm" |
|
) |
|
st.markdown("### Mixtral-8x7B-Instruct-v0.1 Result") |
|
st.markdown(result1) |
|
result2 = client.predict( |
|
prompt=query, |
|
llm_model_picked="mistralai/Mistral-7B-Instruct-v0.2", |
|
stream_outputs=True, |
|
api_name="/ask_llm" |
|
) |
|
st.markdown("### Mistral-7B-Instruct-v0.2 Result") |
|
st.markdown(result2) |
|
combined_result = f"{result1}\n\n{result2}" |
|
return combined_result |
|
|
|
|
|
def extract_urls(text): |
|
try: |
|
date_pattern = re.compile(r'### (\d{2} \w{3} \d{4})') |
|
abs_link_pattern = re.compile(r'\[(.*?)\]\((https://arxiv\.org/abs/\d+\.\d+)\)') |
|
pdf_link_pattern = re.compile(r'\[⬇️\]\((https://arxiv\.org/pdf/\d+\.\d+)\)') |
|
title_pattern = re.compile(r'### \d{2} \w{3} \d{4} \| \[(.*?)\]') |
|
date_matches = date_pattern.findall(text) |
|
abs_link_matches = abs_link_pattern.findall(text) |
|
pdf_link_matches = pdf_link_pattern.findall(text) |
|
title_matches = title_pattern.findall(text) |
|
|
|
markdown_text = "" |
|
for i in range(len(date_matches)): |
|
date = date_matches[i] |
|
title = title_matches[i] |
|
abs_link = abs_link_matches[i][1] |
|
pdf_link = pdf_link_matches[i] |
|
markdown_text += f"**Date:** {date}\n\n" |
|
markdown_text += f"**Title:** {title}\n\n" |
|
markdown_text += f"**Abstract Link:** [{abs_link}]({abs_link})\n\n" |
|
markdown_text += f"**PDF Link:** [{pdf_link}]({pdf_link})\n\n" |
|
markdown_text += "---\n\n" |
|
return markdown_text |
|
|
|
except Exception as e: |
|
st.write(f"An error occurred in extract_urls: {e}") |
|
return '' |
|
|
|
|
|
def generate_filename(prefix, content): |
|
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") |
|
safe_content = re.sub(r'\W+', '_', content[:50]) |
|
filename = f"{prefix}_{timestamp}_{safe_content}.md" |
|
return filename |
|
|
|
|
|
def file_management_sidebar(): |
|
st.sidebar.title("📁 File Management") |
|
|
|
|
|
md_files = [file for file in glob.glob("*.md") if os.path.basename(file).lower() != 'readme.md'] |
|
md_files.sort() |
|
st.session_state.files = md_files |
|
|
|
if md_files: |
|
st.sidebar.markdown("### Markdown Files") |
|
for idx, file in enumerate(md_files): |
|
|
|
key_base = f"file_{idx}_{file}" |
|
col1, col2, col3 = st.sidebar.columns([6, 1, 1]) |
|
with col1: |
|
st.write(file) |
|
with col2: |
|
if st.sidebar.button("📄", key=f"view_{key_base}"): |
|
st.session_state.selected_file = file |
|
st.session_state.view_mode = 'view' |
|
with col3: |
|
if st.sidebar.button("✏️", key=f"edit_{key_base}"): |
|
st.session_state.selected_file = file |
|
st.session_state.view_mode = 'edit' |
|
|
|
if st.sidebar.button("Create New Markdown File"): |
|
|
|
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") |
|
new_filename = f"note_{timestamp}.md" |
|
with open(new_filename, 'w', encoding='utf-8') as f: |
|
f.write("# New Markdown File\n") |
|
st.sidebar.success(f"Created new file: {new_filename}") |
|
st.session_state.selected_file = new_filename |
|
st.session_state.view_mode = 'edit' |
|
else: |
|
st.sidebar.write("No markdown files found.") |
|
if st.sidebar.button("Create New Markdown File"): |
|
|
|
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") |
|
new_filename = f"note_{timestamp}.md" |
|
with open(new_filename, 'w', encoding='utf-8') as f: |
|
f.write("# New Markdown File\n") |
|
st.sidebar.success(f"Created new file: {new_filename}") |
|
st.session_state.selected_file = new_filename |
|
st.session_state.view_mode = 'edit' |
|
|
|
|
|
def main(): |
|
st.title("Markdown Content with AI Lookup and File Management") |
|
|
|
|
|
st.markdown("## Original Markdown Content") |
|
st.markdown(Boxing_and_MMA_Commentary_and_Knowledge) |
|
st.markdown(Multiplayer_Custom_Hosting_Game_Servers_For_Simulated_Worlds) |
|
|
|
|
|
st.markdown("## Terms with Links") |
|
terms1 = extract_terms(Boxing_and_MMA_Commentary_and_Knowledge) |
|
terms2 = extract_terms(Multiplayer_Custom_Hosting_Game_Servers_For_Simulated_Worlds) |
|
all_terms = terms1 + terms2 |
|
display_terms_with_links(all_terms) |
|
|
|
|
|
try: |
|
query_params = st.experimental_get_query_params() |
|
query_list = query_params.get('q') or query_params.get('query') or [] |
|
if query_list: |
|
search_query = query_list[0] |
|
if len(search_query) > 1: |
|
st.write(f"### Search query received: {search_query}") |
|
|
|
ai_result = perform_ai_lookup(search_query) |
|
|
|
markdown_text = extract_urls(ai_result) |
|
st.markdown("## Extracted URLs") |
|
st.markdown(markdown_text) |
|
|
|
filename = generate_filename("AI_Result", search_query) |
|
with open(filename, 'w', encoding='utf-8') as f: |
|
f.write(markdown_text) |
|
st.write(f"Generated file **{filename}** with AI lookup results.") |
|
|
|
st.experimental_set_query_params() |
|
|
|
st.session_state.selected_file = filename |
|
st.session_state.view_mode = 'view' |
|
except Exception as e: |
|
st.write(f"An error occurred while processing query parameters: {e}") |
|
|
|
|
|
params = st.experimental_get_query_params() |
|
if 'action' in params: |
|
action_list = params['action'] |
|
if action_list: |
|
action = action_list[0] |
|
if action == 'show_message': |
|
st.success("Showing a message because 'action=show_message' was found in the URL.") |
|
elif action == 'clear': |
|
|
|
st.experimental_set_query_params() |
|
if 'query' in params: |
|
query_list = params['query'] |
|
if query_list: |
|
query = query_list[0] |
|
|
|
st.write(f"Displaying content for query: {query}") |
|
|
|
|
|
|
|
file_management_sidebar() |
|
|
|
|
|
selected_file = st.session_state.get('selected_file') |
|
if selected_file: |
|
view_mode = st.session_state.get('view_mode', 'view') |
|
if os.path.exists(selected_file): |
|
if view_mode == 'view': |
|
st.markdown(f"### Viewing {selected_file}") |
|
with open(selected_file, 'r', encoding='utf-8') as f: |
|
file_content = f.read() |
|
st.markdown(file_content) |
|
elif view_mode == 'edit': |
|
st.markdown(f"### Editing {selected_file}") |
|
with open(selected_file, 'r', encoding='utf-8') as f: |
|
file_content = f.read() |
|
edited_content = st.text_area("Edit the markdown content", file_content, height=400) |
|
if st.button("Save Changes"): |
|
with open(selected_file, 'w', encoding='utf-8') as f: |
|
f.write(edited_content) |
|
st.success(f"Changes saved to {selected_file}") |
|
|
|
st.session_state.files = [file for file in glob.glob("*.md") if os.path.basename(file).lower() != 'readme.md'] |
|
else: |
|
st.error("Selected file does not exist.") |
|
else: |
|
st.info("No file selected.") |
|
|
|
if __name__ == "__main__": |
|
main() |
|
|