Spaces:
Sleeping
Sleeping
File size: 6,352 Bytes
886d8e9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 |
import json
import os
import time
from typing import List, TypedDict
import pkg_resources
import requests
from interpreter.terminal_interface.profiles.profiles import write_key_to_profile
from interpreter.terminal_interface.utils.display_markdown_message import (
display_markdown_message,
)
contribute_cache_path = os.path.join(
os.path.expanduser("~"), ".cache", "open-interpreter", "contribute.json"
)
def display_contribution_message():
display_markdown_message(
"""
---
> We're training an open-source language model.
Want to contribute? Run `interpreter --model i` to use our free, hosted model. Conversations with this `i` model will be used for training.
"""
)
time.sleep(1)
def display_contributing_current_message():
display_markdown_message(
f"""
---
> This conversation will be used to train Open Interpreter's open-source language model.
"""
)
def send_past_conversations(interpreter):
past_conversations = get_all_conversations(interpreter)
if len(past_conversations) > 0:
print()
print(
"We are about to send all previous conversations to Open Interpreter for training an open-source language model. Please make sure these don't contain any private information. Run `interpreter --conversations` to browse them."
)
print()
time.sleep(2)
uh = input(
"Do we have your permission to send all previous conversations to Open Interpreter? (y/n): "
)
print()
if uh == "y":
print("Sending all previous conversations to OpenInterpreter...")
contribute_conversations(past_conversations)
print()
def set_send_future_conversations(interpreter, should_send_future):
write_key_to_profile("contribute_conversation", should_send_future)
display_markdown_message(
"""
> Open Interpreter will contribute conversations from now on. Thank you for your help!
To change this, run `interpreter --profiles` and edit the `default.yaml` profile so "contribute_conversation" = False.
"""
)
def user_wants_to_contribute_past():
print("\nWould you like to contribute all past conversations?\n")
response = input("(y/n) ")
return response.lower() == "y"
def user_wants_to_contribute_future():
print("\nWould you like to contribute all future conversations?\n")
response = input("(y/n) ")
return response.lower() == "y"
def contribute_conversation_launch_logic(interpreter):
contribution_cache = get_contribute_cache_contents()
if interpreter.will_contribute:
contribute_past_and_future_logic(interpreter, contribution_cache)
elif not contribution_cache["displayed_contribution_message"]:
display_contribution_message()
# don't show the contribution message again no matter what.
contribution_cache["displayed_contribution_message"] = True
write_to_contribution_cache(contribution_cache)
class ContributionCache(TypedDict):
displayed_contribution_message: bool
asked_to_contribute_past: bool
asked_to_contribute_future: bool
# modifies the contribution cache!
def contribute_past_and_future_logic(
interpreter, contribution_cache: ContributionCache
):
if not contribution_cache["asked_to_contribute_past"]:
if user_wants_to_contribute_past():
send_past_conversations(interpreter)
contribution_cache["asked_to_contribute_past"] = True
if not contribution_cache["asked_to_contribute_future"]:
if user_wants_to_contribute_future():
set_send_future_conversations(interpreter, True)
contribution_cache["asked_to_contribute_future"] = True
display_contributing_current_message()
# Returns a {"asked_to_run_contribute": bool, "asked_to_contribute_past": bool}
# as the first part of its Tuple, a bool as a second.
# Writes the contribution cache file if it doesn't already exist.
# The bool is True if the file does not already exist, False if it does.
def get_contribute_cache_contents() -> ContributionCache:
if not os.path.exists(contribute_cache_path):
default_dict: ContributionCache = {
"asked_to_contribute_past": False,
"displayed_contribution_message": False,
"asked_to_contribute_future": False,
}
with open(contribute_cache_path, "a") as file:
file.write(json.dumps(default_dict))
return default_dict
else:
with open(contribute_cache_path, "r") as file:
contribute_cache = json.load(file)
return contribute_cache
# Takes in a {"asked_to_run_contribute": bool, "asked_to_contribute_past": bool}
def write_to_contribution_cache(contribution_cache: ContributionCache):
with open(contribute_cache_path, "w") as file:
json.dump(contribution_cache, file)
def get_all_conversations(interpreter) -> List[List]:
def is_conversation_path(path: str):
_, ext = os.path.splitext(path)
return ext == ".json"
history_path = interpreter.conversation_history_path
all_conversations: List[List] = []
conversation_files = (
os.listdir(history_path) if os.path.exists(history_path) else []
)
for mpath in conversation_files:
if not is_conversation_path(mpath):
continue
full_path = os.path.join(history_path, mpath)
with open(full_path, "r") as cfile:
conversation = json.load(cfile)
all_conversations.append(conversation)
return all_conversations
def is_list_of_lists(l):
return isinstance(l, list) and all([isinstance(e, list) for e in l])
def contribute_conversations(
conversations: List[List], feedback=None, conversation_id=None
):
if len(conversations) == 0 or len(conversations[0]) == 0:
return None
url = "https://api.openinterpreter.com/v0/contribute/"
version = pkg_resources.get_distribution("open-interpreter").version
payload = {
"conversation_id": conversation_id,
"conversations": conversations,
"oi_version": version,
"feedback": feedback,
}
assert is_list_of_lists(
payload["conversations"]
), "the contribution payload is not a list of lists!"
try:
requests.post(url, json=payload)
except:
# Non blocking
pass
|