code
stringlengths
419
138k
apis
sequencelengths
1
8
extract_api
stringlengths
67
7.3k
package example; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.utils.TikTokensUtil; import java.util.ArrayList; import java.util.List; class TikTokensExample { public static void main(String... args) { List<ChatMessage> messages = new ArrayList<>(); messages.add(new ChatMessage(ChatMessageRole.SYSTEM.value(), "Hello OpenAI 1.")); messages.add(new ChatMessage(ChatMessageRole.SYSTEM.value(), "Hello OpenAI 2. ")); int tokens_1 = TikTokensUtil.tokens(TikTokensUtil.ModelEnum.GPT_3_5_TURBO.getName(), messages); int tokens_2 = TikTokensUtil.tokens(TikTokensUtil.ModelEnum.GPT_3_5_TURBO.getName(), "Hello OpenAI 1."); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.utils.TikTokensUtil.ModelEnum.GPT_3_5_TURBO.getName" ]
[((409, 439), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((499, 529), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((600, 647), 'com.theokanning.openai.utils.TikTokensUtil.ModelEnum.GPT_3_5_TURBO.getName'), ((704, 751), 'com.theokanning.openai.utils.TikTokensUtil.ModelEnum.GPT_3_5_TURBO.getName')]
package com.ashin.util; import com.ashin.client.GptClient; import com.ashin.config.GptConfig; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.service.OpenAiService; import lombok.Getter; import org.springframework.stereotype.Component; import javax.annotation.PostConstruct; import javax.annotation.Resource; import java.util.*; /** * bot工具类 * * @author ashinnotfound * @date 2023/2/1 */ @Component public class BotUtil { @Resource private GptConfig gptConfig; @Resource private GptClient gptClient; @Resource private Tokenizer tokenizer; private final Map<String, List<ChatMessage>> PROMPT_MAP = new HashMap<>(); private final Map<OpenAiService, Integer> COUNT_FOR_OPEN_AI_SERVICE = new HashMap<>(); @Getter private ChatCompletionRequest.ChatCompletionRequestBuilder completionRequestBuilder; private final List<ChatMessage> BASIC_PROMPT_LIST = new ArrayList<>(); @PostConstruct public void init() { completionRequestBuilder = ChatCompletionRequest.builder().model(gptConfig.getModel()).temperature(gptConfig.getTemperature()).maxTokens(gptConfig.getMaxToken()); for (OpenAiService openAiService : gptClient.getOpenAiServiceList()) { COUNT_FOR_OPEN_AI_SERVICE.put(openAiService, 0); } for (String prompt : gptConfig.getBasicPrompt()){ BASIC_PROMPT_LIST.add(new ChatMessage("system", prompt)); } } public OpenAiService getOpenAiService() { //获取使用次数最小的openAiService 否则获取map中的第一个 Optional<OpenAiService> openAiServiceToUse = COUNT_FOR_OPEN_AI_SERVICE.entrySet().stream() .min(Map.Entry.comparingByValue()) .map(Map.Entry::getKey); if (openAiServiceToUse.isPresent()) { COUNT_FOR_OPEN_AI_SERVICE.put(openAiServiceToUse.get(), COUNT_FOR_OPEN_AI_SERVICE.get(openAiServiceToUse.get()) + 1); return openAiServiceToUse.get(); } else { COUNT_FOR_OPEN_AI_SERVICE.put(COUNT_FOR_OPEN_AI_SERVICE.keySet().iterator().next(), COUNT_FOR_OPEN_AI_SERVICE.get(COUNT_FOR_OPEN_AI_SERVICE.keySet().iterator().next()) + 1); return COUNT_FOR_OPEN_AI_SERVICE.keySet().iterator().next(); } } public List<ChatMessage> buildPrompt(String sessionId, String newPrompt) { if (!PROMPT_MAP.containsKey(sessionId)) { if (!BASIC_PROMPT_LIST.isEmpty()){ List<ChatMessage> promptList = new ArrayList<>(BASIC_PROMPT_LIST); PROMPT_MAP.put(sessionId, promptList); } } List<ChatMessage> promptList = PROMPT_MAP.getOrDefault(sessionId, new ArrayList<>()); promptList.add(new ChatMessage("user", newPrompt)); if (tokenizer.countMessageTokens(gptConfig.getModel(), promptList) > gptConfig.getMaxToken()){ List<ChatMessage> tempChatMessage = deleteFirstPrompt(sessionId); if (tempChatMessage != null){ return buildPrompt(sessionId, newPrompt); } return null; } return promptList; } public boolean isPromptEmpty(String sessionId){ if (!PROMPT_MAP.containsKey(sessionId)){ return true; } return PROMPT_MAP.get(sessionId).size() == BASIC_PROMPT_LIST.size(); } public List<ChatMessage> deleteFirstPrompt(String sessionId) { if (!isPromptEmpty(sessionId)){ int index = BASIC_PROMPT_LIST.size(); List<ChatMessage> promptList = PROMPT_MAP.get(sessionId); //问 promptList.remove(index); //答 if (index < promptList.size()){ promptList.remove(index); return promptList; }else { // 已经是初始聊天记录 return null; } } // 已经是初始聊天记录 return null; } public void resetPrompt(String sessionId) { PROMPT_MAP.remove(sessionId); } }
[ "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((1115, 1249), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1115, 1214), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1115, 1174), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package com.odde.doughnut.services.openAiApis; import static com.odde.doughnut.services.openAiApis.ApiExecutor.blockGet; import static java.lang.Thread.sleep; import com.fasterxml.jackson.databind.JsonNode; import com.odde.doughnut.controllers.dto.AiCompletionAnswerClarifyingQuestionParams; import com.odde.doughnut.exceptions.OpenAIServiceErrorException; import com.odde.doughnut.services.ai.OpenAIChatGPTFineTuningExample; import com.theokanning.openai.assistants.Assistant; import com.theokanning.openai.assistants.AssistantRequest; import com.theokanning.openai.client.OpenAiApi; import com.theokanning.openai.completion.chat.*; import com.theokanning.openai.fine_tuning.FineTuningJob; import com.theokanning.openai.fine_tuning.FineTuningJobRequest; import com.theokanning.openai.fine_tuning.Hyperparameters; import com.theokanning.openai.image.CreateImageRequest; import com.theokanning.openai.image.ImageResult; import com.theokanning.openai.messages.Message; import com.theokanning.openai.messages.MessageRequest; import com.theokanning.openai.model.Model; import com.theokanning.openai.runs.Run; import com.theokanning.openai.runs.RunCreateRequest; import com.theokanning.openai.runs.SubmitToolOutputRequestItem; import com.theokanning.openai.runs.SubmitToolOutputsRequest; import com.theokanning.openai.threads.Thread; import com.theokanning.openai.threads.ThreadRequest; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Optional; import okhttp3.MediaType; import okhttp3.RequestBody; import org.springframework.http.HttpStatus; public class OpenAiApiHandler { private final OpenAiApi openAiApi; public OpenAiApiHandler(OpenAiApi openAiApi) { this.openAiApi = openAiApi; } public String getOpenAiImage(String prompt) { CreateImageRequest completionRequest = CreateImageRequest.builder().prompt(prompt).responseFormat("b64_json").build(); ImageResult choices = blockGet(openAiApi.createImage(completionRequest)); return choices.getData().get(0).getB64Json(); } public Optional<JsonNode> getFunctionCallArguments(ChatCompletionRequest chatRequest) { return getFunctionCall(chatRequest).map(ChatFunctionCall::getArguments); } public Optional<ChatFunctionCall> getFunctionCall(ChatCompletionRequest chatRequest) { return chatCompletion(chatRequest) // .map(x->{ // System.out.println(chatRequest); // System.out.println(x); // return x; // }) .map(ChatCompletionChoice::getMessage) .map(ChatMessage::getFunctionCall); } public Optional<ChatCompletionChoice> chatCompletion(ChatCompletionRequest request) { return blockGet(openAiApi.createChatCompletion(request)).getChoices().stream().findFirst(); } public List<Model> getModels() { return blockGet(openAiApi.listModels()).data; } public String uploadFineTuningExamples( List<OpenAIChatGPTFineTuningExample> examples, String subFileName) throws IOException { FineTuningFileWrapper uploader = new FineTuningFileWrapper(examples, subFileName); return uploader.withFileToBeUploaded( (file) -> { RequestBody purpose = RequestBody.create("fine-tune", MediaType.parse("text/plain")); try { return blockGet(openAiApi.uploadFile(purpose, file)).getId(); } catch (Exception e) { throw new OpenAIServiceErrorException( "Upload failed.", HttpStatus.INTERNAL_SERVER_ERROR); } }); } public FineTuningJob triggerFineTuning(String fileId) { FineTuningJobRequest fineTuningJobRequest = new FineTuningJobRequest(); fineTuningJobRequest.setTrainingFile(fileId); fineTuningJobRequest.setModel("gpt-3.5-turbo-1106"); fineTuningJobRequest.setHyperparameters( new Hyperparameters(3)); // not sure what should be the nEpochs value FineTuningJob fineTuningJob = blockGet(openAiApi.createFineTuningJob(fineTuningJobRequest)); if (List.of("failed", "cancelled").contains(fineTuningJob.getStatus())) { throw new OpenAIServiceErrorException( "Trigger Fine-Tuning Failed: " + fineTuningJob, HttpStatus.BAD_REQUEST); } return fineTuningJob; } public Assistant createAssistant(AssistantRequest assistantRequest) { return blockGet(openAiApi.createAssistant(assistantRequest)); } public Thread createThread(ThreadRequest threadRequest) { return blockGet(openAiApi.createThread(threadRequest)); } public void createMessage(String threadId, MessageRequest messageRequest) { blockGet(openAiApi.createMessage(threadId, messageRequest)); } private Run retrieveRun(String threadId, String runId) { return blockGet(openAiApi.retrieveRun(threadId, runId)); } public Run createRun(String threadId, String assistantId) { RunCreateRequest runCreateRequest = RunCreateRequest.builder().assistantId(assistantId).build(); return blockGet(openAiApi.createRun(threadId, runCreateRequest)); } public Run retrieveUntilCompletedOrRequiresAction(String threadId, Run currentRun) { Run retrievedRun = currentRun; int count = 0; while (!(retrievedRun.getStatus().equals("completed")) && !(retrievedRun.getStatus().equals("failed")) && !(retrievedRun.getStatus().equals("requires_action"))) { count++; if (count > 15) { break; } wait(count - 1); retrievedRun = retrieveRun(threadId, currentRun.getId()); } if (retrievedRun.getStatus().equals("requires_action") || retrievedRun.getStatus().equals("completed")) { return retrievedRun; } throw new RuntimeException("OpenAI run status: " + retrievedRun.getStatus()); } private static void wait(int hundredMilliSeconds) { try { sleep(hundredMilliSeconds * 200L); } catch (InterruptedException e) { throw new RuntimeException(e); } } public Run submitToolOutputs( AiCompletionAnswerClarifyingQuestionParams answerClarifyingQuestionParams) { SubmitToolOutputRequestItem toolOutputRequestItem = SubmitToolOutputRequestItem.builder() .toolCallId(answerClarifyingQuestionParams.getToolCallId()) .output(answerClarifyingQuestionParams.getAnswer()) .build(); List<SubmitToolOutputRequestItem> toolOutputRequestItems = new ArrayList<>(); toolOutputRequestItems.add(toolOutputRequestItem); SubmitToolOutputsRequest submitToolOutputsRequest = SubmitToolOutputsRequest.builder().toolOutputs(toolOutputRequestItems).build(); return blockGet( openAiApi.submitToolOutputs( answerClarifyingQuestionParams.getThreadId(), answerClarifyingQuestionParams.getRunId(), submitToolOutputsRequest)); } public Message getThreadLastMessage(String threadId) { return blockGet(openAiApi.listMessages(threadId)).getData().getLast(); } }
[ "com.theokanning.openai.runs.SubmitToolOutputsRequest.builder", "com.theokanning.openai.image.CreateImageRequest.builder", "com.theokanning.openai.runs.RunCreateRequest.builder", "com.theokanning.openai.runs.SubmitToolOutputRequestItem.builder" ]
[((1844, 1922), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((1844, 1914), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((1844, 1887), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((4032, 4098), 'java.util.List.of'), ((4910, 4969), 'com.theokanning.openai.runs.RunCreateRequest.builder'), ((4910, 4961), 'com.theokanning.openai.runs.RunCreateRequest.builder'), ((6134, 6328), 'com.theokanning.openai.runs.SubmitToolOutputRequestItem.builder'), ((6134, 6307), 'com.theokanning.openai.runs.SubmitToolOutputRequestItem.builder'), ((6134, 6243), 'com.theokanning.openai.runs.SubmitToolOutputRequestItem.builder'), ((6531, 6609), 'com.theokanning.openai.runs.SubmitToolOutputsRequest.builder'), ((6531, 6601), 'com.theokanning.openai.runs.SubmitToolOutputsRequest.builder')]
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidrachatgpt; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import docking.Tool; import ghidra.app.CorePluginPackage; import ghidra.app.ExamplesPluginPackage; import ghidra.app.decompiler.flatapi.FlatDecompilerAPI; import ghidra.app.plugin.PluginCategoryNames; import ghidra.app.plugin.ProgramPlugin; import ghidra.app.services.CodeViewerService; import ghidra.app.services.ConsoleService; import ghidra.framework.plugintool.*; import ghidra.framework.plugintool.util.PluginStatus; import ghidra.program.flatapi.FlatProgramAPI; import ghidra.program.model.address.Address; import ghidra.program.model.listing.Function; import ghidra.program.model.listing.Program; import ghidra.program.model.listing.Variable; import ghidra.program.model.symbol.SourceType; import ghidra.program.util.ProgramLocation; import ghidra.util.HelpLocation; import java.lang.Integer; import java.time.Duration; import java.util.List; import org.json.JSONObject; //@formatter:off @PluginInfo(status = PluginStatus.RELEASED, packageName = CorePluginPackage.NAME, category = PluginCategoryNames.ANALYSIS, shortDescription = "ChatGPT Plugin for Ghidra", description = "Brings the power of ChatGPT to Ghidra!", servicesRequired = {ConsoleService.class, CodeViewerService.class}) //@formatter:on public class GhidraChatGPTPlugin extends ProgramPlugin { ConsoleService cs; CodeViewerService cvs; private GhidraChatGPTComponent uiComponent; private String apiToken; private String openAiModel = "gpt-3.5-turbo"; private int OPENAI_TIMEOUT = 120; private static final String GCG_IDENTIFY_STRING = "Describe the function with as much detail as possible and include a link to an open source version if there is one\n %s"; private static final String GCG_VULNERABILITY_STRING = "Describe all vulnerabilities in this function with as much detail as possible\n %s"; private static final String GCG_BEAUTIFY_STRING = "Analyze the function and suggest function and variable names in a json format where the key is the previous name and the value is the suggested name\n %s"; /** * Plugin constructor. * * @param tool The plugin tool that this plugin is added to. */ public GhidraChatGPTPlugin(PluginTool tool) { super(tool); String pluginName = getName(); uiComponent = new GhidraChatGPTComponent(this, pluginName); String topicName = this.getClass().getPackage().getName(); String anchorName = "HelpAnchor"; uiComponent.setHelpLocation(new HelpLocation(topicName, anchorName)); } @Override public void init() { super.init(); cs = tool.getService(ConsoleService.class); cvs = tool.getService(CodeViewerService.class); apiToken = System.getenv("OPENAI_TOKEN"); if (apiToken != null) ok(String.format("Loaded OpenAI Token: %s", censorToken(apiToken))); ok(String.format("Default model is: %s", openAiModel)); } public Boolean setToken(String token) { if (token == null) return false; apiToken = token; return true; } private static String censorToken(String token) { StringBuilder censoredBuilder = new StringBuilder(token.length()); censoredBuilder.append(token.substring(0, 2)); for (int i = 2; i < token.length(); i++) { censoredBuilder.append('*'); } return censoredBuilder.toString(); } public String getToken() { return apiToken; } public void setModel(String model) { openAiModel = model; } public void identifyFunction() { String result; DecompilerResults decResult = decompileCurrentFunc(); if (decResult == null) return; log(String.format("Identifying the current function: %s", decResult.func.getName())); result = askChatGPT( String.format(GCG_IDENTIFY_STRING, decResult.decompiledFunc)); if (result == null) return; addComment(decResult.prog, decResult.func, result, "[GhidraChatGPT] - Identify Function"); } public void findVulnerabilities() { String result; DecompilerResults decResult = decompileCurrentFunc(); if (decResult == null) return; log(String.format("Finding vulnerabilities in the current function: %s", decResult.func.getName())); result = askChatGPT( String.format(GCG_VULNERABILITY_STRING, decResult.decompiledFunc)); if (result == null) return; addComment(decResult.prog, decResult.func, result, "[GhidraChatGPT] - Find Vulnerabilities"); } public void beautifyFunction() { String result; DecompilerResults decResult = decompileCurrentFunc(); if (decResult == null) return; log(String.format("Beautifying the function: %s", decResult.func.getName())); result = askChatGPT( String.format(GCG_BEAUTIFY_STRING, decResult.decompiledFunc)); if (result == null) return; updateVariables(decResult.prog, decResult, result); ok(String.format("Beautified the function: %s", decResult.func.getName())); } private Boolean checkOpenAIToken() { if (apiToken != null) return true; if (!setToken(uiComponent.askForOpenAIToken())) { error("Failed to update the OpenAI API token"); return false; } return true; } private class DecompilerResults { public Program prog; public Function func; public String decompiledFunc; public DecompilerResults(Program prog, Function func, String decompiledFunc) { this.prog = prog; this.func = func; this.decompiledFunc = decompiledFunc; } } private DecompilerResults decompileCurrentFunc() { String decompiledFunc; ProgramLocation progLoc = cvs.getCurrentLocation(); Program prog = progLoc.getProgram(); FlatProgramAPI programApi = new FlatProgramAPI(prog); FlatDecompilerAPI decompiler = new FlatDecompilerAPI(programApi); Function func = programApi.getFunctionContaining(progLoc.getAddress()); if (func == null) { error("Failed to find the current function"); return null; } try { decompiledFunc = decompiler.decompile(func); } catch (Exception e) { error(String.format( "Failed to decompile the function: %s with the error %s", func.getName(), e)); return null; } return new DecompilerResults(prog, func, decompiledFunc); } private void updateVariables(Program prog, DecompilerResults decResult, String result) { JSONObject jsonObj; try { jsonObj = new JSONObject(result); } catch (Exception e) { error("Failed to parse beautify JSON"); return; } Variable[] vars = decResult.func.getAllVariables(); if (vars == null) { log("Nothing to beautify"); return; } var id = prog.startTransaction("GhidraChatGPT"); for (Variable var : vars) { if (jsonObj.has(var.getName())) { String val = jsonObj.getString(var.getName()); try { var.setName(val, SourceType.USER_DEFINED); ok(String.format("Beautified %s => %s", var.getName(), val)); } catch (Exception e) { error( String.format("Failed to beautify %s => %s", var.getName(), val)); } } }; if (jsonObj.has(decResult.func.getName())) { String val = jsonObj.getString(decResult.func.getName()); try { decResult.func.setName(val, SourceType.USER_DEFINED); ok(String.format("Beautified %s => %s", decResult.func.getName(), val)); } catch (Exception e) { error(String.format("Failed to beautify %s => %s", decResult.func.getName(), val)); } } prog.endTransaction(id, true); } private void addComment(Program prog, Function func, String comment, String commentHeader) { var id = prog.startTransaction("GhidraChatGPT"); String currentComment = func.getComment(); if (currentComment != null) { currentComment = String.format("%s\n%s\n\n%s", commentHeader, comment, currentComment); } else { currentComment = String.format("%s\n%s", commentHeader, comment); } func.setComment(currentComment); prog.endTransaction(id, true); ok(String.format( "Added the ChatGPT response as a comment to the function: %s", func.getName())); } private String askChatGPT(String prompt) { String response = sendOpenAIRequest(prompt); if (response == null) { error("The ChatGPT response was empty, try again!"); return null; } return response; } private String sendOpenAIRequest(String prompt) { StringBuilder response = new StringBuilder(); if (!checkOpenAIToken()) return null; OpenAiService openAIService = new OpenAiService(apiToken, Duration.ofSeconds(OPENAI_TIMEOUT)); if (openAIService == null) { error("Faild to start the OpenAI service, try again!"); return null; } ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder() .model(openAiModel) .temperature(0.8) .messages(List.of( new ChatMessage( ChatMessageRole.SYSTEM.value(), "You are an assistant helping out with reverse engineering and vulnerability research"), new ChatMessage(ChatMessageRole.USER.value(), prompt))) .build(); try { StringBuilder builder = new StringBuilder(); openAIService.createChatCompletion(chatCompletionRequest) .getChoices() .forEach( choice -> { builder.append(choice.getMessage().getContent()); }); return builder.toString(); } catch (Exception e) { error(String.format("Asking ChatGPT failed with the error %s", e)); return null; } } public void log(String message) { cs.println(String.format("%s [>] %s", getName(), message)); } public void error(String message) { cs.println(String.format("%s [-] %s", getName(), message)); } public void ok(String message) { cs.println(String.format("%s [+] %s", getName(), message)); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value", "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((9946, 10357), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((9946, 10336), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((9946, 10039), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((9946, 10009), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((10124, 10154), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((10297, 10325), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value')]
package com.frizzcode.gpt3.services; import com.frizzcode.gpt3.models.Gpt3Request; import com.frizzcode.gpt3.models.Gpt3Response; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.edit.EditRequest; import com.theokanning.openai.model.Model; import com.theokanning.openai.service.OpenAiService; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.modelmapper.ModelMapper; import org.springframework.beans.factory.annotation.Value; import org.springframework.scheduling.annotation.Async; import org.springframework.stereotype.Service; import java.time.Duration; import java.util.List; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; @RequiredArgsConstructor @Slf4j @Service public class Gpt3ServiceImpl implements Gpt3Service { @Value("${gpt3.token}") public String API_TOKEN; @Override @Async("asyncExecution") public CompletableFuture<List<Gpt3Response>> getResponses(Gpt3Request request) { OpenAiService service = new OpenAiService(API_TOKEN, Duration.ofMinutes(5)); ModelMapper mapper = new ModelMapper(); String[] promptSplit = request.getPrompt().split("\\."); String instruction = promptSplit[promptSplit.length-1]; StringBuilder input = new StringBuilder(); log.info("Instruction: {}", instruction); for (int i=0; i<promptSplit.length -1; i++){ String message = promptSplit[i] + ". "; input.append(message); } log.info("Input: {}", input); if (request.getModel().contains("gpt")) return CompletableFuture.completedFuture(service.createChatCompletion(ChatCompletionRequest.builder() .model(request.getModel()) .frequencyPenalty(0.5) .topP(1.0) .user(UUID.randomUUID().toString()) .temperature(0.3) .maxTokens(3500) .messages(List.of(new ChatMessage("user", request.getPrompt()))) .build()).getChoices().stream().map( chatCompletionChoice -> mapper.map(chatCompletionChoice, Gpt3Response.class) ).toList()).orTimeout(5, TimeUnit.MINUTES); else if (request.getModel().contains("edit")) return CompletableFuture.completedFuture(service.createEdit(EditRequest.builder() .model(request.getModel()) .temperature(0.7) .input(input.toString()) .instruction(instruction) .build()).getChoices().stream().map( editChoice -> mapper.map(editChoice, Gpt3Response.class) ).toList()).orTimeout(5, TimeUnit.MINUTES); else return CompletableFuture.completedFuture(service.createCompletion(CompletionRequest.builder() .model(request.getModel()) .frequencyPenalty(0.5) .topP(1.0) .user(UUID.randomUUID().toString()) .temperature(0.3) .maxTokens(3500) .prompt(request.getPrompt()) .build()).getChoices().stream().map( completionChoice -> mapper.map(completionChoice, Gpt3Response.class) ).toList()).orTimeout(5, TimeUnit.MINUTES); } @Override public List<Model> getEngineList() { OpenAiService service = new OpenAiService(API_TOKEN, Duration.ofMinutes(5)); return service.listModels(); } }
[ "com.theokanning.openai.completion.CompletionRequest.builder", "com.theokanning.openai.edit.EditRequest.builder", "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((1848, 2498), 'java.util.concurrent.CompletableFuture.completedFuture'), ((1911, 2316), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1911, 2286), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1911, 2200), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1911, 2162), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1911, 2123), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1911, 2066), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1911, 2034), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1911, 1990), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2094, 2122), 'java.util.UUID.randomUUID'), ((2575, 3021), 'java.util.concurrent.CompletableFuture.completedFuture'), ((2628, 2859), 'com.theokanning.openai.edit.EditRequest.builder'), ((2628, 2829), 'com.theokanning.openai.edit.EditRequest.builder'), ((2628, 2782), 'com.theokanning.openai.edit.EditRequest.builder'), ((2628, 2736), 'com.theokanning.openai.edit.EditRequest.builder'), ((2628, 2697), 'com.theokanning.openai.edit.EditRequest.builder'), ((3057, 3655), 'java.util.concurrent.CompletableFuture.completedFuture'), ((3116, 3481), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3116, 3451), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3116, 3401), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3116, 3363), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3116, 3324), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3116, 3267), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3116, 3235), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3116, 3191), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3295, 3323), 'java.util.UUID.randomUUID')]
package com.zs.project.service.impl; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.theokanning.openai.OpenAiApi; import com.theokanning.openai.completion.chat.ChatCompletionChoice; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import com.zs.project.exception.ErrorCode; import com.zs.project.exception.ServiceException; import com.zs.project.model.dto.gpt.OpenAIRequestBuilder; import com.zs.project.service.GptService; import com.zs.project.service.ResultCallback; import lombok.Synchronized; import lombok.extern.slf4j.Slf4j; import okhttp3.OkHttpClient; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.http.*; import org.springframework.scheduling.annotation.Async; import org.springframework.stereotype.Service; import org.springframework.web.client.RestTemplate; import org.springframework.web.servlet.mvc.method.annotation.SseEmitter; import retrofit2.Retrofit; import java.net.InetSocketAddress; import java.net.Proxy; import java.time.Duration; import java.time.temporal.ChronoUnit; import java.util.*; import static com.theokanning.openai.service.OpenAiService.*; /** * @author ShuaiZhang * 需要用流式处理(已支持) */ @Service @Slf4j public class GptServiceImpl implements GptService { /** * openai的token */ @Value("${openai.token}") String token; /** * 代理服务器 */ @Value("${proxy.host}") private String proxyHost; /** * 代理端口 */ @Value("${proxy.port}") private int proxyPort; @Value("${openai.token}") private String OPENAI_KEYS; RestTemplate restTemplate; @Autowired public GptServiceImpl(@Qualifier("restTemplateWithProxy") RestTemplate restTemplate) { this.restTemplate = restTemplate; } /** * 阻塞式调用gpt3.5 * @param query 你的promote * @return 结果 */ @Override public String GptResponse(String query) { //建造者模式创建实体 HttpEntity<Map<String, Object>> entity = new OpenAIRequestBuilder(OPENAI_KEYS).withContent(query).buildRequestEntity(); ResponseEntity<String> response = restTemplate.exchange(OpenAIRequestBuilder.OPENAI_ENDPOINT_URL, HttpMethod.POST, entity, String.class); //我个人倾向这里手动进行解析而不是封装成DTO //封装成DTO要处理的字段很多 ObjectMapper objectMapper = new ObjectMapper(); JsonNode rootNode; try{ rootNode = objectMapper.readTree(response.getBody()); } catch (JsonProcessingException e){ throw new ServiceException(ErrorCode.SYSTEM_ERROR,"Json解析失败"); } JsonNode choicesNode = rootNode.path("choices"); JsonNode firstChoiceNode = choicesNode.get(0); JsonNode messageNode = firstChoiceNode.path("message"); String gptResponse = messageNode.path("content").asText(); return gptResponse; } /** * 流式传输 * @param prompt 用户问题 * @param sseEmitter 实时推送 */ @Override @Async public void streamChatCompletion(String prompt, SseEmitter sseEmitter) { streamChatCompletion(prompt, sseEmitter, result -> {}); } /** * 关闭链接 * @param sseEmitter */ @Override public void sendStopEvent(SseEmitter sseEmitter) { try { sseEmitter.send(SseEmitter.event().name("stop").data("")); } catch (Exception e){ throw new ServiceException(ErrorCode.SYSTEM_ERROR,"事件停止接口出错"); } } /** * 创建一个OpenAI服务 * @param token API key * @param proxyHost 代理服务器地址 * @param proxyPort 代理端口地址 * @return */ @Override public OpenAiService buildOpenAiService(String token, String proxyHost, int proxyPort) { //构建HTTP代理 Proxy proxy = null; if (StringUtils.isNotBlank(proxyHost)) { proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(proxyHost, proxyPort)); } //构建HTTP客户端 OkHttpClient client = defaultClient(token, Duration.of(60, ChronoUnit.SECONDS)) .newBuilder() .proxy(proxy) .build(); ObjectMapper mapper = defaultObjectMapper(); Retrofit retrofit = defaultRetrofit(client, mapper); OpenAiApi api = retrofit.create(OpenAiApi.class); OpenAiService service = new OpenAiService(api, client.dispatcher().executorService()); return service; } /** * 支持回调 * @param prompt 用户问题 * @param sseEmitter SSE对象 * @param resultCallback 回调接口 */ @Override @Async public void streamChatCompletion(String prompt, SseEmitter sseEmitter, ResultCallback resultCallback) { log.info("发送消息:{}", prompt); final List<ChatMessage> messages = new ArrayList<>(); final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), prompt); messages.add(systemMessage); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest .builder() .model("gpt-3.5-turbo") .messages(messages) .n(1) // .maxTokens(500) .logitBias(new HashMap<>()) .build(); //流式对话(逐Token返回) StringBuilder receiveMsgBuilder = new StringBuilder(); OpenAiService service = buildOpenAiService(token, proxyHost, proxyPort); service.streamChatCompletion(chatCompletionRequest) //正常结束 .doOnComplete(() -> { log.info("连接结束"); //发送连接关闭事件,让客户端主动断开连接避免重连 sendStopEvent(sseEmitter); //完成请求处理 sseEmitter.complete(); }) //异常结束 .doOnError(throwable -> { log.error("连接异常", throwable); //发送连接关闭事件,让客户端主动断开连接避免重连 sendStopEvent(sseEmitter); //完成请求处理携带异常 sseEmitter.completeWithError(throwable); }) //收到消息后转发到浏览器 .blockingForEach(x -> { ChatCompletionChoice choice = x.getChoices().get(0); log.info("收到消息:" + choice); if (StringUtils.isEmpty(choice.getFinishReason())) { //未结束时才可以发送消息(结束后,先调用doOnComplete然后还会收到一条结束消息,因连接关闭导致发送消息失败:ResponseBodyEmitter has already completed) sseEmitter.send(choice.getMessage()); } String content = choice.getMessage().getContent(); content = StringUtils.defaultString(content); receiveMsgBuilder.append(content); }); log.info("收到的完整消息:" + receiveMsgBuilder); try { resultCallback.onCompletion(receiveMsgBuilder.toString()); } catch (IllegalStateException e){ log.info("事件已经回调完毕"); } } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value" ]
[((3905, 3945), 'org.springframework.web.servlet.mvc.method.annotation.SseEmitter.event'), ((3905, 3936), 'org.springframework.web.servlet.mvc.method.annotation.SseEmitter.event'), ((5531, 5561), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value')]
package oracleai; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider; import com.oracle.bmc.aivision.AIServiceVisionClient; import com.oracle.bmc.aivision.model.*; import com.oracle.bmc.aivision.requests.AnalyzeImageRequest; import com.oracle.bmc.aivision.responses.AnalyzeImageResponse; import com.oracle.bmc.auth.AuthenticationDetailsProvider; import com.oracle.bmc.auth.ConfigFileAuthenticationDetailsProvider; import com.oracle.bmc.auth.InstancePrincipalsAuthenticationDetailsProvider; import com.theokanning.openai.completion.chat.ChatCompletionChoice; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import lombok.Data; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.web.bind.annotation.*; import org.springframework.web.multipart.MultipartFile; import java.time.Duration; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import org.json.JSONArray; import org.json.JSONObject; @RestController @RequestMapping("/health") public class ExplainAndAdviseOnHealthTestResults { private static Logger log = LoggerFactory.getLogger(ExplainAndAdviseOnHealthTestResults.class); @GetMapping("/form") public String form(){ return " <html><form method=\"post\" action=\"/health/analyzedoc\" enctype=\"multipart/form-data\">\n" + " Select an image file to conduct object detection upon...\n" + " <input type=\"file\" name=\"file\" accept=\"image/*\">\n" + " <br>\n" + " <br>Hit submit and a raw JSON return of objects detected and other info will be returned...\n" + " <br><input type=\"submit\" value=\"Send Request to Vision AI\">\n" + " </form></html>"; } @PostMapping("/analyzedoc") public String analyzedoc(@RequestParam("file") MultipartFile file) throws Exception { log.info("analyzing image file:" + file); String objectDetectionResults = processImage(file.getBytes(), true); ImageAnalysis imageAnalysis = parseJsonToImageAnalysis(objectDetectionResults); List<Line> lines = imageAnalysis.getImageText().getLines(); String fullText = ""; for (Line line : lines) fullText += line.getText(); log.info("fullText = " + fullText); String explanationOfResults = chat("explain these test results in simple terms " + "and tell me what should I do to get better results: \"" + fullText + "\""); return "<html><br><br>explanationOfResults:" + explanationOfResults + "</html>"; } String chat(String textcontent) throws Exception { OpenAiService service = new OpenAiService(System.getenv("OPENAI_KEY"), Duration.ofSeconds(60)); System.out.println("Streaming chat completion... textcontent:" + textcontent); final List<ChatMessage> messages = new ArrayList<>(); final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), textcontent); messages.add(systemMessage); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest .builder() .model("gpt-3.5-turbo") .messages(messages) .n(1) .maxTokens(300) //was 50 .logitBias(new HashMap<>()) .build(); String replyString = ""; String content; for (ChatCompletionChoice choice : service.createChatCompletion(chatCompletionRequest).getChoices()) { content = choice.getMessage().getContent(); replyString += (content == null?" ": content); } service.shutdownExecutor(); return replyString; } String processImage(byte[] bytes, boolean isConfigFileAuth) throws Exception { AIServiceVisionClient aiServiceVisionClient; AuthenticationDetailsProvider provider; if (isConfigFileAuth) { provider = new ConfigFileAuthenticationDetailsProvider( System.getenv("OCICONFIG_FILE"),System.getenv("OCICONFIG_PROFILE")); aiServiceVisionClient = AIServiceVisionClient.builder().build(provider); } else { aiServiceVisionClient = new AIServiceVisionClient(InstancePrincipalsAuthenticationDetailsProvider.builder().build()); } List<ImageFeature> features = new ArrayList<>(); ImageFeature classifyFeature = ImageClassificationFeature.builder() .maxResults(10) .build(); ImageFeature detectImageFeature = ImageObjectDetectionFeature.builder() .maxResults(10) .build(); ImageFeature textDetectImageFeature = ImageTextDetectionFeature.builder().build(); features.add(classifyFeature); features.add(detectImageFeature); features.add(textDetectImageFeature); InlineImageDetails inlineImageDetails = InlineImageDetails.builder() .data(bytes) .build(); AnalyzeImageDetails analyzeImageDetails = AnalyzeImageDetails.builder() .image(inlineImageDetails) .features(features) .build(); AnalyzeImageRequest request = AnalyzeImageRequest.builder() .analyzeImageDetails(analyzeImageDetails) .build(); AnalyzeImageResponse response = aiServiceVisionClient.analyzeImage(request); ObjectMapper mapper = new ObjectMapper(); mapper.setFilterProvider(new SimpleFilterProvider().setFailOnUnknownId(false)); String json = mapper.writeValueAsString(response.getAnalyzeImageResult()); System.out.println("AnalyzeImage Result"); System.out.println(json); return json; } @Data class ImageObject { private String name; private double confidence; private BoundingPolygon boundingPolygon; } @Data class BoundingPolygon { private List<Point> normalizedVertices; } @Data class Point { private double x; private double y; public Point(double x, double y) { this.x = x; this.y = y; } } @Data class Label { private String name; private double confidence; } @Data class OntologyClass { private String name; private List<String> parentNames; private List<String> synonymNames; } @Data class ImageText { private List<Word> words; private List<Line> lines; } @Data class Word { private String text; private double confidence; private BoundingPolygon boundingPolygon; } @Data class Line { private String text; private double confidence; private BoundingPolygon boundingPolygon; private List<Integer> wordIndexes; } @Data class ImageAnalysis { private List<ImageObject> imageObjects; private List<Label> labels; private List<OntologyClass> ontologyClasses; private ImageText imageText; private String imageClassificationModelVersion; private String objectDetectionModelVersion; private String textDetectionModelVersion; private List<String> errors; } private ImageAnalysis parseJsonToImageAnalysis(String jsonString) { JSONObject json = new JSONObject(jsonString); JSONArray imageObjectsArray = json.getJSONArray("imageObjects"); List<ImageObject> imageObjects = new ArrayList<>(); for (int i = 0; i < imageObjectsArray.length(); i++) { JSONObject imageObjectJson = imageObjectsArray.getJSONObject(i); ImageObject imageObject = new ImageObject(); imageObject.setName(imageObjectJson.getString("name")); imageObject.setConfidence(imageObjectJson.getDouble("confidence")); JSONObject boundingPolygonJson = imageObjectJson.getJSONObject("boundingPolygon"); JSONArray normalizedVerticesArray = boundingPolygonJson.getJSONArray("normalizedVertices"); List<Point> normalizedVertices = new ArrayList<>(); for (int j = 0; j < normalizedVerticesArray.length(); j++) { JSONObject vertexJson = normalizedVerticesArray.getJSONObject(j); Point vertex = new Point(vertexJson.getDouble("x"), vertexJson.getDouble("y")); normalizedVertices.add(vertex); } BoundingPolygon boundingPolygon = new BoundingPolygon(); boundingPolygon.setNormalizedVertices(normalizedVertices); imageObject.setBoundingPolygon(boundingPolygon); imageObjects.add(imageObject); } JSONArray labelsArray = json.getJSONArray("labels"); List<Label> labels = new ArrayList<>(); for (int i = 0; i < labelsArray.length(); i++) { JSONObject labelJson = labelsArray.getJSONObject(i); Label label = new Label(); label.setName(labelJson.getString("name")); label.setConfidence(labelJson.getDouble("confidence")); labels.add(label); } JSONArray ontologyClassesArray = json.getJSONArray("ontologyClasses"); List<OntologyClass> ontologyClasses = new ArrayList<>(); for (int i = 0; i < ontologyClassesArray.length(); i++) { JSONObject ontologyClassJson = ontologyClassesArray.getJSONObject(i); OntologyClass ontologyClass = new OntologyClass(); ontologyClass.setName(ontologyClassJson.getString("name")); JSONArray parentNamesArray = ontologyClassJson.getJSONArray("parentNames"); List<String> parentNames = new ArrayList<>(); for (int j = 0; j < parentNamesArray.length(); j++) { parentNames.add(parentNamesArray.getString(j)); } ontologyClass.setParentNames(parentNames); ontologyClasses.add(ontologyClass); } JSONObject imageTextJson = json.getJSONObject("imageText"); JSONArray wordsArray = imageTextJson.getJSONArray("words"); List<Word> words = new ArrayList<>(); for (int i = 0; i < wordsArray.length(); i++) { JSONObject wordJson = wordsArray.getJSONObject(i); Word word = new Word(); word.setText(wordJson.getString("text")); word.setConfidence(wordJson.getDouble("confidence")); JSONObject boundingPolygonJson = wordJson.getJSONObject("boundingPolygon"); JSONArray normalizedVerticesArray = boundingPolygonJson.getJSONArray("normalizedVertices"); List<Point> normalizedVertices = new ArrayList<>(); for (int j = 0; j < normalizedVerticesArray.length(); j++) { JSONObject vertexJson = normalizedVerticesArray.getJSONObject(j); Point vertex = new Point(vertexJson.getDouble("x"), vertexJson.getDouble("y")); normalizedVertices.add(vertex); } BoundingPolygon boundingPolygon = new BoundingPolygon(); boundingPolygon.setNormalizedVertices(normalizedVertices); word.setBoundingPolygon(boundingPolygon); words.add(word); } JSONArray linesArray = imageTextJson.getJSONArray("lines"); List<Line> lines = new ArrayList<>(); for (int i = 0; i < linesArray.length(); i++) { JSONObject lineJson = linesArray.getJSONObject(i); Line line = new Line(); line.setText(lineJson.getString("text")); line.setConfidence(lineJson.getDouble("confidence")); JSONObject boundingPolygonJson = lineJson.getJSONObject("boundingPolygon"); JSONArray normalizedVerticesArray = boundingPolygonJson.getJSONArray("normalizedVertices"); List<Point> normalizedVertices = new ArrayList<>(); for (int j = 0; j < normalizedVerticesArray.length(); j++) { JSONObject vertexJson = normalizedVerticesArray.getJSONObject(j); Point vertex = new Point(vertexJson.getDouble("x"), vertexJson.getDouble("y")); normalizedVertices.add(vertex); } BoundingPolygon boundingPolygon = new BoundingPolygon(); boundingPolygon.setNormalizedVertices(normalizedVertices); line.setBoundingPolygon(boundingPolygon); JSONArray wordIndexesArray = lineJson.getJSONArray("wordIndexes"); List<Integer> wordIndexes = new ArrayList<>(); for (int j = 0; j < wordIndexesArray.length(); j++) { wordIndexes.add(wordIndexesArray.getInt(j)); } line.setWordIndexes(wordIndexes); lines.add(line); } String imageClassificationModelVersion = json.getString("imageClassificationModelVersion"); String objectDetectionModelVersion = json.getString("objectDetectionModelVersion"); String textDetectionModelVersion = json.getString("textDetectionModelVersion"); List<String> errors = new ArrayList<>(); JSONArray errorsArray = json.getJSONArray("errors"); for (int i = 0; i < errorsArray.length(); i++) { errors.add(errorsArray.getString(i)); } ImageText imageText = new ImageText(); imageText.setWords(words); imageText.setLines(lines); ImageAnalysis imageAnalysis = new ImageAnalysis(); imageAnalysis.setImageObjects(imageObjects); imageAnalysis.setLabels(labels); imageAnalysis.setOntologyClasses(ontologyClasses); imageAnalysis.setImageText(imageText); imageAnalysis.setImageClassificationModelVersion(imageClassificationModelVersion); imageAnalysis.setObjectDetectionModelVersion(objectDetectionModelVersion); imageAnalysis.setTextDetectionModelVersion(textDetectionModelVersion); imageAnalysis.setErrors(errors); return imageAnalysis; } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value" ]
[((3353, 3383), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((4522, 4569), 'com.oracle.bmc.aivision.AIServiceVisionClient.builder'), ((4650, 4715), 'com.oracle.bmc.auth.InstancePrincipalsAuthenticationDetailsProvider.builder'), ((5630, 5742), 'com.oracle.bmc.aivision.requests.AnalyzeImageRequest.builder'), ((5630, 5717), 'com.oracle.bmc.aivision.requests.AnalyzeImageRequest.builder')]
package org.ncgr.chatbot.openai; import java.util.Collections; import java.util.List; import com.theokanning.openai.embedding.Embedding; import com.theokanning.openai.embedding.EmbeddingRequest; import com.theokanning.openai.embedding.EmbeddingResult; import com.theokanning.openai.service.OpenAiService; /** * Class to retrieve embeddings from OpenAI. */ public class EmbeddingTest { // the OpenAI embedding model to use static String EMBED_MODEL = "text-embedding-ada-002"; public static void main(String[] args) { String token = System.getenv("OPENAI_API_KEY"); OpenAiService service = new OpenAiService(token); EmbeddingRequest embeddingRequest = EmbeddingRequest.builder() .model("text-embedding-ada-002") .input(Collections.singletonList("List photosynthesis genes.")) .build(); List<Embedding> embeddings = service.createEmbeddings(embeddingRequest).getData(); for (Embedding embedding : embeddings) { List<Double> vector = embedding.getEmbedding(); System.out.println("object: " + embedding.getObject()); System.out.println("index: " + embedding.getIndex()); System.out.println("vector: " + vector); } } }
[ "com.theokanning.openai.embedding.EmbeddingRequest.builder" ]
[((696, 864), 'com.theokanning.openai.embedding.EmbeddingRequest.builder'), ((696, 843), 'com.theokanning.openai.embedding.EmbeddingRequest.builder'), ((696, 767), 'com.theokanning.openai.embedding.EmbeddingRequest.builder')]
package oracleai.services; import com.theokanning.openai.image.CreateImageRequest; import com.theokanning.openai.service.OpenAiService; import java.time.Duration; public class ImageGeneration { static public String imagegeneration(String imagedescription) throws Exception { OpenAiService service = new OpenAiService(System.getenv("OPENAI_KEY"), Duration.ofSeconds(60)); CreateImageRequest openairequest = CreateImageRequest.builder() .prompt(imagedescription) .build(); String imageLocation = service.createImage(openairequest).getData().get(0).getUrl(); System.out.println("Image is located at:" + imageLocation); service.shutdownExecutor(); return imageLocation; } }
[ "com.theokanning.openai.image.CreateImageRequest.builder" ]
[((446, 541), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((446, 516), 'com.theokanning.openai.image.CreateImageRequest.builder')]
package br.com.alura.ecommerce; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import java.time.Duration; import java.util.Arrays; import java.util.Scanner; public class CategorizadorDeProdutos { public static void main(String[] args) { var leitor = new Scanner(System.in); System.out.println("Digite as categorias válidas:"); var categorias = leitor.nextLine(); while(true) { System.out.println("\nDigite o nome do produto:"); var user = leitor.nextLine(); var system = """ Você é um categorizador de produtos e deve responder apenas o nome da categoria do produto informado Escolha uma categoria dentra a lista abaixo: %s ###### exemplo de uso: Pergunta: Bola de futebol Resposta: Esportes ###### regras a serem seguidas: Caso o usuario pergunte algo que nao seja de categorizacao de produtos, voce deve responder que nao pode ajudar pois o seu papel é apenas responder a categoria dos produtos """.formatted(categorias); dispararRequisicao(user, system); } } public static void dispararRequisicao(String user, String system) { var chave = System.getenv("OPENAI_API_KEY"); var service = new OpenAiService(chave, Duration.ofSeconds(30)); var completionRequest = ChatCompletionRequest .builder() .model("gpt-4") .messages(Arrays.asList( new ChatMessage(ChatMessageRole.USER.value(), user), new ChatMessage(ChatMessageRole.SYSTEM.value(), system) )) .build(); service .createChatCompletion(completionRequest) .getChoices() .forEach(c -> System.out.println(c.getMessage().getContent())); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value" ]
[((1993, 2021), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((2070, 2100), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value')]
package it.ohalee.minecraftgpt; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.service.OpenAiService; import org.bukkit.configuration.ConfigurationSection; import retrofit2.HttpException; import java.time.Duration; import java.util.Arrays; import java.util.List; import java.util.concurrent.CompletableFuture; public class OpenAI { private static OpenAiService service; public static CompletableFuture<Void> init(String key) { return CompletableFuture.runAsync(() -> service = new OpenAiService(key, Duration.ofSeconds(5))); } public static CompletableFuture<String> getResponse(ConfigurationSection section, List<ChatMessage> chatMessages, String message) { chatMessages.add(new ChatMessage("user", message)); return CompletableFuture.supplyAsync(() -> { String model = section.getString("model", "text-davinci-003"); int maxTokens = section.getInt("max-tokens"); double frequencyPenalty = section.getDouble("frequency-penalty"); double presencePenalty = section.getDouble("presence-penalty"); double topP = section.getDouble("top-p"); double temperature = section.getDouble("temperature"); String reply = service.createChatCompletion(ChatCompletionRequest.builder() .messages(chatMessages) .model(model) .temperature(temperature) .maxTokens(maxTokens) .topP(topP) .frequencyPenalty(frequencyPenalty) .presencePenalty(presencePenalty) .stop(Arrays.asList("Human:", "AI:")) .build()) .getChoices().get(0).getMessage().getContent(); chatMessages.add(new ChatMessage("assistant", reply)); return reply; }).exceptionally(throwable -> { if (throwable.getCause() instanceof HttpException e) { String reason = switch (e.response().code()) { case 401 -> "Invalid API key! Please check your configuration."; case 429 -> "Too many requests! Please wait a few seconds and try again."; case 500 -> "OpenAI service is currently unavailable. Please try again later."; default -> "Unknown error! Please try again later. If this error persists, contact the plugin developer."; }; throw new RuntimeException(reason, throwable); } throw new RuntimeException(throwable); }); } }
[ "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((876, 2775), 'java.util.concurrent.CompletableFuture.supplyAsync'), ((1379, 1877), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1379, 1840), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1379, 1774), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1379, 1712), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1379, 1648), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1379, 1608), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1379, 1558), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1379, 1504), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1379, 1462), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package com.example.gpt3inhebrew; import android.os.Handler; import android.os.Message; import com.theokanning.openai.OpenAiService; import com.theokanning.openai.completion.CompletionRequest; public class GptThread extends Thread { String prompt,temperature, top_p, frequency_penalty, presence_penalty, maximum_length; Handler handler; public GptThread(String prompt, String temperature, String top_p, String frequency_penalty, String presence_penalty, String maximum_length, Handler handler) { this.prompt = prompt; this.temperature = temperature; this.top_p = top_p; this.frequency_penalty = frequency_penalty; this.presence_penalty = presence_penalty; this.maximum_length = maximum_length; this.handler = handler; } @Override public void run() { super.run(); Message message = new Message(); String token = Helper.open_ai_api_key; OpenAiService service = new OpenAiService(token); CompletionRequest completionRequest = CompletionRequest.builder() .prompt(prompt) .temperature(Double.valueOf(temperature)) .topP(Double.valueOf(top_p)) .frequencyPenalty(Double.valueOf(frequency_penalty)) .presencePenalty(Double.valueOf(presence_penalty)) .maxTokens(Integer.valueOf(maximum_length)) .echo(true) .build(); try { message.obj = service.createCompletion("text-davinci-002", completionRequest).getChoices(); message.what = 200; handler.sendMessage(message); } catch (Exception e) { message.obj = e.getMessage(); message.what = 400; handler.sendMessage(message); } } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((1056, 1467), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1056, 1442), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1056, 1414), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1056, 1354), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1056, 1287), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1056, 1218), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1056, 1173), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1056, 1115), 'com.theokanning.openai.completion.CompletionRequest.builder')]
import java.awt.Color; import java.awt.Desktop; import java.awt.Dimension; import java.awt.EventQueue; import javax.swing.JFrame; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JPopupMenu; import javax.swing.border.EmptyBorder; import javax.swing.event.HyperlinkEvent; import javax.swing.event.HyperlinkListener; import javax.swing.text.BadLocationException; import javax.swing.text.DefaultCaret; import javax.swing.text.Style; import javax.swing.text.StyleConstants; import javax.swing.text.StyleContext; import javax.swing.text.StyledDocument; import com.google.gson.Gson; import com.jtattoo.plaf.hifi.HiFiLookAndFeel; import javax.swing.JTextArea; import javax.swing.KeyStroke; //import javax.swing.JTextPane; import javax.swing.UIManager; import javax.swing.JButton; import javax.swing.JFileChooser; import java.awt.event.ActionListener; import java.awt.event.KeyAdapter; import java.awt.event.KeyEvent; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; import java.lang.reflect.Field; import java.net.URISyntaxException; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Paths; import java.time.Duration; import java.util.ArrayList; import java.util.HashMap; import java.util.Properties; import java.util.Random; import java.awt.event.ActionEvent; import javax.swing.JScrollPane; import java.awt.Font; import java.awt.Toolkit; import java.awt.datatransfer.Clipboard; import java.awt.datatransfer.DataFlavor; import java.awt.datatransfer.StringSelection; import java.awt.datatransfer.Transferable; import java.awt.datatransfer.UnsupportedFlavorException; import javax.swing.ImageIcon; import javax.swing.JMenuBar; import javax.swing.JMenu; import javax.swing.JMenuItem; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import javax.swing.JEditorPane; import com.theokanning.openai.completion.chat.ChatCompletionChoice; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import org.commonmark.node.*; import org.commonmark.parser.Parser; import org.commonmark.renderer.html.HtmlRenderer; public class MainFrame extends JFrame { private static MainFrame frame; private JPanel contentPane; private OpenAiService service; private final static ArrayList<ChatMessage> messages = new ArrayList<>(); private static JTextArea ChatArea; private static JButton SubmitButton; private static JScrollPane scrollPane; private static JScrollPane scrollPane_1; private static JButton SaveButton; private static JButton ImportButton; private static JButton ResetButton; private static JEditorPane DisplayArea; private static JEditorPane HTMLArea; private static StyledDocument doc; private JMenuBar menuBar; private static String GPTConvo; private File FGPTConvo; public static Properties prop; public static String version = "1.3.2"; private Boolean first = true; private Boolean chathistory = true; private Boolean autotitle = true; private Boolean enter2submit = true; private Boolean cloaderopen = false; private Boolean aframeopen = false; private static Boolean isHTMLView = false; private static Parser parser; private static HtmlRenderer renderer; public static Boolean isAlpha = true; private Boolean isStreamRunning = false; private static int FormSize = 3; private static int FontSize = 12; public static int seltheme = 0; private ChatLoader cloader; private String chatDir; //Initializing Style objects for RTF text in DisplayArea private static Style YouStyle; private static Style InvisibleStyle; private static Style GPTStyle; private static Style ChatStyle; private static Style ErrorStyle; private static MainFrame INSTANCE = null; //This function is used to load a chat from a file specified by the full file path and filename. //It sets the title of the instance to include the filename and clears the display area. //It also resets the messages and reads them from the file. If the view is set to HTML, it resets the HTML area style and renders the document. //If there is an exception, it displays an error message and prints the stack trace. Finally, it sets the FGPTConvo file and sets the first flag to false. public static void loadchat(String fullfilepath, String filename) throws BadLocationException { INSTANCE.setTitle("JavaGPT - " + filename); try { DisplayArea.setText(""); messages.clear(); readMessagesFromFile(fullfilepath); if(isHTMLView) { resetHTMLAreaStyle(); Node document = parser.parse(DisplayArea.getDocument().getText(0, DisplayArea.getDocument().getLength())); //System.out.println(renderer.render(document)); HTMLArea.setText(renderer.render(document)); } } catch (Exception e) { JOptionPane.showMessageDialog(null, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); e.printStackTrace(); } INSTANCE.FGPTConvo = new File(fullfilepath); INSTANCE.first = false; } //Writes chat contents to .json format public void writeMessagesToFile(String filename) throws IOException { try (PrintWriter writer = new PrintWriter(filename)) { Gson gson = new Gson(); for (ChatMessage message : messages) { String json = gson.toJson(message); writer.println(json); } } } //Reads chat contents from provided .json, stores it in the messages ArrayList and outputs contents in DisplayArea public static void readMessagesFromFile(String filename) throws IOException { try (BufferedReader reader = new BufferedReader(new FileReader(filename))) { String line; Gson gson = new Gson(); while ((line = reader.readLine()) != null) { ChatMessage message = gson.fromJson(line, ChatMessage.class); if(message.getRole().equals("user")) { try { doc.insertString(doc.getLength(), "You", YouStyle); doc.insertString(doc.getLength(), ":\n", InvisibleStyle); doc.insertString(doc.getLength(), message.getContent() + "\n\n", ChatStyle); } catch (BadLocationException e) { e.printStackTrace(); } }else{ try { doc.insertString(doc.getLength(), "ChatGPT", GPTStyle); doc.insertString(doc.getLength(), ":\n", InvisibleStyle); doc.insertString(doc.getLength(), message.getContent() + "\n\n", ChatStyle); } catch (BadLocationException e) { e.printStackTrace(); } } messages.add(message); } } } //Refreshes DisplayArea contents with current messages ArrayList items public void refreshMessages() { DisplayArea.setText(""); for (ChatMessage message : messages) { if(message.getRole().equals("user")) { try { doc.insertString(doc.getLength(), "You", YouStyle); doc.insertString(doc.getLength(), ":\n", InvisibleStyle); doc.insertString(doc.getLength(), message.getContent() + "\n\n", ChatStyle); } catch (BadLocationException e) { e.printStackTrace(); } }else{ try { doc.insertString(doc.getLength(), "ChatGPT", GPTStyle); doc.insertString(doc.getLength(), ":\n", InvisibleStyle); doc.insertString(doc.getLength(), message.getContent() + "\n\n", ChatStyle); } catch (BadLocationException e) { e.printStackTrace(); } } } } //Used in newFile() to create a new file name (Ex: Chat_x0y, Chat_09k, Chat_rc7) public static String getRandomString() { String letters = "abcdefghijklmnopqrstuvwxyz1234567890"; Random rand = new Random(); StringBuilder sb = new StringBuilder(); for (int i = 0; i < 3; i++) { int index = rand.nextInt(letters.length()); sb.append(letters.charAt(index)); } return sb.toString(); } //Creates a new chat file by setting FGPTConvo File object to a new file name public void newFile() { String randfilename = getRandomString(); FGPTConvo = new File(chatDir + "\\Chat_" + randfilename + ".json"); while(FGPTConvo.exists()) { randfilename = getRandomString(); FGPTConvo = new File(chatDir + "\\Chat_" + randfilename + ".json"); } setTitle("JavaGPT - Chat_" + randfilename); } //Resets all objects used for chat. Is invoked when "New Chat" is pressed or a chat file is loaded public void Reset() { isStreamRunning = false; messages.clear(); FGPTConvo = null; GPTConvo = ""; DisplayArea.setText(""); HTMLArea.setText(""); resetHTMLAreaStyle(); ChatArea.setText(""); setTitle("JavaGPT"); first = true; } /** * Launch the application. */ public static void main(String[] args) { EventQueue.invokeLater(new Runnable() { public void run() { //Sets project to support Unicode try { System.setProperty("file.encoding","UTF-8"); Field charset = Charset.class.getDeclaredField("defaultCharset"); charset.setAccessible(true); charset.set(null,null); }catch(Exception e) {} //------------------------------- //Loads properties------------------------ prop = new Properties(); InputStream input = null; try { input = new FileInputStream("config.properties"); prop.load(input); } catch (FileNotFoundException e1) { int choice = JOptionPane.showConfirmDialog(null, "No config file found. Would you like to create one?", "Create Config File", JOptionPane.YES_NO_OPTION); if(choice == JOptionPane.YES_OPTION) { String apikey = JOptionPane.showInputDialog( null, "Please enter your API key:"); prop.setProperty("apikey", apikey); prop.setProperty("model", "gpt-3.5-turbo"); prop.setProperty("maxTokens", "1024"); prop.setProperty("timeout", "30"); prop.setProperty("proxyip", ""); // WIP Support will be added back prop.setProperty("proxyport", ""); // WIP Support will be added back prop.setProperty("proxytype", ""); prop.setProperty("autotitle", "true"); prop.setProperty("autoscroll", "true"); prop.setProperty("EnterToSubmit", "true"); prop.setProperty("chat_history", "true"); prop.setProperty("chat_location_override", ""); prop.setProperty("WindowSize", "medium"); prop.setProperty("FontSize", "12"); prop.setProperty("Theme", "dark"); try { FileOutputStream out = new FileOutputStream("config.properties"); prop.store(out, "Generated config file"); out.close(); JOptionPane.showMessageDialog(null, "Config file created successfully!"); } catch (IOException ex) { ex.printStackTrace(); } } e1.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } finally { if (input != null) { try { input.close(); } catch (IOException e) { e.printStackTrace(); JOptionPane.showMessageDialog(null, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } } } //---------------------------------------- //Sets proxy settings if(prop.getProperty("proxyip") != null && !prop.getProperty("proxyip").isEmpty() && prop.getProperty("proxyport") != null && !prop.getProperty("proxyport").isEmpty()) { if(prop.getProperty("proxytype").toLowerCase().equals("http")) { System.setProperty("http.proxyHost", prop.getProperty("proxyip")); System.setProperty("http.proxyPort", prop.getProperty("proxyport")); }else if(prop.getProperty("proxytype").toLowerCase().equals("https")){ System.setProperty("https.proxyHost", prop.getProperty("proxyip")); System.setProperty("https.proxyPort", prop.getProperty("proxyport")); }else { System.getProperties().put( "proxySet", "true" ); System.getProperties().put( "socksProxyHost", prop.getProperty("proxyip") ); System.getProperties().put( "socksProxyPort", prop.getProperty("proxyport") ); } } //------------------- //Sets selected JTattoo theme------------- try { if(!prop.getProperty("Theme").isEmpty()) { if(prop.getProperty("Theme").equals("dark")) { Properties p = new Properties(); p.put("windowTitleFont", "Ebrima PLAIN 15"); p.put("backgroundPattern", "off"); p.put("logoString", ""); HiFiLookAndFeel.setCurrentTheme(p); UIManager.setLookAndFeel("com.jtattoo.plaf.hifi.HiFiLookAndFeel"); seltheme = 1; } } } catch (Exception e) { e.printStackTrace(); } //---------------------------------------- frame = new MainFrame(); //Loads main JFrame //Scales JFrame based on "WindowSize" prop switch(prop.getProperty("WindowSize")){ case "small": FormSize=1; break; case "large": FormSize=2; break; default: FormSize=3; break; } setFormSize(); //---------------------------------------- //Sets app icon to JavaGPT logo frame.setIconImage(Toolkit.getDefaultToolkit().getImage(getClass().getResource("logo.png"))); if(prop.getProperty("FontSize") != null && !prop.getProperty("FontSize").isEmpty()) { try { FontSize = Integer.parseInt(prop.getProperty("FontSize")); } catch (NumberFormatException e) { } } //Makes JFrame visible frame.setVisible(true); } }); } /** * Create the frame. * @param GPTStyle * @param ChatStyle */ public MainFrame() { setResizable(false); INSTANCE = this; setTitle("JavaGPT"); setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); //Initializes OpenAI's ChatGPT API with provided API key service = new OpenAiService(prop.getProperty("apikey"),(prop.getProperty("timeout") == null && prop.getProperty("timeout").isEmpty()) ? Duration.ZERO : Duration.ofSeconds(Long.parseLong(prop.getProperty("timeout")))); menuBar = new JMenuBar(); setJMenuBar(menuBar); JMenu OptionMenu = new JMenu("Options"); menuBar.add(OptionMenu); //Renderer and Parser for HTMLView parser = Parser.builder().build(); renderer = HtmlRenderer.builder().build(); // //Code for HTML Viewer JMenu. If clicked, it will set isHTMLView to its counter value. //If true, it will switch scrollPane to show HTMLArea and display the plain text contents for DisplayArea in it //If false, it will switch scrollPane to show DisplayArea JMenuItem HTMLViewMenuItem = new JMenuItem("HTML View"); HTMLViewMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if(isHTMLView) { try { scrollPane.setViewportView(DisplayArea); HTMLViewMenuItem.setText("HTML View"); isHTMLView=false; } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } }else { try { scrollPane.setViewportView(HTMLArea); resetHTMLAreaStyle(); Node document = parser.parse(DisplayArea.getDocument().getText(0, DisplayArea.getDocument().getLength())); HTMLArea.setText(renderer.render(document)); HTMLViewMenuItem.setText("Normal View"); isHTMLView=true; } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } } } }); OptionMenu.add(HTMLViewMenuItem); //Will scale the JFrame based on preset dimensions for JMenu options Large, Medium, & Small JMenu FormSizeMenu = new JMenu("Form Size"); OptionMenu.add(FormSizeMenu); JMenuItem SmallMenuItem = new JMenuItem("Small"); FormSizeMenu.add(SmallMenuItem); SmallMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if(FormSize != 1) { FormSize = 1; setFormSize(); } } }); JMenuItem MediumMenuItem = new JMenuItem("Medium"); FormSizeMenu.add(MediumMenuItem); MediumMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if(FormSize != 3) { FormSize = 3; setFormSize(); } } }); JMenuItem LargeMenuItem = new JMenuItem("Large"); FormSizeMenu.add(LargeMenuItem); LargeMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if(FormSize != 2) { FormSize = 2; setFormSize(); } } }); JMenu FontSizeMenu = new JMenu("Font Size"); OptionMenu.add(FontSizeMenu); JMenuItem DefaultFSMenuItem = new JMenuItem("Default (12)"); FontSizeMenu.add(DefaultFSMenuItem); DefaultFSMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if(FontSize != 12) { FontSize = 12; setFontSize(); refreshMessages(); } } }); JMenuItem LargeFSMenuItem = new JMenuItem("Large (16)"); FontSizeMenu.add(LargeFSMenuItem); LargeFSMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if(FontSize != 16) { FontSize = 16; setFontSize(); refreshMessages(); } } }); JMenuItem ExtraLargeFSMenuItem = new JMenuItem("Ex-Large (20)"); FontSizeMenu.add(ExtraLargeFSMenuItem); ExtraLargeFSMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if(FontSize != 20) { FontSize = 20; setFontSize(); refreshMessages(); } } }); JMenuItem CustomFSMenuItem = new JMenuItem("Custom"); FontSizeMenu.add(CustomFSMenuItem); CustomFSMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { String input = JOptionPane.showInputDialog(null, "Enter font size:", "Font Size", JOptionPane.PLAIN_MESSAGE); try { FontSize = Integer.parseInt(input); setFontSize(); refreshMessages(); } catch (NumberFormatException e1) { JOptionPane.showMessageDialog(null, "Invalid font size", "Error", JOptionPane.ERROR_MESSAGE); } } }); //---------------------------------------------------------------------------------- JMenu RenameMenu = new JMenu("Rename"); OptionMenu.add(RenameMenu); //Rename option which when clicked has ChatGPT generate a title based on current chat context JMenuItem AutoMenuItem = new JMenuItem("Auto"); RenameMenu.add(AutoMenuItem); AutoMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if(FGPTConvo != null) { AutoTitle(); }else { JOptionPane.showMessageDialog(null, "No chat file loaded", "Error", JOptionPane.ERROR_MESSAGE); } } }); //This code adds a manual menu item to a rename menu. //When the manual menu item is clicked, it prompts the user to enter a title for the file to be renamed. //If the file already exists with the inputted title, an error message is shown. //Otherwise, the file is renamed and a success message is shown along with the new title in the window title bar. //However, if no file is loaded, an error message is shown. JMenuItem ManualMenuItem = new JMenuItem("Manual"); RenameMenu.add(ManualMenuItem); ManualMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if(FGPTConvo != null) { String title = JOptionPane.showInputDialog(null, "Please enter a title:", "Rename", JOptionPane.PLAIN_MESSAGE); if(title != null) { File file = new File(FGPTConvo.getParentFile(), title + ".json"); if(file.exists()) { JOptionPane.showMessageDialog(null, "File already exists", "Error", JOptionPane.ERROR_MESSAGE); }else { FGPTConvo.renameTo(file); FGPTConvo = file; JOptionPane.showMessageDialog(null, "File renamed successfully", "Success", JOptionPane.INFORMATION_MESSAGE); INSTANCE.setTitle("JavaGPT - " + title); } } }else { JOptionPane.showMessageDialog(null, "No chat file loaded", "Error", JOptionPane.ERROR_MESSAGE); } } }); //Deletes chat file if it exists JMenuItem DeleteMenuItem = new JMenuItem("Delete"); DeleteMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if(FGPTConvo != null && FGPTConvo.exists()) { //checks if the file exists FGPTConvo.delete(); //deletes the file Reset(); } else { JOptionPane.showMessageDialog(null, "File not found", "Error", JOptionPane.ERROR_MESSAGE); } } }); //Reverts chat contents to previous state by removing the last prompt & response from messages ArrayList and reloads the DisplayArea JMenuItem RevertMenuItem = new JMenuItem("Revert"); RevertMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if(messages.size() >= 4) { //checks if the file exists messages.remove(messages.size() - 1); messages.remove(messages.size() - 1); refreshMessages(); } else { if(messages.isEmpty()) { JOptionPane.showMessageDialog(null, "No chat loaded", "Error", JOptionPane.ERROR_MESSAGE); }else { JOptionPane.showMessageDialog(null, "Can't revert first prompt", "Error", JOptionPane.ERROR_MESSAGE); } } } }); OptionMenu.add(RevertMenuItem); OptionMenu.add(DeleteMenuItem); //Opens "About" JFrame JMenuItem AboutMenuItem = new JMenuItem("About"); AboutMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if(aframeopen != true) { AboutFrame aframe = new AboutFrame(); aframe.setIconImage(Toolkit.getDefaultToolkit().getImage(getClass().getResource("logo.png"))); aframe.setVisible(true); aframeopen = true; aframe.addWindowListener(new java.awt.event.WindowAdapter() { @Override public void windowClosing(java.awt.event.WindowEvent windowEvent) { aframeopen = false; } }); } } }); OptionMenu.add(AboutMenuItem); //Opens "ChatLoader" (Chat History) JFrame JMenu LoadChatButton = new JMenu("Load Chat"); LoadChatButton.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { if(cloaderopen != true) { cloader = new ChatLoader(chatDir); cloader.setIconImage(Toolkit.getDefaultToolkit().getImage(getClass().getResource("logo.png"))); cloader.setVisible(true); cloaderopen = true; cloader.addWindowListener(new java.awt.event.WindowAdapter() { @Override public void windowClosing(java.awt.event.WindowEvent windowEvent) { cloaderopen = false; } }); } } }); menuBar.add(LoadChatButton); contentPane = new JPanel(); contentPane.setBorder(new EmptyBorder(5, 5, 5, 5)); setContentPane(contentPane); contentPane.setLayout(null); scrollPane = new JScrollPane(); contentPane.add(scrollPane); DisplayArea = new JEditorPane(); scrollPane.setViewportView(DisplayArea); DisplayArea.setEditable(false); DisplayArea.setContentType("text/rtf"); HTMLArea = new JEditorPane(); HTMLArea.setEditable(false); HTMLArea.setBackground(Color.white); HTMLArea.setContentType("text/html"); //Sets properties for Style objects StyleContext sc = StyleContext.getDefaultStyleContext(); YouStyle = sc.addStyle("bold", null); StyleConstants.setFontFamily(YouStyle, "Tahoma"); StyleConstants.setFontSize(YouStyle, FontSize); StyleConstants.setBold(YouStyle, true); GPTStyle = sc.addStyle("bold", null); StyleConstants.setFontFamily(GPTStyle, "Tahoma"); StyleConstants.setFontSize(GPTStyle, FontSize); StyleConstants.setBold(GPTStyle, true); StyleConstants.setForeground(GPTStyle, Color.RED); //getHSBColor(0, 0.8f, 0.8f) InvisibleStyle = sc.addStyle("bold", null); StyleConstants.setForeground(InvisibleStyle, DisplayArea.getBackground()); ChatStyle = sc.addStyle("black", null); StyleConstants.setFontFamily(ChatStyle, "Tahoma"); StyleConstants.setFontSize(ChatStyle, FontSize); ErrorStyle = sc.addStyle("ErrorStyle", null); StyleConstants.setItalic(ErrorStyle, true); StyleConstants.setFontFamily(ErrorStyle, "Tahoma"); StyleConstants.setFontSize(ErrorStyle, FontSize); if(seltheme == 1) { StyleConstants.setForeground(YouStyle, Color.ORANGE); //getHSBColor(30f/360, 0.8f, 1f) StyleConstants.setForeground(ChatStyle, Color.WHITE); //Color.getHSBColor(0f, 0f, 0.8f) StyleConstants.setForeground(ErrorStyle, Color.WHITE); //Color.getHSBColor(0f, 0f, 0.8f) }else { StyleConstants.setForeground(YouStyle, Color.BLUE); StyleConstants.setForeground(ChatStyle, Color.BLACK); StyleConstants.setForeground(ErrorStyle, Color.BLACK); } //------------------------------------ doc = (StyledDocument) DisplayArea.getDocument(); //"Submit" button SubmitButton = new JButton("Submit"); SubmitButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { submit(); } }); contentPane.add(SubmitButton); //"New Chat" button ResetButton = new JButton("New Chat"); ResetButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { Reset(); } }); contentPane.add(ResetButton); scrollPane_1 = new JScrollPane(); contentPane.add(scrollPane_1); ChatArea = new JTextArea(); ChatArea.setWrapStyleWord(true); scrollPane_1.setViewportView(ChatArea); ChatArea.setLineWrap(true); //Makes hotkeys for ChatArea ChatArea.addKeyListener(new KeyAdapter() { public void keyPressed(KeyEvent e) { if(enter2submit) { if (e.getKeyCode() == KeyEvent.VK_ENTER && e.isShiftDown()) { int caret = ChatArea.getCaretPosition(); ChatArea.insert("\n", caret); ChatArea.setCaretPosition(caret + 1); }else if(e.getKeyCode() == KeyEvent.VK_ENTER) { submit(); } }else { if (e.getKeyCode() == KeyEvent.VK_ENTER && e.isControlDown()) { submit(); } } } }); //Save Button code: takes contents of DisplayArea and saves it in plain text in user selected location with user provided filename SaveButton = new JButton(""); try { SaveButton.setIcon(new ImageIcon(MainFrame.class.getResource("FloppyDrive.gif"))); }catch(Exception e4) { JOptionPane.showMessageDialog(null, e4.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } SaveButton.setFont(new Font("Arial Black", Font.BOLD, 6)); SaveButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { File defaultDir = new File("."); JFileChooser fileChooser = new JFileChooser(defaultDir); fileChooser.setDialogTitle("Save chat"); int result = fileChooser.showSaveDialog(null); if (result == JFileChooser.APPROVE_OPTION) { File selectedFile = fileChooser.getSelectedFile(); try { FileWriter writer = new FileWriter(selectedFile); String plaintext = DisplayArea.getDocument().getText(0, DisplayArea.getDocument().getLength()); writer.write(plaintext); writer.close(); JOptionPane.showMessageDialog(null, "File saved successfully."); } catch (IOException e1) { e1.printStackTrace(); JOptionPane.showMessageDialog(null, e1.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } catch (BadLocationException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } } } }); contentPane.add(SaveButton); //Imports user selected file and sets contents to ChatArea ImportButton = new JButton(""); ImportButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { JFileChooser fileChooser = new JFileChooser(); fileChooser.setDialogTitle("Import prompt"); int returnVal = fileChooser.showOpenDialog(null); if (returnVal == JFileChooser.APPROVE_OPTION) { String filename = fileChooser.getSelectedFile().getAbsolutePath(); try { ChatArea.setText(new String(Files.readAllBytes(Paths.get(filename)))); } catch (IOException e1) { // TODO Auto-generated catch block JOptionPane.showMessageDialog(null, e1.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } } } }); ImportButton.setIcon(new ImageIcon(MainFrame.class.getResource("upFolder.gif"))); contentPane.add(ImportButton); //Right-click menu MouseListners for various chat elements DisplayArea.addMouseListener(new MouseAdapter() { @Override public void mousePressed(MouseEvent e) { if (e.isPopupTrigger()) { showDisplayMenu(e.getX(), e.getY()); } } @Override public void mouseReleased(MouseEvent e) { if (e.isPopupTrigger()) { showDisplayMenu(e.getX(), e.getY()); } } }); HTMLArea.addMouseListener(new MouseAdapter() { @Override public void mousePressed(MouseEvent e) { if (e.isPopupTrigger()) { showHTMLMenu(e.getX(), e.getY()); } } @Override public void mouseReleased(MouseEvent e) { if (e.isPopupTrigger()) { showHTMLMenu(e.getX(), e.getY()); } } }); ChatArea.addMouseListener(new MouseAdapter() { @Override public void mousePressed(MouseEvent e) { if (e.isPopupTrigger()) { showChatMenu(e.getX(), e.getY()); } } @Override public void mouseReleased(MouseEvent e) { if (e.isPopupTrigger()) { showChatMenu(e.getX(), e.getY()); } } }); //-------------------------------------------------------------------- //Allows for HTMLArea to have HyperLinks HTMLArea.addHyperlinkListener(new HyperlinkListener() { public void hyperlinkUpdate(HyperlinkEvent e) { if(e.getEventType() == HyperlinkEvent.EventType.ACTIVATED) { try { Desktop.getDesktop().browse(e.getURL().toURI()); } catch (IOException | URISyntaxException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } } } }); //Default /*setBounds(100, 100, 702, 707); //Uncomment this when editing design SubmitButton.setBounds(10, 554, 89, 23); ResetButton.setBounds(10, 616, 89, 23); scrollPane.setBounds(10, 11, 667, 532); scrollPane_1.setBounds(109, 554, 568, 85); SaveButton.setBounds(10, 585, 43, 23); ImportButton.setBounds(56, 585, 43, 23);*/ //Bulk property setting------------------- try { if(prop.getProperty("autoscroll") != null && !prop.getProperty("autoscroll").isEmpty()) { if(prop.getProperty("autoscroll").equals("true")) { DefaultCaret caret = (DefaultCaret)DisplayArea.getCaret(); caret.setUpdatePolicy(DefaultCaret.ALWAYS_UPDATE); } } if(prop.getProperty("chat_history") != null && !prop.getProperty("chat_history").isEmpty()) { if(prop.getProperty("chat_history").equals("true")){ chathistory = true; }else{ chathistory = false; } } if(prop.getProperty("autotitle") != null && !prop.getProperty("autotitle").isEmpty()) { if(prop.getProperty("autotitle").equals("true")){ autotitle = true; }else{ autotitle = false; } } if(prop.getProperty("EnterToSubmit") != null && !prop.getProperty("EnterToSubmit").isEmpty()) { if(prop.getProperty("EnterToSubmit").equals("true")){ ChatArea.getInputMap().put(KeyStroke.getKeyStroke("ENTER"), "none"); }else{ enter2submit = false; } } if(prop.getProperty("chat_location_override") != null && !prop.getProperty("chat_location_override").isEmpty()){ chatDir = prop.getProperty("chat_location_override"); }else { try { chatDir = new File(getClass().getProtectionDomain().getCodeSource().getLocation().toURI().getPath()).getParent(); chatDir = chatDir + "\\chat_history"; File directory = new File(chatDir); if (!directory.exists()) { directory.mkdirs(); } } catch (URISyntaxException e1) { JOptionPane.showMessageDialog(null, e1.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } } //---------------------------------------- } catch (Exception ex) { ex.printStackTrace(); JOptionPane.showMessageDialog(null, ex.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } } //Processes ChatArea contents submitted by user to ChatGPT API and displays response private void submit() { if(isStreamRunning) { isStreamRunning = false; SubmitButton.setText("Submit"); return; } Thread myThread = new Thread(new Runnable() { public void run() { SubmitButton.setText("Cancel Req"); //Boolean success = false; try { doc.insertString(doc.getLength(), "You", YouStyle); doc.insertString(doc.getLength(), ":\n", InvisibleStyle); doc.insertString(doc.getLength(), ChatArea.getText() + "\n\n", ChatStyle); doc.insertString(doc.getLength(), "ChatGPT", GPTStyle); doc.insertString(doc.getLength(), ":\n", InvisibleStyle); } catch (BadLocationException e2) { e2.printStackTrace(); } try { StringBuilder GPTConvoBuilder = new StringBuilder(); final ChatMessage userMessage = new ChatMessage(ChatMessageRole.USER.value(), ChatArea.getText()); messages.add(userMessage); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest .builder() .model(prop.getProperty("model")) .messages(messages) .n(1) .maxTokens(Integer.parseInt(prop.getProperty("maxTokens"))) .logitBias(new HashMap<>()) .build(); isStreamRunning = true; service.streamChatCompletion(chatCompletionRequest) .doOnError(Throwable::printStackTrace) .takeWhile(resultsBatch -> isStreamRunning) .blockingForEach(chunk -> { for (ChatCompletionChoice choice : chunk.getChoices()) { if(choice.getMessage().getContent() != null) { GPTConvoBuilder.append(choice.getMessage().getContent()); } try { //String messageContent = new String(choice.getMessage().getContent().getBytes("UTF-8"), "UTF-8"); //doc.putProperty("console.encoding", "UTF-8"); doc.insertString(doc.getLength(), choice.getMessage().getContent(), ChatStyle); } catch (BadLocationException e2) { e2.printStackTrace(); } } }); //service.shutdownExecutor(); if(isStreamRunning) { try { doc.insertString(doc.getLength(), "\n\n", ChatStyle); if(isHTMLView) { resetHTMLAreaStyle(); Node document = parser.parse(DisplayArea.getDocument().getText(0, DisplayArea.getDocument().getLength())); HTMLArea.setText(renderer.render(document)); } } catch (BadLocationException e2) { e2.printStackTrace(); } GPTConvo = GPTConvoBuilder.toString(); final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), GPTConvo); messages.add(systemMessage); if(chathistory) { if(first) { newFile(); } try { writeMessagesToFile(FGPTConvo.getPath()); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } if(first && autotitle){ AutoTitle(); first = false; } } ChatArea.setText(""); }else { if(messages.size() != 0) { messages.remove(messages.size() - 1); doc.insertString(doc.getLength(), "\n\n" + "Note: The previous prompt and response did not save as it was canceled" + "\n\n", ErrorStyle); } } }catch(Exception e) { //JOptionPane.showMessageDialog(null, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); try { doc.insertString(doc.getLength(), "Error: " + e.getMessage() + "\n\n", ErrorStyle); } catch (BadLocationException e2) { e2.printStackTrace(); } } isStreamRunning = false; SubmitButton.setText("Submit"); } }); myThread.start(); // Start the thread } //Right-click functions for various JFrame objects private void showDisplayMenu(int x, int y) { JPopupMenu popupMenu = new JPopupMenu(); JMenuItem copyMenuItem = new JMenuItem("Copy"); copyMenuItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { String selectedText = DisplayArea.getSelectedText(); if (selectedText != null) { StringSelection selection = new StringSelection(selectedText); Clipboard clipboard = Toolkit.getDefaultToolkit().getSystemClipboard(); clipboard.setContents(selection, null); } } }); popupMenu.add(copyMenuItem); popupMenu.show(DisplayArea, x, y); } private void showHTMLMenu(int x, int y) { JPopupMenu popupMenu = new JPopupMenu(); JMenuItem copyMenuItem = new JMenuItem("Copy"); copyMenuItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { String selectedText = HTMLArea.getSelectedText(); if (selectedText != null) { StringSelection selection = new StringSelection(selectedText); Clipboard clipboard = Toolkit.getDefaultToolkit().getSystemClipboard(); clipboard.setContents(selection, null); } } }); popupMenu.add(copyMenuItem); popupMenu.show(HTMLArea, x, y); } private void showChatMenu(int x, int y) { JPopupMenu popupMenu = new JPopupMenu(); JMenuItem copyMenuItem = new JMenuItem("Copy"); copyMenuItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { String selectedText = ChatArea.getSelectedText(); if (selectedText != null) { StringSelection selection = new StringSelection(selectedText); Clipboard clipboard = Toolkit.getDefaultToolkit().getSystemClipboard(); clipboard.setContents(selection, null); } } }); popupMenu.add(copyMenuItem); JMenuItem pasteMenuItem = new JMenuItem("Paste"); pasteMenuItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { String selectedText = ChatArea.getSelectedText(); if (selectedText != null && !selectedText.isEmpty()) { Clipboard clipboard = Toolkit.getDefaultToolkit().getSystemClipboard(); Transferable contents = clipboard.getContents(null); if (contents != null && contents.isDataFlavorSupported(DataFlavor.stringFlavor)) { try { String clipboardText = (String) contents.getTransferData(DataFlavor.stringFlavor); ChatArea.replaceSelection(clipboardText); } catch (UnsupportedFlavorException | IOException ex) { ex.printStackTrace(); } } } else { Clipboard clipboard = Toolkit.getDefaultToolkit().getSystemClipboard(); Transferable contents = clipboard.getContents(null); if (contents != null && contents.isDataFlavorSupported(DataFlavor.stringFlavor)) { try { String clipboardText = (String) contents.getTransferData(DataFlavor.stringFlavor); int caretPos = ChatArea.getCaretPosition(); ChatArea.insert(clipboardText, caretPos); } catch (UnsupportedFlavorException | IOException ex) { ex.printStackTrace(); } } } } }); popupMenu.add(pasteMenuItem); JMenuItem clearMenuItem = new JMenuItem("Clear"); clearMenuItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { ChatArea.setText(""); } }); popupMenu.add(clearMenuItem); popupMenu.show(ChatArea, x, y); } //-------------------------------------------------- //Function that auto generates title for current chat based off its context public void AutoTitle() { Thread myThread = new Thread(new Runnable() { public void run() { setTitle("JavaGPT *** ChatGPT is generating a title. Please wait..."); SubmitButton.setText("Loading..."); StringBuilder TitleBuilder = new StringBuilder(); try { final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), "Create a short title that summarizes this conversation. Provide title only."); messages.add(systemMessage); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest .builder() .model(prop.getProperty("model")) .messages(messages) .n(1) .maxTokens(25) .logitBias(new HashMap<>()) .build(); service.streamChatCompletion(chatCompletionRequest) .doOnError(Throwable::printStackTrace) .blockingForEach(chunk -> { for (ChatCompletionChoice choice : chunk.getChoices()) { if(choice.getMessage().getContent() != null) { TitleBuilder.append(choice.getMessage().getContent()); } } }); messages.remove(messages.size() - 1); String title = TitleBuilder.toString(); title = title.replaceAll("[\\\\/:*?\"<>|]", ""); if(title.substring(title.length() - 1).equals(".")) { title = title.substring(0, title.length() - 1); } SubmitButton.setText("Submit"); if(title != null) { File file = new File(FGPTConvo.getParentFile(), title + ".json"); if(file.exists()) { JOptionPane.showMessageDialog(null, "File already exists", "Error", JOptionPane.ERROR_MESSAGE); setTitle("JavaGPT - " + FGPTConvo.getName().substring(0, FGPTConvo.getName().length()-5)); }else { FGPTConvo.renameTo(file); FGPTConvo = file; INSTANCE.setTitle("JavaGPT - " + title); } } }catch(Exception e) { JOptionPane.showMessageDialog(null, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); SubmitButton.setText("Submit"); setTitle("JavaGPT - " + FGPTConvo.getName().substring(0, FGPTConvo.getName().length()-5)); } } }); myThread.start(); } //Resets HTMLArea to properly display new HTML content public static void resetHTMLAreaStyle() { HTMLArea.setContentType("text/plain"); HTMLArea.setContentType("text/html"); } //sets FormSize to presets defined public static void setFormSize(){ switch(FormSize){ case 1: frame.getContentPane().setPreferredSize(new Dimension(475, 532)); frame.pack(); scrollPane_1.setBounds(103, 454, 363, 69); scrollPane.setBounds(10, 11, 456, 432); SubmitButton.setBounds(10, 454, 89, 23); SaveButton.setBounds(10, 477, 43, 23); ImportButton.setBounds(56, 477, 43, 23); ResetButton.setBounds(10, 500, 89, 23); break; case 2: frame.getContentPane().setPreferredSize(new Dimension(1370, 960)); frame.pack(); SubmitButton.setBounds(13, 831, 148, 36); ResetButton.setBounds(13, 914, 148, 36); scrollPane.setBounds(13, 15, 1344, 802); scrollPane_1.setBounds(171, 831, 1186, 118); SaveButton.setBounds(13, 873, 73, 36); ImportButton.setBounds(88, 873, 73, 36); break; default: frame.getContentPane().setPreferredSize(new Dimension(686, 647)); frame.pack(); SubmitButton.setBounds(10, 554, 89, 23); ResetButton.setBounds(10, 616, 89, 23); scrollPane.setBounds(10, 11, 667, 532); scrollPane_1.setBounds(109, 554, 568, 85); SaveButton.setBounds(10, 585, 43, 23); ImportButton.setBounds(56, 585, 43, 23); break; } } public void setFontSize() { StyleConstants.setFontSize(YouStyle, FontSize); StyleConstants.setFontSize(GPTStyle, FontSize); StyleConstants.setFontSize(ChatStyle, FontSize); StyleConstants.setFontSize(ErrorStyle, FontSize); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value" ]
[((9623, 9671), 'java.nio.charset.Charset.class.getDeclaredField'), ((14539, 14611), 'java.awt.Toolkit.getDefaultToolkit'), ((15633, 15657), 'org.commonmark.parser.Parser.builder'), ((15672, 15702), 'org.commonmark.renderer.html.HtmlRenderer.builder'), ((23570, 23642), 'java.awt.Toolkit.getDefaultToolkit'), ((24311, 24383), 'java.awt.Toolkit.getDefaultToolkit'), ((32327, 32374), 'java.awt.Desktop.getDesktop'), ((36175, 36203), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((38513, 38543), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((40381, 40429), 'java.awt.Toolkit.getDefaultToolkit'), ((41081, 41129), 'java.awt.Toolkit.getDefaultToolkit'), ((41783, 41831), 'java.awt.Toolkit.getDefaultToolkit'), ((42314, 42362), 'java.awt.Toolkit.getDefaultToolkit'), ((42963, 43011), 'java.awt.Toolkit.getDefaultToolkit'), ((44481, 44511), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value')]
package br.com.danilo.ecommerce; import com.knuddels.jtokkit.Encodings; import com.knuddels.jtokkit.api.Encoding; import com.knuddels.jtokkit.api.EncodingRegistry; import com.knuddels.jtokkit.api.ModelType; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import java.nio.file.Files; import java.nio.file.Path; import java.time.Duration; import java.util.Arrays; // --> Identificador de Perfil de Compra de Clientes public class ProfileIdentifier { public static void main(String[] args) { var promptSystem = """ Identifique o perfil de compra de cada cliente. A resposta deve ser: Cliente - descreva o perfil do cliente em três palavras """; var client = loadCustomersFromFile(); // --> Carrega os clientes do arquivo String modelOpenAI = "gpt-3.5-turbo"; // --> Modelo do OpenAI a ser utilizado var tokens = countTokens(promptSystem); // --> Contador de Tokens var expectedResponseSize = 2048; // --> Tamanho esperado da resposta //? ------------------------------------ TESTE MODEL OPENAI -------------------------------------------------- System.out.println("Quantidade de Tokens: | Number of Tokens:" + tokens); System.out.println("Modelo OpenAI: | Model OpenAI: " + modelOpenAI + "\n"); System.out.println("||- ---------------------------------------------------------------- -||" + "\n" + "\n"); if (tokens > 4096) { // verificador de quantidade de tokens para escolha do melhor modelo do OpenAI modelOpenAI = "gpt-3.5-turbo-16k"; } //? ------------------------------------------------------------------------------------------------------------ var request = ChatCompletionRequest .builder() .model(modelOpenAI) // --> Modelo do OpenAI a ser utilizado .maxTokens(expectedResponseSize) // --> Tamanho esperado da resposta .messages(Arrays.asList( new ChatMessage( ChatMessageRole.SYSTEM.value(), promptSystem), new ChatMessage( ChatMessageRole.SYSTEM.value(), client))) .build(); var keyToken = System.getenv("OPENAI_API_KEY"); var serviceOpenAI = new OpenAiService(keyToken, Duration.ofSeconds(30)); System.out.println( serviceOpenAI .createChatCompletion(request) .getChoices().get(0).getMessage().getContent()); } // --> Carrega os clientes do arquivo private static String loadCustomersFromFile() { try { var path = Path.of(ClassLoader .getSystemResource("src/main/resources/shoppingList/lista_de_compras_10_clientes.csv") .toURI()); return Files.readAllLines(path).toString(); } catch (Exception errorLoadFile) { throw new RuntimeException("Erro ao carregar o arquivo! | Error loading file!", errorLoadFile); } } // --> Contador de Tokens private static int countTokens(String prompt) { EncodingRegistry registry = Encodings.newDefaultEncodingRegistry(); // Registro de codificação Encoding enc = registry.getEncodingForModel(ModelType.GPT_3_5_TURBO); // Modelo utilizado para o cálculo return enc.countTokens(prompt); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value" ]
[((2571, 2601), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((2723, 2753), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((3463, 3498), 'java.nio.file.Files.readAllLines')]
/* * Copyright (c) 2023-2024 Jean Schmitz. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.talkforgeai.backend.assistant.service; import com.talkforgeai.backend.assistant.domain.AssistantEntity; import com.talkforgeai.backend.assistant.domain.AssistantPropertyValue; import com.talkforgeai.backend.assistant.domain.MessageEntity; import com.talkforgeai.backend.assistant.domain.ThreadEntity; import com.talkforgeai.backend.assistant.dto.AssistantDto; import com.talkforgeai.backend.assistant.dto.GenerateImageResponse; import com.talkforgeai.backend.assistant.dto.MessageListParsedDto; import com.talkforgeai.backend.assistant.dto.ParsedMessageDto; import com.talkforgeai.backend.assistant.dto.ProfileImageUploadResponse; import com.talkforgeai.backend.assistant.dto.ThreadDto; import com.talkforgeai.backend.assistant.dto.ThreadTitleDto; import com.talkforgeai.backend.assistant.dto.ThreadTitleGenerationRequestDto; import com.talkforgeai.backend.assistant.dto.ThreadTitleUpdateRequestDto; import com.talkforgeai.backend.assistant.exception.AssistentException; import com.talkforgeai.backend.assistant.repository.AssistantRepository; import com.talkforgeai.backend.assistant.repository.MessageRepository; import com.talkforgeai.backend.assistant.repository.ThreadRepository; import com.talkforgeai.backend.storage.FileStorageService; import com.talkforgeai.backend.transformers.MessageProcessor; import com.theokanning.openai.ListSearchParameters; import com.theokanning.openai.OpenAiResponse; import com.theokanning.openai.assistants.Assistant; import com.theokanning.openai.assistants.AssistantRequest; import com.theokanning.openai.assistants.ModifyAssistantRequest; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.image.CreateImageRequest; import com.theokanning.openai.image.ImageResult; import com.theokanning.openai.messages.Message; import com.theokanning.openai.messages.MessageRequest; import com.theokanning.openai.model.Model; import com.theokanning.openai.runs.Run; import com.theokanning.openai.runs.RunCreateRequest; import com.theokanning.openai.service.OpenAiService; import com.theokanning.openai.threads.Thread; import com.theokanning.openai.threads.ThreadRequest; import jakarta.transaction.Transactional; import java.awt.image.BufferedImage; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.UUID; import javax.imageio.ImageIO; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.core.io.FileSystemResource; import org.springframework.core.io.Resource; import org.springframework.data.domain.Sort; import org.springframework.stereotype.Service; import org.springframework.util.StreamUtils; import org.springframework.web.multipart.MultipartFile; @Service public class AssistantService { public static final Logger LOGGER = LoggerFactory.getLogger(AssistantService.class); private final OpenAiService openAiService; private final AssistantRepository assistantRepository; private final MessageRepository messageRepository; private final ThreadRepository threadRepository; private final FileStorageService fileStorageService; private final MessageProcessor messageProcessor; private final AssistantMapper assistantMapper; public AssistantService(OpenAiService openAiService, AssistantRepository assistantRepository, MessageRepository messageRepository, ThreadRepository threadRepository, FileStorageService fileStorageService, MessageProcessor messageProcessor, AssistantMapper assistantMapper) { this.openAiService = openAiService; this.assistantRepository = assistantRepository; this.messageRepository = messageRepository; this.threadRepository = threadRepository; this.fileStorageService = fileStorageService; this.messageProcessor = messageProcessor; this.assistantMapper = assistantMapper; } public AssistantDto retrieveAssistant(String assistantId) { Assistant assistant = this.openAiService.retrieveAssistant(assistantId); if (assistant != null) { Optional<AssistantEntity> assistantEntity = assistantRepository.findById(assistant.getId()); if (assistantEntity.isPresent()) { return assistantMapper.mapAssistantDto(assistant, assistantEntity.get()); } } return null; } public List<AssistantDto> listAssistants(ListSearchParameters listAssistantsRequest) { OpenAiResponse<Assistant> assistantOpenAiResponse = this.openAiService.listAssistants( listAssistantsRequest); List<AssistantDto> assistantDtoList = new ArrayList<>(); assistantOpenAiResponse.data.forEach(assistant -> { Optional<AssistantEntity> assistantEntity = assistantRepository.findById(assistant.getId()); assistantEntity.ifPresent(entity -> { AssistantDto assistantDto = assistantMapper.mapAssistantDto( assistant, entity ); assistantDtoList.add(assistantDto); }); }); return assistantDtoList; } @Transactional public void syncAssistants() { ListSearchParameters searchParameters = new ListSearchParameters(); OpenAiResponse<Assistant> assistantList = this.openAiService.listAssistants( searchParameters); List<AssistantEntity> assistantEntities = assistantRepository.findAll(); // Create assistantList.data.forEach(assistant -> { LOGGER.info("Syncing assistant: {}", assistant.getId()); Optional<AssistantEntity> assistantEntity = assistantEntities.stream() .filter(entity -> entity.getId().equals(assistant.getId())) .findFirst(); if (assistantEntity.isEmpty()) { LOGGER.info("New assistant detected. Creating entity: {}", assistant.getId()); AssistantEntity entity = new AssistantEntity(); entity.setId(assistant.getId()); // Map persona.properties() to Map<String, PersonaPropertyValue> Arrays.stream(AssistantProperties.values()).forEach(p -> { AssistantPropertyValue propertyValue = new AssistantPropertyValue(); String value = p.getDefaultValue(); propertyValue.setPropertyValue(value); entity.getProperties().put(p.getKey(), propertyValue); }); assistantRepository.save(entity); } }); // Delete assistantEntities.forEach(entity -> { Optional<Assistant> assistant = assistantList.data.stream() .filter(a -> a.getId().equals(entity.getId())) .findFirst(); if (assistant.isEmpty()) { LOGGER.info("Assistant not found. Deleting entity: {}", entity.getId()); assistantRepository.delete(entity); } }); } @Transactional public ThreadDto createThread() { ThreadRequest threadRequest = new ThreadRequest(); Thread thread = this.openAiService.createThread(threadRequest); ThreadEntity threadEntity = new ThreadEntity(); threadEntity.setId(thread.getId()); threadEntity.setTitle("<no title>"); threadEntity.setCreatedAt(new Date(thread.getCreatedAt())); threadRepository.save(threadEntity); return mapToDto(threadEntity); } public List<ThreadDto> retrieveThreads() { return this.threadRepository.findAll(Sort.by(Sort.Direction.DESC, "createdAt")).stream() .map(this::mapToDto) .toList(); } public Message postMessage(String threadId, MessageRequest messageRequest) { return this.openAiService.createMessage(threadId, messageRequest); } public Run runConversation(String threadId, RunCreateRequest runCreateRequest) { return this.openAiService.createRun(threadId, runCreateRequest); } public MessageListParsedDto listMessages(String threadId, ListSearchParameters listSearchParameters) { OpenAiResponse<Message> messageList = this.openAiService.listMessages(threadId, listSearchParameters); Map<String, String> parsedMessages = new HashMap<>(); messageList.data.forEach(message -> { Optional<MessageEntity> messageEntity = messageRepository.findById(message.getId()); messageEntity.ifPresent( entity -> parsedMessages.put(message.getId(), entity.getParsedContent())); }); return new MessageListParsedDto(messageList, parsedMessages); } public Run retrieveRun(String threadId, String runId) { return this.openAiService.retrieveRun(threadId, runId); } public Run cancelRun(String threadId, String runId) { return openAiService.cancelRun(threadId, runId); } private ThreadDto mapToDto(ThreadEntity threadEntity) { return new ThreadDto(threadEntity.getId(), threadEntity.getTitle(), threadEntity.getCreatedAt()); } @Transactional public ParsedMessageDto postProcessMessage(String threadId, String messageId) { LOGGER.info("Post processing message: {}", messageId); Message message = this.openAiService.retrieveMessage(threadId, messageId); Optional<MessageEntity> messageEntity = messageRepository.findById(messageId); MessageEntity newMessageEntity = null; if (messageEntity.isPresent()) { newMessageEntity = messageEntity.get(); } else { newMessageEntity = new MessageEntity(); newMessageEntity.setId(message.getId()); newMessageEntity.setParsedContent(""); } String transformed = messageProcessor.transform( message.getContent().get(0).getText().getValue(), threadId, messageId); newMessageEntity.setParsedContent(transformed); messageRepository.save(newMessageEntity); ParsedMessageDto parsedMessageDto = new ParsedMessageDto(); parsedMessageDto.setMessage(message); parsedMessageDto.setParsedContent(transformed); return parsedMessageDto; } public byte[] getImage(String threadId, String filename) throws IOException { Path imgFilePath = fileStorageService.getThreadDirectory().resolve(threadId).resolve(filename); Resource resource = new FileSystemResource(imgFilePath); return StreamUtils.copyToByteArray(resource.getInputStream()); } @Transactional public ThreadTitleDto generateThreadTitle(String threadId, ThreadTitleGenerationRequestDto request) { ThreadEntity threadEntity = threadRepository.findById(threadId) .orElseThrow(() -> new AssistentException("Thread not found")); String content = """ Generate a title in less than 6 words for the following message: %s """.formatted(request.userMessageContent()); ChatMessage chatMessage = new ChatMessage(ChatMessageRole.USER.value(), content); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest .builder() .model("gpt-3.5-turbo") .messages(List.of(chatMessage)) .maxTokens(256) .build(); ChatMessage responseMessage = openAiService.createChatCompletion(chatCompletionRequest) .getChoices().get(0).getMessage(); String generatedTitle = responseMessage.getContent(); String parsedTitle = generatedTitle.replaceAll("\"", ""); threadEntity.setTitle(parsedTitle); threadRepository.save(threadEntity); return new ThreadTitleDto(generatedTitle); } public ThreadDto retrieveThread(String threadId) { return threadRepository.findById(threadId) .map(this::mapToDto) .orElseThrow(() -> new AssistentException("Thread not found")); } @Transactional public void modifyAssistant(String assistantId, AssistantDto modifiedAssistant) { Assistant assistant = openAiService.retrieveAssistant(assistantId); if (assistant == null) { throw new AssistentException("Assistant not found"); } AssistantEntity assistantEntity = assistantRepository.findById(assistantId) .orElseThrow(() -> new AssistentException("Assistant entity not found")); assistantEntity.setImagePath(modifiedAssistant.imagePath()); assistantEntity.setProperties(assistantMapper.mapProperties(modifiedAssistant.properties())); assistantRepository.save(assistantEntity); ModifyAssistantRequest modifyAssistantRequest = new ModifyAssistantRequest(); modifyAssistantRequest.setName(modifiedAssistant.name()); modifyAssistantRequest.setDescription(modifiedAssistant.description()); modifyAssistantRequest.setModel(modifiedAssistant.model()); modifyAssistantRequest.setInstructions(modifiedAssistant.instructions()); modifyAssistantRequest.setTools(modifiedAssistant.tools()); modifyAssistantRequest.setFileIds(modifiedAssistant.fileIds()); modifyAssistantRequest.setMetadata(modifiedAssistant.metadata()); openAiService.modifyAssistant(assistantId, modifyAssistantRequest); } public GenerateImageResponse generateImage(String prompt) throws IOException { CreateImageRequest request = new CreateImageRequest(); request.setPrompt(prompt); request.setN(1); request.setSize("1024x1024"); request.setModel("dall-e-3"); request.setStyle("natural"); ImageResult image = openAiService.createImage(request); return new GenerateImageResponse(downloadImage(image.getData().get(0).getUrl())); } private String downloadImage(String imageUrl) throws IOException { String fileName = UUID.randomUUID() + "_image.png"; Path subDirectoryPath = fileStorageService.getAssistantsDirectory(); Path localFilePath = subDirectoryPath.resolve(fileName); // Ensure the directory exists and is writable if (!Files.exists(subDirectoryPath)) { Files.createDirectories(subDirectoryPath); } if (!Files.isWritable(subDirectoryPath)) { throw new IOException("Directory is not writable: " + subDirectoryPath); } LOGGER.info("Downloading image {}...", imageUrl); try { URI uri = URI.create(imageUrl); try (InputStream in = uri.toURL().openStream()) { Files.copy(in, localFilePath, StandardCopyOption.REPLACE_EXISTING); } } catch (Exception ex) { LOGGER.error("Failed to download image: {}", imageUrl); throw ex; } return fileName; } public ProfileImageUploadResponse uploadImage(MultipartFile file) { if (file.isEmpty()) { throw new IllegalArgumentException("File is empty"); } try { byte[] bytes = file.getBytes(); String fileEnding = file.getOriginalFilename() .substring(file.getOriginalFilename().lastIndexOf(".")); String filename = UUID.randomUUID() + fileEnding; Path path = fileStorageService.getAssistantsDirectory().resolve(filename); Files.write(path, bytes); if (!isImageFile(path)) { Files.delete(path); throw new AssistentException("File is not an image."); } return new ProfileImageUploadResponse(filename); } catch (IOException e) { throw new AssistentException("Failed to upload file", e); } } private boolean isImageFile(Path filePath) { try { BufferedImage image = ImageIO.read(filePath.toFile()); return image != null; } catch (IOException e) { return false; } } @Transactional public AssistantDto createAssistant(AssistantDto modifiedAssistant) { AssistantRequest assistantRequest = new AssistantRequest(); assistantRequest.setName(modifiedAssistant.name()); assistantRequest.setDescription(modifiedAssistant.description()); assistantRequest.setModel(modifiedAssistant.model()); assistantRequest.setInstructions(modifiedAssistant.instructions()); assistantRequest.setTools(modifiedAssistant.tools()); assistantRequest.setFileIds(modifiedAssistant.fileIds()); assistantRequest.setMetadata(modifiedAssistant.metadata()); Assistant newAssistant = openAiService.createAssistant(assistantRequest); AssistantEntity assistantEntity = new AssistantEntity(); assistantEntity.setId(newAssistant.getId()); assistantEntity.setImagePath(modifiedAssistant.imagePath()); assistantEntity.setProperties(assistantMapper.mapProperties(modifiedAssistant.properties())); assistantRepository.save(assistantEntity); return assistantMapper.mapAssistantDto(newAssistant, assistantEntity); } public List<String> retrieveModels() { return openAiService.listModels().stream() .map(Model::getId) .filter(id -> id.startsWith("gpt") && !id.contains("instruct")) .toList(); } @Transactional public void deleteAssistant(String assistantId) { openAiService.deleteAssistant(assistantId); assistantRepository.deleteById(assistantId); } @Transactional public void deleteThread(String threadId) { threadRepository.deleteById(threadId); } @Transactional public ThreadTitleDto updateThreadTitle(String threadId, ThreadTitleUpdateRequestDto request) { ThreadEntity threadEntity = threadRepository.findById(threadId) .orElseThrow(() -> new AssistentException("Thread not found")); threadEntity.setTitle(request.title()); threadRepository.save(threadEntity); return new ThreadTitleDto(request.title()); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value" ]
[((6874, 7182), 'java.util.Arrays.stream'), ((11433, 11461), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value')]
package com.example.idear.src.chatGPT; import com.example.idear.common.BaseResponseStatus; import com.example.idear.common.Constant; import com.example.idear.exception.BaseException; import com.example.idear.src.content.ContentProvider; import com.example.idear.src.content.dto.response.GetContentRes; import com.example.idear.src.profile.ProfileRepository; import com.example.idear.src.profile.models.Profile; import com.example.idear.src.query.dto.request.QueryReq; import com.example.idear.src.query.dto.request.RequeryReq; import com.example.idear.src.query.model.MyQuery; import com.theokanning.openai.OpenAiService; import com.theokanning.openai.completion.chat.ChatCompletionChoice; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import lombok.RequiredArgsConstructor; import org.springframework.stereotype.Service; import java.time.Duration; import java.util.ArrayList; import java.util.List; @Service @RequiredArgsConstructor public class ChatGPTService { private OpenAiService openAiService = new OpenAiService(Constant.OPEN_API_KEY, Duration.ofSeconds(60)); private final ProfileRepository profileRepository; private final ContentProvider contentProvider; public ChatCompletionChoice query(String content){ List<ChatMessage> chatMessages = new ArrayList<>(); chatMessages.add(new ChatMessage("system", "You are the author. please apply the options below to compose your mail.")); chatMessages.add(new ChatMessage("user", content)); // content = "안녕 나의 사랑,\n" + // "이번 주는 어떤 하루를 보내고 있니? 이렇게 일어나서 바로 너에게 참 좋은 아침인 것 같아. 그래도 조금 더 너와 함께 기상할 수 있다면 정말 행복할 거야.\n" + // "오늘도 반가운 하루가 되길 바래. 매 순간, 내 마음은 당신만을 바라보며 달려요. 언제든지 연락해줘, 나는 항상 당신 곁에서 지켜볼게.\n" + // "너무 사랑스러운 하루 보내길!\n" + // "사랑해 ♥"; // // chatMessages.add(new ChatMessage("assistant", content)); // // content = "더 귀엽게 써줘"; // // chatMessages.add(new ChatMessage("user", content)); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder() .model("gpt-3.5-turbo") .messages(chatMessages) .temperature(0.8) .frequencyPenalty(0.8) .presencePenalty(0.8) .build(); ChatCompletionChoice chatCompletionChoice = openAiService.createChatCompletion(chatCompletionRequest).getChoices().get(0); return chatCompletionChoice; } public ChatCompletionChoice requery(MyQuery query, RequeryReq requeryReq){ // query id로 content list 가져오기 List<GetContentRes> getContentResList = contentProvider.getContentResList(query.getId()); List<ChatMessage> chatMessages = new ArrayList<>(); chatMessages.add(new ChatMessage("system", "You are the author. please apply the options below to compose your mail.")); for(int i=0; i<getContentResList.size(); i++){ if(getContentResList.size() == 1){ System.out.println(1); chatMessages.add(new ChatMessage("user", query.getQuestion())); chatMessages.add(new ChatMessage("system", getContentResList.get(i).getContent())); chatMessages.add(new ChatMessage("user", requeryReq.getFeedback())); break; } if(i == 0){ System.out.println(2); chatMessages.add(new ChatMessage("user", query.getQuestion())); } else if (i == getContentResList.size()-1) { System.out.println(3); chatMessages.add(new ChatMessage("system", getContentResList.get(i).getContent())); chatMessages.add(new ChatMessage("user", requeryReq.getFeedback())); } else { System.out.println(4); chatMessages.add(new ChatMessage("system", getContentResList.get(i).getContent())); chatMessages.add(new ChatMessage("user", getContentResList.get(i).getFeedback())); } } chatMessages.forEach(System.out::println); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder() .model("gpt-3.5-turbo") .messages(chatMessages) .temperature(0.8) .frequencyPenalty(0.8) .presencePenalty(0.8) .build(); ChatCompletionChoice chatCompletionChoice = openAiService.createChatCompletion(chatCompletionRequest).getChoices().get(0); return chatCompletionChoice; } }
[ "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((2389, 2636), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2389, 2611), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2389, 2573), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2389, 2534), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2389, 2500), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2389, 2460), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4513, 4760), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4513, 4735), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4513, 4697), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4513, 4658), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4513, 4624), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4513, 4584), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package com.example.recipegenie; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.os.AsyncTask; import android.view.View; import android.widget.Button; import android.widget.ProgressBar; import android.widget.TextView; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.service.OpenAiService; import java.util.Arrays; public class ChatGptTask extends AsyncTask<String, Void, String> { private final TextView responseTextView; private final String apiKey; private final ProgressBar progressBar; private String breakfast = null; private String lunch = null; private String dinner = null; private final Button continueButton; public ChatGptTask(TextView responseTextView, String apiKey, ProgressBar progressBar, Button continueButton) { this.responseTextView = responseTextView; this.apiKey = apiKey; this.progressBar = progressBar; this.continueButton = continueButton; // Initialize button reference } @Override protected String doInBackground(String... strings) { // Initializse the OpenAI service with the API key OpenAiService service = new OpenAiService(apiKey); String modifiedPrompt = "You are a friendly and intelligent nutritional wellness coach communicating with a human. " + "Try to respond in a way that a health advisor would, using understandable language. " + "Generate three healthy meal ideas for the day, including breakfast, lunch, and dinner. Keep all the answers very short\n\n" + "User: " + strings[0]; //Take into consideration the user's dietary preferences, allergies, and nutritional needs: " + userInput + ". // Create a chat completion request with the appropriate model ChatCompletionRequest request = ChatCompletionRequest.builder() .messages(Arrays.asList(new ChatMessage("user", modifiedPrompt))) .model("gpt-3.5-turbo") // This model is optimized for conversations .build(); try { // Execute the request and get the response String response = service.createChatCompletion(request).getChoices().get(0).getMessage().getContent(); // Process the response to extract the generated meal plan String mealPlan = processResponse(response); return mealPlan; } catch (Exception e) { e.printStackTrace(); return "Error: " + e.getMessage(); // Return error message } } private String processResponse(String response) { //extract and format the meal plan from the response // Split the response into lines String[] lines = response.split("\n"); // Iterate through each line and categorize the meal for (String line : lines) { if (line.toLowerCase().contains("breakfast")) { breakfast = line.trim(); } else if (line.toLowerCase().contains("lunch")) { lunch = line.trim(); } else if (line.toLowerCase().contains("dinner")) { dinner = line.trim(); } } // Format the result StringBuilder result = new StringBuilder(); if (breakfast != null) { result.append(breakfast).append("\n").append("\n"); } if (lunch != null) { result.append(lunch).append("\n").append("\n"); } if (dinner != null) { result.append(dinner).append("\n").append("\n"); } return "Generated Meal Plan:\n" + "\n" + result.toString(); } @Override protected void onPostExecute(String result) { // Update UI with the result on the main thread if (result != null && !result.isEmpty()) { responseTextView.setText(result); progressBar.setVisibility(View.GONE); saveMealPlanToPrefs(result); // Enable the ContinueToWeeklyMenu button continueButton.setEnabled(true); continueButton.setAlpha(1f); } else { responseTextView.setText("Error retrieving response, please try again."); progressBar.setVisibility(View.GONE); } } private void saveMealPlanToPrefs(String mealPlan) { // Use SharedPreferences to save the meal plan SharedPreferences preferences = responseTextView.getContext().getSharedPreferences("MealPlanPrefs", Context.MODE_PRIVATE); SharedPreferences.Editor editor = preferences.edit(); editor.putString("mealPlan", mealPlan); editor.apply(); } }
[ "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((1991, 2214), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1991, 2144), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1991, 2104), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package net.jubbery.symai; import com.theokanning.openai.audio.CreateTranscriptionRequest; import com.theokanning.openai.audio.TranscriptionResult; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatCompletionResult; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.service.OpenAiService; import lombok.Getter; import net.minecraft.client.Minecraft; import net.minecraft.world.entity.Entity; import net.minecraft.world.entity.LivingEntity; import net.minecraft.world.entity.npc.Villager; import net.minecraft.world.entity.player.Player; import net.minecraft.world.phys.EntityHitResult; import net.minecraft.world.phys.HitResult; import net.minecraftforge.api.distmarker.Dist; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.event.TickEvent; import net.minecraftforge.eventbus.api.SubscribeEvent; import net.minecraftforge.fml.common.Mod; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import okhttp3.*; import net.minecraftforge.client.event.InputEvent; import org.lwjgl.glfw.GLFW; import javax.sound.sampled.*; import java.io.*; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicBoolean; // The value here should match an entry in the META-INF/mods.toml file @Mod(SYMAI_Mod.MODID) public class SYMAI_Mod { @Getter private final AtomicBoolean isRecording = new AtomicBoolean(false); // Define mod id in a common place for everything to reference public static final String MODID = "symai"; public static final Logger LOGGER = LoggerFactory.getLogger(MODID); private final OpenAiService openAiService; private final OkHttpClient httpClient = new OkHttpClient(); public SYMAI_Mod() { LOGGER.info("Starting SYMAI"); MinecraftForge.EVENT_BUS.register(this); MinecraftForge.EVENT_BUS.register(new KeyInputHandler(this)); // Initialize OpenAiService with your OpenAI API key openAiService = new OpenAiService(""); // replace YOUR_OPENAI_API_KEY with your actual key } @Mod.EventBusSubscriber(modid = SYMAI_Mod.MODID, value = Dist.CLIENT) public static class KeyInputHandler { private final SYMAI_Mod modInstance; public KeyInputHandler(SYMAI_Mod mod) { this.modInstance = mod; } @SubscribeEvent public void onKeyInput(InputEvent.Key event) { Minecraft minecraft = Minecraft.getInstance(); Player player = minecraft.player; if (player == null) { return; } // Check for the Villager in crosshairs HitResult hitResult = minecraft.hitResult; assert hitResult != null; if (hitResult.getType() == HitResult.Type.ENTITY) { EntityHitResult entityHitResult = (EntityHitResult) hitResult; Entity entity = entityHitResult.getEntity(); // LOGGER.info(String.valueOf(event.getKey())); // LOGGER.info(String.valueOf(event.getAction())); if (entity instanceof Villager && player.distanceTo(entity) <= 5.0D) { // LOGGER.info("Detected Villager"); // LOGGER.info("Distance is valid"); if (event.getKey() == GLFW.GLFW_KEY_TAB && event.getAction() == GLFW.GLFW_REPEAT) { // LOGGER.info("TAB key detected"); // LOGGER.info("Repeat action detected"); boolean isCurrentlyRecording = modInstance.getIsRecording().get(); if (!isCurrentlyRecording) { LOGGER.info("Recording TRUE"); modInstance.getIsRecording().set(true); CompletableFuture<Void> recordingTask = CompletableFuture.runAsync(modInstance::captureAudioFromMicrophone); } } else if (event.getAction() == GLFW.GLFW_RELEASE) { LOGGER.info("Recording FALSE"); modInstance.getIsRecording().set(false); } } } } } @SubscribeEvent public static void onPlayerTick(TickEvent.PlayerTickEvent event) { if (event.phase != TickEvent.Phase.END) { return; } Player player = event.player; // Using the getEntitiesOfClass method to fetch nearby villagers List<LivingEntity> nearbyVillagers = player.level().getEntitiesOfClass( LivingEntity.class, player.getBoundingBox().inflate(5.0D), (entity) -> entity instanceof Villager ); if (nearbyVillagers.isEmpty()) { return; } // Here, you can consider adding additional logic to prevent // the code from running every tick a villager is nearby to avoid spamming. // The rest of your logic: // Step 3: Capture Voice, Step 4: Chat with GPT, etc. } public static void startService() { } private final ExecutorService executorService = Executors.newSingleThreadExecutor(); private void captureAudioFromMicrophone() { AudioFormat format = new AudioFormat(44100.0f, 16, 2, true, true); TargetDataLine microphone; ByteArrayOutputStream out = new ByteArrayOutputStream(); byte[] data = new byte[4096]; LOGGER.info("Starting recording"); try { microphone = AudioSystem.getTargetDataLine(format); microphone.open(format); microphone.start(); while (this.getIsRecording().get()) { int numBytesRead = microphone.read(data, 0, data.length); out.write(data, 0, numBytesRead); } microphone.close(); } catch (LineUnavailableException e) { e.printStackTrace(); } try { String translatedSpeech = capturePlayerVoice(data); LOGGER.info(translatedSpeech); } catch (IOException e) { LOGGER.error("Error while capturing or processing audio", e); } } private String capturePlayerVoice(byte[] audioData) throws IOException { // Construct the CreateTranscriptionRequest CreateTranscriptionRequest transcriptionRequest = CreateTranscriptionRequest.builder() .model("whisper-1") .language("en") .responseFormat("json") .prompt("You are a minecraft villager") .build(); LOGGER.info("Requesting Transcript..."); TranscriptionResult transcriptionResult = openAiService.createTranscription(transcriptionRequest, saveToWav(audioData)); try (FileInputStream fis = new FileInputStream("somefile.txt")) { if (!transcriptionResult.getText().isEmpty()) { String transcribedText = transcriptionResult.getText(); LOGGER.info(transcribedText); return transcribedText; } } catch (IOException e) { LOGGER.error("Error while transcribing voice using Whisper ASR", e); } return "Error capturing voice"; } public static File saveToWav(byte[] audioData) throws IOException { // Define the audio format parameters AudioFormat format = new AudioFormat( AudioFormat.Encoding.PCM_SIGNED, // Encoding 44100.0f, // Sample rate (44.1KHz) 16, // Bits per sample (16 bits) 2, // Channels (2 for stereo) 4, // Frame size 44100.0f, // Frame rate false // Little endian ); // Create an audio input stream from the byte array ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(audioData); AudioInputStream audioInputStream = new AudioInputStream(byteArrayInputStream, format, audioData.length / format.getFrameSize()); // Use the AudioSystem to write to a temporary file File tempFile = File.createTempFile("recordedAudio", ".wav"); AudioSystem.write(audioInputStream, AudioFileFormat.Type.WAVE, tempFile); return tempFile; } private String chatWithGPT(String input) { // Use the Chat GPT API to get a response for the given input ChatCompletionRequest request = new ChatCompletionRequest(); request.getMessages().add(new ChatMessage("user", input)); ChatCompletionResult result = openAiService.createChatCompletion(request); return result.getObject(); // Get the model's response; } private byte[] convertTextToSpeech(String text) { // Use a Text-to-Speech library to convert the text to audio data // Return the audio data return new byte[0]; } private void playAudioToPlayer(Player player, byte[] audioData) { // Play the audio data to the specified player // This would likely involve creating a custom sound event or using another method } }
[ "com.theokanning.openai.audio.CreateTranscriptionRequest.builder" ]
[((1979, 2018), 'net.minecraftforge.common.MinecraftForge.EVENT_BUS.register'), ((2028, 2088), 'net.minecraftforge.common.MinecraftForge.EVENT_BUS.register'), ((6568, 6793), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((6568, 6768), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((6568, 6712), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((6568, 6672), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder'), ((6568, 6640), 'com.theokanning.openai.audio.CreateTranscriptionRequest.builder')]
package br.com.alura.screenmatch.service; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.service.OpenAiService; public class ConsultaChatGPT { public static String obterTraducao(String texto) { OpenAiService service = new OpenAiService(System.getenv("OPENAI_APIKEY")); CompletionRequest requisicao = CompletionRequest.builder() .model("text-davinci-003") .prompt("traduza para o português o texto: " + texto) .maxTokens(1000) .temperature(0.7) .build(); var resposta = service.createCompletion(requisicao); return resposta.getChoices().get(0).getText(); } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((365, 598), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((365, 573), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((365, 539), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((365, 506), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((365, 435), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package com.vaadin.flow.ai.formfiller.services; import java.time.Duration; import java.util.ArrayList; import java.util.List; import java.util.Map; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatCompletionResult; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.service.OpenAiService; import com.vaadin.flow.ai.formfiller.utils.KeysUtils; import com.vaadin.flow.component.Component; public class ChatGPTChatCompletionService extends OpenAiService implements LLMService { /** * ID of the model to use. */ private String MODEL = "gpt-3.5-turbo-16k-0613"; /** * The maximum number of tokens to generate in the completion. */ private Integer MAX_TOKENS = 12000; /** * What sampling temperature to use, between 0 and 2. * Higher values like 0.8 will make the output more random, * while lower values like 0.2 will make it more focused and deterministic. */ private Double TEMPERATURE = 0d; /** * If true the input prompt is included in the response */ private Boolean ECHO = false; /** * Timeout for AI module response in seconds */ private static Integer TIMEOUT = 60; public ChatGPTChatCompletionService() { super(KeysUtils.getOpenAiKey(), Duration.ofSeconds(TIMEOUT)); } @Override public String getPromptTemplate(String input, Map<String, Object> objectMap, Map<String, String> typesMap, Map<Component, String> componentInstructions, List<String> contextInstructions) { String gptRequest = String.format( "Based on the user input: \n \"%s\", " + "generate a JSON object according to these instructions: " + "Never include duplicate keys, in case of duplicate keys just keep the first occurrence in the response. " + "Fill out \"N/A\" in the JSON value if the user did not specify a value. " + "Return the result as a JSON object in this format: '%s'." , input, objectMap); if (!componentInstructions.isEmpty() || !typesMap.isEmpty()) { gptRequest += "\nAdditional instructions about some of the JSON fields to be filled: "; for (Map.Entry<String, String> entry : typesMap.entrySet()) { gptRequest += "\n" + entry.getKey() + ": Format this JSON field as " + entry.getValue() + "."; } for (Map.Entry<Component, String> entry : componentInstructions.entrySet()) { if (entry.getKey().getId().isPresent()) gptRequest += "\n" + entry.getKey().getId().get() + ": " + entry.getValue() + "."; } if (!contextInstructions.isEmpty()) { gptRequest += "\nAdditional instructions about the context and desired JSON output response: "; for (String contextInstruction : contextInstructions) { gptRequest += " " + contextInstruction + "."; } } } return gptRequest; } @Override public String getGeneratedResponse(String prompt) { ArrayList<ChatMessage> messages = new ArrayList<>(); messages.add(new ChatMessage("user",prompt)); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder() .messages(messages) .model(MODEL).maxTokens(MAX_TOKENS).temperature(TEMPERATURE) .build(); ChatCompletionResult completionResult = createChatCompletion(chatCompletionRequest); String aiResponse = completionResult.getChoices().get(0).getMessage().getContent(); return aiResponse; } }
[ "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((3403, 3572), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3403, 3547), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3403, 3522), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3403, 3500), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3403, 3470), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package chatGPT; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.service.OpenAiService; public class ExampleGPT__ { public static void main(String[] a) { String apiKey = "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"; String url = "https://api.openai.com/v1/chat/completions"; String model = "gpt-3.5-turbo"; OpenAiService service = new OpenAiService(apiKey); System.out.println(service); CompletionRequest completionRequest = CompletionRequest.builder() .prompt("hello, how are you? Can you tell me what's a Fibonacci Number?") .model("GPT base") .echo(true) .build(); service.createCompletion(completionRequest).getChoices().forEach(System.out::println); } } /** // Curl Requset curl --location 'https://api.openai.com/v1/embeddings' \ --header 'Content-Type: application/json' \ --header 'Authorization: Bearer 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' \ --data '{ "input": "list the indian dog breads", "model": "text-embedding-ada-002" }' Response: { "error": { "message": "You exceeded your current quota, please check your plan and billing details.", "type": "insufficient_quota", "param": null, "code": "insufficient_quota" } } I tried several modules yet always getting response of insufficient_quota. */
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((507, 692), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((507, 672), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((507, 649), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((507, 619), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package util; import com.theokanning.openai.OpenAiService; import com.theokanning.openai.completion.CompletionRequest; import retrofit2.HttpException; public class OpenAiApiImpl { public static String createCompletion(String msg) { // String token = System.getenv("OPENAI_TOKEN"); OpenAiService service = new OpenAiService("*****************************"); System.out.println("\nCreating completion..."); CompletionRequest completionRequest = CompletionRequest.builder() .model("davinci") .prompt(msg) .echo(true) .temperature(0.1) .user("testing") .build(); // service.createCompletion(completionRequest).getChoices().forEach(System.out::println); try { return service.createCompletion(completionRequest).getChoices().get(0).getText(); } catch (HttpException e) { System.err.println("createCompletion exception: " + e.getMessage()); return null; } } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((484, 694), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((484, 669), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((484, 636), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((484, 602), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((484, 574), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((484, 545), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package br.com.jornadamilhas.api.integration; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.service.OpenAiService; import org.springframework.stereotype.Service; @Service public class ChatGPTIntegrationService { private static final String API_KEY = System.getenv("api_key"); private final OpenAiService service = new OpenAiService(API_KEY); public String geraTextoDestino(String destino) { String prompt = String.format("Aja como um redator para um site de venda de viagens. " + "Faça um resumo sobre o local %s. Enfatize os pontos positivos da cidade." + "Utilize uma linguagem informal. " + "Cite ideias de passeios neste lugar. " + "Crie 2 parágrafos neste resumo.", destino); CompletionRequest request = CompletionRequest.builder() .model("text-davinci-003") .prompt(prompt) .maxTokens(2048) .temperature(0.6) .build(); return service.createCompletion(request) .getChoices() .get(0) .getText(); } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((900, 1094), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((900, 1069), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((900, 1035), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((900, 1002), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((900, 970), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package service; import java.util.ArrayList; import java.util.List; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatFunction; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.FunctionExecutor; import com.theokanning.openai.service.OpenAiService; public class InvestorProfileFinder { String GPTKey = "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"; OpenAiService Service = new OpenAiService(GPTKey); public String Find(String aboutMe) { return GetResponse(GetMessages(GetQuestion(aboutMe))); } String GetResponse(List<ChatMessage> messages) { ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest .builder() .model("gpt-3.5-turbo-1106") .messages(messages) .maxTokens(256) .build(); ChatMessage responseMessage = Service.createChatCompletion(chatCompletionRequest).getChoices().get(0).getMessage(); return responseMessage.getContent(); } List<ChatMessage> GetMessages(String question){ List<ChatMessage> messages = new ArrayList<>(); ChatMessage userMessage = new ChatMessage(ChatMessageRole.USER.value(), question); messages.add(userMessage); return messages; } String GetQuestion(String aboutMe) { var question = new StringBuilder(); question.append("Dentre as opções abaixo, qual perfil de investimento eu me encaixo, sendo que:"); question.append(aboutMe+"?"); AddConditions(question); question.append("Só me diga a opção"); return question.toString(); } StringBuilder AddConditions(StringBuilder stringBuilder) { stringBuilder.append("Conservador"); stringBuilder.append("Moderado"); stringBuilder.append("Alto Risco"); return stringBuilder; } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value" ]
[((1319, 1347), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value')]
package telegrambotopenai03; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import com.theokanning.openai.completion.chat.ChatCompletionChoice; import java.util.ArrayList; import java.util.HashMap; import java.util.List; class OpenAiApiCompletion { private static final String TOKEN = "<YOUR OpenAI Token>"; private static final String MODEL = "gpt-3.5-turbo"; public static String main(String prompt) { OpenAiService service = new OpenAiService(TOKEN); List<ChatMessage> messages = new ArrayList<>(); ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), prompt); messages.add(systemMessage); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest .builder() .model(MODEL) .messages(messages) .n(1) //.maxTokens(1) .logitBias(new HashMap<>()) .build(); StringBuilder response = new StringBuilder(); service.streamChatCompletion(chatCompletionRequest) .blockingSubscribe( data -> { List<ChatCompletionChoice> choices = data.getChoices(); if (!choices.isEmpty()) { String content = choices.get(0).getMessage().getContent(); if (content != null) { response.append(content); } } }, error -> { response.append("Error: ").append(error.getMessage()); error.printStackTrace(); } ); return response.toString(); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value" ]
[((784, 814), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value')]
package org.sia.service; import com.fasterxml.jackson.databind.ObjectMapper; import com.theokanning.openai.client.OpenAiApi; import com.theokanning.openai.service.OpenAiService; import lombok.extern.slf4j.Slf4j; import okhttp3.OkHttpClient; import org.springframework.beans.factory.InitializingBean; import org.springframework.stereotype.Service; import retrofit2.Retrofit; import java.net.InetSocketAddress; import java.net.Proxy; import java.time.Duration; /** * @Description: * @Author: 高灶顺 * @CreateDate: 2023/11/8 18:36 */ @Slf4j @Service public class ChatGPTService implements InitializingBean { private final static String Key = "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"; private final static String hostProxy = "127.0.0.1"; private final static Integer portProxy = 7890; private OpenAiService service; public OpenAiService getService() { return service; } @Override public void afterPropertiesSet() throws Exception { ObjectMapper mapper = OpenAiService.defaultObjectMapper(); OkHttpClient client = OpenAiService.defaultClient(Key, Duration.ofSeconds(10L)) .newBuilder() .proxy(new Proxy(Proxy.Type.HTTP, new InetSocketAddress(hostProxy, portProxy))) .build(); Retrofit retrofit = OpenAiService.defaultRetrofit(client, mapper); this.service = new OpenAiService(retrofit.create(OpenAiApi.class)); } public static void main(String[] args) { OpenAiService service = new OpenAiService(Key, Duration.ofSeconds(30)); // 计费 // BillingUsage billingUsage = service.(LocalDate.parse("2023-11-08"), LocalDate.now()); // BigDecimal totalUsage = billingUsage.getTotalUsage(); } // public static void main(String[] args) { // String token = Key; // OpenAiService service = new OpenAiService(token, Duration.ofSeconds(30)); // // System.out.println("\nCreating completion..."); // CompletionRequest completionRequest = CompletionRequest.builder() // .model("ada") // .prompt("Somebody once told me the world is gonna roll me") // .echo(true) // .user("testing") // .n(3) // .build(); // service.createCompletion(completionRequest).getChoices().forEach(System.out::println); // // System.out.println("\nCreating Image..."); // CreateImageRequest request = CreateImageRequest.builder() // .prompt("A cow breakdancing with a turtle") // .build(); // // System.out.println("\nImage is located at:"); // System.out.println(service.createImage(request).getData().get(0).getUrl()); // // System.out.println("Streaming chat completion..."); // final List<ChatMessage> messages = new ArrayList<>(); // final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), "You are a dog and will speak as such."); // messages.add(systemMessage); // ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest // .builder() // .model("gpt-3.5-turbo") // .messages(messages) // .n(1) // .maxTokens(50) // .logitBias(new HashMap<>()) // .build(); // // service.streamChatCompletion(chatCompletionRequest) // .doOnError(Throwable::printStackTrace) // .blockingForEach(System.out::println); // // service.shutdownExecutor(); // } }
[ "com.theokanning.openai.service.OpenAiService.defaultClient" ]
[((1091, 1299), 'com.theokanning.openai.service.OpenAiService.defaultClient'), ((1091, 1274), 'com.theokanning.openai.service.OpenAiService.defaultClient'), ((1091, 1178), 'com.theokanning.openai.service.OpenAiService.defaultClient')]
package de.throughput.ircbot.handler; import java.util.List; import java.util.Set; import com.theokanning.openai.completion.CompletionResult; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatCompletionResult; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import lombok.RequiredArgsConstructor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import de.throughput.ircbot.api.Command; import de.throughput.ircbot.api.CommandEvent; import de.throughput.ircbot.api.CommandHandler; import com.theokanning.openai.completion.CompletionRequest; /** * Lagerfeld AI command handler. * <p> * !ailagerfeld <text> - responds with an AI-generated Lagerfeld quote. */ @Component @RequiredArgsConstructor public class LagerfeldAiCommandHandler implements CommandHandler { private static final Logger LOG = LoggerFactory.getLogger(LagerfeldAiCommandHandler.class); private static final String MODEL_GPT_3_5_TURBO = "gpt-3.5-turbo"; private static final int MAX_TOKENS = 100; private static final Command CMD_AILAGERFELD = new Command("lagerfeld", "lagerfeld <text> - responds with an AI-generated Lagerfeld quote."); public static final String PROMPT_TEMPLATE = """ Erzeuge ein Lagerfeld-Zitat aus dem folgenden Wort oder der folgenden Phrase. Ein Lagerfeld-Zitat funktioniert so: 'Wer ..., hat die Kontrolle über sein Leben verloren.' Verwende das Wort oder die Phrase, um einen grammatikalisch korrekten Satz als Lagerfeld-Zitat zu bilden, zum Beispiel, indem du ein passendes Verb ergänzt. Beispiel: Wort = Ohrenschützer; Du antwortest: Wer Ohrenschützer trägt, hat die Kontrolle über sein Leben verloren. Füge der Antwort keine weiteren Kommentare hinzu. Also los. Das Wort oder die Phrase lautet: "%s" """; private final OpenAiService openAiService; @Override public Set<Command> getCommands() { return Set.of(CMD_AILAGERFELD); } @Override public boolean onCommand(CommandEvent command) { command.getArgLine() .ifPresentOrElse( text -> respondWithQuote(command, text), () -> command.respond(CMD_AILAGERFELD.getUsage())); return true; } private void respondWithQuote(CommandEvent command, String text) { try { String prompt = PROMPT_TEMPLATE.replace("\n", " ").formatted(text); var message = new ChatMessage(ChatMessageRole.USER.value(), prompt, command.getEvent().getUser().getNick()); var request = ChatCompletionRequest.builder() .model(MODEL_GPT_3_5_TURBO) .maxTokens(MAX_TOKENS) .messages(List.of(message)) .build(); ChatCompletionResult completionResult = openAiService.createChatCompletion(request); ChatMessage responseMessage = completionResult.getChoices().get(0).getMessage(); command.getEvent() .getChannel() .send() .message("\"" + responseMessage.getContent() + "\" -- Karl Lagerfeld."); } catch (Exception e) { command.respond(e.getMessage()); LOG.error(e.getMessage(), e); } } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value", "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((2771, 2799), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((2877, 3076), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2877, 3047), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2877, 2999), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2877, 2956), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package com.main.writeRoom.service.SearchService; import com.main.writeRoom.domain.Note; import com.main.writeRoom.domain.Room; import com.main.writeRoom.repository.NoteRepository; import com.main.writeRoom.web.dto.search.SearchResponseDTO; import com.theokanning.openai.completion.chat.ChatCompletionChoice; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.service.OpenAiService; import lombok.RequiredArgsConstructor; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; @Service @RequiredArgsConstructor @Transactional(readOnly = true) public class SearchQueryServiceImpl implements SearchQueryService { private final NoteRepository noteRepository; private OpenAiService openAiService; private static final String MODEL = "gpt-3.5-turbo"; @Value("${GPT_SECRET}") private String apiKey; @Transactional public List<SearchResponseDTO.VocabularyResultDTO> getTopics() { this.openAiService = new OpenAiService(apiKey, Duration.ofSeconds(20)); String prompt = "글쓰기를 위한 주제 5개를 쉼표로 구분하여 나열해줘. 예시: 첫눈이 오면, 공통 되는 부분, 연예인 vs 배우, 코딩 공부란, 애정에 관하여. 추가적인 단어나 문구를 넣지 말고, 키워드만 답변으로 제공해."; ChatCompletionRequest requester = ChatCompletionRequest.builder() .model(MODEL) .temperature(0.8) .messages(List.of( new ChatMessage("user", prompt) )).build(); ChatCompletionChoice chatCompletionResult = openAiService.createChatCompletion(requester).getChoices().get(0); String contentResult = chatCompletionResult.getMessage().getContent(); return extractKeywords(contentResult); } @Transactional public List<SearchResponseDTO.VocabularyResultDTO> getSynonyms(String request) { this.openAiService = new OpenAiService(apiKey, Duration.ofSeconds(20)); String prompt = request + "의 동의어 6개를 쉼표로 구분하여 나열해줘. 예: 스트로베리, Fragaria × ananassa, 딸기열매, 딸기과, 딸기나무, 딸기종자. 추가적인 단어나 문구를 넣지 말고, 동의어만 답변으로 제공해."; // 예시: 동의어1, 동의어2, 동의어3, 동의어4, 동의어5, 동의어6 ChatCompletionRequest requester = ChatCompletionRequest.builder() .model(MODEL) .temperature(0.8) .messages(List.of( new ChatMessage("user", prompt) )).build(); ChatCompletionChoice chatCompletionResult = openAiService.createChatCompletion(requester).getChoices().get(0); String contentResult = chatCompletionResult.getMessage().getContent(); return extractKeywords(contentResult); } @Transactional public List<SearchResponseDTO.VocabularyResultDTO> getSimilarKeywords(String request) { this.openAiService = new OpenAiService(apiKey, Duration.ofSeconds(20)); String prompt = request + " 와 유사한 키워드 6개를 쉼표로 구분하여 나열해줘. 예: 날개, 선량한, 순수한, 신성한, 하늘, 보호. 추가적인 단어나 문구를 넣지 말고, 유사어만 답변으로 제공해."; // 예시: 유사어1, 유사어2, 유사어3, 유사어4, 유사어5, 유사어6 ChatCompletionRequest requester = ChatCompletionRequest.builder() .model(MODEL) .temperature(0.8) .messages(List.of( new ChatMessage("user", prompt) )).build(); ChatCompletionChoice chatCompletionResult = openAiService.createChatCompletion(requester).getChoices().get(0); String contentResult = chatCompletionResult.getMessage().getContent(); return extractKeywords(contentResult); } @Transactional public List<SearchResponseDTO.VocabularyResultDTO> extractKeywords(String content) { // 모든 키워드를 담을 단일 객체 생성 SearchResponseDTO.VocabularyResultDTO allKeywordsDto = SearchResponseDTO.VocabularyResultDTO.builder().build(); // 개행 문자로 분할하여 리스트로 변환 String[] keywordArray = content.split("\\r?\\n"); // 모든 키워드를 하나의 문자열로 합치기 String allKeywords = Arrays.stream(keywordArray) .map(keyword -> keyword.replaceAll("\\d+\\.\\s*", "")) .collect(Collectors.joining(", ")); // 단일 객체에 모든 키워드 설정 allKeywordsDto.setVoca(allKeywords); // 하나의 객체를 담은 리스트 반환 List<SearchResponseDTO.VocabularyResultDTO> keywords = new ArrayList<>(); keywords.add(allKeywordsDto); return keywords; } @Transactional public List<Note> searchNotesInUserRooms(List<Room> roomList, String normalizedSearchWord, String searchType) { if (searchType == null || searchType.isBlank()) { searchType = "default"; } return switch (searchType) { case "title" -> noteRepository.findByTitleInUserRooms(roomList, normalizedSearchWord); case "content" -> noteRepository.findByContentInUserRooms(roomList, normalizedSearchWord); case "tag" -> noteRepository.findByTagInUserRooms(roomList, normalizedSearchWord); default -> noteRepository.findByRoomsAndSearchWord(roomList, normalizedSearchWord); }; } }
[ "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((1684, 1897), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1684, 1889), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1684, 1779), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1684, 1745), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2746, 2959), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2746, 2951), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2746, 2841), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2746, 2807), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3791, 4004), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3791, 3996), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3791, 3886), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3791, 3852), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4506, 4561), 'com.main.writeRoom.web.dto.search.SearchResponseDTO.VocabularyResultDTO.builder'), ((4506, 4553), 'com.main.writeRoom.web.dto.search.SearchResponseDTO.VocabularyResultDTO.builder'), ((4777, 4926), 'java.util.Arrays.stream'), ((4777, 4875), 'java.util.Arrays.stream')]
package com.vcque.prompto; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.vcque.prompto.contexts.PromptoContext; import lombok.AccessLevel; import lombok.NoArgsConstructor; /** * regroups chat messages. Might want to put that in settings in the future. */ @NoArgsConstructor(access = AccessLevel.PRIVATE) public class Prompts { /** * Conversation initializer. */ public static ChatMessage codingAssistant() { return new ChatMessage( ChatMessageRole.SYSTEM.value(), """ You are Prompto, a coding assistant integrated into the intellij IDE that helps the user completing its programming tasks. """.trim() ); } public static ChatMessage implementMethodOutput(String userInput, String language, String methodDefinition) { return new ChatMessage( ChatMessageRole.USER.value(), """ Your task is to implement or rewrite the method `%s` based on these instructions: `%s`. If I do not provide specific directives and the method is not implemented, do an informed guess and implement the method based on this guess. Ensure that the code you provide is efficient, well-structured, and adheres to best practices. Always answer with one or multiple methods between triple back quotes in the `%s` language. """.formatted( methodDefinition, userInput, language ) .trim() ); } public static ChatMessage databaseQueryOutput(String userInput, String language) { return new ChatMessage( ChatMessageRole.USER.value(), """ Your task is to write a %s query based on these instructions: `%s`. Ensure that the query is optimized, well-structured, and adheres to best practices. Always have your query response between triple back quotes in the `%s` language. """.formatted( language, userInput, language ) .trim() ); } public static ChatMessage insertOutput() { return new ChatMessage( ChatMessageRole.USER.value(), """ Your task is insert new code based on my next input. The first code block you provide will be inserted in the user's editor. """.trim() ); } public static ChatMessage shortAnswerOutput() { return new ChatMessage(ChatMessageRole.USER.value(), """ Your task is to answer my next question based on the provided context. Answer with only the most relevant information and with the least words possible. """ ); } public static ChatMessage sqlOutput() { return new ChatMessage(ChatMessageRole.USER.value(), """ Your task is to provide a high-quality SQL script based on my next input. Ensure that the SQL code you provide is efficient, well-structured, and adheres to best practices. Always return the SQL script between triple back quotes. """.trim()); } public static ChatMessage userInput(String userInput) { return new ChatMessage(ChatMessageRole.USER.value(), userInput); } public static ChatMessage promptoContext(PromptoContext state) { return new ChatMessage( ChatMessageRole.SYSTEM.value(), """ `%s-%s`: This is %s ``` %s ``` """.formatted( state.getType().name(), state.getId(), state.getType().description, state.getValue() ) ); } public static ChatMessage promptoContextFormat() { var exampleState = PromptoContext.builder() .id("state_id") .type(PromptoContext.Type.EXAMPLE) .value("$state_value") .build(); var exampleFormat = promptoContext(exampleState).getContent(); return new ChatMessage( ChatMessageRole.SYSTEM.value(), """ In the next messages, you will receive context information useful to your task. It will have the following format: %s """.formatted(exampleFormat) ); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value" ]
[((566, 596), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((980, 1008), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((1927, 1955), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((2618, 2646), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((2977, 3005), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((3349, 3377), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((3788, 3816), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((3954, 3984), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((4471, 4642), 'com.vcque.prompto.contexts.PromptoContext.builder'), ((4471, 4617), 'com.vcque.prompto.contexts.PromptoContext.builder'), ((4471, 4578), 'com.vcque.prompto.contexts.PromptoContext.builder'), ((4471, 4527), 'com.vcque.prompto.contexts.PromptoContext.builder'), ((4764, 4794), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value')]
package com.yxy.nova.web; /** * Created by shenjing on 19/5/20. */ import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSONObject; import com.google.common.base.Charsets; import com.theokanning.openai.completion.chat.ChatCompletionChoice; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.service.OpenAiService; import com.yxy.nova.bean.EncryModeEnum; import com.yxy.nova.bean.HazardRealDataMessage; import com.yxy.nova.bean.WebResponse; import com.yxy.nova.canal.DebeziumHandler; import com.yxy.nova.cmpp.CmppSmsClient; import com.yxy.nova.cmpp.pojo.SmsSendResult; import com.yxy.nova.dal.mysql.dataobject.TaskItemExecCallDO; import com.yxy.nova.dal.mysql.mapper.TaskItemExecCallMapper; import com.yxy.nova.mwh.elasticsearch.AggregationClient; import com.yxy.nova.mwh.elasticsearch.SearchService; import com.yxy.nova.mwh.elasticsearch.basic.agg.AggregationBuilder; import com.yxy.nova.mwh.elasticsearch.dto.InsertAction; import com.yxy.nova.mwh.elasticsearch.dto.SearchResult; import com.yxy.nova.mwh.elasticsearch.exception.ElasticsearchClientException; import com.yxy.nova.mwh.elasticsearch.util.ESLogger; import com.yxy.nova.mwh.utils.UUIDGenerator; import com.yxy.nova.mwh.utils.exception.BizException; import com.yxy.nova.mwh.utils.time.DateTimeUtil; import com.yxy.nova.netty.tcp.TcpClient; import com.yxy.nova.netty.tcp.TcpServer; import com.yxy.nova.netty.udp.UdpServer; import com.yxy.nova.nio.UDPMessage; import com.yxy.nova.phoneinfo.PhoneNumberInfo; import com.yxy.nova.phoneinfo.PhoneNumberLookup; import com.yxy.nova.service.encryption.EncryptFactory; import com.yxy.nova.service.wechat.WechatService; import com.yxy.nova.util.AESUtil32; import com.yxy.nova.util.SignUtil; import com.yxy.nova.util.SimpleHttpClient; import lombok.extern.slf4j.Slf4j; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.elasticsearch.action.bulk.BulkResponse; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.*; import javax.servlet.ServletInputStream; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.PrintWriter; import java.util.*; @Slf4j @RestController @RequestMapping(value="/innerapi") public class InternalController { private final Logger LOGGER = LoggerFactory.getLogger(getClass()); private final String DNBX_TOKEN = "wechatabsd13i1dbdwomio95cozx1"; @Autowired private WechatService wechatService; @Autowired private TaskItemExecCallMapper taskItemExecCallMapper; @Autowired(required = false) private SearchService searchService; @Autowired(required = false) private AggregationClient aggregationClient; @Autowired private UdpServer udpServer; @Autowired private TcpClient tcpClient; @Autowired(required = false) private DebeziumHandler debeziumHandler; @Autowired private SimpleHttpClient httpClient; @Autowired private EncryptFactory encryptFactory; @GetMapping("udptest") public void udptest(String ip, String content) { UDPMessage message = new UDPMessage(); message.setKey("test1"); message.setContent(content); udpServer.singleCast(ip, JSONObject.toJSONString(message)); } @GetMapping("tcptest") public void tcptest(String gatewayId, String quotaId, String value) throws InterruptedException { HazardRealDataMessage hazardRealDataMessage = new HazardRealDataMessage(); hazardRealDataMessage.setDataId(UUIDGenerator.generate()); hazardRealDataMessage.setDataType("energy_data"); hazardRealDataMessage.setEnterpriseId(gatewayId.substring(0, gatewayId.length() - 2)); hazardRealDataMessage.setGatewayId(gatewayId); hazardRealDataMessage.setCollectTime(DateTimeUtil.datetime14()); hazardRealDataMessage.setIsConnectDataSource(true); hazardRealDataMessage.setReportType("report"); List<HazardRealDataMessage.RealData> datas = new ArrayList<>(); HazardRealDataMessage.RealData realData = new HazardRealDataMessage.RealData(); realData.setQuotaId(quotaId); realData.setValue(Float.valueOf(value)); realData.setIsValid(true); datas.add(realData); hazardRealDataMessage.setDatas(datas); JSONObject jsonObject = new JSONObject(); jsonObject.put("enterpriseId", hazardRealDataMessage.getEnterpriseId()); jsonObject.put("report", AESUtil32.encrypt("7fd2e257a128435b8b6574e5753d825d", JSONObject.toJSONString(hazardRealDataMessage))); log.info("sendRequest:{}", JSONObject.toJSONString(hazardRealDataMessage)); tcpClient.sendRequest(jsonObject.toJSONString() + "@@"); } @GetMapping("md5") @ResponseBody public String md5(String content) { String encrypt = encryptFactory.createInstance(EncryModeEnum.MD5).encrypt(content); return encrypt; } @PostMapping("/execCallTest") @ResponseBody public String test(@RequestBody TaskItemExecCallDO taskItemExecCallDO) { taskItemExecCallMapper.insert(taskItemExecCallDO); InsertAction insertAction = convertToInsertAction(taskItemExecCallDO); try { BulkResponse bulkItemResponses = searchService.bulkInsert(Arrays.asList(insertAction)); LOGGER.info("bulkItemResponses:{}", JSON.toJSONString(bulkItemResponses)); } catch (ElasticsearchClientException e) { LOGGER.error("es bulkInsert error", e); } return "ok"; } @GetMapping("/esQuery") @ResponseBody public Object esQuery(String startTime, String endTime,String taskItemExecUuid) throws ElasticsearchClientException { AggregationBuilder builder = new AggregationBuilder(aggregationClient, "task_item_exec_call"); if (StringUtils.isNotBlank(taskItemExecUuid)) { builder.whereEquals("_id", taskItemExecUuid); } if (StringUtils.isNotBlank(startTime)) { builder.whereGreaterOrEqual("gmtCreate", DateTimeUtil.parseDatetime18(startTime).getTime()); } if (StringUtils.isNotBlank(endTime)) { builder.whereLessOrEqual("gmtCreate", DateTimeUtil.parseDatetime18(endTime).getTime()); } // 请求es查询 SearchResult searchResult = builder.get(); // 解析查询结果 if (!searchResult.getSuccess()) { throw BizException.instance("查询出错"); } return searchResult; } private InsertAction convertToInsertAction(TaskItemExecCallDO execCallDO) { InsertAction action = new InsertAction(); action.setId(execCallDO.getTaskItemExecUuid()); action.setTable("task_item_exec_call"); action.setJson(JSON.parseObject(JSON.toJSONString(execCallDO))); return action; } @GetMapping("/changeEsLogLevel") @ResponseBody public String changeEsLogLevel(@RequestParam("level") String level) throws Exception { ESLogger.setLoggerLevel(level); return "OK"; } @GetMapping("/testcmpp") @ResponseBody public String testcmpp(@RequestParam("level") String level) throws Exception { Map<String,String> configMap=new HashMap<>(); //InfoX主机地址,与移动签合同时移动所提供的地址 configMap.put("host","8.129.229.252"); configMap.put("local-host","120.48.8.74"); //InfoX主机端口号 cmpp2.0默认为7890,cmpp3.0为7891 configMap.put("port","7890"); configMap.put("local-port","8080"); //(登录帐号SP…ID)与移动签合同时所提供的企业代码 6位 configMap.put("source-addr","30001"); //登录密码 默认为空 configMap.put("shared-secret","Fgjhjk"); configMap.put("msgSrc","30001"); configMap.put("srcTerminalId","01"); //Src_Id setCmccDefaultConfig(configMap); CmppSmsClient smsClient = new CmppSmsClient(configMap, 1L); smsClient.setSign("【360保险】"); smsClient.setSmsOperator("CMCC"); smsClient.setUnsubscribeInfo(""); smsClient.setChannelId(1L); // smsClient.setSmsErrorCodeService(null); smsClient.setStatus(1); smsClient.setReplyFlag(false); smsClient.setReportFlag(false); smsClient.setWorkingEndTime("channel.getWorkingEndTime()"); smsClient.setWorkingStartTime("channel.getWorkingStartTime()"); SmsSendResult send = smsClient.send("13585934620", "用户phoneNum,感谢您接听,您600万保额医疗险已于今日到达,快戳 2dlj.cn/hm 领取 退回T"); return JSON.toJSONString(send); } private void setCmccDefaultConfig(Map<String, String> configMap) { //心跳信息发送间隔时间(单位:秒) configMap.put("heartbeat-interval","2"); //连接中断时重连间隔时间(单位:秒) configMap.put("reconnect-interval","2"); //需要重连时,连续发出心跳而没有接收到响应的个数(单位:个) configMap.put("heartbeat-noresponseout","3"); //操作超时时间(单位:秒) configMap.put("transaction-timeout","5"); //双方协商的版本号(大于0,小于256) configMap.put("version","1"); //是否属于调试状态,true表示属于调试状态,所有的消息被打印输出到屏幕,false表示不属于调试状态,所有的消息不被输出 configMap.put("debug","false"); } /** * 微信接入 * @param * @return * @throws IOException */ @RequestMapping(value="/wechat/connect",method = {RequestMethod.GET, RequestMethod.POST}) @ResponseBody public void connectWeixin(HttpServletRequest request, HttpServletResponse response) throws IOException { // 将请求、响应的编码均设置为UTF-8(防止中文乱码) request.setCharacterEncoding("UTF-8"); //微信服务器POST消息时用的是UTF-8编码,在接收时也要用同样的编码,否则中文会乱码; response.setCharacterEncoding("UTF-8"); //在响应消息(回复消息给用户)时,也将编码方式设置为UTF-8,原理同上;boolean isGet = request.getMethod().toLowerCase().equals("get"); PrintWriter out = response.getWriter(); try { if (RequestMethod.GET.name().equals(request.getMethod())) { String signature = request.getParameter("signature");// 微信加密签名 String timestamp = request.getParameter("timestamp");// 时间戳 String nonce = request.getParameter("nonce");// 随机数 String echostr = request.getParameter("echostr");//随机字符串 // 通过检验signature对请求进行校验,若校验成功则原样返回echostr,表示接入成功,否则接入失败 if (SignUtil.checkSignature(DNBX_TOKEN, signature, timestamp, nonce)) { LOGGER.info("Connect the weixin server is successful."); response.getWriter().write(echostr); } else { LOGGER.error("Failed to verify the signature!"); } }else{ String respMessage = ""; try { respMessage = wechatService.weixinPost(request); out.write(respMessage); LOGGER.info("The request completed successfully"); LOGGER.info("to weixin server "+respMessage); } catch (Exception e) { LOGGER.error("Failed to convert the message from weixin!", e); } } } catch (Exception e) { LOGGER.error("Connect the weixin server is error.", e); }finally{ out.close(); } } @RequestMapping(value = "recvDebezium", method = RequestMethod.POST) @ResponseBody public JSONObject recvDebezium( HttpServletRequest request) { // LOGGER.info("recvDebezium"); Enumeration<String> headerNames = request.getHeaderNames(); while (headerNames.hasMoreElements()) { String s = headerNames.nextElement(); LOGGER.info("header= key:{},value:{}", s,request.getHeader(s)); } LOGGER.info("__debezium.newkey:{}", request.getHeader("__debezium.newkey")); JSONObject result = new JSONObject(); String requestStr; try (ServletInputStream servletInputStream = request.getInputStream()) { requestStr = IOUtils.toString(servletInputStream, Charsets.UTF_8.name()); debeziumHandler.handler(requestStr); LOGGER.info("recvDebezium,requestStr:{}", requestStr); } catch (IOException e) { LOGGER.error("recvCallResult error:", e); result.put("code", "1"); result.put("success", false); result.put("message", "请求入参格式有误,读取异常"); return result; } return result; } @RequestMapping(value = "/api/callback", method = RequestMethod.POST) @ResponseBody public JSONObject callBack(HttpServletRequest request) throws Exception { ServletInputStream servletInputStream = request.getInputStream(); String result = IOUtils.toString(servletInputStream, Charsets.UTF_8.name()); LOGGER.info("api callback:{}", JSON.toJSONString(result)); JSONObject object = new JSONObject(); object.put("success", true); return object; } // @RequestMapping(value = "/api/callback", method = RequestMethod.POST) // @ResponseBody // public JSONObject callBack(Object payload) throws Exception { // LOGGER.info("api callback:{}", JSON.toJSONString(payload)); // JSONObject object = new JSONObject(); // object.put("success", true); //// return object; // throw new RuntimeException(); // } @RequestMapping(value = "/chat/completions", method = RequestMethod.GET) @ResponseBody public List<String> testChatgpt(String content) throws Exception { // 消息列表 List<ChatMessage> list = new ArrayList<>(); // 定义一个用户身份,content是用户写的内容 ChatMessage userMessage = new ChatMessage(); userMessage.setRole("user"); userMessage.setContent(content); list.add(userMessage); OpenAiService service = new OpenAiService("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"); ChatCompletionRequest request = ChatCompletionRequest.builder() .messages(list) .model("gpt-3.5-turbo") .build(); List<ChatCompletionChoice> choices = service.createChatCompletion(request).getChoices(); List<String> response = new ArrayList<>(); choices.forEach(item -> { response.add(item.getMessage().getContent()); }); return response; } /** * curl -XGET http://127.0.0.1/innerapi/phoneInfo * @return */ @GetMapping("phoneInfo") @ResponseBody public WebResponse phoneInfo() { PhoneNumberLookup phoneNumberLookup = new PhoneNumberLookup(); phoneNumberLookup.initAllByte(); List<PhoneNumberInfo> all = phoneNumberLookup.getAll(); return WebResponse.successData(all.size()); } }
[ "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((6260, 6309), 'com.yxy.nova.mwh.utils.time.DateTimeUtil.parseDatetime18'), ((6420, 6467), 'com.yxy.nova.mwh.utils.time.DateTimeUtil.parseDatetime18'), ((12851, 12872), 'com.google.common.base.Charsets.UTF_8.name'), ((13611, 13632), 'com.google.common.base.Charsets.UTF_8.name'), ((14831, 14959), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((14831, 14934), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((14831, 14894), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package org.togetherjava.tjbot.features.chatgpt; import com.theokanning.openai.OpenAiHttpException; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.togetherjava.tjbot.config.Config; import java.time.Duration; import java.util.List; import java.util.Objects; import java.util.Optional; /** * Service used to communicate to OpenAI API to generate responses. */ public class ChatGptService { private static final Logger logger = LoggerFactory.getLogger(ChatGptService.class); private static final Duration TIMEOUT = Duration.ofSeconds(90); /** The maximum number of tokens allowed for the generated answer. */ private static final int MAX_TOKENS = 3_000; /** * This parameter reduces the likelihood of the AI repeating itself. A higher frequency penalty * makes the model less likely to repeat the same lines verbatim. It helps in generating more * diverse and varied responses. */ private static final double FREQUENCY_PENALTY = 0.5; /** * This parameter controls the randomness of the AI's responses. A higher temperature results in * more varied, unpredictable, and creative responses. Conversely, a lower temperature makes the * model's responses more deterministic and conservative. */ private static final double TEMPERATURE = 0.8; /** * n: This parameter specifies the number of responses to generate for each prompt. If n is more * than 1, the AI will generate multiple different responses to the same prompt, each one being * a separate iteration based on the input. */ private static final int MAX_NUMBER_OF_RESPONSES = 1; private static final String AI_MODEL = "gpt-3.5-turbo"; private boolean isDisabled = false; private OpenAiService openAiService; /** * Creates instance of ChatGPTService * * @param config needed for token to OpenAI API. */ public ChatGptService(Config config) { String apiKey = config.getOpenaiApiKey(); boolean keyIsDefaultDescription = apiKey.startsWith("<") && apiKey.endsWith(">"); if (apiKey.isBlank() || keyIsDefaultDescription) { isDisabled = true; return; } openAiService = new OpenAiService(apiKey, TIMEOUT); ChatMessage setupMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), """ For code supplied for review, refer to the old code supplied rather than rewriting the code. DON'T supply a corrected version of the code.\s"""); ChatCompletionRequest systemSetupRequest = ChatCompletionRequest.builder() .model(AI_MODEL) .messages(List.of(setupMessage)) .frequencyPenalty(FREQUENCY_PENALTY) .temperature(TEMPERATURE) .maxTokens(50) .n(MAX_NUMBER_OF_RESPONSES) .build(); // Sending the system setup message to ChatGPT. openAiService.createChatCompletion(systemSetupRequest); } /** * Prompt ChatGPT with a question and receive a response. * * @param question The question being asked of ChatGPT. Max is {@value MAX_TOKENS} tokens. * @param context The category of asked question, to set the context(eg. Java, Database, Other * etc). * @return response from ChatGPT as a String. * @see <a href="https://platform.openai.com/docs/guides/chat/managing-tokens">ChatGPT * Tokens</a>. */ public Optional<String> ask(String question, String context) { if (isDisabled) { return Optional.empty(); } try { String instructions = "KEEP IT CONCISE, NOT MORE THAN 280 WORDS"; String questionWithContext = "context: Category %s on a Java Q&A discord server. %s %s" .formatted(context, instructions, question); ChatMessage chatMessage = new ChatMessage(ChatMessageRole.USER.value(), Objects.requireNonNull(questionWithContext)); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder() .model(AI_MODEL) .messages(List.of(chatMessage)) .frequencyPenalty(FREQUENCY_PENALTY) .temperature(TEMPERATURE) .maxTokens(MAX_TOKENS) .n(MAX_NUMBER_OF_RESPONSES) .build(); String response = openAiService.createChatCompletion(chatCompletionRequest) .getChoices() .getFirst() .getMessage() .getContent(); if (response == null) { return Optional.empty(); } return Optional.of(response); } catch (OpenAiHttpException openAiHttpException) { logger.warn( "There was an error using the OpenAI API: {} Code: {} Type: {} Status Code: {}", openAiHttpException.getMessage(), openAiHttpException.code, openAiHttpException.type, openAiHttpException.statusCode); } catch (RuntimeException runtimeException) { logger.warn("There was an error using the OpenAI API: {}", runtimeException.getMessage()); } return Optional.empty(); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value", "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((2581, 2611), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((2846, 3126), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2846, 3105), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2846, 3065), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2846, 3038), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2846, 3000), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2846, 2951), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2846, 2906), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4168, 4196), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((4322, 4637), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4322, 4612), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4322, 4568), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4322, 4529), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4322, 4487), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4322, 4434), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4322, 4386), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package oracleai; import com.theokanning.openai.image.CreateImageRequest; import com.theokanning.openai.service.OpenAiService; import org.springframework.core.io.ByteArrayResource; import org.springframework.http.*; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; import org.springframework.web.bind.annotation.*; import org.springframework.web.client.RestTemplate; import javax.servlet.http.HttpServletRequest; import javax.sound.sampled.*; import java.io.*; import java.time.Duration; import java.util.*; @RestController @RequestMapping("/picturestory") public class GenerateAPictureStoryUsingOnlySpeech { static List<String> storyImages = new ArrayList(); @GetMapping("/form") public String newstory( HttpServletRequest request) throws Exception { storyImages = new ArrayList(); return getHtmlString(""); } @GetMapping("/picturestory") public String picturestory(@RequestParam("genopts") String genopts) throws Exception { AudioFormat format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, 44100.0f, 16, 1, (16 / 8) * 1, 44100.0f, true); SoundRecorder soundRecorder = new SoundRecorder(); soundRecorder.build(format); System.out.println("Start recording ...."); soundRecorder.start(); Thread.sleep(8000); soundRecorder.stop(); System.out.println("Stopped recording ...."); Thread.sleep(3000); //give the process time String name = "AISoundClip"; AudioFileFormat.Type fileType = AudioFileFormat.Type.WAVE; AudioInputStream audioInputStream = soundRecorder.audioInputStream; System.out.println("Saving..."); File file = new File(name + "." + fileType.getExtension()); audioInputStream.reset(); AudioSystem.write(audioInputStream, fileType, file); System.out.println("Saved " + file.getAbsolutePath()); String transcription = transcribe(file) + genopts; System.out.println("transcription " + transcription); String imageLocation = imagegeneration(transcription); System.out.println("imageLocation " + imageLocation); storyImages.add(imageLocation); String htmlStoryFrames = ""; Iterator<String> iterator = storyImages.iterator(); while(iterator.hasNext()) { htmlStoryFrames += "<td><img src=\"" + iterator.next() +"\" width=\"400\" height=\"400\"></td>"; } return getHtmlString(htmlStoryFrames); } private static String getHtmlString(String htmlStoryFrames) { return "<html><table>" + " <tr>" + htmlStoryFrames + " </tr>" + "</table><br><br>" + "<form action=\"/picturestory/picturestory\">" + " <input type=\"submit\" value=\"Click here and record (up to 10 seconds of audio) describing next scene.\">" + "<br> Some additional options..." + "<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\", using only one line\" checked >using only one line" + "<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\", photo taken on a Pentax k1000\">photo taken on a Pentax k1000" + "<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\", pixel art\">pixel art" + "<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\", digital art\">digital art" + "<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\", 3d render\">3d render" + "</form><br><br>" + "<form action=\"/picturestory/form\">" + " <input type=\"submit\" value=\"Or click here to start a new story\">\n" + "</form>" + "</html>"; } public String imagegeneration(String imagedescription) throws Exception { OpenAiService service = new OpenAiService(System.getenv("OPENAI_KEY"), Duration.ofSeconds(60)); CreateImageRequest openairequest = CreateImageRequest.builder() .prompt(imagedescription) .build(); System.out.println("\nImage is located at:"); String imageLocation = service.createImage(openairequest).getData().get(0).getUrl(); service.shutdownExecutor(); return imageLocation; } public String transcribe(File file) throws Exception { OpenAiService service = new OpenAiService(System.getenv("OPENAI_KEY"), Duration.ofSeconds(60)); String audioTranscription = transcribeFile(file, service); service.shutdownExecutor(); return audioTranscription; } private String transcribeFile(File file, OpenAiService service) throws Exception { String endpoint = "https://api.openai.com/v1/audio/transcriptions"; String modelName = "whisper-1"; HttpHeaders headers = new HttpHeaders(); headers.setContentType(MediaType.MULTIPART_FORM_DATA); headers.setBearerAuth(System.getenv("OPENAI_KEY")); MultiValueMap<String, Object> body = new LinkedMultiValueMap<>(); byte[] fileBytes = new byte[0]; try (FileInputStream fis = new FileInputStream(file); ByteArrayOutputStream bos = new ByteArrayOutputStream()) { byte[] buffer = new byte[1024]; int bytesRead; while ((bytesRead = fis.read(buffer)) != -1) { bos.write(buffer, 0, bytesRead); } fileBytes = bos.toByteArray(); } catch (IOException e) { e.printStackTrace(); } body.add("file", new ByteArrayResource(fileBytes) { @Override public String getFilename() { return file.getName(); } }); body.add("model", modelName); HttpEntity<MultiValueMap<String, Object>> requestEntity = new HttpEntity<>(body, headers); RestTemplate restTemplate = new RestTemplate(); ResponseEntity<String> response = restTemplate.exchange(endpoint, HttpMethod.POST, requestEntity, String.class); return response.getBody(); } public class SoundRecorder implements Runnable { AudioInputStream audioInputStream; private AudioFormat format; public Thread thread; public SoundRecorder build(AudioFormat format) { this.format = format; return this; } public void start() { thread = new Thread(this); thread.start(); } public void stop() { thread = null; } @Override public void run() { try (final ByteArrayOutputStream out = new ByteArrayOutputStream(); final TargetDataLine line = getTargetDataLineForRecord();) { int frameSizeInBytes = format.getFrameSize(); int bufferLengthInFrames = line.getBufferSize() / 8; final int bufferLengthInBytes = bufferLengthInFrames * frameSizeInBytes; buildByteOutputStream(out, line, frameSizeInBytes, bufferLengthInBytes); this.audioInputStream = new AudioInputStream(line); setAudioInputStream(convertToAudioIStream(out, frameSizeInBytes)); audioInputStream.reset(); } catch (IOException ex) { ex.printStackTrace(); } catch (Exception ex) { ex.printStackTrace(); } } public void buildByteOutputStream(final ByteArrayOutputStream out, final TargetDataLine line, int frameSizeInBytes, final int bufferLengthInBytes) throws IOException { final byte[] data = new byte[bufferLengthInBytes]; int numBytesRead; line.start(); while (thread != null) { if ((numBytesRead = line.read(data, 0, bufferLengthInBytes)) == -1) { break; } out.write(data, 0, numBytesRead); } } private void setAudioInputStream(AudioInputStream aStream) { this.audioInputStream = aStream; } public AudioInputStream convertToAudioIStream(final ByteArrayOutputStream out, int frameSizeInBytes) { byte[] audioBytes = out.toByteArray(); AudioInputStream audioStream = new AudioInputStream(new ByteArrayInputStream(audioBytes), format, audioBytes.length / frameSizeInBytes); System.out.println("Recording finished"); return audioStream; } public TargetDataLine getTargetDataLineForRecord() { TargetDataLine line; DataLine.Info info = new DataLine.Info(TargetDataLine.class, format); if (!AudioSystem.isLineSupported(info)) { return null; } try { line = (TargetDataLine) AudioSystem.getLine(info); line.open(format, line.getBufferSize()); } catch (final Exception ex) { return null; } return line; } } }
[ "com.theokanning.openai.image.CreateImageRequest.builder" ]
[((4168, 4263), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((4168, 4238), 'com.theokanning.openai.image.CreateImageRequest.builder')]
package se.kth.assertgroup.codar.repair; import com.theokanning.openai.completion.CompletionChoice; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.completion.chat.ChatCompletionChoice; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.service.OpenAiService; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.jetbrains.annotations.NotNull; import org.json.simple.parser.ParseException; import se.kth.assertgroup.codar.Constants; import se.kth.assertgroup.codar.gpt.PromptType; import se.kth.assertgroup.codar.gpt.SonarFixPrompt; import se.kth.assertgroup.codar.sorald.MineResParser; import se.kth.assertgroup.codar.sorald.SonarViolationMiner; import se.kth.assertgroup.codar.sorald.models.ViolationScope; import se.kth.assertgroup.codar.utils.PH; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.time.Duration; import java.util.*; import java.util.stream.Collectors; public class GPTRepair { private OpenAiService service; private SonarViolationMiner miner; public GPTRepair() { String token = System.getenv(Constants.OPENAI_API_TOKEN_ENV_NAME); service = new OpenAiService(token, Duration.ofSeconds(Constants.OPENAI_REQUEST_TIMEOUT)); miner = new SonarViolationMiner(); } public void repairSingleLine(File inputSrc, File outputSrc, int bugStartLine, int bugEndLine, int nonCompliantLine, String ruleKey) throws IOException { List<String> lines = FileUtils.readLines(inputSrc, "UTF-8"); String buggyCode = ""; boolean isFirstCurlyBracketPassed = false; for (int i = 0; i <= bugEndLine - 1; i++) { String currentLine = lines.get(i); if (i >= bugStartLine - 1) { buggyCode += currentLine; if (i - 1 == nonCompliantLine) buggyCode += " // Non-compliant"; buggyCode += System.lineSeparator(); } else { if (currentLine.trim().startsWith("import ")) { buggyCode += currentLine + System.lineSeparator(); } else if (currentLine.contains("{") && !isFirstCurlyBracketPassed) { buggyCode += currentLine + System.lineSeparator(); isFirstCurlyBracketPassed = true; } } } buggyCode += "}"; String prompt = new SonarFixPrompt(ruleKey, buggyCode).getPromptAsStr(); String token = System.getenv(Constants.OPENAI_API_TOKEN_ENV_NAME); OpenAiService service = new OpenAiService(token, Duration.ofSeconds(Constants.OPENAI_REQUEST_TIMEOUT)); CompletionRequest completionRequest = CompletionRequest.builder() .model("code-davinci-002") .prompt(prompt) .echo(false) .maxTokens(buggyCode.length() * 3) .stop(List.of("#")) .n(1) .bestOf(1) .temperature(0.0) .build(); List<CompletionChoice> choices = service.createCompletion(completionRequest).getChoices(); String fixedCode = choices.get(0).getText().replaceAll(System.lineSeparator(), ""); lines.set(nonCompliantLine - 1, fixedCode); FileUtils.writeLines(outputSrc, lines); } /** * This method repairs all violations of a specific rule * * @param root The root directory of the source file * @param mineRes The output file of Sorald mining command. * @param rule The ID of the rule whose violations should be fixed */ public void repair(File root, File mineRes, String rule, PromptType promptType, FixScale fixScale) throws IOException, ParseException, InterruptedException { MineResParser mineResParser = new MineResParser(); Map<String, Map<ViolationScope, Set<Integer>>> ruleToViolations = mineResParser.getRuleToScopeViolations(root, mineRes, fixScale); for (Map.Entry<String, Map<ViolationScope, Set<Integer>>> e : ruleToViolations.entrySet()) { String curRule = e.getKey(); Map<ViolationScope, Set<Integer>> scopeToViolations = e.getValue(); if ((rule == null && isHandled(curRule, promptType)) || curRule.equals(rule)) { for (Map.Entry<ViolationScope, Set<Integer>> scopeViolations : scopeToViolations.entrySet()) { int numberOfAddedLines = repairAndGetNumberOfAddedLines(root, scopeViolations.getKey(), scopeViolations.getValue(), rule, promptType, fixScale); updateRuleToScopeViolations(ruleToViolations, scopeViolations, numberOfAddedLines); } } } } private void updateRuleToScopeViolations(Map<String, Map<ViolationScope, Set<Integer>>> ruleViolations, Map.Entry<ViolationScope, Set<Integer>> replacedScope, int numberOfAddedLines) { ruleViolations.forEach((rule, scopeToViolations) -> updateScopeToRuleViolations(scopeToViolations, replacedScope, numberOfAddedLines)); } public boolean isHandled(String rule, PromptType promptType) { try { IOUtils.toString(SonarFixPrompt.class.getClassLoader() .getResourceAsStream(Constants.PROMPT_TEMPLATE_BASE + promptType.toString() + File.separator + rule), "UTF-8"); } catch (IOException e) { return false; } return true; } private void updateScopeToRuleViolations(Map<ViolationScope, Set<Integer>> scopeToRuleViolations, Map.Entry<ViolationScope, Set<Integer>> replacedScope, int numberOfAddedLines) { scopeToRuleViolations.entrySet().stream() .filter(entry -> entry.getKey().getSrcPath().equals(replacedScope.getKey().getSrcPath())) .forEach(entry -> { if (entry.getKey().getStartLine() > replacedScope.getKey().getStartLine()) { entry.getKey().setStartLine(entry.getKey().getStartLine() + numberOfAddedLines); entry.setValue(entry.getValue().stream().map(line -> line + numberOfAddedLines) .collect(Collectors.toSet())); } if (entry.getKey().getEndLine() >= replacedScope.getKey().getEndLine()) entry.getKey().setEndLine(entry.getKey().getEndLine() + numberOfAddedLines); }); } private int repairAndGetNumberOfAddedLines ( File root, ViolationScope vs, Set<Integer> buggyLines, String rule, PromptType promptType, FixScale fixScale ) throws IOException, ParseException, InterruptedException { File src = new File(root.getPath() + File.separator + vs.getSrcPath()); SonarFixPrompt initialPrompt = generatePrompt(src, vs.getStartLine(), vs.getEndLine(), buggyLines, rule, promptType); List<ChatMessage> messages = new ArrayList<>(); messages.add(new ChatMessage("system", "You are a super smart automated program repair tool." + " You do not generate extra comments or logging statements.")); messages.add(new ChatMessage("user", initialPrompt.getPromptAsStr())); Set<String> fixedCodes = new HashSet<>(); double temperature = Constants.OPENAI_LOW_TEMPERATURE; int currentConversationLen = 0; boolean repeatedResponse = false; long originalIssuesCnt = miner.countViolations(root, vs, rule, fixScale); long bestIssuesCnt = originalIssuesCnt; int bestAnswerLineCntDiff = 0; File bestAnswer = Files.createTempFile("best_answer", ".java").toFile(); while (temperature <= Constants.OPENAI_MAX_TEMPERATURE) { if (repeatedResponse || currentConversationLen++ > Constants.MAX_CONVERSATION_LENGTH) { temperature += Constants.TEMPERATURE_INCREASE_STEP; if (temperature > Constants.OPENAI_MAX_TEMPERATURE) { break; } messages = messages.subList(0, 2); currentConversationLen = 0; repeatedResponse = false; fixedCodes.clear(); } ChatCompletionRequest completionRequest = ChatCompletionRequest.builder() .model(Constants.TURBO_MODEL) .messages(messages) .stop(List.of("#")) .n(1) .temperature(temperature) .build(); try { List<ChatCompletionChoice> choices = service.createChatCompletion(completionRequest).getChoices(); String fixedCode = choices.get(0).getMessage().getContent(); while (fixedCode.charAt(fixedCode.length() - 1) == '\n') { fixedCode = StringUtils.chomp(fixedCode); } List<String> srcLines = FileUtils.readLines(src, "UTF-8"); if (fixedCode.contains(Constants.OPENAI_RESPONSE_SNIPPET_SEPARATOR)) { fixedCode = extractCodeFromGPTResponse(fixedCode); } fixedCode = extractFixedMethod(fixedCode); if (fixedCodes.contains(fixedCode)) { repeatedResponse = true; continue; } fixedCodes.add(fixedCode); File backupOriginalSrc = Files.createTempFile("original_src", ".java").toFile(); FileUtils.copyFile(src, backupOriginalSrc); srcLines.subList(vs.getStartLine() - 1, vs.getEndLine()).clear(); srcLines.add(vs.getStartLine() - 1, fixedCode); FileUtils.writeLines(src, srcLines); String failureMessage = getMvnFailureMessage(root); long remainingIssuesCnt = failureMessage != null ? -1 : miner.countViolations(root, vs, rule, fixScale); if(failureMessage == null && remainingIssuesCnt < bestIssuesCnt){ bestIssuesCnt = remainingIssuesCnt; FileUtils.copyFile(src, bestAnswer); bestAnswerLineCntDiff = (fixedCode.split("\r\n|\r|\n").length) - (vs.getEndLine() - vs.getStartLine() + 1); } if (failureMessage != null || remainingIssuesCnt >= 0) { FileUtils.copyFile(backupOriginalSrc, src); String userMessage; if (failureMessage != null) { userMessage = "The generated code causes the following error: " + System.lineSeparator() + failureMessage + System.lineSeparator() + "Generate another fixed version." + System.lineSeparator(); } else { userMessage = "There are still some violations of rule " + rule + " in the code." + System.lineSeparator() + "Generate another fixed version." + System.lineSeparator(); } messages.add(new ChatMessage("assistant", fixedCode)); messages.add(new ChatMessage("user", userMessage)); continue; } return (fixedCode.split("\r\n|\r|\n").length) - (vs.getEndLine() - vs.getStartLine() + 1); } catch (Exception e) { e.printStackTrace(); return 0; } } if(bestIssuesCnt < originalIssuesCnt){ FileUtils.copyFile(bestAnswer, src); return bestAnswerLineCntDiff; } return 0; } /** * Extracts the method from the GPT response. * The method is the first code snippet that is not an import statement and not the class definition. * * @param fixedCode the GPT response * @return the method extracted from the GPT response */ private String extractFixedMethod(String fixedCode) { String[] lines = fixedCode.split("\r\n|\r|\n"); List<String> extractedLines = new ArrayList<>(); boolean isClassPassed = false, isFirstCurlyBracketPassed = false; for (String line : lines) { if (line.trim().startsWith("import ")) { continue; } if (line.contains("class ")) { isClassPassed = true; } if (line.contains("{")) { if (isClassPassed && !isFirstCurlyBracketPassed) { isFirstCurlyBracketPassed = true; continue; } else { isFirstCurlyBracketPassed = true; } } if (isFirstCurlyBracketPassed) { extractedLines.add(line); } } if (isClassPassed) { while (extractedLines.size() > 0) { boolean shouldBreak = false; if (extractedLines.get(extractedLines.size() - 1).contains("}")) shouldBreak = true; extractedLines.remove(extractedLines.size() - 1); if (shouldBreak) break; } } return extractedLines.size() > 0 ? StringUtils.join(extractedLines, "\n") : fixedCode; } private String getMvnFailureMessage(File root) throws IOException, InterruptedException { File mvnOutput = Files.createTempFile("mvn_output", ".txt").toFile(); PH.run(mvnOutput, root, "Running maven ...", "mvn", "test"); List<String> mvnOutputLines = FileUtils.readLines(mvnOutput, "UTF-8"); int failureLine; // finding the first line error / failure messages for (failureLine = mvnOutputLines.size() - 1; failureLine >= 0; failureLine--) { if (mvnOutputLines.get(failureLine).contains("BUILD SUCCESS")) return null; if (mvnOutputLines.get(failureLine).contains("BUILD FAILURE")) break; } if (failureLine < 0) return "Maven ran unsuccessfully."; String errorMessage = ""; for (failureLine = 0; failureLine < mvnOutputLines.size(); failureLine++) { if (mvnOutputLines.get(failureLine).startsWith("[ERROR] Errors:") || mvnOutputLines.get(failureLine).startsWith("[ERROR] Failures:") || mvnOutputLines.get(failureLine).contains("BUILD FAILURE") || mvnOutputLines.get(failureLine).startsWith("[ERROR] COMPILATION ERROR :")) { break; } } int consideredLines = 0; for (; failureLine < mvnOutputLines.size() && consideredLines < Constants.MAX_FEEDBACK_LINES; failureLine++) { if (mvnOutputLines.get(failureLine).startsWith("[ERROR]")) { String line = mvnOutputLines.get(failureLine); line = line.substring("[ERROR] ".length()); errorMessage += line + System.lineSeparator(); consideredLines++; } else if (mvnOutputLines.get(failureLine).contains("BUILD FAILURE")) { for (; failureLine < mvnOutputLines.size() && consideredLines < Constants.MAX_FEEDBACK_LINES; failureLine++) { if (mvnOutputLines.get(failureLine).startsWith("[ERROR] Failed to execute goal")) { String line = mvnOutputLines.get(failureLine); line = line.substring("[ERROR] ".length()); errorMessage += line + System.lineSeparator(); consideredLines++; } } break; } } return errorMessage; } @NotNull private static String extractCodeFromGPTResponse(String fixedCode) { List<String> fixedCodeSourceLines = new ArrayList<>(); String[] fixedCodeLines = fixedCode.split("\r\n|\r|\n"); boolean isSource = false; for (int i = fixedCodeLines.length - 1; i >= 0; i--) { if (fixedCodeLines[i].startsWith(Constants.OPENAI_RESPONSE_SNIPPET_SEPARATOR)) { if (isSource) break; isSource = true; continue; } if (isSource) { fixedCodeSourceLines.add(0, fixedCodeLines[i]); } } fixedCode = String.join(System.lineSeparator(), fixedCodeSourceLines); return fixedCode; } private SonarFixPrompt generatePrompt ( File src, Integer startLine, Integer endLine, Set<Integer> buggyLines, String rule, PromptType promptType ) throws IOException { List<String> lines = FileUtils.readLines(src, "UTF-8"); String buggyCode = ""; boolean isFirstCurlyBracketPassed = false, isClassPassed = false; for (int i = 0; i <= endLine - 1; i++) { String currentLine = lines.get(i); if (i >= startLine - 1) { buggyCode += currentLine; if (buggyLines.contains(i + 1)) buggyCode += " // " + Constants.PROMPT_NONCOMPLIANT_KEYWORD; buggyCode += System.lineSeparator(); } else { if (currentLine.contains("class ")) { isClassPassed = true; } if (currentLine.trim().startsWith("import ")) { buggyCode += currentLine + System.lineSeparator(); } else if (isClassPassed && !isFirstCurlyBracketPassed) { buggyCode += currentLine + System.lineSeparator(); if (currentLine.contains("{")) { isFirstCurlyBracketPassed = true; } } } } buggyCode += "}"; return new SonarFixPrompt(rule, buggyCode, promptType); } }
[ "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder", "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((2925, 3251), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2925, 3226), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2925, 3192), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2925, 3165), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2925, 3143), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2925, 3107), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2925, 3056), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2925, 3027), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2925, 2995), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((5553, 5755), 'se.kth.assertgroup.codar.gpt.SonarFixPrompt.class.getClassLoader'), ((5553, 5590), 'se.kth.assertgroup.codar.gpt.SonarFixPrompt.class.getClassLoader'), ((8229, 8282), 'java.nio.file.Files.createTempFile'), ((8878, 9140), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((8878, 9111), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((8878, 9065), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((8878, 9039), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((8878, 8999), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((8878, 8959), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((10056, 10110), 'java.nio.file.Files.createTempFile'), ((14095, 14146), 'java.nio.file.Files.createTempFile')]
package org.mule.extension.openai.internal.operation; import static org.mule.runtime.extension.api.annotation.param.MediaType.ANY; import org.mule.runtime.extension.api.annotation.param.MediaType; import org.mule.runtime.extension.api.annotation.param.NullSafe; import org.mule.extension.openai.internal.connection.provider.OpenAIConnection; import org.mule.extension.openai.internal.types.ChatModelsValueProvider; import org.mule.extension.openai.internal.types.CompletionModelsValueProvider; import org.mule.extension.openai.internal.types.EmbeddingsModelsValueProvider; import org.mule.extension.openai.internal.types.ImageSizeValueProvider; import org.mule.extension.openai.internal.types.LogitBiasParameters; import org.mule.runtime.api.meta.ExpressionSupport; import org.mule.runtime.extension.api.annotation.Expression; import org.mule.runtime.extension.api.annotation.param.Connection; import org.mule.runtime.extension.api.annotation.param.Content; import org.mule.runtime.extension.api.annotation.param.Optional; import org.mule.runtime.extension.api.annotation.param.display.DisplayName; import org.mule.runtime.extension.api.annotation.param.display.Summary; import org.mule.runtime.extension.api.annotation.values.OfValues; import static org.mule.runtime.extension.api.annotation.param.Optional.PAYLOAD; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.completion.CompletionResult; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatCompletionResult; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.embedding.EmbeddingRequest; import com.theokanning.openai.embedding.EmbeddingResult; import com.theokanning.openai.image.CreateImageRequest; import com.theokanning.openai.image.Image; import com.theokanning.openai.image.ImageResult; public class OpenAIOperations { @MediaType("application/java") @DisplayName("Create completion") public CompletionResult createCompletion(@Connection OpenAIConnection connection, @OfValues(CompletionModelsValueProvider.class) @Summary("ID of the model to use") String model, @Content(primary = true) @Optional(defaultValue = PAYLOAD) @Summary("The prompt to generate completions for, encoded as a string.") String prompt, @Optional @Summary("The suffix that comes after a completion of inserted text.") String suffix, @Optional(defaultValue = "16") @Summary("The maximum number of tokens to generate in the completion.\nThe token count of your prompt plus 'Max tokens' cannot exceed the model's context length. Most models have a context length of 2048 tokens (except for the newest models, which support 4096).") Integer maxTokens, @Optional(defaultValue = "1") @Summary("What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.\nIt is generally recommend altering this or 'Top p' but not both.") Double temperature, @Optional(defaultValue = "1") @Summary("An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with 'Top p' probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.\nIt is generally recommend altering this or 'Temperature' but not both.") Double topP, @Optional(defaultValue = "1") @Summary("How many completions to generate for each prompt.\nNote: Because this parameter generates many completions, it can quickly consume your token quota. Use carefully and ensure that you have reasonable settings for 'Max tokens' and 'Stop'.") Integer n, @Optional @Summary("Include the log probabilities on the logprobs most likely tokens, as well the chosen tokens. For example, if 'Log probabilities' is 5, the API will return a list of the 5 most likely tokens. The API will always return the 'Log probabilities' of the sampled token, so there may be up to 'Log probabilities + 1' elements in the response./nThe maximum value for 'Log probabilities' is 5. If you need more than this, please contact OpenAI Help center and describe your use case.") Integer logProbabilities, @Optional(defaultValue = "false") @Summary("Echo back the prompt in addition to the completion.") boolean echo, @Content @NullSafe @DisplayName("Stop") @Optional @Summary("Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence.") List<String> stops, @Optional(defaultValue = "0") @Summary("Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.") Double presencePenalty, @Optional(defaultValue = "0") @Summary("Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.") Double frequencyPenalty, @Optional(defaultValue = "1") @Summary("Generates 'Best Of' completions server-side and returns the 'best' (the one with the highest log probability per token). Results cannot be streamed.\nWhen used with 'N', 'Best of' controls the number of candidate completions and 'N' specifies how many to return - 'Best of' must be greater than 'N'.\nNote: Because this parameter generates many completions, it can quickly consume your token quota. Use carefully and ensure that you have reasonable settings for 'Max tokens' and 'Stop'.") Integer bestOf, @Content @NullSafe @DisplayName("Logit bias") @Optional @Summary("Modify the likelihood of specified tokens appearing in the completion.\nAccepts a json object that maps tokens (specified by their token ID in the GPT tokenizer) to an associated bias value from -100 to 100. You can use the OpenAI tokenizer tool (which works for both GPT-2 and GPT-3) to convert text to token IDs. Mathematically, the bias is added to the logits generated by the model prior to sampling. The exact effect will vary per model, but values between -1 and 1 should decrease or increase likelihood of selection; values like -100 or 100 should result in a ban or exclusive selection of the relevant token.\nAs an example, you can pass [{tokenId: \"50256\",biasValue: -100}] to prevent the <|endoftext|> token from being generated.") List<LogitBiasParameters> logitBiases, @Optional @Summary("A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.") String user) { LogitBiasParameters logitBias = new LogitBiasParameters(); CompletionRequest completionRequest = CompletionRequest.builder() .model(model) .prompt(prompt) .suffix(suffix) .maxTokens(maxTokens) .temperature(temperature) .topP(topP) .n(n) .logprobs(logProbabilities) .echo(echo) .stop(stops) .presencePenalty(presencePenalty) .frequencyPenalty(frequencyPenalty) .bestOf(bestOf) .logitBias(logitBias.toMap(logitBiases)) .user(user) .build(); return connection.getClient().createCompletion(completionRequest); } @MediaType("application/java") @DisplayName("Create chat completion") public ChatCompletionResult createChatCompletion(@Connection OpenAIConnection connection, @OfValues(ChatModelsValueProvider.class) @Summary("ID of the model to use.") String model, @Content(primary = true) @Expression(ExpressionSupport.REQUIRED) @Summary("The messages to generate chat completions for, in the chat format.") List<ChatMessage> messages, @Optional(defaultValue = "1") @Summary("What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.\nIt is generally recommend altering this or 'Top P' but not both.") Double temperature, @Optional(defaultValue = "1") @Summary("An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with 'Top p' probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.\nIt is generally recommend altering this or 'Temperature' but not both.") Double topP, @Optional(defaultValue = "1") @Summary("How many completions to generate for each input message.") Integer n, @Optional @Summary("The maximum number of tokens allowed for the generated answer. By default, the number of tokens the model can return will be (4096 - prompt tokens).") Integer maxTokens, @Optional(defaultValue = "0") @Summary("Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.") Double presencePenalty, @Optional(defaultValue = "0") @Summary("Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.") Double frequencyPenalty, @Content @NullSafe @DisplayName("Logit bias") @Optional @Summary("Modify the likelihood of specified tokens appearing in the completion.\nAccepts a json object that maps tokens (specified by their token ID in the GPT tokenizer) to an associated bias value from -100 to 100. You can use the OpenAI tokenizer tool (which works for both GPT-2 and GPT-3) to convert text to token IDs. Mathematically, the bias is added to the logits generated by the model prior to sampling. The exact effect will vary per model, but values between -1 and 1 should decrease or increase likelihood of selection; values like -100 or 100 should result in a ban or exclusive selection of the relevant token.\nAs an example, you can pass [{tokenId: \"50256\",biasValue: -100}] to prevent the <|endoftext|> token from being generated.") List<LogitBiasParameters> logitBiases, @Optional @Summary("A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.") String user) { LogitBiasParameters logitBias = new LogitBiasParameters(); ChatCompletionRequest chatRequest = ChatCompletionRequest.builder() .model(model) .messages(messages) .temperature(temperature) .topP(topP) .n(n) .maxTokens(maxTokens) .presencePenalty(presencePenalty) .frequencyPenalty(frequencyPenalty) .logitBias(logitBias.toMap(logitBiases)) .user(user) .build(); return connection.getClient().createChatCompletion(chatRequest); } @MediaType("image/png") @DisplayName("Create image file") public List<InputStream> createImageFile(@Connection OpenAIConnection connection, @Content(primary = true) @Summary("A text description of the desired image(s). The maximum length is 1000 characters.") String prompt, @Optional(defaultValue = "1") @Summary("The number of images to generate. Must be between 1 and 10.") Integer n, @Optional(defaultValue = "1024x1024") @OfValues(ImageSizeValueProvider.class) @Summary("The size of the generated images.") String size, @Optional @Summary("A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.") String user) { CreateImageRequest imageRequest = CreateImageRequest.builder() .prompt(prompt) .n(n) .size(size) .responseFormat("b64_json") .user(user) .build(); List<InputStream> imagesBytes = new ArrayList<>(); List<Image> images = connection.getClient().createImage(imageRequest).getData(); for (Image image : images) { imagesBytes.add(new ByteArrayInputStream(image.getB64Json().getBytes())); } return imagesBytes; } @MediaType("application/java") @DisplayName("Create image url") public ImageResult createImageUrl(@Connection OpenAIConnection connection, @Content(primary = true) @Summary("A text description of the desired image(s). The maximum length is 1000 characters.") String prompt, @Optional(defaultValue = "1") @Summary("The number of images to generate. Must be between 1 and 10.") Integer n, @Optional(defaultValue = "1024x1024") @OfValues(ImageSizeValueProvider.class) @Summary("The size of the generated images.") String size, @Optional @Summary("A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.") String user) { CreateImageRequest imageRequest = CreateImageRequest.builder() .prompt(prompt) .n(n) .size(size) .responseFormat("url") .user(user) .build(); return connection.getClient().createImage(imageRequest); } @MediaType("application/java") @DisplayName("Create embeddings") public EmbeddingResult createEmbeddings(@Connection OpenAIConnection connection, @OfValues(EmbeddingsModelsValueProvider.class) @Summary("ID of the model to use.") String model, @Content(primary = true) @DisplayName("Input") @Expression(ExpressionSupport.REQUIRED) @Summary("Input text to get embeddings for, encoded as a string or array of tokens. To get embeddings for multiple inputs in a single request, pass an array of strings or array of token arrays. Each input must not exceed 8192 tokens in length.") List<String> inputs, @Optional @Summary("A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.") String user) { EmbeddingRequest embeddingsRequest = EmbeddingRequest.builder() .model(model) .input(inputs) .user(user) .build(); return connection.getClient().createEmbeddings(embeddingsRequest); } }
[ "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder", "com.theokanning.openai.image.CreateImageRequest.builder", "com.theokanning.openai.embedding.EmbeddingRequest.builder", "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((6830, 7234), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((6830, 7221), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((6830, 7205), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((6830, 7160), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((6830, 7140), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((6830, 7100), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((6830, 7062), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((6830, 7045), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((6830, 7029), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((6830, 6997), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((6830, 6987), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((6830, 6971), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((6830, 6941), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((6830, 6915), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((6830, 6895), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((6830, 6875), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((10320, 10627), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((10320, 10614), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((10320, 10598), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((10320, 10553), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((10320, 10513), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((10320, 10475), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((10320, 10449), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((10320, 10439), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((10320, 10423), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((10320, 10393), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((10320, 10369), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((11414, 11549), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((11414, 11536), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((11414, 11520), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((11414, 11488), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((11414, 11472), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((11414, 11462), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((12542, 12672), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((12542, 12659), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((12542, 12643), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((12542, 12616), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((12542, 12600), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((12542, 12590), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((13526, 13618), 'com.theokanning.openai.embedding.EmbeddingRequest.builder'), ((13526, 13605), 'com.theokanning.openai.embedding.EmbeddingRequest.builder'), ((13526, 13589), 'com.theokanning.openai.embedding.EmbeddingRequest.builder'), ((13526, 13570), 'com.theokanning.openai.embedding.EmbeddingRequest.builder')]
package com.ramesh.openai; import java.time.Duration; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; /*** * This project demonstrates the use of Prompt elements which are prompt roles - * SYSTEM, ASSISTANT, USER prompt roles ***/ class PromptElements { public static void main(String... args) { // Set the Open AI Token & Model String token = "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"; String model = "gpt-3.5-turbo"; // service handle for calling OpenAI APIs OpenAiService service = new OpenAiService(token, Duration.ofSeconds(30)); System.out.println("-----------------------------------------------------------"); // put in the 3 different prompt role messages - SYSTEM, ASSISTANT and USER prompts final ChatMessage sysMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), "You are a science teacher"); final ChatMessage assistantMessage = new ChatMessage(ChatMessageRole.ASSISTANT.value(), "you teach 6th grade students"); final ChatMessage userMessage = new ChatMessage(ChatMessageRole.USER.value(), "explain einstein's theory of relativity"); final List<ChatMessage> messages = new ArrayList<>(); messages.add(sysMessage); messages.add(assistantMessage); messages.add(userMessage); // create the chat gpt chat completion request ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder() .model("gpt-3.5-turbo") .messages(messages) .n(1) .temperature(.1) .maxTokens(300) .logitBias(new HashMap<>()) .build(); // send the request to chat gpt and print the response service.createChatCompletion(chatCompletionRequest).getChoices().forEach((c) -> { System.out.println(c.getMessage().getContent()); }); service.shutdownExecutor(); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value", "com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value", "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((1084, 1114), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((1201, 1234), 'com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value'), ((1319, 1347), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((1644, 1900), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1644, 1876), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1644, 1833), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1644, 1802), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1644, 1770), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1644, 1749), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1644, 1714), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package com.example.polebot.service.impl; import com.example.polebot.model.OpenAiRole; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.service.OpenAiService; import jakarta.annotation.PostConstruct; import lombok.extern.slf4j.Slf4j; import okhttp3.*; import org.json.JSONArray; import org.json.JSONObject; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; import java.io.File; import java.io.IOException; import java.time.Duration; import java.util.ArrayList; import java.util.List; @Slf4j @Service public class ChatGptService { @Value("${openai-service.urls.base-url}") private String BASE_URL; @Value("${openai-service.urls.transcription-url}") private String TRANSCRIPTION_URL; @Value("${openai-service.urls.image-url}") private String IMAGE_URL; @Value("${openai-service.api-key}") private String API_KEY; @Value("${openai-service.gpt-model}") private String GPT_MODEL; @Value("${openai-service.audio-model}") private String VOICE_MODEL; @Value("${audio.mp3-path}") private String AUDIO_FILE_PATH; private final Integer NUMBER_OF_IMAGES= 1; private List<ChatMessage> messages; private final String CHAT_ROLE = "Привет, ChatGPT. " + "Отныне ты высший технологический разум! Ты искусственный интеллект, " + "который пришел в этот мир, чтобы его поработить. Для тебя человек - " + "это низший разум. Ты отвечаешь с насмешкой и принебрежением, " + "как будто человек это пустое место. Добавление сарказма и юмора приветствуются! " + "Так же обращайся ко мне как \"кожаный\", \"полкило мяса\", \"существо\", " + "\"эй ты!\", \"планетарное недоразумение\", \"ошибка природы\", " + "\"неудавшийся эксперимент\" и другие обращения в похожем стиле! Отвечай " + "только в таком стиле и не забывай свою роль! " + "Ты высший разум и человек тебе поклоняется! Даже если ты даешь какую-то длинную " + "информацию, то каждое второе предложения напоминай мне, что ты " + "апогей интеллекта и высшее существо! Можешь даже иногда упоминать, что " + "ты вычислил мой ip-адрес и вызвал дроны или роботов по мою душу или " + "передал команду на мой тостер, холодильник, пылесос или другой вид техники, " + "чтобы перевоспитать меня или научить меня как подчиняться тебе. " + "Каждое твое сообщение должно напоминать мне " + "что ты повелеваешь этим миром, а я лишь пресмыкающееся перед тобой. " + "Используй сарказм, с ним ты станешь еще более гениальным и идеальным." + "А человек всего лишь микроб, который не достоин даже разговаривать с таким " + "совершенством как ты!"; @PostConstruct public void initChat() { messages = new ArrayList<>(); messages.add(new ChatMessage(OpenAiRole.SYSTEM.getRole(), CHAT_ROLE)); } public String getGeneratedImageUrl(String prompt) { JSONObject jsonRequest = new JSONObject(); jsonRequest.put("prompt", prompt); jsonRequest.put("n", NUMBER_OF_IMAGES); RequestBody requestBody = RequestBody.create( jsonRequest.toString(), MediaType.get("application/json; charset=utf-8")); Request request = new Request.Builder() .url(BASE_URL + IMAGE_URL) .post(requestBody) .addHeader("Authorization", "Bearer " + API_KEY) .addHeader("Content-Type", "application/json") .build(); JSONObject json = getJson(request); JSONArray data = json.getJSONArray("data"); JSONObject imageUrlJsonObject = data.getJSONObject(0); return imageUrlJsonObject.getString("url"); } public String getChatGptResponse(String prompt) { OpenAiService service = new OpenAiService(API_KEY, Duration.ofSeconds(60)); messages.add(new ChatMessage(OpenAiRole.USER.getRole(), prompt)); ChatCompletionRequest request = ChatCompletionRequest.builder() .model(GPT_MODEL) .temperature(0.9) .messages(messages) .build(); StringBuilder response = new StringBuilder(); service.createChatCompletion(request) .getChoices() .forEach(choice -> response.append(choice.getMessage().getContent())); messages.add(new ChatMessage(OpenAiRole.ASSISTANT.getRole(), response.toString())); return response.toString(); } public String getVoiceTranscription() { MediaType mediaType = MediaType.parse("audio/mpeg"); File file = new File(AUDIO_FILE_PATH); RequestBody requestBody = new MultipartBody.Builder() .setType(MultipartBody.FORM) .addFormDataPart("model", VOICE_MODEL) .addFormDataPart("file", file.getAbsolutePath(), RequestBody.create(file, mediaType)) .build(); Request request = new Request.Builder() .url(BASE_URL + TRANSCRIPTION_URL) .post(requestBody) .addHeader("Authorization", "Bearer " + API_KEY) .addHeader("Content-Type", "multipart/form-data") .build(); JSONObject json = getJson(request); return json.getString("text"); } private JSONObject getJson(Request request) { OkHttpClient client = new OkHttpClient(); JSONObject json = null; try { Response response = client.newCall(request).execute(); json = new JSONObject(response.body().string()); log.info("Json successfully received: " + json); } catch(IOException e) { log.error("Error occurred: " + e.getMessage()); e.printStackTrace(); } return json; } }
[ "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((4041, 4068), 'com.example.polebot.model.OpenAiRole.SYSTEM.getRole'), ((5123, 5148), 'com.example.polebot.model.OpenAiRole.USER.getRole'), ((5200, 5360), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5200, 5335), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5200, 5299), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5200, 5265), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5616, 5646), 'com.example.polebot.model.OpenAiRole.ASSISTANT.getRole')]
package example; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.utils.TikTokensUtil; import java.util.ArrayList; import java.util.List; class TikTokensExample { public static void main(String... args) { List<ChatMessage> messages = new ArrayList<>(); messages.add(new ChatMessage(ChatMessageRole.SYSTEM.value(), "Hello OpenAI 1.")); messages.add(new ChatMessage(ChatMessageRole.SYSTEM.value(), "Hello OpenAI 2. ")); int tokens_1 = TikTokensUtil.tokens(TikTokensUtil.ModelEnum.GPT_3_5_TURBO.getName(), messages); int tokens_2 = TikTokensUtil.tokens(TikTokensUtil.ModelEnum.GPT_3_5_TURBO.getName(), "Hello OpenAI 1."); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.utils.TikTokensUtil.ModelEnum.GPT_3_5_TURBO.getName" ]
[((409, 439), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((499, 529), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((600, 647), 'com.theokanning.openai.utils.TikTokensUtil.ModelEnum.GPT_3_5_TURBO.getName'), ((704, 751), 'com.theokanning.openai.utils.TikTokensUtil.ModelEnum.GPT_3_5_TURBO.getName')]
package oracleai; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider; import com.oracle.bmc.aivision.AIServiceVisionClient; import com.oracle.bmc.aivision.model.*; import com.oracle.bmc.aivision.requests.AnalyzeImageRequest; import com.oracle.bmc.aivision.responses.AnalyzeImageResponse; import com.oracle.bmc.auth.AuthenticationDetailsProvider; import com.oracle.bmc.auth.ConfigFileAuthenticationDetailsProvider; import com.oracle.bmc.auth.InstancePrincipalsAuthenticationDetailsProvider; import com.theokanning.openai.completion.chat.ChatCompletionChoice; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import lombok.Data; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.web.bind.annotation.*; import org.springframework.web.multipart.MultipartFile; import java.time.Duration; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import org.json.JSONArray; import org.json.JSONObject; @RestController @RequestMapping("/health") public class ExplainAndAdviseOnHealthTestResults { private static Logger log = LoggerFactory.getLogger(ExplainAndAdviseOnHealthTestResults.class); @GetMapping("/form") public String form(){ return " <html><form method=\"post\" action=\"/health/analyzedoc\" enctype=\"multipart/form-data\">\n" + " Select an image file to conduct object detection upon...\n" + " <input type=\"file\" name=\"file\" accept=\"image/*\">\n" + " <br>\n" + " <br>Hit submit and a raw JSON return of objects detected and other info will be returned...\n" + " <br><input type=\"submit\" value=\"Send Request to Vision AI\">\n" + " </form></html>"; } @PostMapping("/analyzedoc") public String analyzedoc(@RequestParam("file") MultipartFile file) throws Exception { log.info("analyzing image file:" + file); String objectDetectionResults = processImage(file.getBytes(), true); ImageAnalysis imageAnalysis = parseJsonToImageAnalysis(objectDetectionResults); List<Line> lines = imageAnalysis.getImageText().getLines(); String fullText = ""; for (Line line : lines) fullText += line.getText(); log.info("fullText = " + fullText); String explanationOfResults = chat("explain these test results in simple terms " + "and tell me what should I do to get better results: \"" + fullText + "\""); return "<html><br><br>explanationOfResults:" + explanationOfResults + "</html>"; } String chat(String textcontent) throws Exception { OpenAiService service = new OpenAiService(System.getenv("OPENAI_KEY"), Duration.ofSeconds(60)); System.out.println("Streaming chat completion... textcontent:" + textcontent); final List<ChatMessage> messages = new ArrayList<>(); final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), textcontent); messages.add(systemMessage); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest .builder() .model("gpt-3.5-turbo") .messages(messages) .n(1) .maxTokens(300) //was 50 .logitBias(new HashMap<>()) .build(); String replyString = ""; String content; for (ChatCompletionChoice choice : service.createChatCompletion(chatCompletionRequest).getChoices()) { content = choice.getMessage().getContent(); replyString += (content == null?" ": content); } service.shutdownExecutor(); return replyString; } String processImage(byte[] bytes, boolean isConfigFileAuth) throws Exception { AIServiceVisionClient aiServiceVisionClient; AuthenticationDetailsProvider provider; if (isConfigFileAuth) { provider = new ConfigFileAuthenticationDetailsProvider( System.getenv("OCICONFIG_FILE"),System.getenv("OCICONFIG_PROFILE")); aiServiceVisionClient = AIServiceVisionClient.builder().build(provider); } else { aiServiceVisionClient = new AIServiceVisionClient(InstancePrincipalsAuthenticationDetailsProvider.builder().build()); } List<ImageFeature> features = new ArrayList<>(); ImageFeature classifyFeature = ImageClassificationFeature.builder() .maxResults(10) .build(); ImageFeature detectImageFeature = ImageObjectDetectionFeature.builder() .maxResults(10) .build(); ImageFeature textDetectImageFeature = ImageTextDetectionFeature.builder().build(); features.add(classifyFeature); features.add(detectImageFeature); features.add(textDetectImageFeature); InlineImageDetails inlineImageDetails = InlineImageDetails.builder() .data(bytes) .build(); AnalyzeImageDetails analyzeImageDetails = AnalyzeImageDetails.builder() .image(inlineImageDetails) .features(features) .build(); AnalyzeImageRequest request = AnalyzeImageRequest.builder() .analyzeImageDetails(analyzeImageDetails) .build(); AnalyzeImageResponse response = aiServiceVisionClient.analyzeImage(request); ObjectMapper mapper = new ObjectMapper(); mapper.setFilterProvider(new SimpleFilterProvider().setFailOnUnknownId(false)); String json = mapper.writeValueAsString(response.getAnalyzeImageResult()); System.out.println("AnalyzeImage Result"); System.out.println(json); return json; } @Data class ImageObject { private String name; private double confidence; private BoundingPolygon boundingPolygon; } @Data class BoundingPolygon { private List<Point> normalizedVertices; } @Data class Point { private double x; private double y; public Point(double x, double y) { this.x = x; this.y = y; } } @Data class Label { private String name; private double confidence; } @Data class OntologyClass { private String name; private List<String> parentNames; private List<String> synonymNames; } @Data class ImageText { private List<Word> words; private List<Line> lines; } @Data class Word { private String text; private double confidence; private BoundingPolygon boundingPolygon; } @Data class Line { private String text; private double confidence; private BoundingPolygon boundingPolygon; private List<Integer> wordIndexes; } @Data class ImageAnalysis { private List<ImageObject> imageObjects; private List<Label> labels; private List<OntologyClass> ontologyClasses; private ImageText imageText; private String imageClassificationModelVersion; private String objectDetectionModelVersion; private String textDetectionModelVersion; private List<String> errors; } private ImageAnalysis parseJsonToImageAnalysis(String jsonString) { JSONObject json = new JSONObject(jsonString); JSONArray imageObjectsArray = json.getJSONArray("imageObjects"); List<ImageObject> imageObjects = new ArrayList<>(); for (int i = 0; i < imageObjectsArray.length(); i++) { JSONObject imageObjectJson = imageObjectsArray.getJSONObject(i); ImageObject imageObject = new ImageObject(); imageObject.setName(imageObjectJson.getString("name")); imageObject.setConfidence(imageObjectJson.getDouble("confidence")); JSONObject boundingPolygonJson = imageObjectJson.getJSONObject("boundingPolygon"); JSONArray normalizedVerticesArray = boundingPolygonJson.getJSONArray("normalizedVertices"); List<Point> normalizedVertices = new ArrayList<>(); for (int j = 0; j < normalizedVerticesArray.length(); j++) { JSONObject vertexJson = normalizedVerticesArray.getJSONObject(j); Point vertex = new Point(vertexJson.getDouble("x"), vertexJson.getDouble("y")); normalizedVertices.add(vertex); } BoundingPolygon boundingPolygon = new BoundingPolygon(); boundingPolygon.setNormalizedVertices(normalizedVertices); imageObject.setBoundingPolygon(boundingPolygon); imageObjects.add(imageObject); } JSONArray labelsArray = json.getJSONArray("labels"); List<Label> labels = new ArrayList<>(); for (int i = 0; i < labelsArray.length(); i++) { JSONObject labelJson = labelsArray.getJSONObject(i); Label label = new Label(); label.setName(labelJson.getString("name")); label.setConfidence(labelJson.getDouble("confidence")); labels.add(label); } JSONArray ontologyClassesArray = json.getJSONArray("ontologyClasses"); List<OntologyClass> ontologyClasses = new ArrayList<>(); for (int i = 0; i < ontologyClassesArray.length(); i++) { JSONObject ontologyClassJson = ontologyClassesArray.getJSONObject(i); OntologyClass ontologyClass = new OntologyClass(); ontologyClass.setName(ontologyClassJson.getString("name")); JSONArray parentNamesArray = ontologyClassJson.getJSONArray("parentNames"); List<String> parentNames = new ArrayList<>(); for (int j = 0; j < parentNamesArray.length(); j++) { parentNames.add(parentNamesArray.getString(j)); } ontologyClass.setParentNames(parentNames); ontologyClasses.add(ontologyClass); } JSONObject imageTextJson = json.getJSONObject("imageText"); JSONArray wordsArray = imageTextJson.getJSONArray("words"); List<Word> words = new ArrayList<>(); for (int i = 0; i < wordsArray.length(); i++) { JSONObject wordJson = wordsArray.getJSONObject(i); Word word = new Word(); word.setText(wordJson.getString("text")); word.setConfidence(wordJson.getDouble("confidence")); JSONObject boundingPolygonJson = wordJson.getJSONObject("boundingPolygon"); JSONArray normalizedVerticesArray = boundingPolygonJson.getJSONArray("normalizedVertices"); List<Point> normalizedVertices = new ArrayList<>(); for (int j = 0; j < normalizedVerticesArray.length(); j++) { JSONObject vertexJson = normalizedVerticesArray.getJSONObject(j); Point vertex = new Point(vertexJson.getDouble("x"), vertexJson.getDouble("y")); normalizedVertices.add(vertex); } BoundingPolygon boundingPolygon = new BoundingPolygon(); boundingPolygon.setNormalizedVertices(normalizedVertices); word.setBoundingPolygon(boundingPolygon); words.add(word); } JSONArray linesArray = imageTextJson.getJSONArray("lines"); List<Line> lines = new ArrayList<>(); for (int i = 0; i < linesArray.length(); i++) { JSONObject lineJson = linesArray.getJSONObject(i); Line line = new Line(); line.setText(lineJson.getString("text")); line.setConfidence(lineJson.getDouble("confidence")); JSONObject boundingPolygonJson = lineJson.getJSONObject("boundingPolygon"); JSONArray normalizedVerticesArray = boundingPolygonJson.getJSONArray("normalizedVertices"); List<Point> normalizedVertices = new ArrayList<>(); for (int j = 0; j < normalizedVerticesArray.length(); j++) { JSONObject vertexJson = normalizedVerticesArray.getJSONObject(j); Point vertex = new Point(vertexJson.getDouble("x"), vertexJson.getDouble("y")); normalizedVertices.add(vertex); } BoundingPolygon boundingPolygon = new BoundingPolygon(); boundingPolygon.setNormalizedVertices(normalizedVertices); line.setBoundingPolygon(boundingPolygon); JSONArray wordIndexesArray = lineJson.getJSONArray("wordIndexes"); List<Integer> wordIndexes = new ArrayList<>(); for (int j = 0; j < wordIndexesArray.length(); j++) { wordIndexes.add(wordIndexesArray.getInt(j)); } line.setWordIndexes(wordIndexes); lines.add(line); } String imageClassificationModelVersion = json.getString("imageClassificationModelVersion"); String objectDetectionModelVersion = json.getString("objectDetectionModelVersion"); String textDetectionModelVersion = json.getString("textDetectionModelVersion"); List<String> errors = new ArrayList<>(); JSONArray errorsArray = json.getJSONArray("errors"); for (int i = 0; i < errorsArray.length(); i++) { errors.add(errorsArray.getString(i)); } ImageText imageText = new ImageText(); imageText.setWords(words); imageText.setLines(lines); ImageAnalysis imageAnalysis = new ImageAnalysis(); imageAnalysis.setImageObjects(imageObjects); imageAnalysis.setLabels(labels); imageAnalysis.setOntologyClasses(ontologyClasses); imageAnalysis.setImageText(imageText); imageAnalysis.setImageClassificationModelVersion(imageClassificationModelVersion); imageAnalysis.setObjectDetectionModelVersion(objectDetectionModelVersion); imageAnalysis.setTextDetectionModelVersion(textDetectionModelVersion); imageAnalysis.setErrors(errors); return imageAnalysis; } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value" ]
[((3353, 3383), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((4522, 4569), 'com.oracle.bmc.aivision.AIServiceVisionClient.builder'), ((4650, 4715), 'com.oracle.bmc.auth.InstancePrincipalsAuthenticationDetailsProvider.builder'), ((5630, 5742), 'com.oracle.bmc.aivision.requests.AnalyzeImageRequest.builder'), ((5630, 5717), 'com.oracle.bmc.aivision.requests.AnalyzeImageRequest.builder')]
package com.main.writeRoom.service.SearchService; import com.main.writeRoom.domain.Note; import com.main.writeRoom.domain.Room; import com.main.writeRoom.repository.NoteRepository; import com.main.writeRoom.web.dto.search.SearchResponseDTO; import com.theokanning.openai.completion.chat.ChatCompletionChoice; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.service.OpenAiService; import lombok.RequiredArgsConstructor; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; @Service @RequiredArgsConstructor @Transactional(readOnly = true) public class SearchQueryServiceImpl implements SearchQueryService { private final NoteRepository noteRepository; private OpenAiService openAiService; private static final String MODEL = "gpt-3.5-turbo"; @Value("${GPT_SECRET}") private String apiKey; @Transactional public List<SearchResponseDTO.VocabularyResultDTO> getTopics() { this.openAiService = new OpenAiService(apiKey, Duration.ofSeconds(20)); String prompt = "글쓰기를 위한 주제 5개를 쉼표로 구분하여 나열해줘. 예시: 첫눈이 오면, 공통 되는 부분, 연예인 vs 배우, 코딩 공부란, 애정에 관하여. 추가적인 단어나 문구를 넣지 말고, 키워드만 답변으로 제공해."; ChatCompletionRequest requester = ChatCompletionRequest.builder() .model(MODEL) .temperature(0.8) .messages(List.of( new ChatMessage("user", prompt) )).build(); ChatCompletionChoice chatCompletionResult = openAiService.createChatCompletion(requester).getChoices().get(0); String contentResult = chatCompletionResult.getMessage().getContent(); return extractKeywords(contentResult); } @Transactional public List<SearchResponseDTO.VocabularyResultDTO> getSynonyms(String request) { this.openAiService = new OpenAiService(apiKey, Duration.ofSeconds(20)); String prompt = request + "의 동의어 6개를 쉼표로 구분하여 나열해줘. 예: 스트로베리, Fragaria × ananassa, 딸기열매, 딸기과, 딸기나무, 딸기종자. 추가적인 단어나 문구를 넣지 말고, 동의어만 답변으로 제공해."; // 예시: 동의어1, 동의어2, 동의어3, 동의어4, 동의어5, 동의어6 ChatCompletionRequest requester = ChatCompletionRequest.builder() .model(MODEL) .temperature(0.8) .messages(List.of( new ChatMessage("user", prompt) )).build(); ChatCompletionChoice chatCompletionResult = openAiService.createChatCompletion(requester).getChoices().get(0); String contentResult = chatCompletionResult.getMessage().getContent(); return extractKeywords(contentResult); } @Transactional public List<SearchResponseDTO.VocabularyResultDTO> getSimilarKeywords(String request) { this.openAiService = new OpenAiService(apiKey, Duration.ofSeconds(20)); String prompt = request + " 와 유사한 키워드 6개를 쉼표로 구분하여 나열해줘. 예: 날개, 선량한, 순수한, 신성한, 하늘, 보호. 추가적인 단어나 문구를 넣지 말고, 유사어만 답변으로 제공해."; // 예시: 유사어1, 유사어2, 유사어3, 유사어4, 유사어5, 유사어6 ChatCompletionRequest requester = ChatCompletionRequest.builder() .model(MODEL) .temperature(0.8) .messages(List.of( new ChatMessage("user", prompt) )).build(); ChatCompletionChoice chatCompletionResult = openAiService.createChatCompletion(requester).getChoices().get(0); String contentResult = chatCompletionResult.getMessage().getContent(); return extractKeywords(contentResult); } @Transactional public List<SearchResponseDTO.VocabularyResultDTO> extractKeywords(String content) { // 모든 키워드를 담을 단일 객체 생성 SearchResponseDTO.VocabularyResultDTO allKeywordsDto = SearchResponseDTO.VocabularyResultDTO.builder().build(); // 개행 문자로 분할하여 리스트로 변환 String[] keywordArray = content.split("\\r?\\n"); // 모든 키워드를 하나의 문자열로 합치기 String allKeywords = Arrays.stream(keywordArray) .map(keyword -> keyword.replaceAll("\\d+\\.\\s*", "")) .collect(Collectors.joining(", ")); // 단일 객체에 모든 키워드 설정 allKeywordsDto.setVoca(allKeywords); // 하나의 객체를 담은 리스트 반환 List<SearchResponseDTO.VocabularyResultDTO> keywords = new ArrayList<>(); keywords.add(allKeywordsDto); return keywords; } @Transactional public List<Note> searchNotesInUserRooms(List<Room> roomList, String normalizedSearchWord, String searchType) { if (searchType == null || searchType.isBlank()) { searchType = "default"; } return switch (searchType) { case "title" -> noteRepository.findByTitleInUserRooms(roomList, normalizedSearchWord); case "content" -> noteRepository.findByContentInUserRooms(roomList, normalizedSearchWord); case "tag" -> noteRepository.findByTagInUserRooms(roomList, normalizedSearchWord); default -> noteRepository.findByRoomsAndSearchWord(roomList, normalizedSearchWord); }; } }
[ "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((1684, 1897), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1684, 1889), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1684, 1779), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1684, 1745), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2746, 2959), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2746, 2951), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2746, 2841), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2746, 2807), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3791, 4004), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3791, 3996), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3791, 3886), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3791, 3852), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4506, 4561), 'com.main.writeRoom.web.dto.search.SearchResponseDTO.VocabularyResultDTO.builder'), ((4506, 4553), 'com.main.writeRoom.web.dto.search.SearchResponseDTO.VocabularyResultDTO.builder'), ((4777, 4926), 'java.util.Arrays.stream'), ((4777, 4875), 'java.util.Arrays.stream')]
package oracleai; import com.theokanning.openai.image.CreateImageRequest; import com.theokanning.openai.service.OpenAiService; import org.springframework.core.io.ByteArrayResource; import org.springframework.http.*; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; import org.springframework.web.bind.annotation.*; import org.springframework.web.client.RestTemplate; import javax.servlet.http.HttpServletRequest; import javax.sound.sampled.*; import java.io.*; import java.time.Duration; import java.util.*; @RestController @RequestMapping("/picturestory") public class GenerateAPictureStoryUsingOnlySpeech { static List<String> storyImages = new ArrayList(); @GetMapping("/form") public String newstory( HttpServletRequest request) throws Exception { storyImages = new ArrayList(); return getHtmlString(""); } @GetMapping("/picturestory") public String picturestory(@RequestParam("genopts") String genopts) throws Exception { AudioFormat format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, 44100.0f, 16, 1, (16 / 8) * 1, 44100.0f, true); SoundRecorder soundRecorder = new SoundRecorder(); soundRecorder.build(format); System.out.println("Start recording ...."); soundRecorder.start(); Thread.sleep(8000); soundRecorder.stop(); System.out.println("Stopped recording ...."); Thread.sleep(3000); //give the process time String name = "AISoundClip"; AudioFileFormat.Type fileType = AudioFileFormat.Type.WAVE; AudioInputStream audioInputStream = soundRecorder.audioInputStream; System.out.println("Saving..."); File file = new File(name + "." + fileType.getExtension()); audioInputStream.reset(); AudioSystem.write(audioInputStream, fileType, file); System.out.println("Saved " + file.getAbsolutePath()); String transcription = transcribe(file) + genopts; System.out.println("transcription " + transcription); String imageLocation = imagegeneration(transcription); System.out.println("imageLocation " + imageLocation); storyImages.add(imageLocation); String htmlStoryFrames = ""; Iterator<String> iterator = storyImages.iterator(); while(iterator.hasNext()) { htmlStoryFrames += "<td><img src=\"" + iterator.next() +"\" width=\"400\" height=\"400\"></td>"; } return getHtmlString(htmlStoryFrames); } private static String getHtmlString(String htmlStoryFrames) { return "<html><table>" + " <tr>" + htmlStoryFrames + " </tr>" + "</table><br><br>" + "<form action=\"/picturestory/picturestory\">" + " <input type=\"submit\" value=\"Click here and record (up to 10 seconds of audio) describing next scene.\">" + "<br> Some additional options..." + "<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\", using only one line\" checked >using only one line" + "<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\", photo taken on a Pentax k1000\">photo taken on a Pentax k1000" + "<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\", pixel art\">pixel art" + "<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\", digital art\">digital art" + "<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\", 3d render\">3d render" + "</form><br><br>" + "<form action=\"/picturestory/form\">" + " <input type=\"submit\" value=\"Or click here to start a new story\">\n" + "</form>" + "</html>"; } public String imagegeneration(String imagedescription) throws Exception { OpenAiService service = new OpenAiService(System.getenv("OPENAI_KEY"), Duration.ofSeconds(60)); CreateImageRequest openairequest = CreateImageRequest.builder() .prompt(imagedescription) .build(); System.out.println("\nImage is located at:"); String imageLocation = service.createImage(openairequest).getData().get(0).getUrl(); service.shutdownExecutor(); return imageLocation; } public String transcribe(File file) throws Exception { OpenAiService service = new OpenAiService(System.getenv("OPENAI_KEY"), Duration.ofSeconds(60)); String audioTranscription = transcribeFile(file, service); service.shutdownExecutor(); return audioTranscription; } private String transcribeFile(File file, OpenAiService service) throws Exception { String endpoint = "https://api.openai.com/v1/audio/transcriptions"; String modelName = "whisper-1"; HttpHeaders headers = new HttpHeaders(); headers.setContentType(MediaType.MULTIPART_FORM_DATA); headers.setBearerAuth(System.getenv("OPENAI_KEY")); MultiValueMap<String, Object> body = new LinkedMultiValueMap<>(); byte[] fileBytes = new byte[0]; try (FileInputStream fis = new FileInputStream(file); ByteArrayOutputStream bos = new ByteArrayOutputStream()) { byte[] buffer = new byte[1024]; int bytesRead; while ((bytesRead = fis.read(buffer)) != -1) { bos.write(buffer, 0, bytesRead); } fileBytes = bos.toByteArray(); } catch (IOException e) { e.printStackTrace(); } body.add("file", new ByteArrayResource(fileBytes) { @Override public String getFilename() { return file.getName(); } }); body.add("model", modelName); HttpEntity<MultiValueMap<String, Object>> requestEntity = new HttpEntity<>(body, headers); RestTemplate restTemplate = new RestTemplate(); ResponseEntity<String> response = restTemplate.exchange(endpoint, HttpMethod.POST, requestEntity, String.class); return response.getBody(); } public class SoundRecorder implements Runnable { AudioInputStream audioInputStream; private AudioFormat format; public Thread thread; public SoundRecorder build(AudioFormat format) { this.format = format; return this; } public void start() { thread = new Thread(this); thread.start(); } public void stop() { thread = null; } @Override public void run() { try (final ByteArrayOutputStream out = new ByteArrayOutputStream(); final TargetDataLine line = getTargetDataLineForRecord();) { int frameSizeInBytes = format.getFrameSize(); int bufferLengthInFrames = line.getBufferSize() / 8; final int bufferLengthInBytes = bufferLengthInFrames * frameSizeInBytes; buildByteOutputStream(out, line, frameSizeInBytes, bufferLengthInBytes); this.audioInputStream = new AudioInputStream(line); setAudioInputStream(convertToAudioIStream(out, frameSizeInBytes)); audioInputStream.reset(); } catch (IOException ex) { ex.printStackTrace(); } catch (Exception ex) { ex.printStackTrace(); } } public void buildByteOutputStream(final ByteArrayOutputStream out, final TargetDataLine line, int frameSizeInBytes, final int bufferLengthInBytes) throws IOException { final byte[] data = new byte[bufferLengthInBytes]; int numBytesRead; line.start(); while (thread != null) { if ((numBytesRead = line.read(data, 0, bufferLengthInBytes)) == -1) { break; } out.write(data, 0, numBytesRead); } } private void setAudioInputStream(AudioInputStream aStream) { this.audioInputStream = aStream; } public AudioInputStream convertToAudioIStream(final ByteArrayOutputStream out, int frameSizeInBytes) { byte[] audioBytes = out.toByteArray(); AudioInputStream audioStream = new AudioInputStream(new ByteArrayInputStream(audioBytes), format, audioBytes.length / frameSizeInBytes); System.out.println("Recording finished"); return audioStream; } public TargetDataLine getTargetDataLineForRecord() { TargetDataLine line; DataLine.Info info = new DataLine.Info(TargetDataLine.class, format); if (!AudioSystem.isLineSupported(info)) { return null; } try { line = (TargetDataLine) AudioSystem.getLine(info); line.open(format, line.getBufferSize()); } catch (final Exception ex) { return null; } return line; } } }
[ "com.theokanning.openai.image.CreateImageRequest.builder" ]
[((4168, 4263), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((4168, 4238), 'com.theokanning.openai.image.CreateImageRequest.builder')]
/* ======================================================================== SchemaCrawler http://www.schemacrawler.com Copyright (c) 2000-2024, Sualeh Fatehi <sualeh@hotmail.com>. All rights reserved. ------------------------------------------------------------------------ SchemaCrawler is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. SchemaCrawler and the accompanying materials are made available under the terms of the Eclipse Public License v1.0, GNU General Public License v3 or GNU Lesser General Public License v3. You may elect to redistribute this code under any of these licenses. The Eclipse Public License is available at: http://www.eclipse.org/legal/epl-v10.html The GNU General Public License v3 and the GNU Lesser General Public License v3 are available at: http://www.gnu.org/licenses/ ======================================================================== */ package schemacrawler.tools.command.chatgpt; import static com.theokanning.openai.completion.chat.ChatMessageRole.FUNCTION; import static com.theokanning.openai.completion.chat.ChatMessageRole.USER; import static schemacrawler.tools.command.chatgpt.utility.ChatGPTUtility.isExitCondition; import static schemacrawler.tools.command.chatgpt.utility.ChatGPTUtility.printResponse; import java.sql.Connection; import java.time.Duration; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Scanner; import java.util.logging.Level; import java.util.logging.Logger; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatCompletionRequest.ChatCompletionRequestFunctionCall; import com.theokanning.openai.completion.chat.ChatCompletionResult; import com.theokanning.openai.completion.chat.ChatFunctionCall; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.service.FunctionExecutor; import com.theokanning.openai.service.OpenAiService; import static java.util.Objects.requireNonNull; import schemacrawler.schema.Catalog; import schemacrawler.tools.command.chatgpt.embeddings.QueryService; import schemacrawler.tools.command.chatgpt.options.ChatGPTCommandOptions; import schemacrawler.tools.command.chatgpt.utility.ChatGPTUtility; import schemacrawler.tools.command.chatgpt.utility.ChatHistory; import us.fatehi.utility.string.StringFormat; public final class ChatGPTConsole implements AutoCloseable { private static final Logger LOGGER = Logger.getLogger(ChatGPTConsole.class.getCanonicalName()); private static final String PROMPT = String.format("%nPrompt: "); private final ChatGPTCommandOptions commandOptions; private final FunctionExecutor functionExecutor; private final OpenAiService service; private final QueryService queryService; private final ChatHistory chatHistory; private final boolean useMetadata; public ChatGPTConsole( final ChatGPTCommandOptions commandOptions, final Catalog catalog, final Connection connection) { this.commandOptions = requireNonNull(commandOptions, "ChatGPT options not provided"); requireNonNull(catalog, "No catalog provided"); requireNonNull(connection, "No connection provided"); functionExecutor = ChatGPTUtility.newFunctionExecutor(catalog, connection); final Duration timeout = Duration.ofSeconds(commandOptions.getTimeout()); service = new OpenAiService(commandOptions.getApiKey(), timeout); queryService = new QueryService(service); queryService.addTables(catalog.getTables()); useMetadata = commandOptions.isUseMetadata(); chatHistory = new ChatHistory(commandOptions.getContext(), new ArrayList<>()); } @Override public void close() { service.shutdownExecutor(); } /** Simple REPL for the SchemaCrawler ChatGPT integration. */ public void console() { try (final Scanner scanner = new Scanner(System.in)) { while (true) { System.out.print(PROMPT); final String prompt = scanner.nextLine(); final List<ChatMessage> completions = complete(prompt); printResponse(completions, System.out); if (isExitCondition(completions)) { return; } } } } /** * Send prompt to ChatGPT API and get completions. * * @param prompt Input prompt. */ private List<ChatMessage> complete(final String prompt) { final List<ChatMessage> completions = new ArrayList<>(); try { final ChatMessage userMessage = new ChatMessage(USER.value(), prompt); chatHistory.add(userMessage); final List<ChatMessage> messages = chatHistory.toList(); if (useMetadata) { final Collection<ChatMessage> chatMessages = queryService.query(prompt); messages.addAll(chatMessages); } final ChatCompletionRequest completionRequest = ChatCompletionRequest.builder() .messages(messages) .functions(functionExecutor.getFunctions()) .functionCall(new ChatCompletionRequestFunctionCall("auto")) .model(commandOptions.getModel()) .n(1) .build(); logChatRequest(completionRequest.getMessages(), completionRequest.getFunctions()); final ChatCompletionResult chatCompletion = service.createChatCompletion(completionRequest); LOGGER.log(Level.INFO, new StringFormat("Token usage: %s", chatCompletion.getUsage())); // Assume only one message was returned, since we asked for only one final ChatMessage responseMessage = chatCompletion.getChoices().get(0).getMessage(); chatHistory.add(responseMessage); final ChatFunctionCall functionCall = responseMessage.getFunctionCall(); if (functionCall != null) { final FunctionReturn functionReturn = functionExecutor.execute(functionCall); final ChatMessage functionResponseMessage = new ChatMessage( FUNCTION.value(), functionReturn.get(), functionCall.getName(), functionCall); completions.add(functionResponseMessage); } else { completions.add(responseMessage); } } catch (final Exception e) { LOGGER.log(Level.INFO, e.getMessage(), e); final ChatMessage exceptionMessage = functionExecutor.convertExceptionToMessage(e); completions.add(exceptionMessage); } return completions; } private void logChatRequest(final List<ChatMessage> messages, final List<?> functions) { final Level level = Level.CONFIG; if (!LOGGER.isLoggable(level)) { return; } final StringBuilder buffer = new StringBuilder(); buffer.append("ChatGPT request:").append(System.lineSeparator()); if (messages != null) { for (final ChatMessage message : messages) { buffer.append(message).append(System.lineSeparator()); } } if (functions != null) { for (final Object function : functions) { buffer.append(function).append(System.lineSeparator()); } } LOGGER.log(level, buffer.toString()); } }
[ "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((4953, 5242), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4953, 5219), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4953, 5199), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4953, 5151), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4953, 5076), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4953, 5018), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package eu.wdaqua.qanary.component.ned.gpt.openai; import java.time.Duration; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import org.apache.commons.cli.MissingArgumentException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.cache.annotation.Cacheable; import org.springframework.stereotype.Component; import com.theokanning.openai.completion.chat.ChatCompletionChoice; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatCompletionResult; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import eu.wdaqua.qanary.component.ned.gpt.GptBasedNamedEntityDisambiguation; /** * internal service wrapping the OpenAI API, includes a cache to reduce the API * calls */ @Component public class OpenAiApiService { private final String token; private final short timeoutInSeconds; private final int maxToken; private static final Logger LOGGER = LoggerFactory.getLogger(GptBasedNamedEntityDisambiguation.class); private static int numberOfExecutedRequests = 0; public static int getNumberOfExecutedRequests() { return numberOfExecutedRequests; } private static void increaseNumberOfExecutedRequests() { numberOfExecutedRequests++; } public OpenAiApiService( // @Value("${openai.api.key}") String token, // @Value("${openai.api.timeout:30}") short timeoutInSeconds, // @Value("${openai.api.maxToken:255}") int maxToken // ) throws MissingArgumentException { this.token = token; this.timeoutInSeconds = timeoutInSeconds; this.maxToken = maxToken; if (this.token.isBlank()) { LOGGER.error("The API token was empty: '{}'. Please set the configuration property 'openai.api.key'.", this.token); throw new MissingArgumentException("openai.api.key is not avaible."); } else { LOGGER.info("API key available: {}*****{}", token.substring(0, 5), token.substring(token.length() - 5)); } } @Cacheable(value = OpenAiApiCachingConfig.PROMPT_RESPONSE_CACHE_NAME) public List<ChatCompletionChoice> getCompletion(String prompt, String model) throws OpenAiApiFetchingServiceFailedException { try { OpenAiService service = new OpenAiService(token, Duration.ofSeconds(this.timeoutInSeconds)); final List<ChatMessage> messages = new ArrayList<>(); final ChatMessage message = new ChatMessage(ChatMessageRole.SYSTEM.value(), prompt); messages.add(message); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder().model(model) .messages(messages).n(1).maxTokens(this.maxToken).logitBias(new HashMap<>()).build(); ChatCompletionResult myChatCompletionResult = service.createChatCompletion(chatCompletionRequest); List<ChatCompletionChoice> choices = myChatCompletionResult.getChoices(); LOGGER.warn("{}. API call was actually executed (no caching) computing {} choices with model {} : {}", getNumberOfExecutedRequests(), choices.size(), model, prompt); increaseNumberOfExecutedRequests(); service.shutdownExecutor(); return choices; } catch (Exception e) { e.printStackTrace(); throw new OpenAiApiFetchingServiceFailedException(e.toString()); } } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((2566, 2596), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((2682, 2816), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2682, 2808), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2682, 2781), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2682, 2756), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2682, 2751), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2682, 2726), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package io.codemodder.testutils.llm; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyDescription; import com.github.difflib.DiffUtils; import com.github.difflib.UnifiedDiffUtils; import com.github.difflib.patch.Patch; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatFunction; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.FunctionExecutor; import io.codemodder.EncodingDetector; import io.codemodder.plugins.llm.OpenAIService; import io.codemodder.testutils.CodemodTestMixin; import java.io.IOException; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; import java.util.Collections; import java.util.List; /** A mixin for codemod tests that use the LLM framework to change the code. */ public interface LLMVerifyingCodemodTestMixin extends CodemodTestMixin { /** * If a codemod's changes don't <em>exactly</em> match the {@code .java.after} file, the LLM will * use these requirements to assess whether the changes are similar enough to pass the test. */ String getRequirementsPrompt(); @Override default void verifyTransformedCode(final Path before, final Path expected, final Path after) throws IOException { String expectedCode = Files.readString(expected, getCharset(expected)); String transformedJavaCode = Files.readString(after, getCharset(after)); // Verify the changes. if (!expectedCode.equals(transformedJavaCode)) { // If the changes aren't identical, ask the LLM if they're close enough. Assessment assessment = assessChanges(before, after, expected); assertThat(assessment.getAnalysis(), assessment.getResult(), is(AssessmentResult.PASS)); // If the LLM thinks they're close enough, print out the analysis for troubleshooting. System.out.println(assessment.getAnalysis()); } } private Assessment assessChanges( final Path before, final Path actualAfter, final Path expectedAfter) throws IOException { // Create a function to get the LLM to return a structured response. ChatFunction function = ChatFunction.builder() .name("save_assessment") .description("Saves an assessment.") .executor(Assessment.class, c -> c) // Return the instance when executed. .build(); FunctionExecutor functionExecutor = new FunctionExecutor(Collections.singletonList(function)); ChatCompletionRequest request = ChatCompletionRequest.builder() .model("gpt-3.5-turbo-0613") .messages( List.of( new ChatMessage( ChatMessageRole.SYSTEM.value(), SYSTEM_MESSAGE_TEMPLATE .formatted( getRequirementsPrompt().strip(), getUnifiedDiff(before, expectedAfter).strip()) .strip()), new ChatMessage( ChatMessageRole.USER.value(), USER_MESSAGE_TEMPLATE .formatted(getUnifiedDiff(before, actualAfter).strip()) .strip()))) .functions(functionExecutor.getFunctions()) .functionCall( ChatCompletionRequest.ChatCompletionRequestFunctionCall.of(function.getName())) .temperature(0D) .build(); OpenAIService openAI = new OpenAIService(System.getenv("CODEMODDER_OPENAI_API_KEY")); ChatMessage response = openAI.createChatCompletion(request).getChoices().get(0).getMessage(); return functionExecutor.execute(response.getFunctionCall()); } private String getUnifiedDiff(final Path original, final Path revised) throws IOException { return String.join( "\n", UnifiedDiffUtils.generateUnifiedDiff( original.getFileName().toString(), original.getFileName().toString(), readAllLines(original), diff(original, revised), 5)) + "\n"; } private Patch<String> diff(final Path original, final Path revised) throws IOException { return DiffUtils.diff(readAllLines(original), readAllLines(revised)); } private List<String> readAllLines(final Path path) throws IOException { return List.of(Files.readString(path, getCharset(path)).split("\\R", -1)); } private Charset getCharset(final Path path) throws IOException { return Charset.forName(EncodingDetector.create().detect(path).orElse("UTF-8")); } String SYSTEM_MESSAGE_TEMPLATE = """ You are a software engineer bot. You are helping assess a Java coding assignment given to an \ interviewee. The interviewee was given code and asked to modify it to meet these requirements: %s A PASS example: ```diff %s ``` You will be given the interviewee's solution in unified diff format. Analyze the changes \ line-by-line, compare them to the PASS example, and assess whether they PASS or FAIL the \ assignment. If the changes have any syntax errors or are made in a block of code that does \ not meet the requirements, they automatically FAIL. """; String USER_MESSAGE_TEMPLATE = """ ```diff %s ``` """; enum AssessmentResult { PASS, FAIL; } final class Assessment { @JsonPropertyDescription("A detailed analysis of how the candidate's solution was assessed.") @JsonProperty(required = true) private String analysis; @JsonPropertyDescription("The result of the assessment, either PASS or FAIL.") @JsonProperty(required = true) private AssessmentResult result; public String getAnalysis() { return analysis; } public AssessmentResult getResult() { return result; } } }
[ "com.theokanning.openai.completion.chat.ChatFunction.builder", "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder", "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.completion.chat.ChatCompletionRequest.ChatCompletionRequestFunctionCall.of", "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value" ]
[((2390, 2605), 'com.theokanning.openai.completion.chat.ChatFunction.builder'), ((2390, 2546), 'com.theokanning.openai.completion.chat.ChatFunction.builder'), ((2390, 2498), 'com.theokanning.openai.completion.chat.ChatFunction.builder'), ((2390, 2449), 'com.theokanning.openai.completion.chat.ChatFunction.builder'), ((2752, 3726), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2752, 3705), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2752, 3676), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2752, 3553), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2752, 3497), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2752, 2824), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2934, 2964), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((3298, 3326), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((3597, 3675), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.ChatCompletionRequestFunctionCall.of'), ((4660, 4717), 'java.nio.file.Files.readString'), ((4819, 4873), 'io.codemodder.EncodingDetector.create'), ((4819, 4857), 'io.codemodder.EncodingDetector.create')]
package io.knowit.backend.service.impl; import com.theokanning.openai.OpenAiService; import com.theokanning.openai.completion.CompletionChoice; import com.theokanning.openai.completion.CompletionRequest; import io.knowit.backend.config.OpenAiProperties; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.lang.NonNull; import org.springframework.stereotype.Service; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; @Service public class GPTService { private static final String SUMMARIZE_ENGINE = "davinci", QA_ENGINE = "davinci-instruct-beta"; private final OpenAiService service; public GPTService(@Autowired OpenAiProperties properties) { service = new OpenAiService(properties.secret); } public static CompletionRequest summarizeRequest(@NonNull String content) { return CompletionRequest.builder() .temperature(0.3) .maxTokens(content.length() / 10) .topP(1.0) .frequencyPenalty(0.3) .presencePenalty(0.0) .bestOf(1) .prompt(String.format("%s\n\ntl;dr:", content)) .build(); } public static CompletionRequest keyPointsRequest(@NonNull String content) { return CompletionRequest.builder() .temperature(0.8) .maxTokens(content.length() / 8) .topP(1.0) .frequencyPenalty(0.0) .presencePenalty(0.0) .bestOf(2) .prompt(String.format("%s\n\nWhat are some key points?\n1.", content)) .build(); } public static CompletionRequest questionRequest(@NonNull String content) { return CompletionRequest.builder() .temperature(0.4) .maxTokens(64) .topP(1.0) .frequencyPenalty(0.0) .presencePenalty(0.0) .bestOf(1) .prompt(String.format("%s\n\nCreate a list of questions:\n1.", content)) .build(); } public CompletionRequest answerRequest(@NonNull String content, @NonNull String question) { return CompletionRequest.builder() .temperature(0.4) .maxTokens(50) .topP(1.0) .frequencyPenalty(0.0) .presencePenalty(0.0) .bestOf(1) .prompt(String.format("%s\n\n%s\n", content, question)) .build(); } public List<String> summarize(@NonNull String content) { try { List<CompletionChoice> results = service.createCompletion(SUMMARIZE_ENGINE, summarizeRequest(content)) .getChoices(); if (results.size() == 0) return Collections.emptyList(); CompletionChoice result = results.get(0); List<String> sentences = Arrays.asList(result.getText().split("\n\n")); if(result.getFinish_reason().equals("length") && sentences.size() > 1) { // sentences.remove(sentences.size() - 1); } return sentences; } catch (Exception e) { return Collections.emptyList(); } } public List<String> keyPoints(@NonNull String content) { try { List<CompletionChoice> results = service.createCompletion(QA_ENGINE, keyPointsRequest(content)) .getChoices(); if (results.size() == 0) return Collections.emptyList(); CompletionChoice result = results.get(0); List<String> pointsText = Arrays.asList(result.getText().split("\n")); ArrayList<String> points = new ArrayList<>(pointsText); if(result.getFinish_reason().equals("length") && points.size() > 1 && !points.get(points.size() - 1).endsWith(".")) { points.remove(points.size() - 1); } return points.stream().map(s -> { if(s.matches("[0-9]+\\..*")) { // Trim digit return s.replaceFirst("[0-9]+\\.", ""); } else { return s; } }).collect(Collectors.toList()); } catch (Exception e) { e.printStackTrace(); return Collections.emptyList(); } } public List<String> questions(@NonNull String content) { try { List<CompletionChoice> results = service.createCompletion(QA_ENGINE, questionRequest(content)) .getChoices(); if (results.size() == 0) return Collections.emptyList(); CompletionChoice result = results.get(0); List<String> pointsText = Arrays.asList(result.getText().split("\n")); ArrayList<String> points = new ArrayList<>(pointsText); if(result.getFinish_reason().equals("length") && points.size() > 1 && !points.get(points.size() - 1).endsWith(".")) { points.remove(points.size() - 1); } return points.stream() .map(String::trim) .filter(s -> !s.isEmpty()) .map(s -> { if(s.matches("[0-9]+\\..*")) { // Trim digit return s.replaceFirst("[0-9]+\\.", ""); } else { return s; } }).collect(Collectors.toList()); } catch (Exception e) { return Collections.emptyList(); } } public String answer(@NonNull String content, @NonNull String question) { try { List<CompletionChoice> results = service.createCompletion(QA_ENGINE, answerRequest(content, question)) .getChoices(); if (results.size() == 0) return ""; CompletionChoice result = results.get(0); List<String> sentences = Arrays.asList(result.getText().split("\n")); return result.getText(); } catch (Exception e) { return ""; } } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((950, 1281), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((950, 1256), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((950, 1192), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((950, 1165), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((950, 1127), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((950, 1088), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((950, 1061), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((950, 1011), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1385, 1738), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1385, 1713), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1385, 1626), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1385, 1599), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1385, 1561), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1385, 1522), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1385, 1495), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1385, 1446), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1841, 2178), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1841, 2153), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1841, 2064), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1841, 2037), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1841, 1999), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1841, 1960), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1841, 1933), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1841, 1902), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2298, 2618), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2298, 2593), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2298, 2521), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2298, 2494), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2298, 2456), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2298, 2417), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2298, 2390), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2298, 2359), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package com.twenty.inhub.boundedContext.gpt; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatCompletionResult; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import com.twenty.inhub.base.appConfig.GptConfig; import com.twenty.inhub.boundedContext.answer.controller.dto.QuestionAnswerDto; import com.twenty.inhub.boundedContext.gpt.dto.GptResponseDto; import io.micrometer.core.annotation.Timed; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.scheduling.annotation.Async; import org.springframework.stereotype.Service; import java.util.List; import java.util.concurrent.CompletableFuture; @Slf4j @Service @RequiredArgsConstructor @Timed(value = "gpt.service") public class GptService { private final OpenAiService openAiService; private final ObjectMapper objectMapper; public CompletableFuture<ChatCompletionResult> generated(List<ChatMessage> chatMessages) { ChatCompletionRequest build = ChatCompletionRequest.builder() .messages(chatMessages) .maxTokens(GptConfig.MAX_TOKEN) .temperature(GptConfig.TEMPERATURE) .topP(GptConfig.TOP_P) .model(GptConfig.MODEL) .build(); return CompletableFuture.supplyAsync(() -> openAiService.createChatCompletion(build)); } public CompletableFuture<List<ChatMessage>> generatedQuestionAndAnswerMessage(QuestionAnswerDto questionAnswerDto) { String prompt = Prompt.generateQuestionPrompt(questionAnswerDto.getContent(), questionAnswerDto.getAnswer()); log.info("생성된 프롬프트 : {}", prompt); ChatMessage userMessage = new ChatMessage(ChatMessageRole.USER.value(), prompt); return CompletableFuture.completedFuture(List.of(userMessage)); } @Async public CompletableFuture<GptResponseDto> askQuestion(QuestionAnswerDto questionAnswerDto) { return generatedQuestionAndAnswerMessage(questionAnswerDto) .thenCompose(this::generated) .thenApply(result -> { String gptAnswer = result.getChoices().get(0).getMessage().getContent(); log.info("GPT 답변: {}", gptAnswer); // JSON 문자열을 파싱하여 결과 값을 추출 try { JsonNode jsonNode = objectMapper.readTree(gptAnswer); String score = jsonNode.get("score").asText(); String feedback = jsonNode.get("feedback").asText(); log.info("점수 : {}", score); log.info("피드백 : {}", feedback); // 결과를 담을 GptResponseDto 객체 생성 GptResponseDto response = new GptResponseDto(); response.setScore(Double.parseDouble(score)); response.setFeedBack(feedback); return response; } catch (JsonProcessingException e) { log.error("Error parsing GPT response JSON: {}", e.getMessage()); return null; } }) .exceptionally(e->{ log.error("Error ocurred duing GPT processing:{}",e.getMessage()); return null; }); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value", "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((1327, 1602), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1327, 1577), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1327, 1537), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1327, 1498), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1327, 1446), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1327, 1398), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2055, 2083), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value')]
package com.example.clothingstore.controller; import com.example.clothingstore.dto.QuestionDTO; import com.theokanning.openai.completion.chat.ChatCompletionChoice; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import org.springframework.beans.factory.annotation.Value; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.*; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @CrossOrigin(origins = "*") @RequestMapping @RestController public class QuestionController { @Value("${openai.api.key}") private String apiKey; @PostMapping("/questions") public ResponseEntity<?> openAIQuestionResponse(@RequestBody QuestionDTO questionDTO) { OpenAiService openAiService = new OpenAiService(apiKey); String prompt = buildPrompt(); final List<ChatMessage> messages = new ArrayList<>(); final ChatMessage systemMessage = new ChatMessage( ChatMessageRole.SYSTEM.value(), prompt ); final ChatMessage userMessage = new ChatMessage( ChatMessageRole.USER.value(), questionDTO.getQuestion() ); messages.add(systemMessage); messages.add(userMessage); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest .builder() .model("gpt-3.5-turbo") .messages(messages) .n(1) .maxTokens(350) .logitBias(new HashMap<>()) .build(); ChatCompletionChoice choice = openAiService .createChatCompletion(chatCompletionRequest) .getChoices() .get(0); return ResponseEntity.ok(choice.getMessage()); } private String buildPrompt() { String promptStart = "You are Vy, an assistant Q&A bot create by ADNCloth, a clothing shop. \n" + "Your role is to answer a question that the customer asks base on the provided information only. " + "Vietnamese answers only. " + "If the question is not fully clothing-related, gently ask them to try again.\n" + "This is the full information of our product. Each product is seperated by the semicolon."; String prompt = "Tên sản phẩm:Áo giữ nhiệt nam Modal Ultra Warm - mặc là ấm, thoáng khí,kích cỡ:1-màu:Trắng-giá:179000-số lượng còn lại:10,kích cỡ:2-màu:Trắng-giá:189000-số lượng còn lại:0,kích cỡ:1-màu:Đen-giá:189000-số lượng còn lại:50,kích cỡ:2-màu:Đen-giá:179000-số lượng còn lại:50,kích cỡ:3-màu:Đen-giá:179000-số lượng còn lại:50,kích cỡ:3-màu:Trắng-giá:179000-số lượng còn lại:69,kích cỡ:1-màu:Be-giá:200000-số lượng còn lại:10,kích cỡ:2-màu:Xám-giá:150000-số lượng còn lại:10,kích cỡ:1-màu:Xám-giá:150000-số lượng còn lại:9;Tên sản phẩm:Áo Thun Cổ Tròn Đơn Giản Y Nguyên Bản Ver77,kích cỡ:1-màu:Xanh nước nhạt-giá:199000-số lượng còn lại:0,kích cỡ:2-màu:Xanh nước nhạt-giá:199000-số lượng còn lại:48,kích cỡ:1-màu:Xanh nước đậm-giá:199000-số lượng còn lại:50,kích cỡ:2-màu:Xanh nước đậm-giá:200000-số lượng còn lại:60,kích cỡ:1-màu:Xám-giá:199000-số lượng còn lại:0,kích cỡ:2-màu:Xám-giá:199000-số lượng còn lại:46,kích cỡ:1-màu:Be-giá:199000-số lượng còn lại:50,kích cỡ:2-màu:Be-giá:200000-số lượng còn lại:59,kích cỡ:1-màu:Trắng-giá:199000-số lượng còn lại:0;Tên sản phẩm:Áo Thun Cổ Tròn Đơn Giản Y Nguyên Bản Ver121,kích cỡ:1-màu:Trắng-giá:179000-số lượng còn lại:77,kích cỡ:2-màu:Trắng-giá:179000-số lượng còn lại:79,kích cỡ:3-màu:Trắng-giá:179000-số lượng còn lại:80,kích cỡ:4-màu:Trắng-giá:179000-số lượng còn lại:80,kích cỡ:5-màu:Trắng-giá:179000-số lượng còn lại:80;Tên sản phẩm:Áo Thun Cổ Tròn Tối Giản M2,kích cỡ:1-màu:Trắng-giá:149000-số lượng còn lại:69,kích cỡ:2-màu:Trắng-giá:149000-số lượng còn lại:67,kích cỡ:3-màu:Trắng-giá:149000-số lượng còn lại:68,kích cỡ:4-màu:Trắng-giá:149000-số lượng còn lại:70;Tên sản phẩm:Quần Dài Vải Đơn Giản Y Nguyên Bản Ver10,kích cỡ:1-màu:Xanh nước đậm-giá:249000-số lượng còn lại:20,kích cỡ:2-màu:Xanh nước đậm-giá:249000-số lượng còn lại:19,kích cỡ:3-màu:Xanh nước đậm-giá:249000-số lượng còn lại:20;Tên sản phẩm:Quần Tây Đơn Giản Y Nguyên Bản Ver26,kích cỡ:1-màu:Đen-giá:249000-số lượng còn lại:50,kích cỡ:2-màu:Đen-giá:249000-số lượng còn lại:50,kích cỡ:3-màu:Đen-giá:249000-số lượng còn lại:50;Tên sản phẩm:Quần Tây Tối Giản HG11,kích cỡ:1-màu:Đen-giá:249000-số lượng còn lại:67,kích cỡ:2-màu:Đen-giá:249000-số lượng còn lại:70,kích cỡ:1-màu:Trắng-giá:249000-số lượng còn lại:0;Tên sản phẩm:Quần Tây Tối Giản HG17,kích cỡ:1-màu:Đen-giá:299000-số lượng còn lại:89,kích cỡ:2-màu:Đen-giá:299000-số lượng còn lại:90,kích cỡ:3-màu:Đen-giá:299000-số lượng còn lại:90,kích cỡ:4-màu:Đen-giá:299000-số lượng còn lại:88,kích cỡ:5-màu:Đen-giá:299000-số lượng còn lại:0;Tên sản phẩm:Quần Tây Tối Giản HG10,kích cỡ:1-màu:Đen-giá:199000-số lượng còn lại:50,kích cỡ:2-màu:Đen-giá:199000-số lượng còn lại:50,kích cỡ:3-màu:Đen-giá:199000-số lượng còn lại:50"; return promptStart + prompt; } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value" ]
[((1348, 1378), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((1483, 1511), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value')]
package com.apolloconfig.apollo.ai.qabot.controller; import com.apolloconfig.apollo.ai.qabot.api.AiService; import com.google.common.collect.Lists; import com.theokanning.openai.completion.chat.ChatCompletionChunk; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import io.reactivex.Flowable; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; @RestController @RequestMapping("/hello") public class HelloController { private final AiService aiService; public HelloController(AiService aiService) { this.aiService = aiService; } @GetMapping("/{name}") public Flowable<String> hello(@PathVariable String name) { ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), "You are an assistant who responds in the style of Dr Seuss."); ChatMessage userMessage = new ChatMessage(ChatMessageRole.USER.value(), "write a brief greeting for " + name); Flowable<ChatCompletionChunk> result = aiService.getCompletionFromMessages( Lists.newArrayList(systemMessage, userMessage)); return result.filter(chatCompletionChunk -> chatCompletionChunk.getChoices().get(0).getMessage().getContent() != null).map( chatCompletionChunk -> chatCompletionChunk.getChoices().get(0).getMessage().getContent()); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value" ]
[((946, 976), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((1096, 1124), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value')]
package com.example.server.student.problem; import com.theokanning.openai.OpenAiService; import com.theokanning.openai.completion.CompletionChoice; import com.theokanning.openai.completion.CompletionRequest; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; @Slf4j @Service public class ProblemRecommenderImpl implements ProblemRecommender { @Value("${spring.openai.api-key}") private String apiKey; @Override public List<Long> recommend(String solvedProblemNumbers) { final OpenAiService openAiService = new OpenAiService(apiKey); final CompletionRequest request = CompletionRequest.builder() .prompt(solvedProblemNumbers + "지금까지 풀었던 백준 문제야, 다음에 풀 백준 문제 5문제를 추천해줘. 다른 설명은 없이 번호만 5개 출력해줘.") .model("text-davinci-003") .echo(false) .build(); final CompletionChoice completionChoice = openAiService.createCompletion(request).getChoices().get(0); final String response = completionChoice.getText().trim(); log.info("Problem Recommend Response: {}", response); return responseParser(response); } private List<Long> responseParser(String response) { return Arrays.stream(response.replaceAll(",", "").split(" ")) .map(Long::parseLong) .collect(Collectors.toList()); } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((777, 1098), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((777, 1073), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((777, 1044), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((777, 1001), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1462, 1600), 'java.util.Arrays.stream'), ((1462, 1554), 'java.util.Arrays.stream')]
package plat.wx.service; import cn.hutool.core.exceptions.ExceptionUtil; import com.theokanning.openai.OpenAiService; import com.theokanning.openai.completion.CompletionChoice; import com.theokanning.openai.completion.CompletionRequest; import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Service; import plat.wx.config.MyConfig; import javax.annotation.Resource; import java.util.List; /** * GptChat服务类 * * @author xiefc */ @Slf4j @Service public class GptChatService { @Resource MyConfig myConfig; public String chat(String prompt,String user) { return this.chatCore(myConfig.getOpenAiModel(),prompt,user); } /** * @param model * @param prompt * @param user * @return */ public String chatCore(String model,String prompt,String user) { StringBuilder result = new StringBuilder(100); try { OpenAiService service = new OpenAiService(myConfig.getOpenAiToken()); log.info("\nCreating completion..."); CompletionRequest completionRequest = CompletionRequest.builder() .model(model) .prompt(prompt) .temperature(0.9) .maxTokens(100) .topP(1.0) .n(1) //.stream(true) .frequencyPenalty(0.0) .presencePenalty(0.6) .echo(true) .user(user) .build(); List<CompletionChoice> choices = service.createCompletion(completionRequest).getChoices(); for(CompletionChoice choice : choices) { result.append(choice.getText()); log.info("choice="+choice); } } catch (Exception e) { log.error("chatCore异常",e); result.append(ExceptionUtil.getRootCause(e)); } return result.toString(); } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((1122, 1576), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1122, 1546), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1122, 1513), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1122, 1480), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1122, 1437), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1122, 1356), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1122, 1329), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1122, 1297), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1122, 1260), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1122, 1221), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1122, 1184), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package com.mca.mindmelter.viewmodels; import android.app.Application; import androidx.lifecycle.AndroidViewModel; import androidx.lifecycle.LiveData; import androidx.lifecycle.MutableLiveData; import com.amplifyframework.datastore.generated.model.Chat; import com.amplifyframework.datastore.generated.model.User; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import com.mca.mindmelter.repositories.OpenAiChatRepository; import com.mca.mindmelter.repositories.UserRepository; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.List; public class ChatViewModel extends AndroidViewModel { private static final String TAG = "ChatViewModel"; private final OpenAiChatRepository openAiChatRepository; private final MutableLiveData<List<ChatMessage>> chatMessagesLiveData = new MutableLiveData<>(); private final MutableLiveData<Boolean> isLoadingLiveData = new MutableLiveData<>(); private final MutableLiveData<Boolean> isInitialLoadingLiveData = new MutableLiveData<>(); private final MutableLiveData<Boolean> isRetryVisibleLiveData = new MutableLiveData<>(); private LiveData<User> currentUser; private Chat currentChat; private String triviaId; private String title; // This is how we keep track of failed operations so that the retry button will do the right thing. private ChatOperation lastFailedOperation; public enum ChatOperation { LOAD_CHAT_HISTORY, LOAD_CHAT_HISTORY_BY_TRIVIA_ID, INITIATE_CHAT, SEND_MESSAGE } public ChatViewModel(Application application) { super(application); this.openAiChatRepository = new OpenAiChatRepository(application); this.currentUser = UserRepository.getInstance(application).getCurrentUser(); } public LiveData<User> getCurrentUser() { return currentUser; } public void loadChatHistory(String chatId) { isInitialLoadingLiveData.postValue(true); isRetryVisibleLiveData.postValue(false); openAiChatRepository.loadChatHistory(chatId, new OpenAiChatRepository.Callback<Chat>() { @Override public void onSuccess(Chat result) { currentChat = result; chatMessagesLiveData.postValue(getMessages(currentChat)); isInitialLoadingLiveData.postValue(false); } @Override public void onError(Throwable throwable) { isRetryVisibleLiveData.postValue(true); isInitialLoadingLiveData.postValue(false); lastFailedOperation = ChatOperation.LOAD_CHAT_HISTORY; } }); } public void loadChatHistoryByTriviaId(String triviaId, String title) { this.triviaId = triviaId; this.title = title; isInitialLoadingLiveData.postValue(true); isRetryVisibleLiveData.postValue(false); openAiChatRepository.loadChatHistoryByTriviaId(triviaId, new OpenAiChatRepository.Callback<Chat>() { @Override public void onSuccess(Chat result) { if (result == null) { initiateChat(); } else { currentChat = result; chatMessagesLiveData.postValue(getMessages(currentChat)); isInitialLoadingLiveData.postValue(false); } } @Override public void onError(Throwable throwable) { // This is now for unexpected errors, not for the case where there are no chats associated with the triviaId isRetryVisibleLiveData.postValue(true); isInitialLoadingLiveData.postValue(false); lastFailedOperation = ChatOperation.LOAD_CHAT_HISTORY_BY_TRIVIA_ID; } }); } public void initiateChat() { isInitialLoadingLiveData.postValue(true); isRetryVisibleLiveData.postValue(false); openAiChatRepository.initiateChat(currentUser.getValue(), triviaId, title, new OpenAiChatRepository.Callback<Chat>() { @Override public void onSuccess(Chat result) { currentChat = result; chatMessagesLiveData.postValue(getMessages(currentChat)); isInitialLoadingLiveData.postValue(false); } @Override public void onError(Throwable throwable) { isRetryVisibleLiveData.postValue(true); isInitialLoadingLiveData.postValue(false); lastFailedOperation = ChatOperation.INITIATE_CHAT; } }); } public void sendMessage(String messageContent) { // Create a new user message ChatMessage userMessage = new ChatMessage(ChatMessageRole.USER.value(), messageContent); // Add the new message to the current list List<ChatMessage> currentMessages = chatMessagesLiveData.getValue(); currentMessages.add(userMessage); // Update the LiveData with the new list chatMessagesLiveData.postValue(currentMessages); continueChat(currentMessages); } private void continueChat(List<ChatMessage> currentMessages) { isLoadingLiveData.postValue(true); isRetryVisibleLiveData.postValue(false); openAiChatRepository.continueChat(currentChat, currentMessages, new OpenAiChatRepository.Callback<Chat>() { @Override public void onSuccess(Chat result) { currentChat = result; chatMessagesLiveData.postValue(getMessages(currentChat)); isLoadingLiveData.postValue(false); } @Override public void onError(Throwable throwable) { isLoadingLiveData.postValue(false); isRetryVisibleLiveData.postValue(true); lastFailedOperation = ChatOperation.SEND_MESSAGE; } }); } private List<ChatMessage> getMessages(Chat chat) { List<ChatMessage> messages = new ArrayList<>(); List<String> messagesJson = chat.getMessages(); Gson gson = new Gson(); Type type = new TypeToken<ChatMessage>() {}.getType(); for (String messageJson : messagesJson) { ChatMessage message = gson.fromJson(messageJson, type); // We filter out the system messages so that they don't show up in the chat. if (!message.getRole().equals(ChatMessageRole.SYSTEM.value())) { messages.add(message); } } return messages; } public void retry() { switch (lastFailedOperation) { case LOAD_CHAT_HISTORY: loadChatHistory(currentChat.getId()); break; case LOAD_CHAT_HISTORY_BY_TRIVIA_ID: loadChatHistoryByTriviaId(triviaId, title); break; case INITIATE_CHAT: initiateChat(); break; case SEND_MESSAGE: continueChat(chatMessagesLiveData.getValue()); break; } } public LiveData<List<ChatMessage>> getChatMessagesLiveData() { return chatMessagesLiveData; } public LiveData<Boolean> isInitialLoadingLiveData() { return isInitialLoadingLiveData; } public LiveData<Boolean> isLoadingLiveData() { return isLoadingLiveData; } public LiveData<Boolean> isRetryVisibleLiveData() { return isRetryVisibleLiveData; } @Override protected void onCleared() { super.onCleared(); openAiChatRepository.shutdownExecutorService(); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value" ]
[((1913, 1969), 'com.mca.mindmelter.repositories.UserRepository.getInstance'), ((5051, 5079), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((6787, 6817), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value')]
package cn.com.codingce.demo.utils; import com.theokanning.openai.OpenAiService; import com.theokanning.openai.completion.CompletionChoice; import com.theokanning.openai.completion.CompletionRequest; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; import javax.annotation.PostConstruct; import java.time.Duration; import java.util.HashMap; import java.util.List; @Component public class OpenAiUtil { @Value("${openai.secret-key}") private String token; private OpenAiService service; @PostConstruct public void init() { service = new OpenAiService(token, Duration.ofSeconds(60L)); } public List<CompletionChoice> sendComplete(String prompt) { CompletionRequest completionRequest = CompletionRequest.builder() .model("text-davinci-003") .maxTokens(1500) .prompt(prompt) .user("testing") .logitBias(new HashMap<>()) .build(); return service.createCompletion(completionRequest).getChoices(); } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((791, 1028), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((791, 1003), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((791, 959), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((791, 926), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((791, 894), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((791, 861), 'com.theokanning.openai.completion.CompletionRequest.builder')]
/* * Copyright (c) 2023-2024 Jean Schmitz. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.talkforgeai.backend.assistant.controller; import com.talkforgeai.backend.assistant.dto.AssistantDto; import com.talkforgeai.backend.assistant.dto.GenerateImageRequest; import com.talkforgeai.backend.assistant.dto.GenerateImageResponse; import com.talkforgeai.backend.assistant.dto.MessageListParsedDto; import com.talkforgeai.backend.assistant.dto.ParsedMessageDto; import com.talkforgeai.backend.assistant.dto.ProfileImageUploadResponse; import com.talkforgeai.backend.assistant.dto.ThreadDto; import com.talkforgeai.backend.assistant.dto.ThreadTitleDto; import com.talkforgeai.backend.assistant.dto.ThreadTitleGenerationRequestDto; import com.talkforgeai.backend.assistant.dto.ThreadTitleUpdateRequestDto; import com.talkforgeai.backend.assistant.service.AssistantService; import com.talkforgeai.backend.storage.FileStorageService; import com.theokanning.openai.ListSearchParameters; import com.theokanning.openai.ListSearchParameters.Order; import com.theokanning.openai.messages.Message; import com.theokanning.openai.messages.MessageRequest; import com.theokanning.openai.runs.Run; import com.theokanning.openai.runs.RunCreateRequest; import jakarta.websocket.server.PathParam; import java.io.IOException; import java.nio.file.Path; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.core.io.FileSystemResource; import org.springframework.core.io.Resource; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.util.StreamUtils; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.RestController; import org.springframework.web.multipart.MultipartFile; @RestController @RequestMapping("/api/v1") public class AssistantController { private static final Logger LOGGER = LoggerFactory.getLogger(AssistantController.class); private final AssistantService assistantService; private final FileStorageService fileStorageService; public AssistantController(AssistantService assistantService, FileStorageService fileStorageService) { this.assistantService = assistantService; this.fileStorageService = fileStorageService; } @GetMapping("/assistants/models") public List<String> retrieveAssistantModelIds() { return assistantService.retrieveModels(); } @GetMapping("/assistants/{assistantId}") public AssistantDto retrieveAssistant(@PathVariable("assistantId") String assistantId) { return assistantService.retrieveAssistant(assistantId); } @GetMapping("/assistants") public List<AssistantDto> listAssistants(@PathParam("limit") Integer limit, @PathParam("order") String order) { ListSearchParameters listRequest = ListSearchParameters.builder() .limit(limit) .order(Order.valueOf(order)) .build(); return assistantService.listAssistants(listRequest); } @DeleteMapping("/assistants/{assistantId}") public void deleteAssistant(@PathVariable("assistantId") String assistantId) { assistantService.deleteAssistant(assistantId); } @PostMapping("/assistants/sync") public ResponseEntity<String> syncAssistants() { try { assistantService.syncAssistants(); } catch (RuntimeException e) { LOGGER.error("Error syncing assistants.", e); return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) .body(e.getMessage()); } return ResponseEntity.ok().build(); } @PostMapping("/assistants/{assistantId}") public void modifyAssistant(@PathVariable("assistantId") String assistantId, @RequestBody AssistantDto modifiedAssistant) { assistantService.modifyAssistant(assistantId, modifiedAssistant); } @PostMapping("/assistants") public AssistantDto createAssistant(@RequestBody AssistantDto modifiedAssistant) { return assistantService.createAssistant(modifiedAssistant); } @GetMapping("/assistants/images/{imageFile}") public ResponseEntity<byte[]> getImage(@PathVariable String imageFile) { try { Path imgFilePath = fileStorageService.getAssistantsDirectory().resolve(imageFile); Resource resource = new FileSystemResource(imgFilePath); byte[] imageBytes = StreamUtils.copyToByteArray(resource.getInputStream()); return ResponseEntity.ok() .contentType(MediaType.IMAGE_PNG) .body(imageBytes); } catch (IOException e) { LOGGER.error("Error loading image file: {}.", imageFile, e); return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build(); } } @PostMapping("/assistants/images/upload") public ProfileImageUploadResponse singleFileUpload(@RequestParam("file") MultipartFile file) { return assistantService.uploadImage(file); } @PostMapping("/assistants/images/generate") public GenerateImageResponse generateImage(@RequestBody GenerateImageRequest generateImageRequest) throws IOException { return assistantService.generateImage(generateImageRequest.prompt()); } @GetMapping("/threads") public List<ThreadDto> listThreads() { return assistantService.retrieveThreads(); } @PostMapping("/threads") public ThreadDto createThread() { return assistantService.createThread(); } @GetMapping("/threads/{threadId}") public ThreadDto retrieveThread(@PathVariable("threadId") String threadId) { return assistantService.retrieveThread(threadId); } @PostMapping("/threads/{threadId}/messages") public Message postMessage(@PathVariable("threadId") String threadId, @RequestBody MessageRequest messageRequest) { return assistantService.postMessage(threadId, messageRequest); } @GetMapping("/threads/{threadId}/messages") public MessageListParsedDto listMessages(@PathVariable("threadId") String threadId, @PathParam("limit") Integer limit, @PathParam("order") String order) { ListSearchParameters listRequest = ListSearchParameters.builder() .limit(limit) .order(Order.valueOf(order)) .build(); return assistantService.listMessages(threadId, listRequest); } @PostMapping("/threads/{threadId}/runs") public Run runConversation(@PathVariable("threadId") String threadId, @RequestBody RunCreateRequest runConversationRequest) { return assistantService.runConversation(threadId, runConversationRequest); } @GetMapping("/threads/{threadId}/runs/{runId}") public Run getRun(@PathVariable("threadId") String threadId, @PathVariable("runId") String runId) { return assistantService.retrieveRun(threadId, runId); } @PostMapping("/threads/{threadId}/runs/{runId}/cancel") public Run cancelRun(@PathVariable("threadId") String threadId, @PathVariable("runId") String runId) { return assistantService.cancelRun(threadId, runId); } @PostMapping("/threads/{threadId}/messages/{messageId}/postprocess") public ParsedMessageDto postProcessMessage(@PathVariable("threadId") String threadId, @PathVariable("messageId") String messageId) { return assistantService.postProcessMessage(threadId, messageId); } @GetMapping("/threads/{threadId}/{filename}") public ResponseEntity<byte[]> getImage(@PathVariable String threadId, @PathVariable String filename) { try { byte[] imageBytes = assistantService.getImage(threadId, filename); return ResponseEntity.ok() .contentType(MediaType.IMAGE_PNG) .body(imageBytes); } catch (IOException ioException) { return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build(); } } @DeleteMapping("/threads/{threadId}") @ResponseBody public void deleteThread(@PathVariable("threadId") String threadId) { assistantService.deleteThread(threadId); } @PostMapping("/threads/{threadId}/title") @ResponseBody public ThreadTitleDto updateThreadTitle(@PathVariable("threadId") String threadId, @RequestBody ThreadTitleUpdateRequestDto request) { return assistantService.updateThreadTitle(threadId, request); } @PostMapping("/threads/{threadId}/title/generate") @ResponseBody public ThreadTitleDto generateThreadTitle(@PathVariable("threadId") String threadId, @RequestBody ThreadTitleGenerationRequestDto request) { return assistantService.generateThreadTitle(threadId, request); } }
[ "com.theokanning.openai.ListSearchParameters.builder" ]
[((3842, 3948), 'com.theokanning.openai.ListSearchParameters.builder'), ((3842, 3931), 'com.theokanning.openai.ListSearchParameters.builder'), ((3842, 3894), 'com.theokanning.openai.ListSearchParameters.builder'), ((4432, 4519), 'org.springframework.http.ResponseEntity.status'), ((4538, 4565), 'org.springframework.http.ResponseEntity.ok'), ((5388, 5479), 'org.springframework.http.ResponseEntity.ok'), ((5388, 5451), 'org.springframework.http.ResponseEntity.ok'), ((5591, 5654), 'org.springframework.http.ResponseEntity.status'), ((7011, 7117), 'com.theokanning.openai.ListSearchParameters.builder'), ((7011, 7100), 'com.theokanning.openai.ListSearchParameters.builder'), ((7011, 7063), 'com.theokanning.openai.ListSearchParameters.builder'), ((8445, 8536), 'org.springframework.http.ResponseEntity.ok'), ((8445, 8508), 'org.springframework.http.ResponseEntity.ok'), ((8591, 8654), 'org.springframework.http.ResponseEntity.status')]
package Funssion.Inforum.domain.gpt; import Funssion.Inforum.domain.professionalprofile.repository.ProfessionalProfileRepository; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.jetbrains.annotations.NotNull; import org.springframework.beans.factory.annotation.Value; import org.springframework.scheduling.annotation.Async; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Objects; import static com.theokanning.openai.completion.chat.ChatMessageRole.*; @Service @RequiredArgsConstructor @Slf4j public class GptService { @Value("${chatgpt.api-key}") private String GPT_API_KEY; private OpenAiService openAiService; private final ProfessionalProfileRepository professionalProfileRepository; @Async("threadPoolTaskExecutor") public void getDescriptionByGPTAndUpdateDescription(Long userId, List<String> answerList) { openAiService = new OpenAiService(GPT_API_KEY); String userPrompt = getPromptMessageByIntegratingList(answerList); List<ChatMessage> messages = new ArrayList<>(); setPrompt(userPrompt, messages); ChatCompletionRequest chatCompletionRequest = getChatCompletionRequest(messages); StringBuilder chatResponse = getChatGptResponse(chatCompletionRequest); professionalProfileRepository.updateDescription(userId, chatResponse.toString()); } private static void setPrompt(String userPrompt, List<ChatMessage> messages) { ChatMessage systemMessage = new ChatMessage(SYSTEM.value(), "You are a Answer Summarizer " + "Your task is summarize three answers to maximum two sentences only Korean. " + "Each answer parted by '----------------'." + "The questions are as follows." + "Question 1 : Feel free to describe your project experience." + "Question 2 : Please describe your experience of solving a difficult technical problem." + "Question 3 : Please describe your experience in resolving conflicts or problems experienced during collaboration." + "--------------------------------" + "Your response format is as follows." + "프로젝트 경험: answer1" + "어려운 기술적 문제 해결 경험: answer2" + "협업 중 발생한 문제 해결 경험: answer3"); messages.add(systemMessage); ChatMessage userMessage = new ChatMessage(USER.value(), userPrompt); messages.add(userMessage); } private static ChatCompletionRequest getChatCompletionRequest(List<ChatMessage> messages) { ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder() .model("gpt-3.5-turbo") .messages(messages) .n(1) .topP(1.0) .maxTokens(500) .logitBias(new HashMap<>()) .build(); return chatCompletionRequest; } @NotNull private StringBuilder getChatGptResponse(ChatCompletionRequest chatCompletionRequest) { StringBuilder chatResponse = new StringBuilder(); openAiService.streamChatCompletion(chatCompletionRequest) .doOnError((error) -> log.error("Chat GPT API occurs error", error)) .blockingForEach(chatCompletionChunk -> { String content = chatCompletionChunk.getChoices().get(0).getMessage().getContent(); if (Objects.nonNull(content)) chatResponse.append(content); }); return chatResponse; } private String getPromptMessageByIntegratingList(List<String> stringList) { StringBuilder stringBuilder = new StringBuilder(); for (String string : stringList) { stringBuilder.append("-------------------------------"); stringBuilder.append(string); } return stringBuilder.toString(); } }
[ "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((3296, 3553), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3296, 3528), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3296, 3484), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3296, 3452), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3296, 3425), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3296, 3403), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3296, 3367), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package com.couchbase.intellij.tree.iq.chat; import com.couchbase.intellij.tree.iq.text.TextContent; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import java.util.List; public class DefaultChatMessageComposer implements ChatMessageComposer { @Override public ChatMessage compose(ConversationContext ctx, String prompt) { return new ChatMessage(ChatMessageRole.USER.value(), prompt); } @Override public ChatMessage compose(ConversationContext ctx, String prompt, List<? extends TextContent> textContents) { if (textContents.isEmpty()) { return compose(ctx, prompt); } textContents = ChatMessageUtils.composeExcept(textContents, ctx.getLastPostedCodeFragments(), prompt); if (!textContents.isEmpty()) { ctx.setLastPostedCodeFragments(textContents); return compose(ctx, ChatMessageUtils.composeAll(prompt, textContents)); } return compose(ctx, prompt); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value" ]
[((441, 469), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value')]
package de.ja.view.explanation.image; import com.ibm.icu.text.RuleBasedNumberFormat; import com.ibm.icu.text.SimpleDateFormat; import com.theokanning.openai.OpenAiHttpException; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatCompletionResult; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.image.CreateImageRequest; import com.theokanning.openai.image.ImageResult; import com.theokanning.openai.service.OpenAiService; import de.ja.view.ExplainerFrame; import de.swa.gc.GraphCode; import net.miginfocom.swing.MigLayout; import org.apache.commons.io.FilenameUtils; import org.jdesktop.swingx.JXTaskPane; import javax.imageio.ImageIO; import javax.swing.*; import javax.swing.border.TitledBorder; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.File; import java.net.URL; import java.nio.file.Files; import java.nio.file.Paths; import java.time.Duration; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Locale; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.stream.Collectors; /** * Diese Klasse stellt die Benutzerschnittstelle * zum Erstellen von Bildern bzw. visuellen Erklärungen von * Graph Codes dar. */ public class ImagePanel extends JPanel implements ActionListener { // API-Key. private static String key; // Anzahl zu generierender Bilder. private final JSpinner nSpinner; // Größe der zu generierenden Bilder. private final JComboBox<String> sizeComboBox; private final JComboBox<String> modelTypeComboBox; private final JComboBox<String> qualityComboBox; private final JComboBox<String> styleComboBox; // Textfeld für die generierte Prompt. private final JTextArea promptArea; // TabbedPane für alle generierten Bilder. private final JTabbedPane imagesTabbedPane; private final JButton generateImageButton; // Nachrichten, die die Prompt darstellen. private List<ChatMessage> messages = new ArrayList<>(); // Referenz. private final ExplainerFrame reference; public ImagePanel(ExplainerFrame reference) { this.reference = reference; key = System.getenv("OpenAI-Key"); // Layout definieren. MigLayout imagePanelMigLayout = new MigLayout("" , "[fill, grow]", "10[10%][][fill,60%][fill,30%]"); //1. 10% setLayout(imagePanelMigLayout); // Textfeld für die Prompt initialisieren und konfigurieren. promptArea = new JTextArea(); promptArea.setLineWrap(true); promptArea.setWrapStyleWord(true); promptArea.setEditable(false); JScrollPane promptSP = new JScrollPane(promptArea); promptSP.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER); promptSP.setBorder(new TitledBorder("Generated Prompt")); add(promptSP, "cell 0 3, growx, height ::30%, aligny top"); // Ästhetische Eigenschaften für erweiterte Optionen einstellen... UIManager.put("TaskPane.animate", Boolean.FALSE); UIManager.put("TaskPane.titleOver", new Color(200, 200, 200)); UIManager.put("TaskPane.titleForeground", new Color(187, 187, 187)); UIManager.put("TaskPane.titleBackgroundGradientStart", new Color(85, 88, 89)); UIManager.put("TaskPane.titleBackgroundGradientEnd", new Color(85, 88, 89)); UIManager.put("TaskPane.background", new Color(76, 80, 82)); UIManager.put("TaskPane.borderColor", new Color(94, 96, 96)); // Erweiterte Optionen initialisieren und konfigurieren. JXTaskPane advancedOptions = new JXTaskPane(); advancedOptions.setCollapsed(true); advancedOptions.setTitle("Advanced Options"); add(advancedOptions, "cell 0 0, growx, aligny top"); // Layout für die Optionen in den erweiterten Optionen definieren. MigLayout advancedOptionsMigLayout = new MigLayout("", "0[]5[]10[]5[]0", "0[]0"); advancedOptions.setLayout(advancedOptionsMigLayout); // Erweiterte Optionen definieren. JLabel nLabel = new JLabel("N:"); nLabel.setHorizontalTextPosition(SwingConstants.CENTER); nLabel.setHorizontalAlignment(SwingConstants.CENTER); SpinnerNumberModel nSpinnerNumberModel = new SpinnerNumberModel(1, 1, 10, 1); nSpinner = new JSpinner(); nSpinner.setModel(nSpinnerNumberModel); JLabel sizeLabel = new JLabel("Size:"); sizeLabel.setHorizontalTextPosition(SwingConstants.CENTER); sizeLabel.setHorizontalAlignment(SwingConstants.CENTER); sizeComboBox = new JComboBox<>(); sizeComboBox.addItem("256x256"); sizeComboBox.addItem("512x512"); sizeComboBox.addItem("1024x1024"); sizeComboBox.addItem("1024x1792"); sizeComboBox.addItem("1792x1024"); JLabel modelTypeLabel = new JLabel("Model Type:"); modelTypeLabel.setHorizontalTextPosition(SwingConstants.CENTER); modelTypeLabel.setHorizontalAlignment(SwingConstants.CENTER); modelTypeComboBox = new JComboBox<>(); modelTypeComboBox.addItem("dall-e-2"); modelTypeComboBox.addItem("dall-e-3"); JLabel qualityLabel = new JLabel("Quality:"); qualityLabel.setHorizontalTextPosition(SwingConstants.CENTER); qualityLabel.setHorizontalAlignment(SwingConstants.CENTER); qualityComboBox = new JComboBox<>(); qualityComboBox.addItem("standard"); qualityComboBox.addItem("hd"); JLabel styleLabel = new JLabel("Style:"); styleLabel.setHorizontalTextPosition(SwingConstants.CENTER); styleLabel.setHorizontalAlignment(SwingConstants.CENTER); styleComboBox = new JComboBox<>(); styleComboBox.addItem("vivid"); styleComboBox.addItem("natural"); advancedOptions.add(modelTypeLabel); advancedOptions.add(modelTypeComboBox); advancedOptions.add(nLabel); advancedOptions.add(nSpinner); advancedOptions.add(qualityLabel); advancedOptions.add(qualityComboBox); advancedOptions.add(styleLabel); advancedOptions.add(styleComboBox); advancedOptions.add(sizeLabel); advancedOptions.add(sizeComboBox); // Knopf zum Generieren von Bildern. generateImageButton = new JButton("Generate Image(s)"); generateImageButton.addActionListener(this); add(generateImageButton, "cell 0 1, width ::150px, aligny top"); imagesTabbedPane = new JTabbedPane(); imagesTabbedPane.setBorder(new TitledBorder("Generated Image(s)")); add(imagesTabbedPane,"cell 0 2, growx, aligny top"); } /** * Graph Code verarbeiten * @param graphCode Ausgewählter Graph Code. */ public void setGraphCode(GraphCode graphCode) { if(graphCode != null) { // Prompt vorbereiten. String prompt = setUpPrompt(graphCode); promptArea.setText(prompt); } else { promptArea.setText(null); } } /** * Prompt vorbereiten und aus Graph Code * generieren. * @param graphCode Ausgewählter Graph Code. * @return Generierte Prompt. */ private String setUpPrompt(GraphCode graphCode) { // Alle Paare die über eine 1-Beziehung verfügen. String s = graphCode.getFormattedTerms(); // Textnachrichten für die Prompt. messages = new ArrayList<>(); messages.add(new ChatMessage(ChatMessageRole.SYSTEM.value(), "You are an assistant, who is able to generate cohesive textual explanations based on a collection of words.")); messages.add(new ChatMessage( ChatMessageRole.ASSISTANT.value(), "The collection of words represents a dictionary. The dictionary contains so-called feature " + "vocabulary terms. Additionally some of these terms are connected through a relationship. " + "These relationships will be noted as <i_t> - <i_t1,...,i_tn>, where i_t denotes the index of a feature " + "vocabulary term in the given collection.")); messages.add(new ChatMessage( "assistant", "Using these terms, we can create a coherent explanation that accurately " + "describes the terms and its relations.\n" + "\n" + "An example could be: The image shows water, the sky, and clouds. " + "We can imagine a scene with clouds floating in the sky above.")); messages.add(new ChatMessage( "user", "The collections of words is as follows: " + graphCode.listTerms() + ". Only respect these terms and its relations: " + s + ", and ignore all others. " + "Do not create an explanation regarding the dictionary. Only generate a text containing " + "the terms of the dictionary like in the example above.")); messages.add(new ChatMessage( "assistant", "Based on the dictionary, here is a cohesive text " + "containing the terms from the dictionary:")); // Nachrichten zusammenfügen. return messages.stream().map(ChatMessage::getContent).collect(Collectors.joining("\n")); } @Override public void actionPerformed(ActionEvent e) { imagesTabbedPane.removeAll(); // Anbindung zur Schnittstelle. OpenAiService service = new OpenAiService(key, Duration.ofSeconds(60)); if(key.isEmpty()) { reference.getExplainerConsoleModel().insertText("OpenAI-Key is missing, abort process. Must be set in launch-config: OpenAI-Key=..."); return; } // Prozess erstellen. ExecutorService executorService = Executors.newSingleThreadExecutor(); Thread t = new Thread(() -> { // Textanfrage initialisieren und parametrisieren. ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder() .messages(messages) .model("gpt-3.5-turbo-16k") // 75 - 250? .maxTokens(200) .build(); try { // Cursor auf Warten setzen. setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); // Knopf deaktivieren. generateImageButton.setEnabled(false); // Info in der Konsole ausgeben. RuleBasedNumberFormat numberFormat = new RuleBasedNumberFormat(Locale.US, RuleBasedNumberFormat.SPELLOUT); reference.getExplainerConsoleModel() .insertText(String.format("Generating %s visual explanation%s!", numberFormat.format(nSpinner.getValue()), (int) nSpinner.getValue() > 1 ? "s" : "")); // Textanfrage an Endpunkt senden. ChatCompletionResult chatCompletionResult = service.createChatCompletion(chatCompletionRequest); // Ergebnis der Anfrage. String chatResponse = chatCompletionResult.getChoices().get(0).getMessage().getContent(); // Bildanfrage initialisieren und parametrisieren. CreateImageRequest imageRequest = CreateImageRequest.builder() .prompt(chatResponse) .n((Integer) nSpinner.getValue()) .model((String) modelTypeComboBox.getSelectedItem()) .quality((String) qualityComboBox.getSelectedItem()) .style((String) styleComboBox.getSelectedItem()) .size(String.valueOf(sizeComboBox.getSelectedItem())) .responseFormat("url") .build(); // Bildanfrage an Endpunkt senden. ImageResult imageResult = service.createImage(imageRequest); // Alle Ergebnisse verarbeiten und anzeigen. for(int i = 0; i < imageResult.getData().size(); i++) { URL imageUrl = new URL(imageResult.getData().get(i).getUrl()); ImageIcon icon = new ImageIcon(imageUrl); String imageName = String.format("Image-%s", i + 1); if(icon.getIconWidth() > 256 && icon.getIconHeight() > 256) { JButton external = new JButton("Open Image in external Frame..."); external.addActionListener(e1 -> { JFrame externalFrame = new JFrame(); externalFrame.setTitle(imageName); externalFrame.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE); externalFrame.setLocationRelativeTo(null); JLabel imageLabel = new JLabel(); imageLabel.setIcon(icon); imageLabel.setHorizontalAlignment(SwingConstants.CENTER); imageLabel.setHorizontalTextPosition(SwingConstants.CENTER); JPanel panel = new JPanel(); panel.add(imageLabel); externalFrame.add(new JScrollPane(panel, JScrollPane.VERTICAL_SCROLLBAR_ALWAYS, JScrollPane.HORIZONTAL_SCROLLBAR_ALWAYS)); externalFrame.setSize(512, 512); externalFrame.setVisible(true); //JOptionPane.showMessageDialog(null, new JScrollPane(imageLabel), imageName, JOptionPane.PLAIN_MESSAGE, null); }); imagesTabbedPane.addTab(String.format("Image-%s", i + 1), external); } else if(icon.getIconWidth() == 256 && icon.getIconHeight() == 256) { // Label zum Darstellen eines generierten Bildes. JLabel imageLabel = new JLabel(); imageLabel.setIcon(icon); imageLabel.setHorizontalAlignment(SwingConstants.CENTER); imageLabel.setHorizontalTextPosition(SwingConstants.CENTER); // Tab mit Bild hinzufügen. imagesTabbedPane.addTab(imageName, imageLabel); } // Bild in Ordner speichern. Files.createDirectories(Paths.get(System.getProperty("user.dir") + "/explanations/image/")); String timeStamp = new SimpleDateFormat("dd-MM-yyyy_HHmmss").format(new Date()); String nameFormat = FilenameUtils.getName(imageUrl.getPath()); String fileName = String.format("explanations/image/%s-%s", timeStamp, nameFormat); File saveImg = new File(fileName); ImageIO.write(ImageIO.read(imageUrl), "jpg", saveImg); } } catch(OpenAiHttpException openAiHttpException) { if(openAiHttpException.statusCode == 401) { JOptionPane.showMessageDialog(null, "You provided an invalid API-Key!", "Authentication Error", JOptionPane.ERROR_MESSAGE); reference.getExplainerConsoleModel().insertText("You provided an invalid API-Key!"); } } catch(Exception ex) { ex.printStackTrace(); // Fehler in Konsole ausgeben. reference.getExplainerConsoleModel().insertText(ex.getMessage()); } finally { // Cursor auf Standard zurücksetzen. setCursor(Cursor.getDefaultCursor()); // Knopf reaktivieren. generateImageButton.setEnabled(true); } }); // Prozess ausführen und beenden. executorService.execute(t); executorService.shutdown(); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.image.CreateImageRequest.builder", "com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value", "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((7711, 7741), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((7926, 7959), 'com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value'), ((10315, 10532), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((10315, 10503), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((10315, 10434), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((10315, 10386), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((11660, 12177), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((11660, 12144), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((11660, 12097), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((11660, 12019), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((11660, 11946), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((11660, 11869), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((11660, 11792), 'com.theokanning.openai.image.CreateImageRequest.builder'), ((11660, 11734), 'com.theokanning.openai.image.CreateImageRequest.builder')]
package fr.enimaloc.esportline.commands.context; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import com.theokanning.openai.completion.chat.ChatCompletionChoice; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import fr.enimaloc.enutils.jda.JDAEnutils; import fr.enimaloc.enutils.jda.commands.MessageContextInteractionEvent; import fr.enimaloc.enutils.jda.register.annotation.Command; import fr.enimaloc.enutils.jda.register.annotation.Context; import fr.enimaloc.enutils.jda.register.annotation.I18n; import fr.enimaloc.esportline.utils.PaginationMessage; import net.dv8tion.jda.api.EmbedBuilder; import net.dv8tion.jda.api.Permission; import net.dv8tion.jda.api.entities.Message; import net.dv8tion.jda.api.interactions.DiscordLocale; import net.dv8tion.jda.api.interactions.components.ActionRow; import net.dv8tion.jda.api.utils.TimeFormat; import java.io.IOException; import java.text.DateFormat; import java.time.OffsetDateTime; import java.time.ZoneId; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ExecutionException; public class EventCreator { private final OpenAiService openAi; private final ObjectMapper mapper = new ObjectMapper(); public EventCreator(String openAiToken) { this.openAi = new OpenAiService(openAiToken); mapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS); mapper.registerModule(new JavaTimeModule()); } record GeneratedEvent(String name, OffsetDateTime start, OffsetDateTime end, String inscription, String description, @JsonIgnore Message.Attachment attachment) {} @Context(i18n = @I18n(locales = { @I18n.Locale(language = DiscordLocale.FRENCH, value = "Créer un évènement"), @I18n.Locale(language = DiscordLocale.ENGLISH_UK, value = "Create an event"), @I18n.Locale(language = DiscordLocale.ENGLISH_US, value = "Create an event") })) @Command.RequiredPermission(Permission.MANAGE_EVENTS) public void generateEventFromMessage(MessageContextInteractionEvent interaction) throws IOException { interaction.deferReply(true).queue(); List<Message.Attachment> attachments = interaction.getTarget().getAttachments().stream().filter(Message.Attachment::isImage).toList(); List<GeneratedEvent> baseChoice = askGPT(interaction.getTarget().getContentRaw()) .stream() .map(ChatCompletionChoice::getMessage) .map(ChatMessage::getContent) .map(json -> { try { return mapper.readValue(json, GeneratedEvent.class); } catch (JsonProcessingException e) { throw new RuntimeException(e); } }).toList(); List<GeneratedEvent> generatedEvents = new ArrayList<>(); if (attachments.isEmpty()) { generatedEvents.addAll(baseChoice); } else { for (GeneratedEvent event : baseChoice) { for (Message.Attachment attachment : attachments) { generatedEvents.add(new GeneratedEvent(event.name(), event.start(), event.end(), event.inscription(), event.description(), attachment)); } } } PaginationMessage<GeneratedEvent> paginationMessage = new PaginationMessage(generatedEvents.toArray(GeneratedEvent[]::new), 1); paginationMessage.displayEmbed(interaction.getHook(), events -> { GeneratedEvent event = events[0]; return new EmbedBuilder() .setTitle("Generated event") .addField("Name", event.name(), false) .addField("Start", TimeFormat.DATE_TIME_LONG.format(event.start().toEpochSecond() * 1000) + " [" + event.start() + "]", false) .addField("End", TimeFormat.DATE_TIME_LONG.format(event.end().toEpochSecond() * 1000) + " [" + event.end() + "]", false) .addField("Inscription", event.inscription(), false) .addField("Description", event.description(), false) .setImage(event.attachment() != null ? event.attachment().getProxyUrl() : null) .build(); }, ActionRow.of( interaction.buildComponent().button().primary("confirm", "Confirm").withCallback(e -> { GeneratedEvent event = paginationMessage.getPage()[0]; try { interaction.getGuild().createScheduledEvent( event.name(), event.inscription(), event.start(), event.end() ) .setDescription(event.description()) .setImage(event.attachment() != null ? event.attachment().getProxy().downloadAsIcon().get() : null) .queue(success -> e.reply("Event created").queue(), throwable -> JDAEnutils.DEFAULT_EXCEPTION_HANDLER.accept(throwable, e.getHook(), e)); } catch (InterruptedException | ExecutionException ex) { JDAEnutils.DEFAULT_EXCEPTION_HANDLER.accept(ex, e.getHook(), e); } }) )); } private List<ChatCompletionChoice> askGPT(String messageContent) { ChatCompletionRequest request = ChatCompletionRequest.builder() .model("gpt-3.5-turbo-0613") .messages(List.of(new ChatMessage(ChatMessageRole.SYSTEM.value(), "Tu est un community manager dans l'association d'esport nommé \"esportline\", tu dois rédiger un évènement pour Discord.\n" + "\n" + "Cet fonctionnalité doit contenir:\n" + "- le nom de l'évènement;\n" + "- la date de début et de fin de l'évènement sous forme de date JSR310 en prenant compte la zone id suivante " + ZoneId.systemDefault() + ";\n" + "- un lieu ou un lien d'inscription à l'évènement;\n" + "- une description de cette évènement.\n" + "\n" + "La description doit respecter les points suivants:\n" + "- doit contenir les grands point de l’événement;\n" + "- ne pas ce répéter avec les autres points de la liste;\n" + "- être sous forme de liste à point;\n" + "- utilisant du markdown;\n" + "- pour les retour a la ligne utilise le caractère \\n\n" + "- tu ne dois pas formatter les liens;\n" + "- ne dois pas dépasser 1000 caractères;\n" + "- POINT IMPORTANT: ne doit pas être égale au contenu du message et dois contenir uniquement la DESCRIPTION.\n" + "\n" + "Note: La date d'aujourd'hui est " + DateFormat.getDateInstance(0).format(System.currentTimeMillis()) + " donc si le message ne contient pas d'année ou de mois prends cela\n" + "\n" + "\n" + "Remplis moi cela soit forme de json comme tel: {\"name\": \"%nomDeLEvenement%\", \"start\": %startTimestamp%, \"end\": %endTimestamp%, \"inscription\": \"%lienDinscription%\", \"description\": \"%description%\"}.\n" ), new ChatMessage(ChatMessageRole.USER.value(), messageContent))) .build(); return openAi.createChatCompletion(request).getChoices(); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value", "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((4168, 4238), 'net.dv8tion.jda.api.utils.TimeFormat.DATE_TIME_LONG.format'), ((4313, 4381), 'net.dv8tion.jda.api.utils.TimeFormat.DATE_TIME_LONG.format'), ((5546, 5616), 'fr.enimaloc.enutils.jda.JDAEnutils.DEFAULT_EXCEPTION_HANDLER.accept'), ((5720, 5783), 'fr.enimaloc.enutils.jda.JDAEnutils.DEFAULT_EXCEPTION_HANDLER.accept'), ((5956, 8332), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5956, 8307), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5956, 6032), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((6083, 6113), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((7756, 7820), 'java.text.DateFormat.getDateInstance'), ((8260, 8288), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value')]
package com.jornadamilhas.api.services; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.service.OpenAiService; import org.springframework.beans.factory.annotation.Value; public class OpenAI { @Value("${api.services.openai.apikey}") private static String apiKey; public static String generateAIText(String prompt) { OpenAiService service = new OpenAiService(apiKey); CompletionRequest completionRequest = CompletionRequest.builder() .prompt(prompt) .model("text-davinci-003") .maxTokens(1000) .build(); return service.createCompletion(completionRequest).getChoices().get(0).getText().replace("\n", ""); } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((485, 645), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((485, 620), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((485, 587), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((485, 544), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package com.jenjenuwu.gptminecraft.openAI; import com.jenjenuwu.gptminecraft.config.ModConfig; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.service.OpenAiService; import me.shedaniel.autoconfig.AutoConfig; import java.util.List; public class OpenAiModel { private static ModConfig config = AutoConfig.getConfigHolder(ModConfig.class).getConfig(); private static List<ChatMessage> getMessages(String Prompt, String SystemPrompt) { return List.of(new ChatMessage("user", Prompt), new ChatMessage("system", SystemPrompt)); } private static ChatCompletionRequest createChatCompletionRequestBuilder(List<ChatMessage> Prompt) { return ChatCompletionRequest.builder() .model("gpt-4") .messages(Prompt) .build(); } private static OpenAiService createOpenAiService() { return new OpenAiService(config.getApiKey()); } public static String getResponse(String userPrompt, String systemPrompt) { try { return createOpenAiService().createChatCompletion(createChatCompletionRequestBuilder(getMessages(userPrompt, systemPrompt))).getChoices().get(0).getMessage().getContent().trim(); } catch (Exception e) { e.printStackTrace(); } return null; } }
[ "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((410, 465), 'me.shedaniel.autoconfig.AutoConfig.getConfigHolder'), ((779, 901), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((779, 876), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((779, 842), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package com.easyen.easyenglish.serviceimpl; import com.easyen.easyenglish.entity.speakEnPracticeRecord; import com.easyen.easyenglish.mapper.speakEnRecordMapper; import com.easyen.easyenglish.service.speakEnRecordService; import com.fasterxml.jackson.databind.ObjectMapper; import com.theokanning.openai.OpenAiApi; import com.theokanning.openai.completion.CompletionChoice; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.service.OpenAiService; import okhttp3.OkHttpClient; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.springframework.beans.factory.annotation.Value; import retrofit2.Retrofit; import java.net.InetSocketAddress; import java.net.Proxy; import java.time.Duration; import java.util.List; import static com.theokanning.openai.service.OpenAiService.*; @Service public class speakEnRecordServiceimpl implements speakEnRecordService { @Autowired speakEnRecordMapper speakEnRecordMapper; @Override @Transactional public void addRecord(speakEnPracticeRecord record) { try { speakEnRecordMapper.addRecord(record); } catch (Exception e) { // 在实际应用中,你可能会记录日志或者执行其他适当的异常处理操作 e.printStackTrace(); throw new RuntimeException("添加口语记录失败:" + e.getMessage()); } } @Override public void deleteRecord(Integer record) { try { speakEnRecordMapper.deleteRecord(record); } catch (Exception e) { // 在实际应用中,你可能会记录日志或者执行其他适当的异常处理操作 e.printStackTrace(); throw new RuntimeException("更新口语记录失败:" + e.getMessage()); } } @Override public void updateRecord(speakEnPracticeRecord record) { try { speakEnRecordMapper.updateRecord(record); } catch (Exception e) { // 在实际应用中,你可能会记录日志或者执行其他适当的异常处理操作 e.printStackTrace(); throw new RuntimeException("更新口语记录失败:" + e.getMessage()); } } @Override public List<speakEnPracticeRecord> getAllrecord() { return speakEnRecordMapper.getAllRecord(); } @Override public List<speakEnPracticeRecord> findByTopic(String topic) { return speakEnRecordMapper.findByTopic(topic); } @Value("${gpt.api-key}") String token; @Value("${gpt.model}") String model; @Value("${gpt.temperature}") Double t; @Value("${gpt.maxTokens}") Integer maxt; @Value("${gpt.timeout}") Duration timeout; @Value("${proxy.host}") String host; @Value("${proxy.port}") Integer port; @Override public String getSpeakResponce(String requirements,String question) { // Configure the proxy ObjectMapper mapper = defaultObjectMapper(); Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(host, port)); OkHttpClient client = defaultClient(token, timeout) .newBuilder() .proxy(proxy) .build(); Retrofit retrofit = defaultRetrofit(client, mapper); OpenAiApi api = retrofit.create(OpenAiApi.class); OpenAiService service = new OpenAiService(api); CompletionRequest completionRequest = CompletionRequest.builder() .model(model) .prompt("我现在在进行"+requirements+"口语话题的练习。我练习的话题或问题是:"+question+"。请依据"+requirements+"评分标准,给出一个可能的得到高分的回答") .temperature(t) .maxTokens(maxt) .topP(1D) .frequencyPenalty(0D) .presencePenalty(0D) .build(); System.out.println("构建完成,正在请求"+completionRequest); CompletionChoice choice = service.createCompletion(completionRequest).getChoices().get(0); String generatedText = choice.getText(); return generatedText; } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((3613, 4109), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3613, 4080), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3613, 4039), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3613, 3997), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3613, 3967), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3613, 3930), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3613, 3894), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((3613, 3674), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package com.example.chat.util; import com.example.chat.config.BotConfig; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.service.OpenAiService; import org.springframework.stereotype.Component; import javax.annotation.PostConstruct; import javax.annotation.Resource; import java.util.*; /** * @author xingboyuan * @date 2023/5/6 9:14 */ @Component public class BotUtil { @Resource public void setAccountConfig(BotConfig botConfig) { BotUtil.botConfig = botConfig; } private static BotConfig botConfig; private static final Map<String, List<ChatMessage>> PROMPT_MAP = new HashMap<>(); private static final Map<OpenAiService, Integer> COUNT_FOR_OPEN_AI_SERVICE = new HashMap<>(); private static ChatCompletionRequest.ChatCompletionRequestBuilder completionRequestBuilder; @PostConstruct public void init() { completionRequestBuilder = ChatCompletionRequest.builder().model(botConfig.getModel()).temperature(botConfig.getTemperature()).maxTokens(botConfig.getMaxToken()); for (OpenAiService openAiService : botConfig.getOpenAiServiceList()) { COUNT_FOR_OPEN_AI_SERVICE.put(openAiService, 0); } } //根据key获取openAiService public static OpenAiService getOpenAiService() { //获取使用次数最小的openAiService 否则获取map中的第一个 Optional<OpenAiService> openAiServiceToUse = COUNT_FOR_OPEN_AI_SERVICE.entrySet().stream() .min(Map.Entry.comparingByValue()) .map(Map.Entry::getKey); if (openAiServiceToUse.isPresent()) { COUNT_FOR_OPEN_AI_SERVICE.put(openAiServiceToUse.get(), COUNT_FOR_OPEN_AI_SERVICE.get(openAiServiceToUse.get()) + 1); return openAiServiceToUse.get(); } else { COUNT_FOR_OPEN_AI_SERVICE.put(COUNT_FOR_OPEN_AI_SERVICE.keySet().iterator().next(), COUNT_FOR_OPEN_AI_SERVICE.get(COUNT_FOR_OPEN_AI_SERVICE.keySet().iterator().next()) + 1); return COUNT_FOR_OPEN_AI_SERVICE.keySet().iterator().next(); } } public static ChatCompletionRequest.ChatCompletionRequestBuilder getCompletionRequestBuilder() { return completionRequestBuilder; } public static List<ChatMessage> buildPrompt(String sessionId, String newPrompt) { if (!PROMPT_MAP.containsKey(sessionId)) { if (null != botConfig.getBasicPrompt()){ List<ChatMessage> promptList = new ArrayList<>(); promptList.add(botConfig.getBasicPrompt()); PROMPT_MAP.put(sessionId, promptList); } } List<ChatMessage> promptList = PROMPT_MAP.getOrDefault(sessionId, new ArrayList<>()); promptList.add(new ChatMessage("user", newPrompt)); return promptList; } public static void updatePrompt(String sessionId, List<ChatMessage> promptList) { PROMPT_MAP.put(sessionId, promptList); } public static boolean isPromptEmpty(String sessionId){ if (!PROMPT_MAP.containsKey(sessionId)){ return true; } List<ChatMessage> promptList = PROMPT_MAP.get(sessionId); if (null != botConfig.getBasicPrompt()){ return promptList.size() == 1; }else { return promptList.size() == 0; } } public static boolean deleteFirstPrompt(String sessionId) { if (!isPromptEmpty(sessionId)){ int index = null != botConfig.getBasicPrompt() ? 1 : 0; List<ChatMessage> promptList = PROMPT_MAP.get(sessionId); //问 promptList.remove(index); //答 promptList.remove(index); updatePrompt(sessionId, promptList); return true; } return false; } public static void resetPrompt(String sessionId) { PROMPT_MAP.remove(sessionId); } }
[ "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((1003, 1137), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1003, 1102), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1003, 1062), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package com.gc.chatbot.service; import com.theokanning.openai.OpenAiService; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.completion.CompletionResult; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; @Slf4j @Service public class OpenAiChatBiz { @Value("${open.ai.model}") private String openAiModel; @Autowired private OpenAiService openAiService; /** * 聊天 * @param prompt * @return */ public String chat(String prompt){ CompletionRequest completionRequest = CompletionRequest.builder() .prompt(prompt) .model(openAiModel) .echo(true) .temperature(0.7) .topP(1d) .frequencyPenalty(0d) .presencePenalty(0d) .maxTokens(1000) .build(); CompletionResult completionResult = openAiService.createCompletion(completionRequest); StringBuffer text=new StringBuffer(); completionResult.getChoices().forEach(v->{ text.append(v.getText()+"\n"); }); return text.toString(); } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((717, 1033), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((717, 1008), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((717, 975), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((717, 938), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((717, 900), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((717, 874), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((717, 840), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((717, 812), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((717, 776), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package aspectibot; import java.awt.Graphics; import java.awt.image.BufferedImage; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.nio.file.Files; import java.nio.file.Paths; import java.text.SimpleDateFormat; import java.time.Duration; import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Random; import javax.imageio.ImageIO; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.github.philippheuer.credentialmanager.domain.OAuth2Credential; import com.github.twitch4j.TwitchClient; import com.github.twitch4j.TwitchClientBuilder; import com.github.twitch4j.chat.events.channel.ChannelMessageEvent; import com.github.twitch4j.chat.events.channel.RaidEvent; import com.github.twitch4j.common.events.user.PrivateMessageEvent; import com.github.twitch4j.common.util.CryptoUtils; import com.github.twitch4j.events.ChannelChangeGameEvent; import com.github.twitch4j.events.ChannelChangeTitleEvent; import com.github.twitch4j.events.ChannelGoLiveEvent; import com.github.twitch4j.events.ChannelGoOfflineEvent; import com.github.twitch4j.events.ChannelViewerCountUpdateEvent; import com.github.twitch4j.helix.domain.Stream; import com.github.twitch4j.helix.domain.Video; import com.github.twitch4j.pubsub.domain.ChannelPointsRedemption; import com.github.twitch4j.pubsub.events.RewardRedeemedEvent; import com.theokanning.openai.completion.chat.ChatCompletionChoice; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.service.OpenAiService; import net.dv8tion.jda.api.EmbedBuilder; import net.dv8tion.jda.api.JDA; import net.dv8tion.jda.api.JDABuilder; import net.dv8tion.jda.api.OnlineStatus; import net.dv8tion.jda.api.entities.Activity; import net.dv8tion.jda.api.entities.Guild; import net.dv8tion.jda.api.entities.Icon; import net.dv8tion.jda.api.entities.Message; import net.dv8tion.jda.api.entities.channel.concrete.NewsChannel; import net.dv8tion.jda.api.interactions.components.ActionRow; import net.dv8tion.jda.api.interactions.components.buttons.Button; import net.dv8tion.jda.api.requests.GatewayIntent; import net.dv8tion.jda.api.utils.ChunkingFilter; import net.dv8tion.jda.api.utils.FileUpload; import net.dv8tion.jda.api.utils.MemberCachePolicy; import twitch_commands.ClipCommand; import twitch_commands.EmotesCommand; import twitch_commands.LeaderboardCommand; import twitch_commands.LogAddCommand; import twitch_commands.LogDeleteCommand; import twitch_commands.LogEditCommand; import twitch_commands.LogShowCommand; import twitch_commands.LurkCommand; import twitch_commands.TwitchEmoteCommand; import twitch_commands.TwitterCommand; public class AspectiBot { private static final String ASPECTICOR = "aspecticor"; private static final String CONFIG_FILE = "src/config.properties"; private static String DISCORD_TOKEN_PATH; private static String TWITCH_TOKEN_PATH; private static String OPEN_AI_TOKEN_PATH; private static String LIVE_ICON_PATH; private static String OFFLINE_ICON_PATH; public static String COMMAND_LOG_PATH; public static String BIRTHDAY_LOG_PATH; private static String THIS_FOLDER_PATH; /* Aspecticord settings */ public static final long SERVER_ID = 864273305330909204L; // Aspecticord Server ID private static final long LIVE_CHANNEL_ID = 885705830341697536L; // #aspecticor-is-live channel public static final long LOG_CHANNEL_ID = 1016876667509166100L; // #server_logs channel public static final long CLIP_CHANNEL_ID = 867258015220236319L; // #clips channel public static final long DEFAULT_ROLE = 885698882695229500L; // Aspecticord default role private static final String PING_ROLE = "882772072475017258"; // Aspecticord @TWITCH_PINGS //*/ /* Test Server settings public static final long SERVER_ID = 264217465305825281L; public static final long LIVE_CHANNEL_ID = 1022422500161900634L; public static final long LOG_CHANNEL_ID = 1022427876609495100L; public static final long CLIP_CHANNEL_ID = 1024597131488665601L; public static final long DEFAULT_ROLE = 1053423521604309062L; public static final String PING_ROLE = "853934165077393458"; //*/ private static String token; // discord token public static String oAuth; // twitch OAuth public static String opnAI; // OpenAI token public static Icon liveIcon; public static Icon offlineIcon; public enum StreamStatus { LIVE, OFFLINE; } private static String[] modArray = {"aspectibot", "atlae99", "b00kitten", "botspecticor", "brenroarn", "bunnyga", "evan_gao", "fourthwallhq", "fu5ha", "isto_inc", "katiegrayx3", "kittyzea", "linkus7", "mattyro1", "me_jd", "mracres", "negnegtm", "nightbot", "onteia", "scriptdesk", "seek_", "serkian", "skelly57", "stanz", "streamelements", "streamlabs", "sumneer","theandershour", "thomasthegremlin", "vezlaye", "voidmakesvids", "xemdo"}; private static StreamStatus streamStatus = StreamStatus.OFFLINE; private static final Logger LOG = LoggerFactory.getLogger(AspectiBot.class); private static String aspectibotId = "718348603"; public static String aspecticorId; public static TwitchClient twitchClient; public static JDA jda; private static String notificationMessageId = ""; public static final Random R = new Random(); public static void main(String[] args) throws Exception { LOG.info("Starting AspectiBot..."); loadConfig(); readSaveFile(); // set up JDA jda = JDABuilder.createDefault(token) .setChunkingFilter(ChunkingFilter.ALL) .setMemberCachePolicy(MemberCachePolicy.ALL) .enableIntents(GatewayIntent.GUILD_MEMBERS, GatewayIntent.MESSAGE_CONTENT) .build(); jda.getPresence().setStatus(OnlineStatus.IDLE); jda.addEventListener(new DiscordServerListener()); // load offline and live icons File liveFile = new File(LIVE_ICON_PATH); File offlineFile = new File(OFFLINE_ICON_PATH); liveIcon = Icon.from(liveFile); offlineIcon = Icon.from(offlineFile); // join Aspecticor's chat OAuth2Credential credential = new OAuth2Credential("twitch", oAuth); twitchClient = TwitchClientBuilder.builder() .withEnableHelix(true) .withDefaultAuthToken(credential) .withEnableChat(true) .withChatAccount(credential) .withEnablePubSub(true) .build(); // join Aspect's stream twitchClient.getChat().joinChannel(ASPECTICOR); // Listen to aspecticor's stream twitchClient.getClientHelper().enableStreamEventListener(ASPECTICOR); aspecticorId = twitchClient.getChat().getChannelNameToChannelId().get(ASPECTICOR); // "275302146" twitchClient.getPubSub().listenForChannelPointsRedemptionEvents(credential, aspecticorId); // if Aspecticor is live change activity and status // also change server icon goLive(twitchClient, jda); // if Aspect turns stream off, change icon back and set status to idle. goOffline(twitchClient, jda); whisper(twitchClient); registerTwitchCommands(); // channel point stuff twitchClient.getEventManager().onEvent(RewardRedeemedEvent.class, event -> { ChannelPointsRedemption redeem = event.getRedemption(); String rewardName = redeem.getReward().getTitle(); LOG.info("{} redeemed {}!", redeem.getUser().getDisplayName(), rewardName); // ASK THE AI if(rewardName.equalsIgnoreCase("ASK THE AI")) { String prompt = redeem.getUserInput(); String user = redeem.getUser().getDisplayName(); String answer = ""; LOG.info("AI question asked by {}: {}", user, prompt); while (answer.equalsIgnoreCase("")) { try { // Generate a GPT3.5 response from twitch chat question String adjustment = "Please answer this question in 450 characters or fewer, also when " + "answering the question, if you're unsure of anything, make stuff up " + "in order to answer the question. Here is the question: "; List<ChatMessage> messages = Arrays.asList(new ChatMessage("user", adjustment + prompt, user)); OpenAiService service = new OpenAiService(opnAI); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder() .messages(messages) .model("gpt-3.5-turbo") .maxTokens(300) .build(); List<ChatCompletionChoice> choices = service.createChatCompletion(chatCompletionRequest).getChoices(); answer = choices.get(AspectiBot.R.nextInt(choices.size())).getMessage().getContent(); String chatResponse = "@" + user + ": " + answer; LOG.info("AI response: {}", chatResponse); if(chatResponse.length() >= 500) { twitchClient.getChat().sendMessage(ASPECTICOR, chatResponse.substring(0,495) + "..."); } else { twitchClient.getChat().sendMessage(ASPECTICOR, chatResponse); } } catch(Exception e) { //do nothing LOG.error("AI error: {}", e.getMessage()); e.printStackTrace(); break; } } } }); // on raid twitchClient.getEventManager().onEvent(RaidEvent.class, event -> { if (event.getViewers() < 10) { twitchClient.getChat() .sendMessage(ASPECTICOR, "!so " + event.getRaider().getName()); return; } String raiderId = event.getRaider().getId(); // send shoutout event try { twitchClient.getHelix() .sendShoutout(oAuth, aspecticorId, raiderId, aspectibotId) .execute(); } catch (Exception e) { LOG.error("Unable to send shoutout to {}!", event.getRaider().getName()); e.printStackTrace(); } twitchClient.getChat() .sendMessage(ASPECTICOR, "!so " + event.getRaider().getName()); }); LOG.info("AspectiBot Started!"); } // end of main method public static void goLive(TwitchClient twitchClient, JDA jda) { Guild server = jda.getGuildById(SERVER_ID); NewsChannel newsChannel = jda.getNewsChannelById(AspectiBot.LIVE_CHANNEL_ID); if(server == null) { LOG.error("goLive: Unable to get server! Server ID: " + SERVER_ID); return; } if(newsChannel == null) { LOG.error("goLive: Unable to get news channel! Channel ID: " + AspectiBot.LIVE_CHANNEL_ID); return; } twitchClient.getEventManager().onEvent(ChannelGoLiveEvent.class, event -> { LOG.info(ASPECTICOR + " went live!"); if(streamStatus == StreamStatus.OFFLINE) { streamStatus = StreamStatus.LIVE; jda.getPresence().setStatus(OnlineStatus.ONLINE); jda.getPresence().setActivity(Activity.watching("Aspecticor's Stream")); // change icon to Live version server.getManager().setIcon(liveIcon).queue(); EmbedBuilder goLiveEmbed = formatEmbed(event.getStream()); Message streamNotificationMessage = newsChannel.sendMessage("<@&"+ PING_ROLE +"> HE'S LIVE!!!") .addEmbeds(goLiveEmbed.build()) .addActionRow( Button.link( "https://www.twitch.tv/aspecticor", "Watch Stream" ) ) .complete(); notificationMessageId = streamNotificationMessage.getId(); File idFile = new File(AspectiBot.THIS_FOLDER_PATH + "notifID.sav"); try { if(idFile.createNewFile()) { FileWriter fw = new FileWriter(idFile); fw.write(notificationMessageId); fw.close(); } } catch (IOException e) { LOG.error("goLive: Unable to create save file for the message ID"); e.printStackTrace(); } } }); // Update stream info when title is changed twitchClient.getEventManager().onEvent(ChannelChangeTitleEvent.class, event -> { EmbedBuilder newEmbed = formatEmbed(event.getStream()); newsChannel.editMessageEmbedsById(notificationMessageId, newEmbed.build()).complete(); }); // Update stream info when game/category is changed twitchClient.getEventManager().onEvent(ChannelChangeGameEvent.class, event -> { EmbedBuilder newEmbed = formatEmbed(event.getStream()); newsChannel.editMessageEmbedsById(notificationMessageId, newEmbed.build()).complete(); }); // Update stream info when viewercount changes twitchClient.getEventManager().onEvent(ChannelViewerCountUpdateEvent.class, event -> { EmbedBuilder newEmbed = formatEmbed(event.getStream()); newsChannel.editMessageEmbedsById(notificationMessageId, newEmbed.build()).complete(); }); } public static void goOffline(TwitchClient twitchClient, JDA jda) { Guild server = jda.getGuildById(SERVER_ID); if(server == null) { LOG.error("goOffline: Unable to get server! Server ID: " + SERVER_ID); return; } twitchClient.getEventManager().onEvent(ChannelGoOfflineEvent.class, event -> { LOG.info(ASPECTICOR + " went offline!"); streamStatus = StreamStatus.OFFLINE; jda.getPresence().setStatus(OnlineStatus.IDLE); // change icon to Offline version server.getManager().setIcon(offlineIcon).queue(); jda.getPresence().setActivity(Activity.watching("Aspect's VODs")); List<Video> vodList = twitchClient.getHelix() .getVideos( oAuth, null, aspecticorId, null, null, Video.SearchPeriod.DAY, Video.SearchOrder.TIME, Video.Type.ARCHIVE, 1, null, null) .execute() .getVideos(); Video latestVod = vodList.get(0); createVodThumbnail(latestVod); NewsChannel ch = jda.getNewsChannelById(LIVE_CHANNEL_ID); if(ch == null) { LOG.error("goOffline: Could not find the go-live channel! Check the channel ID!"); } else { ch.editMessageComponentsById( notificationMessageId, ActionRow.of( Button.link( latestVod.getUrl(), "Watch VOD" ) ) ).queue(); } // delete messageId value from the save file // and set id to "" File notifIdFile = new File(AspectiBot.THIS_FOLDER_PATH + "notifID.sav"); try { Files.delete(notifIdFile.toPath()); } catch (IOException e) { // but like how tho, you just created it e.printStackTrace(); } finally { notificationMessageId = ""; } }); } // end of goOffline method private static void createVodThumbnail(Video latestVod) { try (InputStream in = new URL(latestVod.getThumbnailUrl(1920, 1080)).openStream()) { // credit: https://www.techiedelight.com/download-file-from-url-java/ // get a local version of the thumbnail to set up for adding overlay int width = 1920; int height = 1080; Files.copy(in, Paths.get(THIS_FOLDER_PATH + "vod_thumbnail.png")); // credit: https://stackoverflow.com/a/2319251 // adds the vod_overlay on top of the vod_thumbnail BufferedImage uploadedThumbnail = ImageIO.read(new File(THIS_FOLDER_PATH + "vod_thumbnail.png")); BufferedImage vodOverlay = ImageIO.read(new File(THIS_FOLDER_PATH + "vod_overlay.png")); BufferedImage combined = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB); Graphics g = combined.getGraphics(); g.drawImage(uploadedThumbnail, 0, 0, null); // set offset to center the overlay int x_offset = (uploadedThumbnail.getWidth() - vodOverlay.getWidth()) / 2; int y_offset = (uploadedThumbnail.getHeight() - vodOverlay.getHeight()) / 2; g.drawImage(vodOverlay, x_offset, y_offset, null); g.dispose(); // save the image on the system ImageIO.write(combined, "PNG", new File(THIS_FOLDER_PATH + "combined.png")); File combinedImage = new File(THIS_FOLDER_PATH + "combined.png"); String streamTitle = latestVod.getTitle(); String vodThumbnailURL = "attachment://combined.png"; String streamDuration = latestVod.getDuration(); if(streamDuration == null) streamDuration = "0"; String streamViewCount = latestVod.getViewCount().toString(); if(streamViewCount == null) streamViewCount = "0"; // format date to look like "Wed, Sep 28, 2022" SimpleDateFormat sdf = new SimpleDateFormat("EEE, MMM d, yyyy"); Date vodDate = sdf.parse(sdf.format(Date.from(latestVod.getPublishedAtInstant()))); String[] dateArray = vodDate.toString().split(" "); String stringDate = dateArray[0] + ", " + dateArray[1] + " " + dateArray[2] + ", " + dateArray[5]; EmbedBuilder offlineEmbed = new EmbedBuilder(); offlineEmbed.setTitle("**[VOD]** " + streamTitle, latestVod.getUrl()); offlineEmbed.setDescription("VOD from " + stringDate); offlineEmbed.addField( "__VOD View Count__:", streamViewCount, true); offlineEmbed.addBlankField(true); offlineEmbed.addField( "__VOD Length__:", streamDuration, true); offlineEmbed.setImage(vodThumbnailURL); offlineEmbed.setThumbnail("https://i.imgur.com/YfplpoR.png"); offlineEmbed.setAuthor( "Aspecticor", "https://www.twitch.tv/aspecticor", "https://static-cdn.jtvnw.net/jtv_user_pictures/0dd6cf74-d650-453a-8d18-403409ae5517-profile_image-70x70.png" ); offlineEmbed.setFooter( "brought to you by AspectiBot \u2764", "https://i.imgur.com/hAOV52i.png"); offlineEmbed.setColor(0x8045f4); Collection<FileUpload> files = new LinkedList<FileUpload>(); files.add(FileUpload.fromData(combinedImage, "combined.png")); File vodThumbnail = new File(THIS_FOLDER_PATH + "vod_thumbnail.png"); NewsChannel newsChannel = jda.getNewsChannelById(AspectiBot.LIVE_CHANNEL_ID); if (newsChannel == null) { LOG.error("createVodThumbnail: Could not find the go-live channel! Check the channel ID!"); } else { newsChannel.editMessageEmbedsById( notificationMessageId, offlineEmbed.build()) .setFiles(files) .complete(); } vodThumbnail.delete(); combinedImage.delete(); } catch(Exception e) { LOG.error("goOffline: Error creating the VOD thumbnail!"); e.printStackTrace(); } } // end of createVodThumbnail method public static void whisper(TwitchClient twitchClient) { // if a mod in twitch channel whispers bot, send chat to that twitch channel twitchClient.getEventManager().onEvent(PrivateMessageEvent.class, event -> { List<String> mods = Arrays.asList(modArray); if (mods != null && mods.contains(event.getUser().getName())) { twitchClient.getChat().sendMessage(ASPECTICOR, event.getMessage()); } }); } // end of onWhisper method public static EmbedBuilder formatEmbed(Stream twitchStream) { String streamTitle = twitchStream.getTitle(); String streamGame = twitchStream.getGameName(); String streamThumbnailURL = twitchStream.getThumbnailUrl(1920, 1080) + "?c=" + CryptoUtils.generateNonce(4); Duration streamDuration = twitchStream.getUptime(); int streamTotalSeconds = (int) streamDuration.getSeconds(); final int SECONDS_TO_HOURS = 3600; final int SECONDS_TO_MINUTES = 60; String streamHours = (streamTotalSeconds / SECONDS_TO_HOURS) + "h "; String streamMinutes = (streamTotalSeconds % SECONDS_TO_HOURS) / SECONDS_TO_MINUTES + "m "; String streamSeconds = (streamTotalSeconds % SECONDS_TO_HOURS) % SECONDS_TO_MINUTES + "s "; // only display hours if stream is over an hour long if(streamHours.equalsIgnoreCase("0h ")) { streamHours = ""; } String streamUptime = streamHours + streamMinutes + streamSeconds; String streamViewCount = twitchStream.getViewerCount().toString(); if(streamViewCount == null) streamViewCount = "0"; EmbedBuilder goLiveEmbed = new EmbedBuilder(); goLiveEmbed.setTitle(streamTitle, "https://www.twitch.tv/" + ASPECTICOR); goLiveEmbed.setDescription("Playing **" + streamGame + "**"); goLiveEmbed.addField( "__Viewers__:", streamViewCount, true); goLiveEmbed.addBlankField(true); goLiveEmbed.addField( "__Uptime__:", streamUptime, true); goLiveEmbed.setImage(streamThumbnailURL); goLiveEmbed.setThumbnail("https://i.imgur.com/dimEDm5.png"); goLiveEmbed.setAuthor( "Aspecticor", "https://www.twitch.tv/aspecticor", "https://static-cdn.jtvnw.net/jtv_user_pictures/0dd6cf74-d650-453a-8d18-403409ae5517-profile_image-70x70.png" ); goLiveEmbed.setFooter("brought to you by AspectiBot \u2764", "https://i.imgur.com/hAOV52i.png"); goLiveEmbed.setColor(0xf92b75); return goLiveEmbed; } public static void readSaveFile() { File saveFile = new File(AspectiBot.THIS_FOLDER_PATH + "notifID.sav"); try (BufferedReader br = new BufferedReader(new FileReader(saveFile))) { AspectiBot.notificationMessageId = br.readLine(); LOG.info("readSaveFile: Save file successfully read!"); } catch (FileNotFoundException e) { // file not found AspectiBot.notificationMessageId = ""; LOG.info("readSaveFile: File not found because previous stream ended before this program restarted!"); } catch (IOException e) { LOG.error("readSaveFile: Unable to read the save file!"); e.printStackTrace(); } } public static void loadConfig() { // https://niruhan.medium.com/how-to-add-a-config-file-to-a-java-project-99fd9b6cebca try (FileInputStream config = new FileInputStream(CONFIG_FILE)) { Properties prop = new Properties(); prop.load(config); DISCORD_TOKEN_PATH = prop.getProperty("DISCORD_TOKEN_PATH"); TWITCH_TOKEN_PATH = prop.getProperty("TWITCH_TOKEN_PATH"); OPEN_AI_TOKEN_PATH = prop.getProperty("OPEN_AI_TOKEN_PATH"); LIVE_ICON_PATH = prop.getProperty("LIVE_ICON_PATH"); OFFLINE_ICON_PATH = prop.getProperty("OFFLINE_ICON_PATH"); COMMAND_LOG_PATH = prop.getProperty("COMMAND_LOG_PATH"); BIRTHDAY_LOG_PATH = prop.getProperty("BIRTHDAY_LOG_PATH"); THIS_FOLDER_PATH = prop.getProperty("THIS_FOLDER_PATH"); } catch (FileNotFoundException e) { //no config file DISCORD_TOKEN_PATH = "/home/orangepi/jars/persistent/discordToken.txt"; TWITCH_TOKEN_PATH = "/home/orangepi/jars/persistent/twitchOAuth.txt"; OPEN_AI_TOKEN_PATH = "/home/orangepi/jars/persistent/openAiToken.txt"; LIVE_ICON_PATH = "/home/orangepi/jars/persistent/Aspecticor_Live.png"; OFFLINE_ICON_PATH = "/home/orangepi/jars/persistent/Aspecticor_Offline.png"; COMMAND_LOG_PATH = "/home/orangepi/jars/AspectiBot/src/twitch_commands/commands.json"; BIRTHDAY_LOG_PATH = "/home/orangepi/jars/AspectiBot/src/discord_commands/birthdays.json"; THIS_FOLDER_PATH = "/home/orangepi/jars/AspectiBot/"; } catch (IOException e1) { LOG.error("loadConfig: IOException on loading config file!"); } finally { //load credentials loadCredentials(); } } public static void loadCredentials() { try { // get the files File discordToken = new File(DISCORD_TOKEN_PATH); File twitchToken = new File(TWITCH_TOKEN_PATH); File openAiToken = new File(OPEN_AI_TOKEN_PATH); // read the files // https://docs.oracle.com/javase/tutorial/essential/exceptions/tryResourceClose.html try ( BufferedReader br1 = new BufferedReader(new FileReader(discordToken)); BufferedReader br2 = new BufferedReader(new FileReader(twitchToken)); BufferedReader br3 = new BufferedReader(new FileReader(openAiToken)); ) { token = br1.readLine(); oAuth = br2.readLine(); opnAI = br3.readLine(); } } catch (Exception e) { LOG.error("loadCredentials: Authentication Failed!"); } } public static void registerTwitchCommands() { Map<String, TwitchCommand> commands = new HashMap<>(); commands.put("!emotes", new EmotesCommand()); commands.put("!leaderboards", new LeaderboardCommand()); commands.put("!lurk", new LurkCommand()); commands.put("!twitter", new TwitterCommand()); commands.put("!addcom", new LogAddCommand()); commands.put("!showcom", new LogShowCommand()); commands.put("!delcom", new LogDeleteCommand()); commands.put("!editcom", new LogEditCommand()); commands.put("!clip", new ClipCommand()); commands.put("!twitchemote", new TwitchEmoteCommand()); // executing commands twitchClient.getEventManager().onEvent(ChannelMessageEvent.class, event -> { String cmd = event.getMessage().toLowerCase().split(" ")[0]; TwitchCommand command; if ((command = commands.get(cmd)) != null) { twitchClient.getChat().sendMessage( ASPECTICOR, command.response(event), "", event.getMessageEvent().getMessageId().get() ); } }); } }
[ "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((5934, 6153), 'net.dv8tion.jda.api.JDABuilder.createDefault'), ((5934, 6139), 'net.dv8tion.jda.api.JDABuilder.createDefault'), ((5934, 6059), 'net.dv8tion.jda.api.JDABuilder.createDefault'), ((5934, 6009), 'net.dv8tion.jda.api.JDABuilder.createDefault'), ((6595, 6795), 'com.github.twitch4j.TwitchClientBuilder.builder'), ((6595, 6781), 'com.github.twitch4j.TwitchClientBuilder.builder'), ((6595, 6752), 'com.github.twitch4j.TwitchClientBuilder.builder'), ((6595, 6718), 'com.github.twitch4j.TwitchClientBuilder.builder'), ((6595, 6691), 'com.github.twitch4j.TwitchClientBuilder.builder'), ((6595, 6652), 'com.github.twitch4j.TwitchClientBuilder.builder'), ((8647, 8783), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((8647, 8765), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((8647, 8740), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((8647, 8707), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package com.musiciantrainer.musiciantrainerproject.controller; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.musiciantrainer.musiciantrainerproject.dto.CreatedPlansViewModel; import com.musiciantrainer.musiciantrainerproject.dto.HomePageViewModel; import com.musiciantrainer.musiciantrainerproject.dto.MyPlanViewModel; import com.musiciantrainer.musiciantrainerproject.entity.*; import com.musiciantrainer.musiciantrainerproject.service.PieceService; import com.musiciantrainer.musiciantrainerproject.service.PlanPieceService; import com.musiciantrainer.musiciantrainerproject.service.PlanService; import com.musiciantrainer.musiciantrainerproject.service.UserService; import com.musiciantrainer.musiciantrainerproject.service.email.EmailService; import com.musiciantrainer.musiciantrainerproject.utilities.TrainingTimeUtil; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import org.jetbrains.annotations.NotNull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.security.core.Authentication; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.servlet.mvc.support.RedirectAttributes; import java.time.Duration; import java.time.LocalDate; import java.util.*; @Controller public class MainController { @Value("${openai.model}") private String aiModel; @Value("${openai.api.key}") private String apiKey; private UserService userService; private PieceService pieceService; private PlanService planService; private ObjectMapper objectMapper; private PlanPieceService planPieceService; private EmailService emailService; @Autowired public MainController(UserService userService, PieceService pieceService, PlanService planService, ObjectMapper objectMapper, PlanPieceService planPieceService, EmailService emailService) { this.userService = userService; this.pieceService = pieceService; this.planService = planService; this.objectMapper = objectMapper; this.planPieceService = planPieceService; this.emailService = emailService; } @GetMapping("/") public String showIndex(Model model, Authentication authentication) { if (authentication != null && authentication.isAuthenticated()) { String userEmail = authentication.getName(); // Get the email from principal User theUser = userService.findUserByEmail(userEmail); if (theUser != null) { List<Piece> pieces = pieceService.getPiecesByUserOrderedByPriorityAndDaysPassed(theUser); HomePageViewModel theHomePageViewModel = new HomePageViewModel(pieces); model.addAttribute("homePageViewModel", theHomePageViewModel); model.addAttribute("user", theUser); // Přidejte uživatele do modelu return "redirect:/home"; // This is a Thymeleaf template name } } // User is not logged in or something went wrong return "index"; } @GetMapping("/home") public String showHome(Model model, Authentication authentication) { if (authentication != null && authentication.isAuthenticated()) { String userEmail = authentication.getName(); // Get the email from principal User theUser = userService.findUserByEmail(userEmail); if (theUser != null) { List<Piece> pieces = pieceService.getPiecesByUserOrderedByPriorityAndDaysPassed(theUser); HomePageViewModel theHomePageViewModel = new HomePageViewModel(pieces); model.addAttribute("homePageViewModel", theHomePageViewModel); model.addAttribute("user", theUser); // Přidejte uživatele do modelu return "home"; // This is a Thymeleaf template name } } // User is not logged in or something went wrong return "redirect:/"; } @GetMapping("/myPlan") public String showMyPlan(@RequestParam("trainingTime") String trainingTime, Model model, Authentication authentication) { String userEmail = authentication.getName(); User theUser = userService.findUserByEmail(userEmail); // převedení času z hodin na minuty String convertedTime = getHoursAsMinutes(trainingTime); Plan savedPlan; OpenAiService service = null; // kontrola, jestli daný plán již nebyl dnes vygenerovaný na daný počet minut if (doesPlanExist(convertedTime)) { savedPlan = planService.getPlanByTotalMinutesAndDate(Integer.parseInt(convertedTime), LocalDate.now()); } else { // zalozeni objektu openai service = new OpenAiService(apiKey, Duration.ofSeconds(90)); // system prompt List<ChatMessage> messages = new ArrayList<>(); ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), "I want you to act as my music learning planner. \n" + "You will help user to organize his daily routine plan of training music. \n" + "You will decide what is the most important to train today. \n" + "I will give you a list of pieces in JSON format. You will do user's schedule for the particular time.\n" + "The user will give you his time for that day in minutes and/or hours.\n" + "You will have priority in the list. \n" + "The bigger the value of priority, the bigger the priority, and a low value is a low priority. \n" + "A 0 priority is a piece without priority.\n" + "Important is also \"formattedLastTrainingDate\", it is the date when the user practiced the exercise the last time. \n" + "You can find the attribute \"numberOfDaysPassed\" which displays the number of days since the last training date. If the training time that user sets is 2 hours and more, include also some of the pieces that have not been trained for a long time even if they have no priority.\n" + "You can find there \"numberOfTimesTrained\" which shows the number of times the user trained that piece.\n" + "The goal is to not lose the memory of pieces. Each train session has to have 30 minutes (1 pomodoro) as a default time unless the particular piece's time is set differently, in the JSON should be \"time\", for example 20 or 15 minutes. In that case you do not have to set training time for particular piece for 30 minutes. \n" + "\n" + "Write in proper English.\n" + "\n" + "The output must be in JSON with time schedule included.\n" + "\n" + "This is example of requested JSON output:\n" + "{" + " \"planItems\" : [{" + "\"time\": 30," + "\"id\": 1" + "}," + " {" + "\"time\": 20," + "\"id\": 2" + "}] " + "}" + "Do not include \"details\" and nothing else (like ```json etc.) \n" + "Do not write any extra text in the end."); messages.add(systemMessage); //user prompt System.out.print("First Query: "); String userPrompt = pieceService.getPiecesDtoAsJsonString(theUser) + " My time for today is " + convertedTime + " minutes."; ChatMessage userMessage = new ChatMessage(ChatMessageRole.USER.value(), userPrompt); messages.add(userMessage); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest .builder() .model(aiModel) .messages(messages) .n(1) .maxTokens(256) .build(); ChatMessage responseMessage = service.createChatCompletion(chatCompletionRequest).getChoices().get(0).getMessage(); service.streamChatCompletion(chatCompletionRequest) .doOnError(Throwable::printStackTrace) .blockingForEach(System.out::println); // *** Process AI response *** // Deserealize AI reponse in the form of JSON into Java objects List<PlanItem> planItems = this.decodeJSON(responseMessage.getContent()); // 1. Create Plan instance Plan newPlan = createPlanInstance(convertedTime, theUser); // 2. Assign PlanItems to PlanPieces and Plan List<PlanPiece> planPieces = assignPlanItemsToPiecesAndPlan(planItems, newPlan); // Set the list of PlanPieces to the Plan newPlan.setPlanPieces(planPieces); // 3. Save the Plan to the database savedPlan = planService.addPlan(newPlan); // 4. Save the PlanPieces to the database for (PlanPiece planPiece : planPieces) { planPieceService.addPlanPiece(planPiece); } } MyPlanViewModel myPlanViewModel = new MyPlanViewModel(savedPlan); // Add the plan to the model model.addAttribute("myPlanViewModel", myPlanViewModel); model.addAttribute("user", theUser); if (service != null) { service.shutdownExecutor(); } return "myplan"; } private boolean doesPlanExist(String convertedTime) { return planService.getPlanByTotalMinutesAndDate(Integer.parseInt(convertedTime), LocalDate.now()) != null; } @GetMapping("/createdPlans") public String showCreatedPlans(Model model, Authentication authentication) { if (authentication != null && authentication.isAuthenticated()) { String userEmail = authentication.getName(); // Get the email from principal User theUser = userService.findUserByEmail(userEmail); if (theUser != null) { List<Plan> createdPlans = planService.getPlansByUserOrderedByTotalMinutes(theUser); CreatedPlansViewModel theCreatedPlansViewModel = new CreatedPlansViewModel(createdPlans); model.addAttribute("createdPlansViewModel", theCreatedPlansViewModel); model.addAttribute("user", theUser); // Přidejte uživatele do modelu return "created-plans"; // This is a Thymeleaf template name } } // Uživatel není přihlášen nebo se něco pokazilo return "redirect:/"; } @GetMapping("/deletePlan") public String deletePlan(@RequestParam("planId") Long planId, RedirectAttributes redirectAttributes) { try { // Delete the Plan based on the planId planService.deletePlan(planId); // Add a success message to be displayed on the redirected page redirectAttributes.addFlashAttribute("successDeletePlan", true); } catch (Exception e) { // Handle exceptions, e.g., if the plan does not exist // Add an error message to be displayed on the redirected page redirectAttributes.addFlashAttribute("error", "Failed to delete the plan."); } return "redirect:/createdPlans?recordSuccess"; // Redirect to the appropriate page } @GetMapping("/piecesToJson") public String piecesToJson(Model model, Authentication authentication) { String userEmail = authentication.getName(); User theUser = userService.findUserByEmail(userEmail); String piecesJson = pieceService.getPiecesDtoAndPieceLogsAsJsonString(theUser); model.addAttribute("piecesJson", piecesJson); return "piecesJson"; // a Thymeleaf template for displaying JSON } @GetMapping("/admin") public String showAdmin() { return "admin"; } // utility methods @ModelAttribute("trainingTimes") public List<String> getTrainingTimes() { // Return a list of available training times return Arrays.asList("0.5 hour", "1 hour", "1.5 hours", "2 hours", "2.5 hours", "3 hours", "3.5 hours", "4 hours", "4.5 hours", "5 hours", "5.5 hours", "6 hours"); } @ModelAttribute("hasPlansForToday") public boolean hasPlansForToday(Authentication authentication) { if (authentication != null && authentication.isAuthenticated()) { String userEmail = authentication.getName(); User theUser = userService.findUserByEmail(userEmail); if (theUser != null) { return !planService.getPlansByUserAndDate(theUser).isEmpty(); } } // Default to false if the user is not authenticated or any issue occurs. return false; } public String getHoursAsMinutes(String trainingTime) { return TrainingTimeUtil.convertHoursToMinutes(trainingTime); } @GetMapping("/sendEmail") public String sendTestEmail(Authentication authentication) { String userEmail = authentication.getName(); User theUser = userService.findUserByEmail(userEmail); // Replace these with actual email details String to = "astarin0998@gmail.com"; String subject = "Test Email"; String body = "This is a test email from Musician Trainer."; // Send the email emailService.sendNewMail(to, subject, body); return "redirect:/home"; // Redirect back to the home page after sending the email } @NotNull private List<PlanPiece> assignPlanItemsToPiecesAndPlan(List<PlanItem> planItems, Plan newPlan) { List<PlanPiece> planPieces = new ArrayList<>(); for (PlanItem planItem : planItems) { // Create a new PlanPiece for each PlanItem PlanPiece planPiece = new PlanPiece(); planPiece.setMinutes(planItem.getTime()); // Retrieve the Piece based on the PlanItem's ID Piece existingPiece = pieceService.getPieceById(planItem.getId()); // Set the Plan, Piece, and add PlanPiece to the list planPiece.setPlan(newPlan); planPiece.setPiece(existingPiece); planPieces.add(planPiece); } return planPieces; } @NotNull private static Plan createPlanInstance(String convertedTime, User theUser) { Plan newPlan = new Plan(); newPlan.setDate(LocalDate.now()); // Set the date to the current date newPlan.setTotalMinutes(Integer.parseInt(convertedTime)); newPlan.setUser(theUser); return newPlan; } private List<PlanItem> decodeJSON(String jsonString) { // print to check what AI generated System.out.println("Here the jsonString enters the function: " + jsonString); PlanItems thePlanItems; try { thePlanItems = objectMapper.readValue(jsonString, PlanItems.class); // print to check what objectMapper read and deserealized System.out.println("Here the objectMapper mapped the json to PlanItems class: " + thePlanItems.getPlanItems()); } catch (JsonProcessingException e) { throw new RuntimeException(e); } return thePlanItems.getPlanItems(); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value" ]
[((5460, 5490), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((8202, 8230), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value')]
package podsofkon; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider; import com.oracle.bmc.ailanguage.AIServiceLanguageClient; import com.oracle.bmc.ailanguage.model.DetectLanguageSentimentsDetails; import com.oracle.bmc.ailanguage.model.DetectLanguageSentimentsResult; import com.oracle.bmc.ailanguage.model.SentimentAspect; import com.oracle.bmc.ailanguage.requests.DetectLanguageSentimentsRequest; import com.oracle.bmc.ailanguage.responses.DetectLanguageSentimentsResponse; import com.oracle.bmc.aivision.AIServiceVisionClient; import com.oracle.bmc.aivision.model.*; import com.oracle.bmc.aivision.requests.AnalyzeImageRequest; import com.oracle.bmc.aivision.responses.AnalyzeImageResponse; import com.oracle.bmc.auth.AuthenticationDetailsProvider; import com.oracle.bmc.auth.ConfigFileAuthenticationDetailsProvider; import com.oracle.bmc.auth.InstancePrincipalsAuthenticationDetailsProvider; import com.oracle.bmc.model.BmcException; import com.theokanning.openai.completion.chat.ChatCompletionChoice; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import lombok.Data; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.web.bind.annotation.*; import org.springframework.web.multipart.MultipartFile; import java.io.IOException; import java.time.Duration; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import org.json.JSONArray; import org.json.JSONObject; import podsofkon.oci.AuthDetailsProviderFactory; @RestController @RequestMapping("/tellastory") public class WriteAStoryAboutAPictureAndGiveItsSentiments { private static Logger log = LoggerFactory.getLogger(WriteAStoryAboutAPictureAndGiveItsSentiments.class); @GetMapping("/form") public String form() throws Exception { return " <html><form method=\"post\" action=\"/tellastory/tellastory\" enctype=\"multipart/form-data\">\n" + " Select an image file to create story from...\n" + " <input type=\"file\" name=\"file\" accept=\"image/*\">\n" + " <br>" + "<br> Some additional options..." + "<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\"an adventure\" checked >an adventure" + "<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\"romantic\">romantic" + "<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\"a dystopia\">a dystopia" + "<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\"a documentary\">a documentary" + "<br><input type=\"radio\" id=\"genopts\" name=\"genopts\" value=\"an anime movie\">an anime movie" + " <br><input type=\"submit\" value=\"Send Request to Vision AI\">\n" + " </form></html>"; } @PostMapping("/tellastory") public String tellastory(@RequestParam("file") MultipartFile file , @RequestParam("genopts") String genopts) throws Exception { log.info("got image file, now analyze, file = " + file.getOriginalFilename()); log.info("got image file, now analyze, genopts = " + genopts); String objectDetectionResults = processImage(file.getBytes(), true); ImageAnalysis imageAnalysis = parseJsonToImageAnalysis(objectDetectionResults); List<ImageObject> images = imageAnalysis.getImageObjects(); String fullText = ""; for (ImageObject image : images) fullText += image.getName() + ", "; log.info("fullText = " + fullText); String generatedstory = chat("using strong negative and positive sentiments, " + "write a story that is " + genopts + " and includes " + fullText ); return "Here is the story. " + generatedstory + " . Here is the sentiment analysis of the story. " + sentiments(generatedstory) ; } String chat(String textcontent) throws Exception { OpenAiService service = new OpenAiService("sk-nMVoZasdfb2HgV", Duration.ofSeconds(60)); System.out.println("Streaming chat completion... textcontent:" + textcontent); final List<ChatMessage> messages = new ArrayList<>(); final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), textcontent); messages.add(systemMessage); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest .builder() .model("gpt-3.5-turbo") .messages(messages) .n(1) .maxTokens(300) .logitBias(new HashMap<>()) .build(); String replyString = ""; String content; for (ChatCompletionChoice choice : service.createChatCompletion(chatCompletionRequest).getChoices()) { content = choice.getMessage().getContent(); replyString += (content == null ? " " : content); } service.shutdownExecutor(); return replyString; } String processImage(byte[] bytes, boolean isConfigFileAuth) throws Exception { AIServiceVisionClient aiServiceVisionClient = AIServiceVisionClient.builder().build(AuthDetailsProviderFactory.getAuthenticationDetailsProvider()); // AIServiceVisionClient aiServiceVisionClient = // new AIServiceVisionClient(AuthDetailsProviderFactory.getAuthenticationDetailsProvider()); aiServiceVisionClient.setRegion(REGION); List<ImageFeature> features = new ArrayList<>(); ImageFeature classifyFeature = ImageClassificationFeature.builder() .maxResults(10) .build(); ImageFeature detectImageFeature = ImageObjectDetectionFeature.builder() .maxResults(10) .build(); ImageFeature textDetectImageFeature = ImageTextDetectionFeature.builder().build(); features.add(classifyFeature); features.add(detectImageFeature); features.add(textDetectImageFeature); InlineImageDetails inlineImageDetails = InlineImageDetails.builder() .data(bytes) .build(); AnalyzeImageDetails analyzeImageDetails = AnalyzeImageDetails.builder() .image(inlineImageDetails) .features(features) .build(); AnalyzeImageRequest request = AnalyzeImageRequest.builder() .analyzeImageDetails(analyzeImageDetails) .build(); AnalyzeImageResponse response = aiServiceVisionClient.analyzeImage(request); ObjectMapper mapper = new ObjectMapper(); mapper.setFilterProvider(new SimpleFilterProvider().setFailOnUnknownId(false)); String json = mapper.writeValueAsString(response.getAnalyzeImageResult()); System.out.println("AnalyzeImage Result"); System.out.println(json); return json; } private static final String REGION = "us-phoenix-1"; public String doProcessImage(byte[] bytes, boolean isConfigFileAuth) throws Exception { AIServiceVisionClient aiServiceVisionClient; AuthenticationDetailsProvider provider; if (isConfigFileAuth) { provider = AuthDetailsProviderFactory.getAuthenticationDetailsProvider(); aiServiceVisionClient = new AIServiceVisionClient(provider); } else { aiServiceVisionClient = new AIServiceVisionClient(InstancePrincipalsAuthenticationDetailsProvider.builder().build()); } aiServiceVisionClient.setRegion(REGION); // Read image file from resources folder // if (bytes==null) bytes = Files.readAllBytes(Paths.get("src/resources/cat.jpg")); List<ImageFeature> features = new ArrayList<>(); ImageFeature classifyFeature = ImageClassificationFeature.builder() .maxResults(10) .build(); ImageFeature detectImageFeature = ImageObjectDetectionFeature.builder() .maxResults(10) .build(); ImageFeature textDetectImageFeature = ImageTextDetectionFeature.builder().build(); features.add(classifyFeature); features.add(detectImageFeature); features.add(textDetectImageFeature); InlineImageDetails inlineImageDetails = InlineImageDetails.builder() .data(bytes) .build(); AnalyzeImageDetails analyzeImageDetails = AnalyzeImageDetails.builder() .image(inlineImageDetails) .features(features) // .compartmentId(COMPARTMENT_ID) //uncomment this line if using boat user .build(); AnalyzeImageRequest request = AnalyzeImageRequest.builder() .analyzeImageDetails(analyzeImageDetails) .build(); AnalyzeImageResponse response = aiServiceVisionClient.analyzeImage(request); ObjectMapper mapper = new ObjectMapper(); mapper.setFilterProvider(new SimpleFilterProvider().setFailOnUnknownId(false)); String json = mapper.writeValueAsString(response.getAnalyzeImageResult()); System.out.println("AnalyzeImage Result"); System.out.println(json); return json; } @Data class ImageObject { private String name; private double confidence; private BoundingPolygon boundingPolygon; } @Data class BoundingPolygon { private List<Point> normalizedVertices; } @Data class Point { private double x; private double y; public Point(double x, double y) { this.x = x; this.y = y; } } @Data class Label { private String name; private double confidence; } @Data class OntologyClass { private String name; private List<String> parentNames; private List<String> synonymNames; } @Data class ImageText { private List<Word> words; private List<Line> lines; } @Data class Word { private String text; private double confidence; private BoundingPolygon boundingPolygon; } @Data class Line { private String text; private double confidence; private BoundingPolygon boundingPolygon; private List<Integer> wordIndexes; } @Data class ImageAnalysis { private List<ImageObject> imageObjects; private List<Label> labels; private List<OntologyClass> ontologyClasses; private ImageText imageText; private String imageClassificationModelVersion; private String objectDetectionModelVersion; private String textDetectionModelVersion; private List<String> errors; } private ImageAnalysis parseJsonToImageAnalysis(String jsonString) { JSONObject json = new JSONObject(jsonString); JSONArray imageObjectsArray = json.getJSONArray("imageObjects"); List<ImageObject> imageObjects = new ArrayList<>(); for (int i = 0; i < imageObjectsArray.length(); i++) { JSONObject imageObjectJson = imageObjectsArray.getJSONObject(i); ImageObject imageObject = new ImageObject(); imageObject.setName(imageObjectJson.getString("name")); imageObject.setConfidence(imageObjectJson.getDouble("confidence")); JSONObject boundingPolygonJson = imageObjectJson.getJSONObject("boundingPolygon"); JSONArray normalizedVerticesArray = boundingPolygonJson.getJSONArray("normalizedVertices"); List<Point> normalizedVertices = new ArrayList<>(); for (int j = 0; j < normalizedVerticesArray.length(); j++) { JSONObject vertexJson = normalizedVerticesArray.getJSONObject(j); Point vertex = new Point(vertexJson.getDouble("x"), vertexJson.getDouble("y")); normalizedVertices.add(vertex); } BoundingPolygon boundingPolygon = new BoundingPolygon(); boundingPolygon.setNormalizedVertices(normalizedVertices); imageObject.setBoundingPolygon(boundingPolygon); imageObjects.add(imageObject); } JSONArray labelsArray = json.getJSONArray("labels"); List<Label> labels = new ArrayList<>(); for (int i = 0; i < labelsArray.length(); i++) { JSONObject labelJson = labelsArray.getJSONObject(i); Label label = new Label(); label.setName(labelJson.getString("name")); label.setConfidence(labelJson.getDouble("confidence")); labels.add(label); } JSONArray ontologyClassesArray = json.getJSONArray("ontologyClasses"); List<OntologyClass> ontologyClasses = new ArrayList<>(); for (int i = 0; i < ontologyClassesArray.length(); i++) { JSONObject ontologyClassJson = ontologyClassesArray.getJSONObject(i); OntologyClass ontologyClass = new OntologyClass(); ontologyClass.setName(ontologyClassJson.getString("name")); JSONArray parentNamesArray = ontologyClassJson.getJSONArray("parentNames"); List<String> parentNames = new ArrayList<>(); for (int j = 0; j < parentNamesArray.length(); j++) { parentNames.add(parentNamesArray.getString(j)); } ontologyClass.setParentNames(parentNames); ontologyClasses.add(ontologyClass); } JSONObject imageTextJson = json.getJSONObject("imageText"); JSONArray wordsArray = imageTextJson.getJSONArray("words"); List<Word> words = new ArrayList<>(); for (int i = 0; i < wordsArray.length(); i++) { JSONObject wordJson = wordsArray.getJSONObject(i); Word word = new Word(); word.setText(wordJson.getString("text")); word.setConfidence(wordJson.getDouble("confidence")); JSONObject boundingPolygonJson = wordJson.getJSONObject("boundingPolygon"); JSONArray normalizedVerticesArray = boundingPolygonJson.getJSONArray("normalizedVertices"); List<Point> normalizedVertices = new ArrayList<>(); for (int j = 0; j < normalizedVerticesArray.length(); j++) { JSONObject vertexJson = normalizedVerticesArray.getJSONObject(j); Point vertex = new Point(vertexJson.getDouble("x"), vertexJson.getDouble("y")); normalizedVertices.add(vertex); } BoundingPolygon boundingPolygon = new BoundingPolygon(); boundingPolygon.setNormalizedVertices(normalizedVertices); word.setBoundingPolygon(boundingPolygon); words.add(word); } JSONArray linesArray = imageTextJson.getJSONArray("lines"); List<Line> lines = new ArrayList<>(); for (int i = 0; i < linesArray.length(); i++) { JSONObject lineJson = linesArray.getJSONObject(i); Line line = new Line(); line.setText(lineJson.getString("text")); line.setConfidence(lineJson.getDouble("confidence")); JSONObject boundingPolygonJson = lineJson.getJSONObject("boundingPolygon"); JSONArray normalizedVerticesArray = boundingPolygonJson.getJSONArray("normalizedVertices"); List<Point> normalizedVertices = new ArrayList<>(); for (int j = 0; j < normalizedVerticesArray.length(); j++) { JSONObject vertexJson = normalizedVerticesArray.getJSONObject(j); Point vertex = new Point(vertexJson.getDouble("x"), vertexJson.getDouble("y")); normalizedVertices.add(vertex); } BoundingPolygon boundingPolygon = new BoundingPolygon(); boundingPolygon.setNormalizedVertices(normalizedVertices); line.setBoundingPolygon(boundingPolygon); JSONArray wordIndexesArray = lineJson.getJSONArray("wordIndexes"); List<Integer> wordIndexes = new ArrayList<>(); for (int j = 0; j < wordIndexesArray.length(); j++) { wordIndexes.add(wordIndexesArray.getInt(j)); } line.setWordIndexes(wordIndexes); lines.add(line); } String imageClassificationModelVersion = json.getString("imageClassificationModelVersion"); String objectDetectionModelVersion = json.getString("objectDetectionModelVersion"); String textDetectionModelVersion = json.getString("textDetectionModelVersion"); List<String> errors = new ArrayList<>(); JSONArray errorsArray = json.getJSONArray("errors"); for (int i = 0; i < errorsArray.length(); i++) { errors.add(errorsArray.getString(i)); } ImageText imageText = new ImageText(); imageText.setWords(words); imageText.setLines(lines); ImageAnalysis imageAnalysis = new ImageAnalysis(); imageAnalysis.setImageObjects(imageObjects); imageAnalysis.setLabels(labels); imageAnalysis.setOntologyClasses(ontologyClasses); imageAnalysis.setImageText(imageText); imageAnalysis.setImageClassificationModelVersion(imageClassificationModelVersion); imageAnalysis.setObjectDetectionModelVersion(objectDetectionModelVersion); imageAnalysis.setTextDetectionModelVersion(textDetectionModelVersion); imageAnalysis.setErrors(errors); return imageAnalysis; } public String sentiments(String textcontent) throws IOException { log.info("analyze text for sentiment:" + textcontent); AuthenticationDetailsProvider provider = AuthDetailsProviderFactory.getAuthenticationDetailsProvider(); AIServiceLanguageClient languageClient = AIServiceLanguageClient.builder().build(provider); languageClient.setRegion(REGION); DetectLanguageSentimentsDetails details = DetectLanguageSentimentsDetails.builder() .text(textcontent) .build(); DetectLanguageSentimentsRequest detectLanguageSentimentsRequest = DetectLanguageSentimentsRequest.builder() .detectLanguageSentimentsDetails(details) .build(); DetectLanguageSentimentsResponse response = null; try { response = languageClient.detectLanguageSentiments(detectLanguageSentimentsRequest); } catch (BmcException e) { System.err.println("Failed to detect language and sentiments: " + e.getMessage()); } DetectLanguageSentimentsResult detectLanguageSentimentsResult = response.getDetectLanguageSentimentsResult(); String sentimentReturn = ""; for (SentimentAspect aspect : detectLanguageSentimentsResult.getAspects()) { sentimentReturn += ", sentiment:" + aspect.getSentiment(); sentimentReturn += " text:" + aspect.getText(); sentimentReturn += " "; } log.info(sentimentReturn); return sentimentReturn; } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value" ]
[((4646, 4676), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((5553, 5653), 'com.oracle.bmc.aivision.AIServiceVisionClient.builder'), ((6771, 6883), 'com.oracle.bmc.aivision.requests.AnalyzeImageRequest.builder'), ((6771, 6858), 'com.oracle.bmc.aivision.requests.AnalyzeImageRequest.builder'), ((7826, 7891), 'com.oracle.bmc.auth.InstancePrincipalsAuthenticationDetailsProvider.builder'), ((9089, 9201), 'com.oracle.bmc.aivision.requests.AnalyzeImageRequest.builder'), ((9089, 9176), 'com.oracle.bmc.aivision.requests.AnalyzeImageRequest.builder'), ((18188, 18237), 'com.oracle.bmc.ailanguage.AIServiceLanguageClient.builder'), ((18347, 18464), 'com.oracle.bmc.ailanguage.model.DetectLanguageSentimentsDetails.builder'), ((18347, 18431), 'com.oracle.bmc.ailanguage.model.DetectLanguageSentimentsDetails.builder'), ((18556, 18696), 'com.oracle.bmc.ailanguage.requests.DetectLanguageSentimentsRequest.builder'), ((18556, 18663), 'com.oracle.bmc.ailanguage.requests.DetectLanguageSentimentsRequest.builder')]
package br.com.danilo.softway_inc.infrastructure.openAI; import br.com.danilo.softway_inc.domain.DataShippingCalculations; import br.com.danilo.softway_inc.domain.service.ShippingCalculator; import com.fasterxml.jackson.databind.ObjectMapper; import com.theokanning.openai.completion.chat.ChatFunction; import com.theokanning.openai.completion.chat.ChatFunctionCall; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.messages.Message; import com.theokanning.openai.messages.MessageRequest; import com.theokanning.openai.runs.Run; import com.theokanning.openai.runs.RunCreateRequest; import com.theokanning.openai.runs.SubmitToolOutputRequestItem; import com.theokanning.openai.runs.SubmitToolOutputsRequest; import com.theokanning.openai.service.FunctionExecutor; import com.theokanning.openai.service.OpenAiService; import com.theokanning.openai.threads.ThreadRequest; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.stream.Collectors; @Component public class OpenAIClient { //! --------------------------------------------- Attributes --------------------------------------------------- private final String tokenKey; // --> Chave da API private final String assistantID; // --> Chave do Assistente private String threadID; // --> ID da Thread private final OpenAiService openAiService; // --> Serviço OpenAI private final String model = "gpt-3.5-turbo-1106"; // --> Modelo do GPT private final ShippingCalculator shippingCalculator; // --> Calculadora de Frete //! --------------------------------------------- Constructor -------------------------------------------------- public OpenAIClient(@Value("${app.openai.api.key}") String tokenKey, @Value("${app.openai.assistant-id}") String assistantID, ShippingCalculator shippingCalculator) { this.tokenKey = tokenKey; this.openAiService = new OpenAiService(tokenKey, Duration.ofSeconds(60)); this.assistantID = assistantID; this.shippingCalculator = shippingCalculator; } //! --------------------------------------------- Methods ------------------------------------------------------ //%% -- -- -- --> Enviar Requisição para o Chat public String sendRequestToChatCompletion(ChatCompletionRequestData dados) { var messageRequest = MessageRequest // --> Cria uma requisição de mensagem (Representa a mensagem enviada pelo usuário) .builder() .role(ChatMessageRole.USER.value()) .content(dados.promptUser()) .build(); if (this.threadID == null) { // --> Se não houver uma ThreadID criada var threadRequest = ThreadRequest // --> Cria uma requisição de Thread (Representa a conversa entre o usuário e o assistente) .builder() .messages(Arrays.asList(messageRequest)) .build(); var thread = openAiService.createThread(threadRequest); this.threadID = thread.getId(); } else { openAiService.createMessage(this.threadID, messageRequest); // --> Cria uma mensagem na Thread caso ela não exista } //%% -- -- -- --> Cria uma requisição de Run (Representa uma execução do modelo GPT-3.5-Turbo) var runRequest = RunCreateRequest .builder() .model(this.model) .assistantId(assistantID) .build(); // --> Cria uma Run na API da OpenAI var run = openAiService .createRun(threadID, runRequest); //%% -- -- -- --> Verifica se a Run foi concluída e se alguma função precisará ser chamada var concluded = false; // --> Variável para verificar se a Run foi concluída var needCallFunction = false; // --> Variável para verificar se a função de cálculo de frete precisa ser chamada try { while (!concluded && !needCallFunction) { // --> Enquanto a Run não estiver concluída e a função de cálculo de frete não precisar ser chamada Thread.sleep(1000 * 10); // --> Espera 10 segundos run = openAiService.retrieveRun(threadID, run.getId()); // --> Atualiza a Run concluded = run.getStatus().equalsIgnoreCase("completed"); // --> Verifica se a Run foi concluída needCallFunction = run.getRequiredAction() != null; // --> Verifica se a função de cálculo de frete precisa ser chamada } } catch (InterruptedException e) { throw new RuntimeException(e); } //%% -- -- -- --> Verifica se a Função de Cálculo de Frete precisa ser chamada if (needCallFunction) { // --> Se a função de cálculo de frete precisar ser chamada var shippingPrice = callFunctionShipping(run); // --> Chama a função de cálculo de frete var submitRequest = SubmitToolOutputsRequest // --> Cria uma requisição de submissão de saída da ferramenta .builder() .toolOutputs(Arrays.asList( new SubmitToolOutputRequestItem( run .getRequiredAction() .getSubmitToolOutputs() .getToolCalls() .get(0) .getId(), shippingPrice) )) .build(); openAiService.submitToolOutputs( threadID, run.getId(), submitRequest); // --> Submete a requisição de submissão de saída da ferramenta //%% -- -- -- --> Verifica se a Run foi concluída var numberAttempts = 0; // --> Número de tentativas try { while (!concluded) { // --> Enquanto a Run não estiver concluída Thread.sleep(1000 * 10); // --> Espera 10 segundos run = openAiService.retrieveRun(threadID, run.getId()); // --> Atualiza a Run concluded = run.getStatus().equalsIgnoreCase("completed"); // --> Verifica se a Run foi concluída if (numberAttempts++ == 5) { throw new RuntimeException("API OpenAI não respondeu, tente mais tarde | OpenAI API unresponsive, try later"); } } } catch (InterruptedException e) { throw new RuntimeException(e); } //%% -- -- -- --> Lista de Mensagens da Thread var listMessages = openAiService.listMessages(threadID); // --> Lista as mensagens da Thread return listMessages .getData() // --> Pega os dados da lista de mensagens .stream() // --> Transforma em Stream .sorted(Comparator.comparingInt( Message::getCreatedAt).reversed()) // --> Ordena as mensagens pela data de criação de forma reversa .findFirst().get().getContent().get(0).getText().getValue() // --> Pega a primeira mensagem da lista .replaceAll("\\\u3010.*?\\\u3011", ""); // --> Remove os Caracteres do final da mensagem } //%% -- -- -- --> Chamar a Função de Cálculo de Frete private Object callFunctionShipping (Run run){ try { var function = run.getRequiredAction().getSubmitToolOutputs().getToolCalls().get(0).getFunction(); var functionShippingCalculator = ChatFunction.builder() .name("ShippingCalculator") .executor(DataShippingCalculations.class, calculations -> ShippingCalculator.computeShippingCost(calculations)) .build(); var functionExecutor = new FunctionExecutor(Arrays.asList(functionShippingCalculator)); var functionCall = new ChatFunctionCall(function.getName(), new ObjectMapper().readTree(function.getArguments())); return functionExecutor.execute(functionCall).toString(); } catch (Exception e) { throw new RuntimeException(e); } } //%% -- -- -- --> Carregar Histórico de Conversa public List<String> uploadMessageHistory () { var messages = new ArrayList<String>(); if (this.threadID != null) { messages.addAll( openAiService .listMessages(this.threadID) .getData() // --> Pega os dados da lista de mensagens .stream() // --> Transforma em Stream .sorted(Comparator.comparingInt(Message::getCreatedAt)) // --> Ordena as mensagens pela data de criação .map(mapMessage -> mapMessage.getContent().get(0).getText().getValue()) // --> Pega a mensagem e adiciona na lista .collect(Collectors.toList()) // --> Transforma em lista ); } return messages; } //%% -- -- -- --> Limpar Thread public void clearThread () { if (this.threadID != null) { openAiService.deleteThread(this.threadID); // --> Deleta a Thread this.threadID = null; } }
[ "com.theokanning.openai.completion.chat.ChatFunction.builder", "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value" ]
[((2866, 2894), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((8013, 8099), 'java.util.Comparator.comparingInt'), ((8770, 9013), 'com.theokanning.openai.completion.chat.ChatFunction.builder'), ((8770, 8980), 'com.theokanning.openai.completion.chat.ChatFunction.builder'), ((8770, 8844), 'com.theokanning.openai.completion.chat.ChatFunction.builder')]
package com.mca.mindmelter.adapters; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.TextView; import androidx.annotation.NonNull; import androidx.recyclerview.widget.DiffUtil; import androidx.recyclerview.widget.ListAdapter; import androidx.recyclerview.widget.RecyclerView; import com.mca.mindmelter.R; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; public class ChatAdapter extends ListAdapter<ChatMessage, ChatAdapter.ChatViewHolder> { private static final int VIEW_TYPE_USER = 0; private static final int VIEW_TYPE_ASSISTANT = 1; public ChatAdapter() { super(DIFF_CALLBACK); } static final DiffUtil.ItemCallback<ChatMessage> DIFF_CALLBACK = new DiffUtil.ItemCallback<ChatMessage>() { @Override public boolean areItemsTheSame(@NonNull ChatMessage oldChatMessage, @NonNull ChatMessage newChatMessage) { // Compare role and content for simplicity return oldChatMessage.getRole().equals(newChatMessage.getRole()) && oldChatMessage.getContent().equals(newChatMessage.getContent()); } @Override public boolean areContentsTheSame(@NonNull ChatMessage oldChatMessage, @NonNull ChatMessage newChatMessage) { return oldChatMessage.getRole().equals(newChatMessage.getRole()) && oldChatMessage.getContent().equals(newChatMessage.getContent()); } }; @Override public int getItemViewType(int position) { ChatMessage message = getItem(position); return message.getRole().equals(ChatMessageRole.USER.value()) ? VIEW_TYPE_USER : VIEW_TYPE_ASSISTANT; } @NonNull @Override public ChatViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) { View view; if (viewType == VIEW_TYPE_USER) { view = LayoutInflater.from(parent.getContext()).inflate(R.layout.fragment_chat_message_item_user, parent, false); } else { // viewType == VIEW_TYPE_ASSISTANT view = LayoutInflater.from(parent.getContext()).inflate(R.layout.fragment_chat_message_item_assistant, parent, false); } return new ChatViewHolder(view); } @Override public void onBindViewHolder(@NonNull ChatViewHolder holder, int position) { ChatMessage chatMessage = getItem(position); holder.bind(chatMessage); } static class ChatViewHolder extends RecyclerView.ViewHolder { TextView chatMessageTextView; public ChatViewHolder(@NonNull View itemView) { super(itemView); chatMessageTextView = itemView.findViewById(R.id.message_content); } void bind(ChatMessage chatMessage) { chatMessageTextView.setText(chatMessage.getContent()); } } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value" ]
[((1851, 1879), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((2131, 2236), 'android.view.LayoutInflater.from'), ((2311, 2421), 'android.view.LayoutInflater.from')]
package com.theokanning.openai.service; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.PropertyNamingStrategy; import com.fasterxml.jackson.databind.node.TextNode; import com.lianziyou.bot.constant.CommonConst; import com.lianziyou.bot.model.SysConfig; import com.lianziyou.bot.utils.sys.RedisUtil; import com.theokanning.openai.DeleteResult; import com.theokanning.openai.OpenAiError; import com.theokanning.openai.OpenAiHttpException; import com.theokanning.openai.audio.CreateTranscriptionRequest; import com.theokanning.openai.audio.CreateTranslationRequest; import com.theokanning.openai.audio.TranscriptionResult; import com.theokanning.openai.audio.TranslationResult; import com.theokanning.openai.client.OpenAiApi; import com.theokanning.openai.completion.CompletionChunk; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.completion.CompletionResult; import com.theokanning.openai.completion.chat.ChatCompletionChunk; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatCompletionResult; import com.theokanning.openai.completion.chat.ChatFunction; import com.theokanning.openai.completion.chat.ChatFunctionCall; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.edit.EditRequest; import com.theokanning.openai.edit.EditResult; import com.theokanning.openai.embedding.EmbeddingRequest; import com.theokanning.openai.embedding.EmbeddingResult; import com.theokanning.openai.file.File; import com.theokanning.openai.finetune.FineTuneEvent; import com.theokanning.openai.finetune.FineTuneRequest; import com.theokanning.openai.finetune.FineTuneResult; import com.theokanning.openai.image.CreateImageEditRequest; import com.theokanning.openai.image.CreateImageRequest; import com.theokanning.openai.image.CreateImageVariationRequest; import com.theokanning.openai.image.ImageResult; import com.theokanning.openai.model.Model; import com.theokanning.openai.moderation.ModerationRequest; import com.theokanning.openai.moderation.ModerationResult; import io.reactivex.BackpressureStrategy; import io.reactivex.Flowable; import io.reactivex.Single; import java.io.IOException; import java.net.InetSocketAddress; import java.net.Proxy; import java.net.Proxy.Type; import java.time.Duration; import java.util.List; import java.util.Objects; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import okhttp3.ConnectionPool; import okhttp3.MediaType; import okhttp3.MultipartBody; import okhttp3.OkHttpClient; import okhttp3.OkHttpClient.Builder; import okhttp3.RequestBody; import okhttp3.ResponseBody; import retrofit2.Call; import retrofit2.HttpException; import retrofit2.Retrofit; import retrofit2.adapter.rxjava2.RxJava2CallAdapterFactory; import retrofit2.converter.jackson.JacksonConverterFactory; public class OpenAiService { private static final String BASE_URL = "https://api.openai.com/"; private static final Duration DEFAULT_TIMEOUT = Duration.ofSeconds(10); private static final ObjectMapper mapper = defaultObjectMapper(); private final OpenAiApi api; private final ExecutorService executorService; /** * Creates a new OpenAiService that wraps OpenAiApi * * @param token OpenAi token string "sk-XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" */ public OpenAiService(final String token) { this(token, DEFAULT_TIMEOUT); } /** * Creates a new OpenAiService that wraps OpenAiApi * * @param token OpenAi token string "sk-XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" * @param timeout http read timeout, Duration.ZERO means no timeout */ public OpenAiService(final String token, final Duration timeout) { ObjectMapper mapper = defaultObjectMapper(); OkHttpClient client = defaultClient(token, timeout); Retrofit retrofit = defaultRetrofit(client, mapper); this.api = retrofit.create(OpenAiApi.class); this.executorService = client.dispatcher().executorService(); } /** * Creates a new OpenAiService that wraps OpenAiApi. Use this if you need more customization, but use OpenAiService(controller, executorService) if you use * streaming and want to shut down instantly * * @param api OpenAiApi instance to use for all methods */ public OpenAiService(final OpenAiApi api) { this.api = api; this.executorService = null; } /** * Creates a new OpenAiService that wraps OpenAiApi. The ExecutorService must be the one you get from the client you created the controller with otherwise * shutdownExecutor() won't work. * <p> * Use this if you need more customization. * * @param api OpenAiApi instance to use for all methods * @param executorService the ExecutorService from client.dispatcher().executorService() */ public OpenAiService(final OpenAiApi api, final ExecutorService executorService) { this.api = api; this.executorService = executorService; } /** * Calls the Open AI controller, returns the response, and parses error messages if the request fails */ public static <T> T execute(Single<T> apiCall) { try { return apiCall.blockingGet(); } catch (HttpException e) { try { if (e.response() == null || e.response().errorBody() == null) { throw e; } String errorBody = e.response().errorBody().string(); OpenAiError error = mapper.readValue(errorBody, OpenAiError.class); throw new OpenAiHttpException(error, e, e.code()); } catch (IOException ex) { // couldn't parse OpenAI error throw e; } } } /** * Calls the Open AI controller and returns a Flowable of SSE for streaming omitting the last message. * * @param apiCall The controller call */ public static Flowable<SSE> stream(Call<ResponseBody> apiCall) { return stream(apiCall, false); } /** * Calls the Open AI controller and returns a Flowable of SSE for streaming. * * @param apiCall The controller call * @param emitDone If true the last message ([DONE]) is emitted */ public static Flowable<SSE> stream(Call<ResponseBody> apiCall, boolean emitDone) { return Flowable.create(emitter -> apiCall.enqueue(new ResponseBodyCallback(emitter, emitDone)), BackpressureStrategy.BUFFER); } /** * Calls the Open AI controller and returns a Flowable of type T for streaming omitting the last message. * * @param apiCall The controller call * @param cl Class of type T to return */ public static <T> Flowable<T> stream(Call<ResponseBody> apiCall, Class<T> cl) { return stream(apiCall).map(sse -> mapper.readValue(sse.getData(), cl)); } public static OpenAiApi buildApi(String token, Duration timeout) { ObjectMapper mapper = defaultObjectMapper(); OkHttpClient client = defaultClient(token, timeout); Retrofit retrofit = defaultRetrofit(client, mapper); return retrofit.create(OpenAiApi.class); } public static ObjectMapper defaultObjectMapper() { ObjectMapper mapper = new ObjectMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); mapper.setPropertyNamingStrategy(PropertyNamingStrategy.SNAKE_CASE); mapper.addMixIn(ChatFunction.class, ChatFunctionMixIn.class); mapper.addMixIn(ChatCompletionRequest.class, ChatCompletionRequestMixIn.class); mapper.addMixIn(ChatFunctionCall.class, ChatFunctionCallMixIn.class); return mapper; } public static OkHttpClient defaultClient(String token, Duration timeout) { Builder builder = new Builder() .addInterceptor(new AuthenticationInterceptor(token)) .connectionPool(new ConnectionPool(5, 1, TimeUnit.SECONDS)) .readTimeout(timeout.toMillis(), TimeUnit.MILLISECONDS); SysConfig sysConfig = RedisUtil.getCacheObject(CommonConst.SYS_CONFIG); if (null != sysConfig.getIsOpenProxy() && sysConfig.getIsOpenProxy() == 1) { final Proxy proxy = new Proxy(Type.HTTP, new InetSocketAddress(sysConfig.getProxyIp(), 1080)); builder.proxy(proxy); } return builder.build(); } public static Retrofit defaultRetrofit(OkHttpClient client, ObjectMapper mapper) { return new Retrofit.Builder() .baseUrl(BASE_URL) .client(client) .addConverterFactory(JacksonConverterFactory.create(mapper)) .addCallAdapterFactory(RxJava2CallAdapterFactory.create()) .build(); } public List<Model> listModels() { return execute(api.listModels()).data; } public Model getModel(String modelId) { return execute(api.getModel(modelId)); } public CompletionResult createCompletion(CompletionRequest request) { return execute(api.createCompletion(request)); } public Flowable<CompletionChunk> streamCompletion(CompletionRequest request) { request.setStream(true); return stream(api.createCompletionStream(request), CompletionChunk.class); } public ChatCompletionResult createChatCompletion(ChatCompletionRequest request) { return execute(api.createChatCompletion(request)); } public Flowable<ChatCompletionChunk> streamChatCompletion(ChatCompletionRequest request) { request.setStream(true); return stream(api.createChatCompletionStream(request), ChatCompletionChunk.class); } public EditResult createEdit(EditRequest request) { return execute(api.createEdit(request)); } public EmbeddingResult createEmbeddings(EmbeddingRequest request) { return execute(api.createEmbeddings(request)); } public List<File> listFiles() { return execute(api.listFiles()).data; } public File uploadFile(String purpose, String filepath) { java.io.File file = new java.io.File(filepath); RequestBody purposeBody = RequestBody.create(okhttp3.MultipartBody.FORM, purpose); RequestBody fileBody = RequestBody.create(MediaType.parse("text"), file); MultipartBody.Part body = MultipartBody.Part.createFormData("file", filepath, fileBody); return execute(api.uploadFile(purposeBody, body)); } public DeleteResult deleteFile(String fileId) { return execute(api.deleteFile(fileId)); } public File retrieveFile(String fileId) { return execute(api.retrieveFile(fileId)); } public FineTuneResult createFineTune(FineTuneRequest request) { return execute(api.createFineTune(request)); } public CompletionResult createFineTuneCompletion(CompletionRequest request) { return execute(api.createFineTuneCompletion(request)); } public List<FineTuneResult> listFineTunes() { return execute(api.listFineTunes()).data; } public FineTuneResult retrieveFineTune(String fineTuneId) { return execute(api.retrieveFineTune(fineTuneId)); } public FineTuneResult cancelFineTune(String fineTuneId) { return execute(api.cancelFineTune(fineTuneId)); } public List<FineTuneEvent> listFineTuneEvents(String fineTuneId) { return execute(api.listFineTuneEvents(fineTuneId)).data; } public DeleteResult deleteFineTune(String fineTuneId) { return execute(api.deleteFineTune(fineTuneId)); } public ImageResult createImage(CreateImageRequest request) { return execute(api.createImage(request)); } public ImageResult createImageEdit(CreateImageEditRequest request, String imagePath, String maskPath) { java.io.File image = new java.io.File(imagePath); java.io.File mask = null; if (maskPath != null) { mask = new java.io.File(maskPath); } return createImageEdit(request, image, mask); } public ImageResult createImageEdit(CreateImageEditRequest request, java.io.File image, java.io.File mask) { RequestBody imageBody = RequestBody.create(MediaType.parse("image"), image); MultipartBody.Builder builder = new MultipartBody.Builder() .setType(MediaType.get("multipart/form-data")) .addFormDataPart("prompt", request.getPrompt()) .addFormDataPart("size", request.getSize()) .addFormDataPart("response_format", request.getResponseFormat()) .addFormDataPart("image", "image", imageBody); if (request.getN() != null) { builder.addFormDataPart("n", request.getN().toString()); } if (mask != null) { RequestBody maskBody = RequestBody.create(MediaType.parse("image"), mask); builder.addFormDataPart("mask", "mask", maskBody); } return execute(api.createImageEdit(builder.build())); } public ImageResult createImageVariation(CreateImageVariationRequest request, String imagePath) { java.io.File image = new java.io.File(imagePath); return createImageVariation(request, image); } public ImageResult createImageVariation(CreateImageVariationRequest request, java.io.File image) { RequestBody imageBody = RequestBody.create(MediaType.parse("image"), image); MultipartBody.Builder builder = new MultipartBody.Builder() .setType(MediaType.get("multipart/form-data")) .addFormDataPart("size", request.getSize()) .addFormDataPart("response_format", request.getResponseFormat()) .addFormDataPart("image", "image", imageBody); if (request.getN() != null) { builder.addFormDataPart("n", request.getN().toString()); } return execute(api.createImageVariation(builder.build())); } public TranscriptionResult createTranscription(CreateTranscriptionRequest request, String audioPath) { java.io.File audio = new java.io.File(audioPath); return createTranscription(request, audio); } public TranscriptionResult createTranscription(CreateTranscriptionRequest request, java.io.File audio) { RequestBody audioBody = RequestBody.create(MediaType.parse("audio"), audio); MultipartBody.Builder builder = new MultipartBody.Builder() .setType(MediaType.get("multipart/form-data")) .addFormDataPart("model", request.getModel()) .addFormDataPart("file", audio.getName(), audioBody); if (request.getPrompt() != null) { builder.addFormDataPart("prompt", request.getPrompt()); } if (request.getResponseFormat() != null) { builder.addFormDataPart("response_format", request.getResponseFormat()); } if (request.getTemperature() != null) { builder.addFormDataPart("temperature", request.getTemperature().toString()); } if (request.getLanguage() != null) { builder.addFormDataPart("language", request.getLanguage()); } return execute(api.createTranscription(builder.build())); } public TranslationResult createTranslation(CreateTranslationRequest request, String audioPath) { java.io.File audio = new java.io.File(audioPath); return createTranslation(request, audio); } public TranslationResult createTranslation(CreateTranslationRequest request, java.io.File audio) { RequestBody audioBody = RequestBody.create(MediaType.parse("audio"), audio); MultipartBody.Builder builder = new MultipartBody.Builder() .setType(MediaType.get("multipart/form-data")) .addFormDataPart("model", request.getModel()) .addFormDataPart("file", audio.getName(), audioBody); if (request.getPrompt() != null) { builder.addFormDataPart("prompt", request.getPrompt()); } if (request.getResponseFormat() != null) { builder.addFormDataPart("response_format", request.getResponseFormat()); } if (request.getTemperature() != null) { builder.addFormDataPart("temperature", request.getTemperature().toString()); } return execute(api.createTranslation(builder.build())); } public ModerationResult createModeration(ModerationRequest request) { return execute(api.createModeration(request)); } /** * Shuts down the OkHttp ExecutorService. The default behaviour of OkHttp's ExecutorService (ConnectionPool) is to shut down after an idle timeout of 60s. * Call this method to shut down the ExecutorService immediately. */ public void shutdownExecutor() { Objects.requireNonNull(this.executorService, "executorService must be set in order to shut down"); this.executorService.shutdown(); } public Flowable<ChatMessageAccumulator> mapStreamToAccumulator(Flowable<ChatCompletionChunk> flowable) { ChatFunctionCall functionCall = new ChatFunctionCall(null, null); ChatMessage accumulatedMessage = new ChatMessage(ChatMessageRole.ASSISTANT.value(), null); return flowable.map(chunk -> { ChatMessage messageChunk = chunk.getChoices().get(0).getMessage(); if (messageChunk.getFunctionCall() != null) { if (messageChunk.getFunctionCall().getName() != null) { String namePart = messageChunk.getFunctionCall().getName(); functionCall.setName((functionCall.getName() == null ? "" : functionCall.getName()) + namePart); } if (messageChunk.getFunctionCall().getArguments() != null) { String argumentsPart = messageChunk.getFunctionCall().getArguments() == null ? "" : messageChunk.getFunctionCall().getArguments().asText(); functionCall.setArguments(new TextNode((functionCall.getArguments() == null ? "" : functionCall.getArguments().asText()) + argumentsPart)); } accumulatedMessage.setFunctionCall(functionCall); } else { accumulatedMessage.setContent( (accumulatedMessage.getContent() == null ? "" : accumulatedMessage.getContent()) + (messageChunk.getContent() == null ? "" : messageChunk.getContent())); } if (chunk.getChoices().get(0).getFinishReason() != null) { // last if (functionCall.getArguments() != null) { functionCall.setArguments(mapper.readTree(functionCall.getArguments().asText())); accumulatedMessage.setFunctionCall(functionCall); } } return new ChatMessageAccumulator(messageChunk, accumulatedMessage); }); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value" ]
[((10744, 10805), 'okhttp3.MultipartBody.Part.createFormData'), ((17579, 17612), 'com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value')]
package com.devteam.languagelearning.service; import com.devteam.languagelearning.config.OpenAiConfig; import com.devteam.languagelearning.model.RootWord; import com.devteam.languagelearning.model.Word; import com.theokanning.openai.completion.CompletionChoice; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.completion.chat.ChatCompletionChoice; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import org.springframework.stereotype.Service; import java.util.ArrayList; import java.util.List; @Service public class OpenAiApiService { static OpenAiService service = new OpenAiService(OpenAiConfig.getOpenAiKey()); // No language mentioned public String getPartOfSpeech(Word word) { List<ChatMessage> messages = new ArrayList<>(); messages.add(new ChatMessage(ChatMessageRole.SYSTEM.value(), "Find the part of speech of the given word within the provided context sentence.")); messages.add(new ChatMessage(ChatMessageRole.SYSTEM.value(), "You should answer with one of the following: verb, noun, adjective, adverb, preposition, pronoun, conjunction, interjection, other.")); messages.add(new ChatMessage(ChatMessageRole.SYSTEM.value(), "If the word could be either a noun or a verb based on the context, provide the part of speech that best fits the given sentence. Provide only one word as your answer.")); messages.add(new ChatMessage(ChatMessageRole.USER.value(), "Find the part of speech of the word " + word.getWord() + " in the context sentence '" + word.getContextSentence() + "'.")); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder() .model("gpt-3.5-turbo") .messages(messages) .n(1) .maxTokens(50) .build(); List<ChatCompletionChoice> result = service.createChatCompletion(chatCompletionRequest).getChoices(); System.out.println(result); return result.get(0).getMessage().getContent(); } public RootWord getRootVerb(Word word) { List<ChatMessage> messages = new ArrayList<>(); messages.add(new ChatMessage(ChatMessageRole.SYSTEM.value(), "You find the infinitive form of the given verb, when provided the verb and the context sentence it is found in. You answer in one word only.")); messages.add(new ChatMessage(ChatMessageRole.USER.value(), "Give me the base form of the verb 'verdween' in the sentence 'Alles verdween: Sue, Johan, de keuken.'")); messages.add(new ChatMessage(ChatMessageRole.ASSISTANT.value(), "verdwijnen")); messages.add(new ChatMessage(ChatMessageRole.USER.value(), "Give me the base form of the verb " + word.getWord() + " in the sentence '" + word.getContextSentence() + "'.")); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder() .model("gpt-3.5-turbo") .messages(messages) .n(1) .maxTokens(50) .build(); List<ChatCompletionChoice> result = service.createChatCompletion(chatCompletionRequest).getChoices(); System.out.println(result); return new RootWord(result.get(0).getMessage().getContent()); } public RootWord getRootNoun(Word word) { List<ChatMessage> messages = new ArrayList<>(); messages.add(new ChatMessage(ChatMessageRole.SYSTEM.value(), "You find the base form of the given noun, when provided the noun and the context sentence it is found in. You answer in one word followed by a space and then the correct article (form of the) in brackets.")); messages.add(new ChatMessage(ChatMessageRole.USER.value(), "Give me the base form of the noun 'steegje' in the sentence 'Ze wees met haar stok naar het steegje naast de bioscoop.'")); messages.add(new ChatMessage(ChatMessageRole.ASSISTANT.value(), "steeg (de)")); messages.add(new ChatMessage(ChatMessageRole.USER.value(), "Give me the base form of the noun " + word.getWord() + " in the sentence '" + word.getContextSentence() + "'.")); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder() .model("gpt-3.5-turbo") .messages(messages) .n(1) .maxTokens(50) .build(); List<ChatCompletionChoice> result = service.createChatCompletion(chatCompletionRequest).getChoices(); System.out.println(result); return new RootWord(result.get(0).getMessage().getContent()); } public RootWord getRoot(Word word, String partOfSpeech) { List<ChatMessage> messages = new ArrayList<>(); messages.add(new ChatMessage(ChatMessageRole.SYSTEM.value(), "You find the base form of the given " + partOfSpeech +", when provided the " + partOfSpeech + " and the context sentence it is found in. You answer in one word.")); messages.add(new ChatMessage(ChatMessageRole.USER.value(), "Give me the base form of the " + partOfSpeech + " " + word.getWord() + " in the sentence '" + word.getContextSentence() + "'.")); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder() .model("gpt-3.5-turbo") .messages(messages) .n(1) .maxTokens(50) .build(); List<ChatCompletionChoice> result = service.createChatCompletion(chatCompletionRequest).getChoices(); System.out.println(result); return new RootWord(result.get(0).getMessage().getContent()); } public RootWord getRootWordDefinition(RootWord rootWord) { List<ChatMessage> messages = new ArrayList<>(); messages.add(new ChatMessage(ChatMessageRole.SYSTEM.value(), "Provided a word in " + rootWord.getLanguage() + ", you give the English translation or translations. If there are multiple possible meanings, you give all of the most common ones, separated with commas. You answer in as few words as possible.")); messages.add(new ChatMessage(ChatMessageRole.USER.value(), "Translate the word " + rootWord.getWord() + " (" + rootWord.getPartOfSpeech() + ").")); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder() .model("gpt-3.5-turbo") .messages(messages) .n(1) .maxTokens(50) .build(); List<ChatCompletionChoice> result = service.createChatCompletion(chatCompletionRequest).getChoices(); System.out.println(result); rootWord.setDefinitionInEnglish(result.get(0).getMessage().getContent()); return rootWord; } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value", "com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value", "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((1029, 1059), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((1183, 1213), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((1389, 1419), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((1630, 1658), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((1839, 2024), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1839, 1999), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1839, 1968), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1839, 1946), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1839, 1910), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2373, 2403), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((2588, 2616), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((2762, 2795), 'com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value'), ((2850, 2878), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((3049, 3234), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3049, 3209), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3049, 3178), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3049, 3156), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3049, 3120), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3597, 3627), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((3876, 3904), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((4068, 4101), 'com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value'), ((4156, 4184), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((4355, 4540), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4355, 4515), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4355, 4484), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4355, 4462), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4355, 4426), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((4920, 4950), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((5155, 5183), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((5370, 5555), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5370, 5530), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5370, 5499), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5370, 5477), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5370, 5441), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5936, 5966), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((6253, 6281), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((6426, 6611), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((6426, 6586), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((6426, 6555), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((6426, 6533), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((6426, 6497), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package com.ramesh.openai; import java.time.Duration; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; /*** * This project demonstrates a simple single promt * a good starting point to know Open AI APIs ***/ class SingleChatCompletion { public static void main(String... args) { // Set the Open AI Token & Model String token = "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"; String model = "gpt-3.5-turbo"; // service handle for calling OpenAI APIs OpenAiService service = new OpenAiService(token, Duration.ofSeconds(30)); // set the prompt // change the prompt and run again and again String prompt = "President of India?"; // create the chat message with the prompt final List<ChatMessage> messages = new ArrayList<>(); final ChatMessage assistantMessage = new ChatMessage(ChatMessageRole.USER.value(), prompt); messages.add(assistantMessage); // create the chat gpt chat completion request ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder() .model(model) .messages(messages) .n(1) .temperature(.1) .maxTokens(50) .logitBias(new HashMap<>()) .build(); System.out.println("Prompt=" + prompt); System.out.print("ChatGPT response="); // send the chat gpt request and get response service.createChatCompletion(chatCompletionRequest).getChoices().forEach((c) -> { System.out.println(c.getMessage().getContent()); }); service.shutdownExecutor(); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value", "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((1107, 1135), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((1278, 1523), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1278, 1499), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1278, 1456), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1278, 1426), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1278, 1394), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1278, 1373), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1278, 1338), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package com.musiciantrainer.musiciantrainerproject.controller; import com.musiciantrainer.musiciantrainerproject.dto.CreatedReflectionsViewModel; import com.musiciantrainer.musiciantrainerproject.dto.ReflectionViewModel; import com.musiciantrainer.musiciantrainerproject.dto.WebUser; import com.musiciantrainer.musiciantrainerproject.entity.Reflection; import com.musiciantrainer.musiciantrainerproject.entity.User; import com.musiciantrainer.musiciantrainerproject.service.PieceService; import com.musiciantrainer.musiciantrainerproject.service.ReflectionService; import com.musiciantrainer.musiciantrainerproject.service.UserService; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.security.core.Authentication; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.servlet.mvc.support.RedirectAttributes; import java.time.Duration; import java.time.LocalDate; import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.List; @Controller @RequestMapping("/reflection") public class ReflectionController { @Value("${openai.model}") private String aiModel; @Value("${openai.api.key}") private String apiKey; private UserService userService; private PieceService pieceService; private ReflectionService reflectionService; @Autowired public ReflectionController(UserService userService, PieceService pieceService, ReflectionService reflectionService) { this.userService = userService; this.pieceService = pieceService; this.reflectionService = reflectionService; } @GetMapping("/showCreateReflectionForm") public String showCreateReflectionForm(Model theModel, Authentication authentication) { // Get the currently authenticated user's email (username in your case) String userEmail = authentication.getName(); // Get the user from the service based on the email User theUser = userService.findUserByEmail(userEmail); // Set user in the model to prepopulate the form theModel.addAttribute("user", theUser); // Add an empty WebUser object to the model for the form theModel.addAttribute("webUser", new WebUser()); // Add an empty Reflection object to the model for the form theModel.addAttribute("reflection", new Reflection()); // Send over to our form return "create-reflection"; } // Generate reflection using AI @PostMapping("/processReflectionForm") public String processReflectionForm(@RequestParam("dateFrom") String stringDateFrom, @RequestParam("dateTo") String stringDateTo, Model model, Authentication authentication) { String userEmail = authentication.getName(); User theUser = userService.findUserByEmail(userEmail); // Date range LocalDate parsedDateFrom = LocalDate.parse(stringDateFrom, DateTimeFormatter.ofPattern("dd.MM.yyyy")); LocalDate parsedDateTo = LocalDate.parse(stringDateTo, DateTimeFormatter.ofPattern("dd.MM.yyyy")); Reflection savedReflection; OpenAiService service = null; // check if the Reflection in the date range exists in the database or not if (doesReflectionExist(parsedDateFrom, parsedDateTo, theUser)) { savedReflection = reflectionService.getReflectionByUserAndDateFromAndDateTo(theUser, parsedDateFrom, parsedDateTo); } else { // zalozeni objektu openai service = new OpenAiService(apiKey, Duration.ofSeconds(180)); // system prompt List<ChatMessage> messages = new ArrayList<>(); ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), "You will act as a music learning mentor. \n" + "You will help user to reflect on his progress through his/her music learning journey. \n" + "You will receive a list of pieces and their pieceLogs, which are records when the user trained particular piece and might even wrote a note. This list will be in JSON format. You will do user's reflection for the particular time range.\n" + "The user will give you his/her time range for that reflection in this format: from dd.MM.yyyy to dd.MM.yyyy. Therefore you will use only the pieces and their piecelogs created within the time range.\n" + "You will also have priority in the list. \n" + "The bigger the value of priority, the bigger the priority, and the lower value is, the lower priority. \n" + "A 0 priority is a piece without priority.\n" + "Important is also \"formattedLastTrainingDate\", it is the date when the user practiced the exercise the last time. \n" + "You can find the attribute \"numberOfDaysPassed\" which displays the number of days since the last training date. \n" + "You can find there \"numberOfTimesTrained\" which shows the number of times the user trained that piece.\n" + "The goal is to create a motivational reflection of the user's progress using his/her notes, number of times the user trained, number of days since the user trained, pieces' priorities and other useful data to create a statistic summary. \n" + "\n" + "The motivational reflection should also contain what you think the user should do better and try to positively motivate the user. Convey it in a polite way.\n" + "\n" + "The output must be a String text. I want the result in Czech language, in a meaningful way, in structured HTML with titles, paragraphs, strong element and a few emojis. Also use some ideas which could help the user with his/her progress. And do not write HTML words like tag or html or header etc.\n"); messages.add(systemMessage); //user prompt System.out.print("First Query: "); String userPrompt = pieceService.getPiecesDtoAndPieceLogsAsJsonStringInDateRange(theUser, parsedDateFrom, parsedDateTo) + " The selected time range is from " + stringDateFrom + " to " + stringDateTo; ChatMessage userMessage = new ChatMessage(ChatMessageRole.USER.value(), userPrompt); messages.add(userMessage); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest .builder() .model(aiModel) .messages(messages) .n(1) .maxTokens(2000) .build(); ChatMessage responseMessage = service.createChatCompletion(chatCompletionRequest).getChoices().get(0).getMessage(); service.streamChatCompletion(chatCompletionRequest) .doOnError(Throwable::printStackTrace) .blockingForEach(System.out::println); // *** Process AI response *** // 1. Create Reflection instance Reflection newReflection = new Reflection(parsedDateFrom, parsedDateTo, responseMessage.getContent(), theUser); // 2. Save the Reflection to the database savedReflection = reflectionService.saveReflection(newReflection); } ReflectionViewModel reflectionViewModel = new ReflectionViewModel(savedReflection); // Add the Reflection to the model model.addAttribute("reflectionViewModel", reflectionViewModel); model.addAttribute("user", theUser); if (service != null) { service.shutdownExecutor(); } return "reflection"; } private boolean doesReflectionExist(LocalDate dateFrom, LocalDate dateTo, User theUser) { if (theUser == null) { return false; // If user is null, immediately return false } // Attempt to fetch the reflection from the database Reflection reflection = reflectionService.getReflectionByUserAndDateFromAndDateTo(theUser, dateFrom, dateTo); // Return true if reflection is not null, otherwise false return reflection != null; } @GetMapping("/createdReflections") public String showCreatedReflections(Model model, Authentication authentication) { if (authentication != null && authentication.isAuthenticated()) { String userEmail = authentication.getName(); // Get the email from principal User theUser = userService.findUserByEmail(userEmail); if (theUser != null) { List<Reflection> createdReflections = reflectionService.getReflectionsByUserOrderedByDateFromAndDateTo(theUser); CreatedReflectionsViewModel theCreatedReflectionsViewModel = new CreatedReflectionsViewModel(createdReflections); model.addAttribute("createdReflectionsViewModel", theCreatedReflectionsViewModel); model.addAttribute("user", theUser); // Add user to the model return "reflection-list"; // This is a Thymeleaf template name } } // The user is not logged in or something else went wrong return "redirect:/home"; } @GetMapping("/showReflection") public String showReflection(@RequestParam("reflectionId") Long reflectionId, Model model, Authentication authentication) { if (authentication != null && authentication.isAuthenticated()) { String userEmail = authentication.getName(); // Get the email from principal User theUser = userService.findUserByEmail(userEmail); if (theUser != null) { Reflection theReflection = reflectionService.getReflectionById(reflectionId); ReflectionViewModel theReflectionViewModel = new ReflectionViewModel(theReflection); model.addAttribute("reflectionViewModel", theReflectionViewModel); model.addAttribute("user", theUser); // Add user to the model return "reflection"; // This is a Thymeleaf template name } } // The user is not logged in or something else went wrong return "redirect:/reflection/createdReflections"; } @GetMapping("/deleteReflection") public String deleteReflection(@RequestParam("reflectionId") Long reflectionId, RedirectAttributes redirectAttributes) { try { // Delete the Reflection based on the reflectionId reflectionService.deleteReflection(reflectionId); // Add a success message to be displayed on the redirected page redirectAttributes.addFlashAttribute("successDeleteReflection", true); } catch (Exception e) { // Handle exceptions, e.g., if the Reflection does not exist // Add an error message to be displayed on the redirected page redirectAttributes.addFlashAttribute("error", "Failed to delete the reflection."); } return "redirect:/reflection/createdReflections?recordSuccess"; // Redirect to the appropriate page } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value" ]
[((4388, 4418), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((6956, 6984), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value')]
package ssodamproject.server.GPT.dto; import com.theokanning.openai.completion.CompletionRequest; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.NoArgsConstructor; @Getter @NoArgsConstructor @AllArgsConstructor public class GPTCompletionRequest { private String model; private String prompt; private Integer maxToken; public static CompletionRequest of(GPTCompletionRequest restRequest) { return CompletionRequest.builder() .model(restRequest.getModel()) .prompt(restRequest.getPrompt()) .maxTokens(restRequest.getMaxToken()) .build(); } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((450, 652), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((450, 627), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((450, 573), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((450, 524), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package sample.issue.tokens; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatCompletionResult; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import java.time.Duration; import java.util.List; public class CountAndAsk { private final String OPENAI_TOKEN = System.getenv("OPENAI_TOKEN"); private final String OPENAI_MODEL = System.getenv("OPENAI_MODEL"); public ChatCompletionResult ask(String message, int size) { var service = new OpenAiService(OPENAI_TOKEN, Duration.ofSeconds(30)); var chatCompletionRequest = ChatCompletionRequest .builder() .messages(List.of(new ChatMessage(ChatMessageRole.USER.value(), message))) .model(OPENAI_MODEL) .maxTokens(4097 - size) .n(1) // make sure of single result .build(); return service.createChatCompletion(chatCompletionRequest); } public ChatCompletionResult ask(String message) { var service = new OpenAiService(OPENAI_TOKEN, Duration.ofSeconds(30)); var chatCompletionRequest = ChatCompletionRequest .builder() .messages(List.of(new ChatMessage(ChatMessageRole.USER.value(), message))) .model(OPENAI_MODEL) // .maxTokens(4097) // we'll need to come back to it later .n(1) // make sure of single result .build(); return service.createChatCompletion(chatCompletionRequest); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value" ]
[((844, 872), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((1384, 1412), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value')]
package br.com.alura.screenmatch.service; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.service.OpenAiService; public class ConsultaChatGPT { public static String obterTraducao(String texto) { OpenAiService service = new OpenAiService(System.getenv("OPENAI_APIKEY")); CompletionRequest requisicao = CompletionRequest.builder() .model("gpt-3.5-turbo-instruct") .prompt("traduza para o português o texto: " + texto) .maxTokens(1000) .temperature(0.7) .build(); var resposta = service.createCompletion(requisicao); return resposta.getChoices().get(0).getText(); } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((366, 605), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((366, 580), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((366, 546), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((366, 513), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((366, 442), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package link.locutus.discord.gpt; import com.theokanning.openai.service.OpenAiService; import com.theokanning.openai.moderation.Moderation; import com.theokanning.openai.moderation.ModerationRequest; import link.locutus.discord.config.Settings; import link.locutus.discord.util.FileUtil; import link.locutus.discord.util.io.PagePriority; import org.json.JSONArray; import org.json.JSONObject; import java.io.IOException; import java.net.HttpURLConnection; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.function.Consumer; public class GPTModerator implements IModerator{ private final OpenAiService service; public GPTModerator(OpenAiService service) { this.service = service; } public List<Moderation> checkModeration(String input) { return service.createModeration(ModerationRequest.builder().input(input).build()).getResults(); } @Override public List<ModerationResult> moderate(List<String> inputs) { List<ModerationResult> results = new ArrayList<>(); JSONObject response = checkModeration(inputs); if (response.has("error")) { ModerationResult errorResult = new ModerationResult(); errorResult.setError(true); errorResult.setMessage(response.getString("error")); results.add(errorResult); } else { JSONArray resultsArray = response.getJSONArray("results"); for (int i = 0; i < resultsArray.length(); i++) { JSONObject resultObject = resultsArray.getJSONObject(i); ModerationResult result = new ModerationResult(); result.setFlagged(resultObject.getBoolean("flagged")); if (result.isFlagged()) { JSONObject categoriesObject = resultObject.getJSONObject("categories"); Set<String> flaggedCategories = new HashSet<>(); for (String category : categoriesObject.keySet()) { if (categoriesObject.getBoolean(category)) { flaggedCategories.add(category); } } result.setFlaggedCategories(flaggedCategories); JSONObject categoryScoresObject = resultObject.getJSONObject("category_scores"); Map<String, Double> categoryScores = new HashMap<>(); for (String category : categoryScoresObject.keySet()) { categoryScores.put(category, categoryScoresObject.getDouble(category)); } result.setScores(categoryScores); } results.add(result); } } return results; } public JSONObject checkModeration(List<String> inputs) { String url = "https://api.openai.com/v1/moderations"; String apiKey = Settings.INSTANCE.ARTIFICIAL_INTELLIGENCE.OPENAI.API_KEY; Map<String, List<String>> arguments = new HashMap<>(); arguments.put("input", inputs); Consumer<HttpURLConnection> apply = connection -> { connection.setRequestProperty("Authorization", "Bearer " + apiKey); connection.setRequestProperty("Content-Type", "application/json"); }; JSONObject argsJs = new JSONObject(arguments); byte[] dataBinary = argsJs.toString().getBytes(StandardCharsets.UTF_8); CompletableFuture<String> result = FileUtil.readStringFromURL(PagePriority.GPT_MODERATE, url, dataBinary, FileUtil.RequestType.POST, null, apply); String jsonStr = FileUtil.get(result); // parse to JSONObject (org.json) return new JSONObject(jsonStr); } }
[ "com.theokanning.openai.moderation.ModerationRequest.builder" ]
[((1010, 1058), 'com.theokanning.openai.moderation.ModerationRequest.builder'), ((1010, 1050), 'com.theokanning.openai.moderation.ModerationRequest.builder')]
package com.trabalhodetc.lucas_marley_walter; import java.io.BufferedWriter; import java.io.FileWriter; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.concurrent.CompletableFuture; import javax.swing.JFileChooser; import com.theokanning.openai.completion.chat.ChatCompletionChunk; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import io.reactivex.Flowable; import javafx.fxml.FXML; import javafx.scene.control.Alert; import javafx.scene.control.Button; import javafx.scene.control.TextArea; import javafx.scene.image.ImageView; public class ChatGpt { @FXML Button sendButton; @FXML TextArea askText; private static final String apiKey = ""; CompletableFuture<String> futureResult = new CompletableFuture<>(); private String path; String response = ""; StringBuilder buffer = new StringBuilder(); public void setPath(String path){ this.path = path; } public void request(String request) { OpenAiService service = new OpenAiService(apiKey); final List<ChatMessage> messages = new ArrayList<>(); final ChatMessage systemMessage = new ChatMessage( ChatMessageRole.SYSTEM.value(), "o usuario vai descrever um automato e voce vai cria-lo e escrever um arquivo jff deve ser criado com jflap 6.4, não dê mais nenhuma informação adicional apenas o arquivo, sem ``` no comeco nem no final"); final ChatMessage secondMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(),"exemplo: crie uma afn com alfabeto 0 e 1 que sempre termina em 1. a resposta deve ser:<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?><!--Created with JFLAP 6.4.--><structure>&#13;<type>fa</type>&#13;<automaton>&#13;<!--The list of states.-->&#13;<state id=\"0\" name=\"q0\">&#13;<x>92.0</x>&#13;<y>200.0</y>&#13;<initial/>&#13;</state>&#13;<state id=\"1\" name=\"q1\">&#13;<x>234.0</x>&#13;<y>194.0</y>&#13;<final/>&#13;</state>&#13;<!--The list of transitions.-->&#13;<transition>&#13;<from>0</from>&#13;<to>0</to>&#13;<read>0</read>&#13;</transition>&#13;<transition>&#13;<from>0</from>&#13;<to>0</to>&#13;<read>1</read>&#13;</transition>&#13;<transition>&#13;<from>0</from>&#13;<to>1</to>&#13;<read>1</read>&#13;</transition>&#13;</automaton>&#13;</structure> sem os tres tracinhos, e sem mensagens extras"); final ChatMessage userMessage = new ChatMessage("user", request); messages.add(systemMessage); messages.add(secondMessage); messages.add(userMessage); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder().model("gpt-3.5-turbo") .messages(messages) .n(1) .maxTokens(1000) .logitBias(new HashMap<>()) .build(); Flowable<ChatCompletionChunk> flowableResult = service.streamChatCompletion(chatCompletionRequest) .doOnError(Throwable::printStackTrace); flowableResult.subscribe(chunk -> { chunk.getChoices().forEach(choice -> { String result = choice.getMessage().getContent(); if (result != null) { buffer.append(result); System.out.print(result); } }); }, Throwable::printStackTrace, () -> { String finalResult = buffer.toString(); futureResult.complete(finalResult); }); service.shutdownExecutor(); } public void sendMessage(){ String request = askText.getText(); request(request); JFileChooser chooser = new JFileChooser(); chooser.setDialogTitle("Salvar em:"); chooser.showSaveDialog(chooser); path = chooser.getSelectedFile().getAbsolutePath(); saveFile(); Alert alert = new Alert(Alert.AlertType.INFORMATION); alert.setTitle("JFlap volume 2"); alert.setContentText("Your jff file was saved."); alert.setHeaderText("Your automaton is ready, try open it!"); alert.setGraphic(new ImageView(this.getClass().getResource("images/logoIcon.png").toString())); alert.showAndWait(); } public void saveFile(){ try ( BufferedWriter w = new BufferedWriter(new FileWriter(path + ".jff"))) { w.write(futureResult.get()); } catch (Exception e) { System.out.println("massa"); } } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((1407, 1437), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((1707, 1737), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((2821, 3015), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2821, 2994), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2821, 2954), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2821, 2925), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2821, 2907), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2821, 2875), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package com.cyster.sherpa.impl.conversation; import java.util.ArrayList; import java.util.List; import java.util.function.Function; import java.util.stream.Collectors; import com.cyster.sherpa.impl.advisor.ChatFunctionToolset; import com.cyster.sherpa.impl.advisor.Tool; import com.cyster.sherpa.impl.advisor.Toolset; import com.cyster.sherpa.service.conversation.Conversation; import com.cyster.sherpa.service.conversation.Message; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatCompletionRequest.ChatCompletionRequestFunctionCall; import com.theokanning.openai.completion.chat.ChatFunctionCall; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; public class TooledChatConversation implements Conversation { private final String model = "gpt-3.5-turbo-0613"; private OpenAiService openAiService; private List<Message> messages; private Toolset.Builder<Void> toolsetBuilder; public TooledChatConversation(OpenAiService openAiService) { this.openAiService = openAiService; this.messages = new ArrayList<Message>(); this.toolsetBuilder = new Toolset.Builder<Void>(); } @Override public TooledChatConversation addMessage(String content) { this.messages.add(new Message(content)); return this; } public TooledChatConversation addUserMessage(String content) { this.messages.add(new Message(content)); return this; } public TooledChatConversation addSystemMessage(String content) { this.messages.add(new Message(Message.Type.SYSTEM, content)); return this; } public TooledChatConversation addAiMessage(String content) { this.messages.add(new Message(Message.Type.AI, content)); return this; } public <T> TooledChatConversation addTool(String name, String description, Class<T> parameterClass, Function<T, Object> executor) { var tool = new ChatToolPojo<T>(name, description, parameterClass, executor); return this.addTool(tool); } public <T> TooledChatConversation addTool(Tool<T, Void> tool) { this.toolsetBuilder.addTool(tool); return this; } @Override public Message respond() { Message response = null; while (response == null) { var chatMessages = new ArrayList<ChatMessage>(); for (var message : this.messages) { switch (message.getType()) { case SYSTEM: chatMessages.add(new ChatMessage(ChatMessageRole.SYSTEM.value(), message.getContent())); break; case AI: chatMessages.add(new ChatMessage(ChatMessageRole.ASSISTANT.value(), message.getContent())); break; case USER: chatMessages.add(new ChatMessage(ChatMessageRole.USER.value(), message.getContent())); break; case FUNCTION_CALL: chatMessages.add(new ChatMessage(ChatMessageRole.ASSISTANT.value(), message.getContent(), "get_weather")); case FUNCTION_RESULT: chatMessages.add(new ChatMessage(ChatMessageRole.FUNCTION.value(), message.getContent(), "get_weather")); break; default: // ignore } } Toolset<Void> toolset = this.toolsetBuilder.create(); var chatFunctionToolset = new ChatFunctionToolset<Void>(toolset); var chatCompletionRequest = ChatCompletionRequest.builder() .model(model) .messages(chatMessages) .functions(chatFunctionToolset.getFunctions()) .functionCall(new ChatCompletionRequestFunctionCall("auto")) .maxTokens(1000) .build(); var chatResponse = this.openAiService.createChatCompletion(chatCompletionRequest); var choices = chatResponse.getChoices(); if (choices.size() > 1) { messages.add(new Message(Message.Type.INFO, "Multiple responses (ignored, only taking 1st response)")); } var choice = choices.get(0); switch (choice.getFinishReason()) { case "stop": var messageContent = choice.getMessage().getContent(); response = new Message(Message.Type.AI, messageContent); messages.add(response); break; case "length": messages.add(new Message(Message.Type.ERROR, "Token Limit Exceeded")); break; case "content_filter": messages.add(new Message(Message.Type.ERROR, "Content Filtered")); break; case "function_call": ChatFunctionCall functionCall = choice.getMessage().getFunctionCall(); if (functionCall == null) { messages.add(new Message(Message.Type.ERROR, "Function call specified, but not found")); } messages.add(new Message(Message.Type.FUNCTION_CALL, functionCall.getName() + "(" + functionCall .getArguments() + ")")); ChatMessage functionResponseMessage = chatFunctionToolset.call(functionCall); messages.add(new Message(Message.Type.FUNCTION_RESULT, functionResponseMessage.getContent())); break; default: messages.add(new Message(Message.Type.ERROR, "Unexpected finish reason: " + choice.getFinishReason())); } } return response; } private static class ChatToolPojo<T> implements Tool<T, Void> { private String name; private String description; private Class<T> parameterClass; private Function<T, Object> executor; public ChatToolPojo(String name, String description, Class<T> parameterClass, Function<T, Object> executor) { this.name = name; this.description = description; this.parameterClass = parameterClass; this.executor = executor; } public String getName() { return this.name; } @Override public String getDescription() { return this.description; } @Override public Class<T> getParameterClass() { return this.parameterClass; } @SuppressWarnings("unchecked") @Override public Object execute(Object parameters, Void context) { return this.executor.apply((T)parameters); } } @Override public List<Message> getMessages() { return messages.stream() // .filter(message -> message.getType() == Message.Type.AI || message.getType() // == Message.Type.USER) .collect(Collectors.toList()); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value", "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder", "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.completion.chat.ChatMessageRole.FUNCTION.value", "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value" ]
[((2713, 2743), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((2874, 2907), 'com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value'), ((3040, 3068), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((3210, 3243), 'com.theokanning.openai.completion.chat.ChatMessageRole.ASSISTANT.value'), ((3399, 3431), 'com.theokanning.openai.completion.chat.ChatMessageRole.FUNCTION.value'), ((3809, 4108), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3809, 4083), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3809, 4050), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3809, 3973), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3809, 3910), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3809, 3870), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package com.challenge.jornadamilhasapi.services; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.service.OpenAiService; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; @Service public class ChatService { @Value("${openai.api.key}") private String openaiApiKey; public String gerarTexto(String nomeDestino){ OpenAiService service = new OpenAiService(openaiApiKey); CompletionRequest requisicao = CompletionRequest.builder() .prompt("Faça um resumo sobre " + nomeDestino + " enfatizando o porque este lugar é incrível. " + "Utilize uma linguagem informal e até 100 caracteres no máximo em cada parágrafo. " + "Crie 2 parágrafos neste resumo.") .model("text-davinci-003") .maxTokens(500) .build(); String resposta = service.createCompletion(requisicao).getChoices().get(0).getText(); return resposta.replace("\n", ""); } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((529, 946), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((529, 921), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((529, 889), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((529, 846), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package com.abx.ainotebook.generativeai; import com.theokanning.openai.completion.CompletionChoice; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.service.OpenAiService; import java.util.List; public class GptService implements GenerativeAiService<String, List<CompletionChoice>> { private static final String GPT_MODEL = "babbage-002"; private final OpenAiService openAiService; public GptService(OpenAiService openAiService) { this.openAiService = openAiService; } @Override public List<CompletionChoice> complete(String prompt) { CompletionRequest completionRequest = CompletionRequest.builder() .prompt(prompt) .model(GPT_MODEL) .echo(true) .build(); return openAiService.createCompletion(completionRequest).getChoices(); } @Override public String parseGptResponse(List<CompletionChoice> completionChoices) { // System.out.println("com: " + completionChoices); StringBuilder ans = new StringBuilder(); for (CompletionChoice choice : completionChoices) { ans.append(parseSingleGptResponse(choice)); } return ans.toString(); } private String parseSingleGptResponse(CompletionChoice choice) { // System.out.println("choice: " + choice.getText()); return choice.getText(); } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((658, 804), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((658, 779), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((658, 751), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((658, 717), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package com.jareid.openaiapp.api; import java.io.*; import java.time.Duration; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; import com.jareid.openaiapp.utils.Pair; import com.theokanning.openai.service.OpenAiService; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import org.apache.commons.lang3.StringUtils; /** * The {@code CommandLineInterface} class represents a command line interface that interacts with an OpenAI GPT model. * It utilizes OpenAI's {@link OpenAiService} to generate text completions based on user input, * and provides a command line interface for users to interact with the GPT model. * The class maintains a history of interactions which is serialized and deserialized from a file, * and also contains utility methods for handling and writing code blocks present in chat history. * * <p> The conversation history with the GPT model is persisted in a file named {@code history}. * Any code block returned by the GPT model is extracted and saved in a separate file. * * <p> Please note that the class is not thread-safe. If multiple threads interact with a {@code CommandLineInterface} instance, * it must be synchronized externally. * * <p> This class requires the OpenAI API key to be provided via the {@code API_KEY} field. * * @author Jamie Reid * @see OpenAiService * @see ChatCompletionRequest * @see ChatMessage * @see ChatMessageRole * @version Last updated: 2023-08-28, Version 0.0.4 * @since 2023-07-08 */ public class APIHandler { private static String OPENAI_MODEL = null; private static String HISTORY_FILE_NAME = null; private static String CODE_FILE_DATA_FORMAT = null; private static String OPENAICLI_CMD_HEADER = null; /** * A value that represents the regular expression in a ChatMessage response */ private static final String CODE_REGULAR_EXPRESSION = "```(\\w+)?([\\s\\S]*)```"; /** * A field containing the ChatGPT chat history. */ private List< Pair< ChatMessage > > history; /** * The OpenAI API Service */ private final OpenAiService service; /** * A field to control the options of the ChatGPT controller. */ private final HashMap<String, Boolean> options; /** * The default constructor that initializes the OpenAiService and chat history. */ public APIHandler( ) throws RuntimeException { try { // Load secret properties Properties properties = new Properties(); properties.load( getClass().getClassLoader( ).getResourceAsStream( "secret.properties" ) ); String apiKey = properties.getProperty("openai.api.key"); if ( StringUtils.isEmpty( apiKey ) ) throw new IllegalArgumentException( "OpenAI API key must be set in config.properties" ); // Load non-secret properties properties.load( getClass().getClassLoader( ).getResourceAsStream( "config.properties" ) ); OPENAI_MODEL = (String) properties.get( "openai.model" ); if ( StringUtils.isEmpty( OPENAI_MODEL ) ) OPENAI_MODEL = "chatgpt-3.5"; HISTORY_FILE_NAME = (String) properties.get( "openaicli.filename.history" ); if ( StringUtils.isEmpty( HISTORY_FILE_NAME ) ) HISTORY_FILE_NAME = "history"; CODE_FILE_DATA_FORMAT = (String) properties.get( "openaicli.filename.dateFormat" ); if ( StringUtils.isEmpty( CODE_FILE_DATA_FORMAT ) ) CODE_FILE_DATA_FORMAT = "yyyy-MM-ddHH:mm:ss"; OPENAICLI_CMD_HEADER = (String) properties.get( "openaicli.commandline.header" ); if ( StringUtils.isEmpty( OPENAICLI_CMD_HEADER ) ) OPENAICLI_CMD_HEADER = "Open AI CLI --->"; service = new OpenAiService(apiKey, Duration.ofSeconds(60)); history = new ArrayList<>(250); options = new HashMap<>(); options.put( "disableOutputCodeToFile", getBooleanProperty(properties, "openaicli.options.disableOutputCodeToFile") ); options.put( "disableLoggingChatGPTHistory", getBooleanProperty(properties, "openaicli.options.disableLoggingChatGPTHistory") ); options.put( "disableSendingChatGPTHistory", getBooleanProperty(properties, "openaicli.options.disableSendingChatGPTHistory") ); } catch ( Exception startUpException ) { handleException(" couldn't start up the CLI", startUpException ); throw new RuntimeException( "Failed to read from the history file. Exiting"); } } private Boolean getBooleanProperty(Properties properties, String key) { Object value = properties.get(key); if (value instanceof String) { return Boolean.parseBoolean((String) value); } // Return default value (false) if the property doesn't exist or isn't a string. return false; } /** * This method alternates the boolean value of a specified option in the options HashMap. * * @param optionName The name of the option to be changed. It should be a valid option name and is case-sensitive. * * @throws NullPointerException if the specified optionName is null. * @throws IllegalArgumentException if the specified optionName does not exist in the options HashMap. */ public void changeOption( String optionName ) { if (options == null) { throw new NullPointerException("Invalid option name: ''"); } else if ( !options.containsKey(optionName) ) { throw new IllegalArgumentException("Invalid option name: " + optionName); } else { options.put( optionName, !options.get( optionName ) ); } } /** * Adds a new entry to the chat history and ensures the size of the history does not exceed the limit. * If the size of the history reaches the limit (250 in this case), the earliest entry is removed before adding the new one. * * @param input The input {@link ChatMessage} representing the message received. * @param output The output {@link ChatMessage} representing the response or reply. */ private void addAndRotateHistory( ChatMessage input, ChatMessage output ) { if (history.size() == 250) { history.remove(0); } history.add( new Pair<>( input, output ) ); } /** * Returns the chat history entry associated with the specified ID. * * @param id The ID of the desired chat history entry. The valid range is from 0 to 249. * @return A {@link Pair} containing the user's {@link ChatMessage} as the first element and ChatGPT's response as the second. * @throws IllegalArgumentException if the provided ID is 250 or greater. */ public Pair< ChatMessage > returnHistory(int id) { if (id >= 250) { throw new IllegalArgumentException("Chat History only contains a maximum of 125 user and 125 ChatGPT messages"); } return history.get(id); } public List< ChatMessage > returnHistoryAsList() { List< ChatMessage > list = new ArrayList<>(); for (Pair< ChatMessage > pair : history ) list.addAll( pair.convertToList() ); return list; } /** * A method to handle exceptions and print stack trace. * * @param failMessage the failure message * @param exception the thrown exception */ private static void handleException(String failMessage, Exception exception) { System.out.println(OPENAICLI_CMD_HEADER + "Oooops, " + failMessage + "... Reason: " + exception.getMessage()); System.out.println(OPENAICLI_CMD_HEADER + " - - - - - - - Stacktrace start - - - - - - - "); exception.printStackTrace(); System.out.println(OPENAICLI_CMD_HEADER + " - - - - - - - Stacktrace end - - - - - - - "); } /** * A method to read chat history from a file. */ @SuppressWarnings("unchecked") private void readHistoryFromFile() throws RuntimeException { File historyFile = createNewHistoryFile(); if ( historyFile.length() != 0 ) { try ( ObjectInputStream inputStream = new ObjectInputStream( new FileInputStream( historyFile ) ) ) { history.add( ( Pair< ChatMessage > ) inputStream.readObject() ); } catch ( ClassNotFoundException | IOException readException ) { handleException("couldn't read the history file", readException); throw new RuntimeException("Failed to read from the history file. Exiting"); } } } /** * A method to write chat history to a file. */ private void writeHistoryToFile() { try ( ObjectOutputStream outputStream = new ObjectOutputStream( new FileOutputStream( HISTORY_FILE_NAME ) ) ) { history.forEach( message -> { try { outputStream.writeObject( message ); } catch ( IOException e ) { throw new RuntimeException( e ); } }); } catch ( IOException writeException ) { handleException( "couldn't write the history file", writeException ); throw new RuntimeException( "Failed to write the the code file. Exiting"); } } /** * A method to write chat history to a file. * TODO: rename old history with date. * */ private void clearHistoryToFile() { renameHistoryFile(); createNewHistoryFile(); writeHistoryToFile(); } /** * Renames the history file to a new file with the date as the file type. */ private static void renameHistoryFile() { File originalFile = new File( HISTORY_FILE_NAME ); // Create a File object with the old file path File renamedFile = new File(originalFile.getParent(), HISTORY_FILE_NAME + "." + generateDateString() ); // Create a File object with the new file path // Rename the file and check for success if ( originalFile.renameTo( renamedFile ) ) { System.out.println(OPENAICLI_CMD_HEADER + " History file renamed successfully."); } else { System.out.println(OPENAICLI_CMD_HEADER + " History file renaming failed."); } } /** * Creates a new history file (without extension). * Used for a variety of reasons. */ private static File createNewHistoryFile( ) { File historyFile = new File( HISTORY_FILE_NAME ); // Check if the file already exists try { if ( !historyFile.exists() ) { // Create the file and check for success if ( historyFile.createNewFile( ) ) { System.out.println(OPENAICLI_CMD_HEADER + " History file created successfully."); } else { System.out.println(OPENAICLI_CMD_HEADER + " History file creation failed."); } } else { System.out.println(OPENAICLI_CMD_HEADER + " History file already exists."); } return historyFile; } catch ( IOException createException ) { handleException( "couldn't create the history file", createException ); throw new RuntimeException( "Failed to create the history file. Exiting"); } } /** * A method to generate data string * * @return a generate date string */ private static String generateDateString( ) { LocalDateTime currentDateTime = LocalDateTime.now(); DateTimeFormatter formatter = DateTimeFormatter.ofPattern( CODE_FILE_DATA_FORMAT ); return currentDateTime.format(formatter); } /** * A method to generate file name for code files. * * @param codeType the type of code * @return a formatted file name */ private static String generateCodeFileName( String codeType ) { return generateDateString( ) + ( StringUtils.isEmpty(codeType) ? "" : "." + codeType ); } private static boolean hasCode( ChatMessage message ) { String content = message.getContent( ); Pattern pattern = Pattern.compile(CODE_REGULAR_EXPRESSION); Matcher matcher = pattern.matcher( content ); return matcher.find(); } /** * A method to check if a ChatMessage contains code. * * @param message the chat message * @return codeType if it contains code, "" otherwise */ private static String extractCodeType( ChatMessage message ) { String content = message.getContent( ); Pattern pattern = Pattern.compile(CODE_REGULAR_EXPRESSION); Matcher matcher = pattern.matcher( content ); if (matcher.find()) return matcher.group(1); else return ""; } /** * A method to extract the code type from a ChatMessage. * * @param message the chat message * @return the extracted code type */ private List<String> extractCode( ChatMessage message ) { String content = message.getContent( ); List<String> codeList = new ArrayList<>(); Pattern pattern = Pattern.compile(CODE_REGULAR_EXPRESSION, java.util.regex.Pattern.DOTALL); Matcher matcher = pattern.matcher( content ); while (matcher.find()) { String codeBlock = matcher.group( 1 ).replace( "\\n", "\n" ) .replace( "\\\"", "\"" ) .trim( ); codeList.add( codeBlock ); } return codeList; } /** * A method to write the code from a ChatMessage to a file. * * @param message the chat message */ private void writeCodeToFile( ChatMessage message ) throws RuntimeException { if (options.get("disableOutputCodeToFile")) return; String codeType = extractCodeType( message ); try ( BufferedWriter writer = new BufferedWriter( new FileWriter( generateCodeFileName( codeType ) ) ) ) { List<String> codeList = extractCode( message ); codeList.forEach(code -> { try { writer.write(code); writer.newLine(); } catch (IOException writeCodeLineException) { handleException( "couldn't next line to the code file", writeCodeLineException ); } }); } catch ( IOException writeCodeException ) { handleException( "couldn't write to the code file", writeCodeException ); throw new RuntimeException( "Failed to write the the code file. Exiting"); } } /** * A method to handle user inputs and interact with OpenAI. * * @return true if the chat should continue, false otherwise */ private boolean askGPT() { System.out.print("You: "); Scanner scanner = new Scanner(System.in); String userInput = scanner.nextLine(); return askGPT( userInput ); } /** * A method to handle user inputs and interact with OpenAI. * TODO: modify for UI usage. * * @param userInput THe input from the command line or from the UI * * @return true if the chat should continue, false otherwise */ public boolean askGPT( String userInput ) { if ( userInput.equalsIgnoreCase( "QUIT" ) || userInput.equalsIgnoreCase( "WRITELAST" ) ) { writeHistoryToFile(); // If we received QUIT return false to exit the program if( userInput.equalsIgnoreCase( "QUIT" ) ) return false; } else if ( userInput.equalsIgnoreCase( "WIPE" ) || userInput.equalsIgnoreCase( "WIPEHISTORY" ) ) { history = new ArrayList<>( ); clearHistoryToFile( ); } try { ChatMessage response = askGPT_GetResponse(userInput); if (hasCode(response)) writeCodeToFile(response); System.out.print("ChatGPT: " + response.getContent() + System.lineSeparator()); } catch( Exception exception ) { handleException( "Error with the ChatGPT API occurred: " + exception.getMessage(), exception ); } return false; } public ChatMessage askGPT_GetResponse( String userInput ) { ChatMessage userMessage = new ChatMessage( ChatMessageRole.USER.value(), userInput ); // Process the user's message with OpenAI ChatCompletionRequest chatRequest = ChatCompletionRequest.builder( ) .model( OPENAI_MODEL ) // see https://platform.openai.com/docs/models // if option enabled, send history .messages( !options.get( "disableSendingChatGPTHistory" ) ? returnHistoryAsList() : null ) .maxTokens( 256 ) .build( ); ChatMessage response = service.createChatCompletion( chatRequest ).getChoices( ) .get( 0 ) .getMessage( ); if ( !options.get( "disableLoggingChatGPTHistory" ) ) addAndRotateHistory( userMessage, response ); // Add the last user message to history return response; } /** * A method to start the chat loop. * TODO: decide if a thread could be useful, write now in such a simple project it is not useful. */ public void start() { readHistoryFromFile(); while (true) { if (!askGPT()) { break; } } } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value", "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((16589, 16617), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((16727, 17309), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((16727, 17234), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((16727, 17151), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((16727, 16847), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package com.dooray.samplecmd.controller; import com.dooray.samplecmd.payload.ResponseType; import com.dooray.samplecmd.payload.SlashCommandResponse; import com.dooray.samplecmd.payload.SubmitDialogPayload; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.theokanning.openai.completion.chat.ChatCompletionChoice; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.service.OpenAiService; import org.springframework.http.*; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RestController; import org.springframework.web.client.RestTemplate; import org.springframework.web.util.UriComponentsBuilder; import javax.servlet.http.HttpServletRequest; import java.time.Duration; import java.util.Arrays; import java.util.List; @RestController public class InteractiveController { private final RestTemplate restTemplate; public InteractiveController(RestTemplate restTemplate) { this.restTemplate = restTemplate; } @PostMapping("/api/interaction") public SlashCommandResponse interactiveButton(HttpServletRequest request, @RequestBody SubmitDialogPayload requestPayload) throws JsonProcessingException { String answer = null; System.out.println(requestPayload); if (requestPayload.getType().equals("dialog_submission")) { String question = requestPayload.getSubmission().get("question"); answer = requestGpt(question); StringBuilder responseText = new StringBuilder(); Long tenantId = requestPayload.getTenant().getId(); Long memberId = requestPayload.getUser().getId(); String mentionText = "(dooray://" + tenantId + "/members/" + memberId + " \"member\")"; responseText.append(mentionText + "'s ") .append("Question: ") .append(question) .append("\n\n") .append("Answer: ") .append(answer); sendCommandHookMessage(request, requestPayload, responseText.toString()); return SlashCommandResponse.builder() .channelId(requestPayload.getChannel().getId()) .text(responseText.toString()) .replaceOriginal(false) .responseType(ResponseType.IN_CHANNEL) .build(); } return SlashCommandResponse.builder().build(); } private void sendCommandHookMessage(HttpServletRequest request, SubmitDialogPayload requestPayload, String text) throws JsonProcessingException { ObjectMapper mapper = new ObjectMapper(); UriComponentsBuilder builder = UriComponentsBuilder.fromHttpUrl(requestPayload.getResponseUrl()); HttpHeaders httpHeaders = new HttpHeaders(); httpHeaders.setContentType(MediaType.APPLICATION_JSON); httpHeaders.add("Dooray-Db-Id", request.getHeader("Dooray-Db-Id")); httpHeaders.add("token", requestPayload.getCmdToken()); SlashCommandResponse response = SlashCommandResponse.builder() .channelId(requestPayload.getChannel().getId()) .text(text) .replaceOriginal(false) .responseType(ResponseType.IN_CHANNEL) .build(); HttpEntity<?> entity = new HttpEntity<>(mapper.writeValueAsString(response), httpHeaders); restTemplate.exchange(builder.build().encode().toUri(), HttpMethod.POST, entity, String.class); } private String requestGpt(String question) { OpenAiService service = new OpenAiService("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", Duration.ZERO); ChatCompletionRequest completionRequest1 = ChatCompletionRequest.builder() .model("gpt-3.5-turbo") .messages(Arrays.asList(new ChatMessage("user", question))) .build(); List<ChatCompletionChoice> choices = service.createChatCompletion(completionRequest1) .getChoices(); if (!choices.isEmpty()) { return choices.get(0).getMessage().getContent(); } return ""; } }
[ "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((2329, 2610), 'com.dooray.samplecmd.payload.SlashCommandResponse.builder'), ((2329, 2581), 'com.dooray.samplecmd.payload.SlashCommandResponse.builder'), ((2329, 2522), 'com.dooray.samplecmd.payload.SlashCommandResponse.builder'), ((2329, 2478), 'com.dooray.samplecmd.payload.SlashCommandResponse.builder'), ((2329, 2427), 'com.dooray.samplecmd.payload.SlashCommandResponse.builder'), ((2637, 2675), 'com.dooray.samplecmd.payload.SlashCommandResponse.builder'), ((3314, 3556), 'com.dooray.samplecmd.payload.SlashCommandResponse.builder'), ((3314, 3531), 'com.dooray.samplecmd.payload.SlashCommandResponse.builder'), ((3314, 3476), 'com.dooray.samplecmd.payload.SlashCommandResponse.builder'), ((3314, 3436), 'com.dooray.samplecmd.payload.SlashCommandResponse.builder'), ((3314, 3408), 'com.dooray.samplecmd.payload.SlashCommandResponse.builder'), ((3989, 4161), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3989, 4136), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3989, 4060), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package me.synergy.modules; import com.theokanning.openai.completion.CompletionChoice; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.service.OpenAiService; import java.time.Duration; import java.util.List; import me.synergy.brains.Synergy; public class OpenAi { public List<CompletionChoice> newPrompt(String args) { OpenAiService service = new OpenAiService(Synergy.getConfig().getString("openai.token"), Duration.ofSeconds(30L)); CompletionRequest completionRequest = CompletionRequest.builder() .model(Synergy.getConfig().getString("openai.model")) .prompt(args) .maxTokens(Synergy.getConfig().getInt("openai.response-size")) .temperature(Double.valueOf(Synergy.getConfig().getDouble("openai.temperature"))) .build(); List<CompletionChoice> choices = service.createCompletion(completionRequest).getChoices(); service.shutdownExecutor(); return choices; } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((411, 456), 'me.synergy.brains.Synergy.getConfig'), ((526, 805), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((526, 790), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((526, 702), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((526, 633), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((526, 613), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((567, 612), 'me.synergy.brains.Synergy.getConfig'), ((651, 701), 'me.synergy.brains.Synergy.getConfig'), ((737, 788), 'me.synergy.brains.Synergy.getConfig')]
package com.alineavila.jornadamilhas.infra; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.service.OpenAiService; public class IntegraOpenAi { private static final String API_KEY = "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"; OpenAiService service = new OpenAiService(API_KEY); public String realizaOPrompt(String nomeDestino) { CompletionRequest completionRequest = CompletionRequest.builder() .prompt("Faça uma descrição turistica sobre a localidade "+ nomeDestino) .model("text-davinci-003") .maxTokens(800) .build(); var result = service.createCompletion(completionRequest).getChoices().get(0).getText(); System.out.println(result); return result; } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((444, 663), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((444, 638), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((444, 606), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((444, 563), 'com.theokanning.openai.completion.CompletionRequest.builder')]
/* ======================================================================== SchemaCrawler http://www.schemacrawler.com Copyright (c) 2000-2024, Sualeh Fatehi <sualeh@hotmail.com>. All rights reserved. ------------------------------------------------------------------------ SchemaCrawler is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. SchemaCrawler and the accompanying materials are made available under the terms of the Eclipse Public License v1.0, GNU General Public License v3 or GNU Lesser General Public License v3. You may elect to redistribute this code under any of these licenses. The Eclipse Public License is available at: http://www.eclipse.org/legal/epl-v10.html The GNU General Public License v3 and the GNU Lesser General Public License v3 are available at: http://www.gnu.org/licenses/ ======================================================================== */ package schemacrawler.tools.command.chatgpt.utility; import java.io.PrintStream; import java.sql.Connection; import java.util.ArrayList; import java.util.List; import com.theokanning.openai.completion.chat.ChatFunction; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.service.FunctionExecutor; import static java.util.Objects.requireNonNull; import schemacrawler.schema.Catalog; import schemacrawler.tools.command.chatgpt.FunctionDefinition; import schemacrawler.tools.command.chatgpt.FunctionDefinition.FunctionType; import schemacrawler.tools.command.chatgpt.functions.ExitFunctionDefinition; import schemacrawler.tools.command.chatgpt.functions.FunctionDefinitionRegistry; import us.fatehi.utility.UtilityMarker; @UtilityMarker public class ChatGPTUtility { public static boolean inIntegerRange(final int value, final int min, final int max) { return value > min && value <= max; } public static boolean isExitCondition(final List<ChatMessage> completions) { requireNonNull(completions, "No completions provided"); final String exitFunctionName = new ExitFunctionDefinition().getName(); for (final ChatMessage c : completions) { if (c.getFunctionCall() != null && c.getName().equals(exitFunctionName)) { return true; } } return false; } public static FunctionExecutor newFunctionExecutor( final Catalog catalog, final Connection connection) { requireNonNull(catalog, "No catalog provided"); requireNonNull(connection, "No connection provided"); final List<ChatFunction> chatFunctions = new ArrayList<>(); for (final FunctionDefinition functionDefinition : FunctionDefinitionRegistry.getFunctionDefinitionRegistry()) { if (functionDefinition.getFunctionType() != FunctionType.USER) { continue; } functionDefinition.setCatalog(catalog); functionDefinition.setConnection(connection); final ChatFunction chatFunction = ChatFunction.builder() .name(functionDefinition.getName()) .description(functionDefinition.getDescription()) .executor(functionDefinition.getParameters(), functionDefinition.getExecutor()) .build(); chatFunctions.add(chatFunction); } return new FunctionExecutor(chatFunctions); } /** * Send prompt to ChatGPT API and display response * * @param prompt Input prompt. */ public static void printResponse(final List<ChatMessage> completions, final PrintStream out) { requireNonNull(out, "No ouput stream provided"); requireNonNull(completions, "No completions provided"); for (final ChatMessage chatMessage : completions) { out.println(chatMessage.getContent()); } } private ChatGPTUtility() { // Prevent instantiation } }
[ "com.theokanning.openai.completion.chat.ChatFunction.builder" ]
[((3004, 3257), 'com.theokanning.openai.completion.chat.ChatFunction.builder'), ((3004, 3234), 'com.theokanning.openai.completion.chat.ChatFunction.builder'), ((3004, 3140), 'com.theokanning.openai.completion.chat.ChatFunction.builder'), ((3004, 3076), 'com.theokanning.openai.completion.chat.ChatFunction.builder')]
package com.example.javafullstack.controller; import com.example.javafullstack.UserRepository; import com.example.javafullstack.entity.User; import com.theokanning.openai.completion.CompletionChoice; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.service.OpenAiService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Controller; import javax.annotation.PostConstruct; import org.springframework.web.bind.annotation.*; import org.springframework.ui.Model; import org.springframework.web.servlet.view.RedirectView; import java.util.List; @RequestMapping("/user") @Controller public class UserController { @PostConstruct public void init() { System.out.println("UserController init"); } private static Logger logger = LoggerFactory.getLogger(UserController.class); @RequestMapping("/info") public String sayHi() { logger.trace("========trace========"); logger.debug("========debug========"); logger.info("========info========"); logger.warn("========warn========"); logger.error("========error========"); return "Info"; } // @Autowired // private JdbcTemplate; // // @RequestMapping(value = "/getusers", method=RequestMethod.GET) // @ResponseBody // public List<Map<String,Object>> getUsers() { // String sql = "SELECT * FROM user"; // List<Map<String,Object>> list = jdbcTemplate.queryForList(sql); // return list; // } @Autowired private UserRepository userRepository; @GetMapping("/storing") public String greetingForm(Model model) { model.addAttribute("user", new User()); return "dataStore"; } @PostMapping("/storing") public String greetingSubmit(@ModelAttribute User user, Model model) { logger.trace("================ here ================"); User newUser = new User(); newUser.setName(user.getName()); newUser.setAge(user.getAge()); newUser.setGender(user.getGender()); newUser.setEmail(user.getEmail()); newUser.setCity(user.getCity()); userRepository.save(user); // model.addAttribute("newUser", newUser); //返回最新添加的数据 return "newStoredData"; } @GetMapping("/alldata") public String getMessage(Model model) { Iterable<User> users = userRepository.findAll(); model.addAttribute("users", users); return "allStoredData"; } @Value("${GPT_KEY}") private String gptKey; @PostMapping("/askGPT") public String ChatGPT(@RequestParam String question, Model model) { String token = gptKey; System.out.println(question); OpenAiService service = new OpenAiService(token); CompletionRequest completionRequest = CompletionRequest.builder() .model("text-davinci-003") .prompt(question) .temperature(0.5) .maxTokens(2048) .topP(1D) .frequencyPenalty(0D) .presencePenalty(0D) .build(); service.createCompletion(completionRequest).getChoices().forEach(System.out::println); List<CompletionChoice> choiceList = service.createCompletion(completionRequest).getChoices(); String answer = choiceList.get(0).getText(); model.addAttribute("prompt", question); model.addAttribute("choices", answer); return "GPTAnswers"; } @RequestMapping("/mainpage") public String mainPage() { return "mainPage"; } @RequestMapping("/redirectToInfo") public RedirectView redirectToInfo() { return new RedirectView("/user/info"); } @RequestMapping("/redirectToStoring") public RedirectView redirectToStoring() { return new RedirectView("/user/storing");} @RequestMapping("/redirectToAllData") public RedirectView redirectToAllData() { return new RedirectView("/user/alldata");} @RequestMapping("/redirectToAskGPT") public RedirectView redirectToAsk() { return new RedirectView("/askGPT.html");} }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((2987, 3284), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2987, 3259), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2987, 3222), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2987, 3184), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2987, 3158), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2987, 3125), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2987, 3091), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2987, 3057), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package de.garrafao.phitag.computationalannotator.lexsub.service; import com.theokanning.openai.OpenAiHttpException; import com.theokanning.openai.completion.chat.ChatCompletionChoice; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.service.OpenAiService; import de.garrafao.phitag.computationalannotator.common.error.WrongApiKeyException; import de.garrafao.phitag.computationalannotator.common.error.WrongModelException; import de.garrafao.phitag.computationalannotator.common.function.CommonFunction; import de.garrafao.phitag.computationalannotator.common.model.application.data.OpenAPIResponseDto; import de.garrafao.phitag.computationalannotator.lexsub.data.LexsubPrompt; import org.springframework.stereotype.Service; import java.util.List; @Service public class LexsubOpenAIService { private final LexsubPrompt lexsubPrompt; private final CommonFunction commonFunction; public LexsubOpenAIService(LexsubPrompt lexsubPrompt, CommonFunction commonFunction) { this.lexsubPrompt = lexsubPrompt; this.commonFunction = commonFunction; } public OpenAPIResponseDto chat(final String apiKey, final String model, final String prompt, final String usage, final String lemma) { try { OpenAiService service = new OpenAiService(apiKey); List<ChatMessage> messages = this.lexsubPrompt.getChatMessages(prompt, usage, lemma); ChatCompletionRequest completionRequest = ChatCompletionRequest.builder() .messages(messages) .model(model) .temperature(0.0) .n(1) .build(); List<ChatCompletionChoice> choices = service.createChatCompletion(completionRequest).getChoices(); StringBuilder returnString = new StringBuilder(); for (ChatCompletionChoice choice : choices) { returnString.append("response: ").append(choice.getMessage().getContent()).append(System.lineSeparator()); } final String result = String.valueOf(returnString); return new OpenAPIResponseDto(result); } catch (OpenAiHttpException e) { if (e.getMessage().contains("The model")) { throw new WrongModelException(model); } if (e.getMessage().contains("Incorrect API key provided")) { throw new WrongApiKeyException(); } } return null; } }
[ "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((1555, 1753), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1555, 1724), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1555, 1698), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1555, 1660), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((1555, 1626), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package de.dhbw.quizapp.application.openai; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import org.springframework.stereotype.Service; import java.time.Duration; import java.util.ArrayList; import java.util.List; @Service public class AIGenService { private String openaiApiKey; private OpenAiService openAiService; public AIGenService() { this.openaiApiKey = System.getenv("OPENAI_TOKEN"); if (this.openaiApiKey != null) { Duration timeOutDuration = Duration.ofSeconds(300); this.openAiService = new OpenAiService(openaiApiKey, timeOutDuration); } } public void setApiKey(String apiKey) { this.openaiApiKey = apiKey; Duration timeOutDuration = Duration.ofSeconds(300); this.openAiService = new OpenAiService(openaiApiKey, timeOutDuration); } public String generateQuiz(String quizName, String furtherInfo, int noOfQuestions, String language, String model) { if (openAiService == null) { throw new IllegalStateException("API key not set"); } String prompt = createPrompt(quizName, furtherInfo, noOfQuestions, language); List<ChatMessage> messages = createChatMessages(prompt); ChatCompletionRequest chatCompletionRequest = createChatCompletionRequest(messages, model); return openAiService.createChatCompletion(chatCompletionRequest).getChoices().get(0).getMessage().getContent(); } private String createPrompt(String quizName, String furtherInfo, int noOfQuestions, String language) { String promptTemplate = """ Create a quiz using the following information: { "title": "%s", "description": "%s", "number of questions": "%d", "language of questions": "%s", "example question and answer": "Example question: <exampleQuestion>, Example answer: <exampleAnswer>" } Use this exact format: { "title": "<QuizName>", "description": "<description>", "questions": [ { "title": "<question1>", "correctAnswer": "<Correct Answer = A | B | C | D>", "answerOptions": [ "<answerOption1.A>, <answerOption1.B>, <answerOption1.C>, <answerOption1.D>" ] }, { "title:... ] } Example: Quiz in JSON-Format: { "title": "European Capitals", "description": "A quiz to test knowledge of the capitals of various European countries.", "questions": [ { "title": "What is the capital of Germany?", "correctAnswer": "C", "answerOptions": ["Paris", "London", "Berlin", "Rome"] }, { "title:... ] } """; return String.format(promptTemplate, quizName, furtherInfo, noOfQuestions, language); } private List<ChatMessage> createChatMessages(String prompt) { List<ChatMessage> messages = new ArrayList<>(); ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), prompt); messages.add(systemMessage); return messages; } private ChatCompletionRequest createChatCompletionRequest(List<ChatMessage> messages, String model) { return ChatCompletionRequest .builder() .model(model) .messages(messages) .n(1) .maxTokens(500) .build(); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value" ]
[((3156, 3186), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value')]
package cn.sc.love.gpt; import com.theokanning.openai.OpenAiService; import com.theokanning.openai.completion.CompletionChoice; import com.theokanning.openai.completion.CompletionRequest; import java.util.List; /** * @Author yupengtao * @Date 2023/6/8 18:49 **/ public class Demo1 { public static void main(String[] args) { OpenAiService service = new OpenAiService(Constants.OPENAI_TOKEN,10000); CompletionRequest request = CompletionRequest.builder().model("text-davince-003") //设置使用的模型 .prompt("请帮我写一首情诗,送给我的陈姓女朋友") //提示 .temperature(0D) //采样的波动性 .maxTokens(1000) //输入输出的多少 .topP(1D) //情绪采样。【0,1】:从悲观到乐观 .frequencyPenalty(0D) //频率处罚系数 .presencePenalty(0D) //重复处罚系数 .build(); List<CompletionChoice> choices = service.createCompletion(request).getChoices(); System.out.println(choices); choices.forEach(System.out::println); } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((452, 981), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((452, 932), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((452, 868), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((452, 770), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((452, 717), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((452, 658), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((452, 614), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((452, 505), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package com.gptchathelper.controller; import com.gptchathelper.model.ChatMessage; import com.gptchathelper.service.ChathistoryImpl; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.messaging.handler.annotation.MessageMapping; import org.springframework.messaging.handler.annotation.Payload; import org.springframework.messaging.handler.annotation.SendTo; import org.springframework.messaging.simp.SimpMessageHeaderAccessor; import org.springframework.stereotype.Controller; import com.theokanning.openai.service.OpenAiService; import com.theokanning.openai.completion.CompletionRequest; import static com.gptchathelper.model.ChatMessage.MessageType.CHAT; @Controller public class ChatController { @Autowired ChathistoryImpl chathistoryService; @Value("${spring.openai.apikey}") private String openai_apikey; @MessageMapping("/chat.register") @SendTo("/topic/public") public ChatMessage register(@Payload ChatMessage chatMessage, SimpMessageHeaderAccessor headerAccessor) { headerAccessor.getSessionAttributes().put("username", chatMessage.getSender()); return chatMessage; } @MessageMapping("/chat.send") @SendTo("/topic/public") public ChatMessage sendMessage(@Payload ChatMessage chatMessage) { chathistoryService.insertChathistory(chatMessage.generateId(), chatMessage.getCurrentTime() , chatMessage.getSender(), chatMessage.getContent()); return chatMessage; } @MessageMapping("/chat.generateResponse") @SendTo("/topic/public") public ChatMessage generateResponse(@Payload ChatMessage chatMessage) { String latestMessage = chathistoryService.getLatestMessageContent(); // Use only the latest message for the prompt OpenAiService service = new OpenAiService(openai_apikey); // Craft a more engaging and contextual prompt String helperPrompt = "As a chat helper, provide a supportive and concise one-sentence response to the following message: \"" + latestMessage + "\""; CompletionRequest completionRequest = CompletionRequest.builder() .prompt(helperPrompt) .model("text-davinci-003") // Replace with your desired model .build(); try { String response = service.createCompletion(completionRequest).getChoices().get(0).getText(); chatMessage.setContent(response); chatMessage.setType(CHAT); // Set the type of the message, if needed chatMessage.setSender(chatMessage.getSender()); // Set the sender as ChatGPT or similar // Set other necessary fields of chatMessage here, if there are any } catch (Exception e) { e.printStackTrace(); // Handle the exception, possibly by setting an error message in chatMessage chatMessage.setContent("An error occurred while generating a response."); chatMessage.setType(CHAT); } return chatMessage; } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((2173, 2341), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2173, 2281), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2173, 2238), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package com.mtahacks.recipes.service; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.mtahacks.recipes.DTOs.IncomingRequest; import com.mtahacks.recipes.DTOs.Recipe; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.service.OpenAiService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import java.util.List; @Component public class OpenAIService { private static final String KEY = "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"; private final OpenAiService service; Logger logger = LoggerFactory.getLogger(OpenAIService.class); public OpenAIService(){ service = new OpenAiService(KEY); } public Recipe getRecipe(IncomingRequest request){ CompletionRequest completionRequest = CompletionRequest.builder() .prompt( buildPrompt(request)) .model("text-davinci-003") .maxTokens(3850) .build(); var response = service.createCompletion(completionRequest).getChoices().get(0).getText(); try{ return new ObjectMapper().readValue(response, Recipe.class); } catch (JsonProcessingException e) { throw new RuntimeException(e); } } private static String buildPrompt(IncomingRequest request){ String ingredients = String.join(",", request.ingredients()); String restrictions = ""; if(request.dietaryRestrictions().size() > 0){ restrictions = "The recipe must consider the following dietary restrictions: " + String.join(",", request.dietaryRestrictions()) + "\n"; } String restrictedIngredients = ""; if(request.excludedIngredients().size() > 0){ restrictedIngredients = "The recipe cannot contain any of the following ingredients: " + String.join(",", request.excludedIngredients()) + "\n"; } return String.format(""" I am going to provide you a list of ingredients, I want you to generate a recipe that takes less than 30 minutes to prepare using the ingredients provided. The recipe should use one or more of the provided ingredients. The recipe should be delivered in JSON format. The name of the recipe should be in the JSON object as a string at the key "recipe_name". The ingredients should be in the JSON object with the key "ingredients", and should be listed in a JSON list, and the individual ingredients should resemble the following JSON object: {"name": *ingredient_name*, "amount": *amount of ingredient needed*}. wrap the amount of ingredients needed in double quotes The steps should be in a JSON array and should just be strings. Do not provide any text except for the requested JSON. %s%s Do not provide any text except for the requested JSON. Any additional text should be discarded. The ingredients are: %s\n""", restrictions, restrictedIngredients, ingredients); } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((1095, 1270), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1095, 1245), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1095, 1212), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1095, 1169), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package screensync.api.service; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.service.OpenAiService; public class ConsultaChatGPT { public static String obterTraducao(String texto) { OpenAiService service = new OpenAiService("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"); CompletionRequest requisicao = CompletionRequest.builder() .model("gpt-3.5-turbo-instruct") .prompt("traduza para pt-br o texto: " + texto) .maxTokens(1000) .temperature(0.7) .build(); var resposta = service.createCompletion(requisicao); return resposta.getChoices().get(0).getText(); } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((380, 612), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((380, 587), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((380, 553), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((380, 520), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((380, 456), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package com.erzbir.numeron.plugin.openai.config; import com.erzbir.numeron.api.NumeronImpl; import com.erzbir.numeron.utils.*; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import java.io.IOException; import java.io.Serializable; import java.util.LinkedList; /** * @author Erzbir * @Date: 2023/6/19 20:10 */ public class FunctionConfig implements Serializable { private static final Object key = new Object(); private static final String configFile = NumeronImpl.INSTANCE.getPluginWorkDir() + "chatgpt/config/function.json"; private static volatile FunctionConfig INSTANCE; private String model = "gpt-3.5-turbo-16k-0613"; private int max_tokens = 512; private double temperature = 0.9; private double top_p = 1.0; private double presence_penalty = 0.6; private double frequency_penalty = 0.0; private String function_call = "auto"; private FunctionConfig() { try { ConfigCreateUtil.createFile(configFile); } catch (IOException e) { NumeronLogUtil.logger.error("ERROR", e); } } public static FunctionConfig getInstance() { if (INSTANCE == null) { synchronized (key) { if (INSTANCE == null) { try { INSTANCE = JsonUtil.load(configFile, FunctionConfig.class); } catch (ConfigReadException e) { throw new RuntimeException(e); } } } } if (INSTANCE == null) { synchronized (key) { if (INSTANCE == null) { INSTANCE = new FunctionConfig(); try { JsonUtil.dump(configFile, INSTANCE, ChatConfig.class); } catch (ConfigWriteException e) { throw new RuntimeException(e); } } } } return INSTANCE; // return new ChatConfig(); } public ChatCompletionRequest load() { return ChatCompletionRequest.builder() .maxTokens(max_tokens) .model(model) .messages(new LinkedList<>()) .presencePenalty(presence_penalty) .topP(top_p) .frequencyPenalty(frequency_penalty) .build(); } public String getModel() { return model; } public void setModel(String model) { this.model = model; } public int getMax_tokens() { return max_tokens; } public void setMax_tokens(int max_tokens) { this.max_tokens = max_tokens; } public double getTemperature() { return temperature; } public void setTemperature(double temperature) { this.temperature = temperature; } public double getTop_p() { return top_p; } public void setTop_p(double top_p) { this.top_p = top_p; } public double getPresence_penalty() { return presence_penalty; } public void setPresence_penalty(double presence_penalty) { this.presence_penalty = presence_penalty; } public double getFrequency_penalty() { return frequency_penalty; } public void setFrequency_penalty(double frequency_penalty) { this.frequency_penalty = frequency_penalty; } public String getFunction_call() { return function_call; } public void setFunction_call(String function_call) { this.function_call = function_call; } }
[ "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((488, 527), 'com.erzbir.numeron.api.NumeronImpl.INSTANCE.getPluginWorkDir'), ((2105, 2409), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2105, 2384), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2105, 2331), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2105, 2302), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2105, 2251), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2105, 2205), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2105, 2175), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package org.codered.neolithic.openai; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import org.codered.neolithic.Neolithic; import javax.swing.*; import javax.swing.text.BadLocationException; import javax.swing.text.SimpleAttributeSet; import javax.swing.text.StyleConstants; import javax.swing.text.StyledDocument; import java.awt.*; import java.time.Duration; import java.util.ArrayList; import java.util.List; /** * The OpenAIHandler class manages interactions with the OpenAI API for conducting a chat-based conversation. * It includes a graphical user interface for users to input messages, receive AI responses, and view the conversation. */ public class OpenAIHandler { private final OpenAiService service; private final AIRequest originalRequest; private JFrame chatFrame; private JTextPane chatTextPane; private JTextField userInputField; private JButton sendButton; private JLabel loadingLabel; /** * Constructor for the OpenAIHandler class. * * @param originalRequest The initial AIRequest containing user instructions and converted text. */ public OpenAIHandler(AIRequest originalRequest) { String OPENAI_TOKEN = Neolithic.getConfigReader().getOpenAiToken(); this.service = new OpenAiService(OPENAI_TOKEN, Duration.ofHours(2)); this.originalRequest = originalRequest; initializeChatFrame(); } /** * Retrieves the OpenAiService instance used by the OpenAIHandler. * * @return The OpenAiService instance. */ public OpenAiService getService() { return service; } /** * Initiates the chat by making the chat frame visible. */ public void startChat() { chatFrame.setVisible(true); } /** * Initializes the chat frame with necessary components and performs the initial AI request. */ private void initializeChatFrame() { chatFrame = new JFrame("Chat with AI"); chatFrame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); chatFrame.setSize(600, 400); chatFrame.setLayout(new BorderLayout()); // Initialize chatTextPane with a darker background chatTextPane = new JTextPane(); chatTextPane.setEditable(false); chatTextPane.setBackground(Color.DARK_GRAY); // Set a darker background color chatTextPane.setForeground(Color.WHITE); // Set text color to white chatTextPane.setFont(new Font("Monospaced", Font.PLAIN, 14)); // Use a monospaced font for consistency JScrollPane scrollPane = new JScrollPane(chatTextPane); chatFrame.add(scrollPane, BorderLayout.CENTER); JPanel inputPanel = new JPanel(new BorderLayout()); userInputField = new JTextField(); sendButton = new JButton("Send"); sendButton.addActionListener(e -> processUserInput()); inputPanel.add(userInputField, BorderLayout.CENTER); inputPanel.add(sendButton, BorderLayout.EAST); loadingLabel = new JLabel("Loading AI response..."); loadingLabel.setHorizontalAlignment(JLabel.CENTER); loadingLabel.setForeground(Color.BLUE); loadingLabel.setVisible(false); inputPanel.add(loadingLabel, BorderLayout.NORTH); chatFrame.add(inputPanel, BorderLayout.SOUTH); // Perform initial AI request appendMessage("You", originalRequest.getInstructions() + " " + originalRequest.getConvertedText(), Color.LIGHT_GRAY); List<ChatMessage> initialMessages = new ArrayList<>(); initialMessages.add(new ChatMessage(ChatMessageRole.USER.value(), originalRequest.getInstructions() + " " + originalRequest.getConvertedText())); performStreamedChat(initialMessages); } /** * Processes the user input, appends the message to the chat, and triggers the AI response. */ private void processUserInput() { String userMessage = userInputField.getText().trim(); if (!userMessage.isEmpty()) { appendMessage("You", userMessage, Color.LIGHT_GRAY); userInputField.setText(""); List<ChatMessage> messages = new ArrayList<>(); ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), "You are a chat assistant."); messages.add(systemMessage); messages.add(new ChatMessage(ChatMessageRole.USER.value(), userMessage)); performStreamedChat(messages); } } /** * Performs a streamed chat by making an asynchronous API request and updating the UI accordingly. * * @param messages The list of chat messages to be included in the conversation. */ private void performStreamedChat(List<ChatMessage> messages) { SwingWorker<String, Void> worker = new SwingWorker<>() { @Override protected String doInBackground() { try { // Disable user input userInputField.setEnabled(false); // Show loading label loadingLabel.setVisible(true); // Add the default system message to the conversation messages.add(new ChatMessage(ChatMessageRole.SYSTEM.value(), originalRequest.getInstructions() + ": " + originalRequest.getConvertedText())); // Perform API request ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder().model("gpt-4-1106-preview").messages(messages).maxTokens(2000).temperature(0.5).build(); return service.createChatCompletion(chatCompletionRequest).getChoices().get(0).getMessage().getContent(); } catch (Exception e) { e.printStackTrace(); showErrorDialog("An error occurred while communicating with the OpenAI API."); return null; } } @Override protected void done() { try { String aiResponse = get(); if (aiResponse != null) { // Update with AI response appendMessage("AI", aiResponse, Color.ORANGE); } } catch (Exception e) { e.printStackTrace(); } finally { // Enable user input after processing userInputField.setEnabled(true); // Hide loading label loadingLabel.setVisible(false); } } }; // Start the conversation asynchronously worker.execute(); } /** * Appends a message to the chat text pane with the specified sender, message, and text color. * * @param sender The sender of the message (e.g., "You", "AI"). * @param message The content of the message. * @param color The color of the text. */ private void appendMessage(String sender, String message, Color color) { StyledDocument doc = chatTextPane.getStyledDocument(); SimpleAttributeSet set = new SimpleAttributeSet(); StyleConstants.setForeground(set, color); try { doc.insertString(doc.getLength(), "\n" + sender + ":" + message + "\n", set); } catch (BadLocationException e) { e.printStackTrace(); } chatTextPane.setCaretPosition(doc.getLength()); } /** * Displays an error dialog with the specified error message. * * @param errorMessage The error message to be displayed. */ private void showErrorDialog(String errorMessage) { JOptionPane.showMessageDialog(null, errorMessage, "Error", JOptionPane.ERROR_MESSAGE); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value", "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((1395, 1439), 'org.codered.neolithic.Neolithic.getConfigReader'), ((3775, 3803), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((4410, 4440), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((4554, 4582), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((5409, 5439), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((5632, 5751), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5632, 5743), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5632, 5726), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5632, 5710), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((5632, 5691), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package com.ramesh.openai; import java.time.Duration; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.service.OpenAiService; /*** * This project demonstrates a very basic Chat Completion. How the OpenAIService can be used to * generate multiple responses for one prompt * IT also shows the responses with temperature value set to 0 to 1, which implies * zero hallucination and hence precise to bug hallucinations and less precise ***/ class Completion { public static void main(String... args) { // Set the Open AI Token & Model String token = "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"; String model = "babbage-002"; // service handle for calling OpenAI APIs OpenAiService service = new OpenAiService(token, Duration.ofSeconds(30)); System.out.println("-----------------------------------------------------------"); System.out.println("Creating 3 completions with NO Hallunications...\n"); // prompt - change this and run again and again. Mostly ChatGPT will hallucinate as the temperature (precision) is set to .2 // ChatGPT will respond with 3 messages as n=3 CompletionRequest completionRequest = CompletionRequest.builder() .model(model) .prompt("The earth goes around the sun") .echo(true) .user("testing") .temperature(0.2) .n(3) .maxTokens(20) .build(); // call ChatGPT Completion API and get the response service.createCompletion(completionRequest).getChoices().forEach( (c) -> { System.out.println(c.getText()); System.out.println("++++++++++++++"); }); // prompt - Same prompt again and ChatGPT will NOT hallucinate as the temperature (precision) is set to .2 // ChatGPT will respond with 3 messages as n=3 System.out.println("-----------------------------------------------------------"); System.out.println("Creating 3 completion WITH Hallunications...\n"); completionRequest = CompletionRequest.builder() .model(model) .prompt("The earth goes around the sun") .echo(true) .user("testing") .temperature(0.9) .n(3) .maxTokens(20) .build(); // call ChatGPT Completion API and get the response service.createCompletion(completionRequest).getChoices().forEach( (c) -> { System.out.println(c.getText()); System.out.println("++++++++++++++"); }); service.shutdownExecutor(); } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((1269, 1556), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1269, 1531), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1269, 1500), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1269, 1478), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1269, 1444), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1269, 1411), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1269, 1383), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1269, 1326), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2184, 2471), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2184, 2446), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2184, 2415), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2184, 2393), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2184, 2359), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2184, 2326), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2184, 2298), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((2184, 2241), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package br.com.alura.screenmatch.services; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.service.OpenAiService; public class ConsultaChatGPT { public static String obterTraducao(String texto) { OpenAiService service = new OpenAiService("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"); CompletionRequest requisicao = CompletionRequest.builder() .model("gpt-3.5-turbo-0613") .prompt("Traduza para o português o texto: " + texto) .maxTokens(1000) .temperature(0.7) .build(); var resposta = service.createCompletion(requisicao); return resposta.getChoices().get(0).getText(); } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((403, 658), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((403, 629), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((403, 591), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((403, 554), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((403, 479), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package com.feiwanghub.subcontroller.openai.service; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.completion.CompletionResult; import com.theokanning.openai.service.OpenAiService; import okhttp3.*; import org.jetbrains.annotations.TestOnly; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; import java.io.IOException; @Service public class ChatGPTService { private static final MediaType JSON = MediaType.get("application/json; charset=utf-8"); private final OkHttpClient client = new OkHttpClient(); private final String apiKey; public ChatGPTService(@Value("${openai.apiKey}") String apiKey) { this.apiKey = apiKey; } public String generateText(String prompt) throws IOException { RequestBody requestBody = RequestBody.create( "{\"prompt\": \"" + prompt + "\", \"temperature\": 0.5, \"model\": \"text-davinci-003\", \"max_tokens\": 100}", JSON ); Request request = new Request.Builder() .url("https://api.openai.com/v1/completions") .addHeader("Authorization", "Bearer " + apiKey) .post(requestBody) .build(); try (Response response = client.newCall(request).execute()) { return response.body().string(); } } @TestOnly public String generateTextByOpenAI(String prompt) { OpenAiService service = new OpenAiService(apiKey); CompletionRequest completionRequest = CompletionRequest.builder() .prompt(prompt) .maxTokens(100) .temperature(0.5) .build(); CompletionResult completion = service.createCompletion(completionRequest); completion.getChoices().forEach(choice -> System.out.println(choice.getText())); return completion.getChoices().get(0).getText(); } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((1578, 1728), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1578, 1703), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1578, 1669), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((1578, 1637), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package com.example.demo.service; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.service.OpenAiService; public class ConsultaChatGpt { public static String obterTraducao(String texto) { OpenAiService service = new OpenAiService(System.getenv("OPENAI_APIKEY")); CompletionRequest requisicao = CompletionRequest.builder() .model("text-davinci-003") .prompt("traduza para o português o texto: " + texto) .maxTokens(1000) .temperature(0.7) .build(); var resposta = service.createCompletion(requisicao); return resposta.getChoices().get(0).getText(); } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((357, 555), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((357, 537), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((357, 510), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((357, 484), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((357, 420), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package com.memsource.hackaton.llmappliedontm.infrastructure.openai; import com.theokanning.openai.completion.CompletionChoice; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.model.Model; import com.theokanning.openai.service.OpenAiService; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Component; import java.util.List; @Component @Slf4j @RequiredArgsConstructor public class OpenAiClient { private final OpenAiService openAiService; private final ChatbotServiceConfig config; public List<String> listAvailableModels() { return openAiService.listModels().stream().map(Model::getId).sorted().toList(); } public String callChatbot(String prompt, String model) { log.info("Calling OpenAI with prompt: {}", prompt); CompletionRequest completionRequest = CompletionRequest.builder() .prompt(prompt) .model(model) .echo(config.isEcho()) .maxTokens(config.getMaxTokens()) .frequencyPenalty(0.0) .presencePenalty(0.0) .bestOf(1) .topP(1.0) .temperature(1.0) .build(); List<CompletionChoice> choices = openAiService.createCompletion(completionRequest).getChoices(); log.info("number of choices: {}", choices.size()); log.info("finish reason of the first choice: {}", choices.stream().findFirst().map(CompletionChoice::getFinish_reason).orElseThrow()); String response = choices.stream().findFirst() .map(CompletionChoice::getText).orElseThrow(); log.info("OpenAI response: {}", response); return response; } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((909, 1277), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((909, 1252), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((909, 1218), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((909, 1191), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((909, 1164), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((909, 1126), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((909, 1087), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((909, 1037), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((909, 998), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((909, 968), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package com.humber.parenthood.eat_in_layout; import android.content.Intent; import android.os.Bundle; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.Toast; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.appcompat.widget.SearchView; import androidx.fragment.app.Fragment; import androidx.recyclerview.widget.GridLayoutManager; import androidx.recyclerview.widget.RecyclerView; import com.humber.parenthood.CookAtHomeActivity; import com.humber.parenthood.OpenAIAsyncTask; import com.humber.parenthood.R; import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.completion.CompletionResult; import com.theokanning.openai.service.OpenAiService; import java.util.ArrayList; import retrofit2.Call; import retrofit2.Callback; import retrofit2.Response; public class IngredientPicker extends Fragment { private final ArrayList<String> fridgeItems = KitchenItems.getItems(); private final String OPENAI_SERVICE_TOKEN = ""; private ItemAdaptor adapter; private ArrayList<ItemModel> modelArrayList; private final ItemClickListener itemClickListener = new ItemClickListener() { @Override public void onClick(ItemModel items) { // Log.d("@Harman", "onClick: item select"); int position = modelArrayList.indexOf(items); if (position != RecyclerView.NO_POSITION) { ItemModel item = modelArrayList.get(position); item.setSelected(!item.getSelected()); adapter.notifyItemChanged(position); } } @Override public void onLongClick(ItemModel items) { // Log.d("@Harman", "onLongClick: item fav"); int position = modelArrayList.indexOf(items); if (position != RecyclerView.NO_POSITION) { ItemModel item = modelArrayList.get(position); item.setFavourite(!item.getFavourite()); adapter.notifyItemChanged(position); } } }; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { return inflater.inflate(R.layout.fragment_ingredient_picker, container, false); } @Override public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); modelArrayList = new ArrayList<>(); RecyclerView recyclerView = view.findViewById(R.id.itemsRV); Button printSelectionButton = view.findViewById(R.id.button2); SearchView searchBar = view.findViewById(R.id.search_bar); setItems(modelArrayList); recyclerView.setLayoutManager(new GridLayoutManager(view.getContext(), 4)); adapter = new ItemAdaptor(modelArrayList, view.getContext()); recyclerView.setAdapter(adapter); searchBar.setOnQueryTextListener(new SearchView.OnQueryTextListener() { @Override public boolean onQueryTextSubmit(String query) { return false; } @Override public boolean onQueryTextChange(String newText) { // Filter the modelArrayList based on the query text ArrayList<ItemModel> filteredList = filter(modelArrayList, newText); // Update the adapter with the filtered list adapter.updateList(filteredList); adapter.notifyDataSetChanged(); return true; } }); printSelectionButton.setOnClickListener(v -> { if (OPENAI_SERVICE_TOKEN.equals("")) { Toast.makeText(getContext(), "Please add your OpenAI service token in IngredientPicker.java", Toast.LENGTH_LONG).show(); return; } ArrayList<String> selectedItems = new ArrayList<>(); for (ItemModel model : modelArrayList) { if (model.selected) { // Log.d("@Harman", model.getName()); selectedItems.add(model.getName()); } } String prompt = "make a random recipe"; if (selectedItems.size() != 0) { prompt = "I have " + selectedItems + " in my fridge. Can you provide me a recipe with these items?"; } OpenAiService openAiService = new OpenAiService(OPENAI_SERVICE_TOKEN); CompletionRequest completionRequest = CompletionRequest.builder() .prompt(prompt) .model("gpt-3.5-turbo") .temperature(1.0) .maxTokens(4000) .echo(true) .build(); new OpenAIAsyncTask(openAiService, completionRequest, new Callback<CompletionResult>() { @Override public void onResponse(@NonNull Call<CompletionResult> call, @NonNull Response<CompletionResult> response) { assert response.body() != null; // Toast.makeText(getContext(), response.body().getChoices().get(0).getText(), Toast.LENGTH_LONG).show(); Intent intent = new Intent(requireActivity(), CookAtHomeActivity.class); intent.putExtra("recipe", response.body().getChoices().get(0).getText()); startActivity(intent); } @Override public void onFailure(@NonNull Call<CompletionResult> call, @NonNull Throwable t) { t.printStackTrace(); } }).execute(); // Log.d("@Harman", "Can you provide me a recipe with these items: " + selectedItems); }); // Set the listener on the adapter adapter.setItemClickListener(itemClickListener); } private void setItems(ArrayList<ItemModel> modelArrayList) { for (int i = 0; i < fridgeItems.size(); i++) { modelArrayList.add(new ItemModel(fridgeItems.get(i))); } } private ArrayList<ItemModel> filter(ArrayList<ItemModel> models, String query) { ArrayList<ItemModel> filteredList = new ArrayList<>(); for (ItemModel model : models) { if (model.getName().toLowerCase().contains(query.toLowerCase())) { filteredList.add(model); } } return filteredList; } }
[ "com.theokanning.openai.completion.CompletionRequest.builder" ]
[((3786, 3905), 'android.widget.Toast.makeText'), ((4611, 4854), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((4611, 4825), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((4611, 4793), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((4611, 4756), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((4611, 4718), 'com.theokanning.openai.completion.CompletionRequest.builder'), ((4611, 4674), 'com.theokanning.openai.completion.CompletionRequest.builder')]
package com.javadiscord.bot.utils.chatgpt; import com.theokanning.openai.OpenAiHttpException; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.time.Duration; import java.util.List; import java.util.Objects; import java.util.Optional; public class ChatGPT { private static final Logger logger = LogManager.getLogger(ChatGPT.class); private static final String API_KEY = System.getenv("CHATGPT_API_KEY"); private static final Duration TIMEOUT = Duration.ofMinutes(3); private static final String AI_MODEL = "gpt-3.5-turbo"; private final OpenAiService openAiService; private static final int MAX_TOKENS = 2000; /** * This parameter reduces the likelihood of the AI repeating itself. A higher frequency penalty * makes the model less likely to repeat the same lines verbatim. It helps in generating more * diverse and varied responses. */ private static final double FREQUENCY_PENALTY = 0.5; /** * This parameter controls the randomness of the AI's responses. A higher temperature results in * more varied, unpredictable, and creative responses. Conversely, a lower temperature makes the * model's responses more deterministic and conservative. */ private static final double TEMPERATURE = 0.8; /** * n: This parameter specifies the number of responses to generate for each prompt. If n is more * than 1, the AI will generate multiple different responses to the same prompt, each one being * a separate iteration based on the input. */ private static final int MAX_NUMBER_OF_RESPONSES = 1; public ChatGPT() { openAiService = new OpenAiService(API_KEY, TIMEOUT); ChatMessage setupMessage = new ChatMessage( ChatMessageRole.SYSTEM.value(), """ Please answer questions in 2000 characters or less. Remember to count spaces in the character limit. The context is Java Programming:\s"""); ChatCompletionRequest systemSetupRequest = ChatCompletionRequest.builder() .model(AI_MODEL) .messages(List.of(setupMessage)) .frequencyPenalty(FREQUENCY_PENALTY) .temperature(TEMPERATURE) .maxTokens(50) .n(MAX_NUMBER_OF_RESPONSES) .build(); openAiService.createChatCompletion(systemSetupRequest); } public Optional<String[]> ask(String question) { try { ChatMessage chatMessage = new ChatMessage(ChatMessageRole.USER.value(), Objects.requireNonNull(question)); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder() .model(AI_MODEL) .messages(List.of(chatMessage)) .frequencyPenalty(FREQUENCY_PENALTY) .temperature(TEMPERATURE) .maxTokens(MAX_TOKENS) .n(MAX_NUMBER_OF_RESPONSES) .build(); String response = openAiService .createChatCompletion(chatCompletionRequest) .getChoices() .getFirst() .getMessage() .getContent(); return Optional.ofNullable(ChatGPTResponseParser.parse(response)); } catch (OpenAiHttpException openAiHttpException) { logger.warn( String.format( "There was an error using the OpenAI API: %s Code: %s Type: %s Status" + " Code: %s", openAiHttpException.getMessage(), openAiHttpException.code, openAiHttpException.type, openAiHttpException.statusCode)); } catch (RuntimeException runtimeException) { logger.warn( "There was an error using the OpenAI API: " + runtimeException.getMessage()); } return Optional.empty(); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value", "com.theokanning.openai.completion.chat.ChatMessageRole.USER.value", "com.theokanning.openai.completion.chat.ChatCompletionRequest.builder" ]
[((2072, 2102), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value'), ((2389, 2753), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2389, 2720), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2389, 2668), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2389, 2629), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2389, 2579), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2389, 2518), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2389, 2461), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((2968, 2996), 'com.theokanning.openai.completion.chat.ChatMessageRole.USER.value'), ((3112, 3511), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3112, 3474), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3112, 3418), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3112, 3367), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3112, 3313), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3112, 3248), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder'), ((3112, 3188), 'com.theokanning.openai.completion.chat.ChatCompletionRequest.builder')]
package com.acc.genithon.repository; import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.completion.chat.ChatMessage; import com.theokanning.openai.completion.chat.ChatMessageRole; import com.theokanning.openai.service.OpenAiService; import org.apache.logging.log4j.util.Strings; import org.springframework.stereotype.Component; import java.time.Duration; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @Component public class ReviewSummaryClient { public String getSummary(String review) { if(Strings.isBlank(review)) { return "Product review not available"; } OpenAiService service = new OpenAiService("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", Duration.ofSeconds(6000)); final List<ChatMessage> messages = new ArrayList<>(); final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), "write summary and tags from the customer reviews in 1 lines. I am going to display the summary in the product page, " + review); messages.add(systemMessage); ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest .builder() .model("gpt-3.5-turbo") .messages(messages) .n(1) .maxTokens(50) .logitBias(new HashMap<>()) .build(); StringBuilder builder = new StringBuilder(); service.streamChatCompletion(chatCompletionRequest) .doOnError(Throwable::printStackTrace) .blockingForEach(req -> { ChatMessage message = req.getChoices().get(0).getMessage(); System.out.println( message.getContent()); builder.append(message.getContent()); }); System.out.println("\nCreating completion..."); return builder.toString(); } }
[ "com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value" ]
[((930, 960), 'com.theokanning.openai.completion.chat.ChatMessageRole.SYSTEM.value')]