PauldeLav commited on
Commit
8f969b7
·
verified ·
1 Parent(s): 1507b97

Upload 2 files

Browse files
Files changed (2) hide show
  1. comfyui-chatgpt-node.py +48 -0
  2. comfyui-claude-node.py +84 -0
comfyui-chatgpt-node.py ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import requests
2
+ from typing import Dict, Any
3
+
4
+ class OpenAIChatNode:
5
+ @classmethod
6
+ def INPUT_TYPES(cls):
7
+ return {
8
+ "required": {
9
+ "system_prompt": ("STRING", {"multiline": True}),
10
+ "user_input": ("STRING", {"multiline": True}),
11
+ "api_key": ("STRING", {"multiline": False}),
12
+ },
13
+ }
14
+
15
+ RETURN_TYPES = ("STRING",)
16
+ FUNCTION = "chat_with_openai"
17
+ CATEGORY = "OpenAI"
18
+
19
+ def chat_with_openai(self, system_prompt: str, user_input: str, api_key: str) -> (str,):
20
+ url = "https://api.openai.com/v1/chat/completions"
21
+
22
+ headers = {
23
+ "Content-Type": "application/json",
24
+ "Authorization": f"Bearer {api_key}",
25
+ }
26
+
27
+ payload = {
28
+ "model": "gpt-4",
29
+ "messages": [
30
+ {"role": "system", "content": system_prompt},
31
+ {"role": "user", "content": user_input},
32
+ ],
33
+ }
34
+
35
+ response = requests.post(url, headers=headers, json=payload)
36
+
37
+ if response.status_code == 200:
38
+ data = response.json()
39
+ assistant_message = data['choices'][0]['message']['content']
40
+ return (assistant_message,)
41
+ else:
42
+ error_message = f"Error {response.status_code}: {response.text}"
43
+ return (error_message,)
44
+
45
+ # Registrar el nodo
46
+ NODE_CLASS_MAPPINGS = {
47
+ "OpenAIChatNode": OpenAIChatNode
48
+ }
comfyui-claude-node.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import json
3
+ import anthropic
4
+ from typing import Dict, Any
5
+
6
+ class ClaudeCustomPrompt:
7
+ """
8
+ ComfyUI node that generates prompts using Claude API
9
+ """
10
+
11
+ def __init__(self):
12
+ self.api_key = os.getenv("ANTHROPIC_API_KEY", "")
13
+ self.client = None
14
+
15
+ @classmethod
16
+ def INPUT_TYPES(cls) -> Dict[str, Any]:
17
+ return {
18
+ "required": {
19
+ "system_prompt": ("STRING", {
20
+ "default": "You are an AI art prompt generator. Reply only with a prompt for image generation, no explanations.",
21
+ "multiline": True
22
+ }),
23
+ "user_input": ("STRING", {
24
+ "default": "Generate a prompt for: space cat",
25
+ "multiline": True
26
+ }),
27
+ },
28
+ "optional": {
29
+ "api_key": ("STRING", {
30
+ "default": "",
31
+ "multiline": False
32
+ }),
33
+ }
34
+ }
35
+
36
+ RETURN_TYPES = ("STRING",)
37
+ FUNCTION = "generate_prompt"
38
+ CATEGORY = "prompt generation"
39
+
40
+ def generate_prompt(self, system_prompt: str, user_input: str, api_key: str = "") -> tuple[str]:
41
+ # Use provided API key or fallback to environment variable
42
+ key_to_use = api_key if api_key else self.api_key
43
+ if not key_to_use:
44
+ raise ValueError("No API key provided. Please set ANTHROPIC_API_KEY environment variable or provide it as input.")
45
+
46
+ # Initialize client if needed
47
+ if not self.client or api_key:
48
+ self.client = anthropic.Anthropic(api_key=key_to_use)
49
+
50
+ try:
51
+ # Make API call to Claude
52
+ message = self.client.messages.create(
53
+ model="claude-3-opus-20240229",
54
+ max_tokens=1000,
55
+ temperature=0,
56
+ system=system_prompt,
57
+ messages=[
58
+ {
59
+ "role": "user",
60
+ "content": [
61
+ {
62
+ "type": "text",
63
+ "text": user_input
64
+ }
65
+ ]
66
+ }
67
+ ]
68
+ )
69
+
70
+ # Extract the generated prompt
71
+ generated_prompt = message.content[0].text.strip()
72
+ return (generated_prompt,)
73
+
74
+ except Exception as e:
75
+ raise RuntimeError(f"Error generating prompt: {str(e)}")
76
+
77
+ # Node registration
78
+ NODE_CLASS_MAPPINGS = {
79
+ "ClaudeCustomPrompt": ClaudeCustomPrompt
80
+ }
81
+
82
+ NODE_DISPLAY_NAME_MAPPINGS = {
83
+ "ClaudeCustomPrompt": "Claude Prompt Generator"
84
+ }