Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Commit
β’
d859e23
1
Parent(s):
65b1bdb
progress on the glif viewer
Browse files- src/components/editors/WorkflowEditor/clapWorkflowToReactWorkflow.ts +13 -0
- src/components/editors/WorkflowEditor/index.tsx +4 -0
- src/components/editors/WorkflowEditor/samples/glif.ts +193 -0
- src/components/editors/WorkflowEditor/specialized/comfyui/types.ts +60 -0
- src/components/editors/WorkflowEditor/specialized/falai/types.ts +64 -0
- src/components/editors/WorkflowEditor/specialized/glif/glifToReactWorkflow.ts +37 -0
- src/components/editors/WorkflowEditor/specialized/glif/types.ts +67 -0
- src/components/editors/WorkflowEditor/types.ts +32 -0
- src/components/editors/WorkflowEditor/viewer/{Node.tsx β NodeView.tsx} +10 -20
- src/components/editors/WorkflowEditor/viewer/WorkflowView.tsx +21 -42
src/components/editors/WorkflowEditor/clapWorkflowToReactWorkflow.ts
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { ClapWorkflow, ClapWorkflowEngine } from '@aitube/clap'
|
2 |
+
|
3 |
+
import { ReactWorkflow } from './types'
|
4 |
+
import { glifToReactWorkflow } from './specialized/glif/glifToReactWorkflow'
|
5 |
+
|
6 |
+
export function clapWorkflowToReactWorkflow(
|
7 |
+
clapWorkflow: ClapWorkflow
|
8 |
+
): ReactWorkflow {
|
9 |
+
if (clapWorkflow.engine === ClapWorkflowEngine.GLIF_WORKFLOW) {
|
10 |
+
return glifToReactWorkflow(JSON.parse(clapWorkflow.data))
|
11 |
+
}
|
12 |
+
return { nodes: [], edges: [] }
|
13 |
+
}
|
src/components/editors/WorkflowEditor/index.tsx
CHANGED
@@ -4,6 +4,7 @@ import { FormInput } from '@/components/forms/FormInput'
|
|
4 |
import { FormSection } from '@/components/forms/FormSection'
|
5 |
import { useWorkflowEditor } from '@/services/editors'
|
6 |
import { useUI } from '@/services'
|
|
|
7 |
|
8 |
export function WorkflowEditor() {
|
9 |
const current = useWorkflowEditor((s) => s.current)
|
@@ -14,6 +15,9 @@ export function WorkflowEditor() {
|
|
14 |
|
15 |
const hasBetaAccess = useUI((s) => s.hasBetaAccess)
|
16 |
|
|
|
|
|
|
|
17 |
if (!current) {
|
18 |
return (
|
19 |
<FormSection label={'Workflow Editor'} className="p-4">
|
|
|
4 |
import { FormSection } from '@/components/forms/FormSection'
|
5 |
import { useWorkflowEditor } from '@/services/editors'
|
6 |
import { useUI } from '@/services'
|
7 |
+
import { WorkflowView } from './viewer/WorkflowView'
|
8 |
|
9 |
export function WorkflowEditor() {
|
10 |
const current = useWorkflowEditor((s) => s.current)
|
|
|
15 |
|
16 |
const hasBetaAccess = useUI((s) => s.hasBetaAccess)
|
17 |
|
18 |
+
if (hasBetaAccess) {
|
19 |
+
return <WorkflowView />
|
20 |
+
}
|
21 |
if (!current) {
|
22 |
return (
|
23 |
<FormSection label={'Workflow Editor'} className="p-4">
|
src/components/editors/WorkflowEditor/samples/glif.ts
ADDED
@@ -0,0 +1,193 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { GlifWorkflow } from '../specialized/glif/types'
|
2 |
+
|
3 |
+
export const glifs: GlifWorkflow[] = [
|
4 |
+
{
|
5 |
+
id: 'clz0abcl70000lqaglpuocakq',
|
6 |
+
name: 'Any Image Animator',
|
7 |
+
imageUrl: null,
|
8 |
+
description: 'Based on @angrypenguin AnimateDiff LCM',
|
9 |
+
createdAt: '2024-07-24T20:15:53.467Z',
|
10 |
+
updatedAt: '2024-07-25T19:46:59.431Z',
|
11 |
+
publishedAt: '2024-07-25T16:22:10.029Z',
|
12 |
+
output:
|
13 |
+
'https://res.cloudinary.com/dkpfhyd71-comfy/image/upload/v1721925035/glif-comfy/b38bd8b3-2ced-4ab5-86da-6f1b14211e82.webp',
|
14 |
+
outputType: 'IMAGE',
|
15 |
+
forkedFromId: 'clyzh5p3e0000fol0z19xo2fe',
|
16 |
+
featuredAt: null,
|
17 |
+
userId: 'clvcyis0h000aowumdmmdij5l',
|
18 |
+
completedSpellRunCount: 37,
|
19 |
+
averageDuration: 88506,
|
20 |
+
user: {
|
21 |
+
id: 'clvcyis0h000aowumdmmdij5l',
|
22 |
+
name: 'Anibaaal',
|
23 |
+
image:
|
24 |
+
'https://lh3.googleusercontent.com/a/ACg8ocKi97yHdJFp8o6gXOWiWSN6eepnpR4XBHbQaT3MJGmJfymnrjIqfg=s96-c',
|
25 |
+
username: 'Anibaaal',
|
26 |
+
},
|
27 |
+
_count: {
|
28 |
+
likes: 1,
|
29 |
+
comments: 0,
|
30 |
+
},
|
31 |
+
spheres: [],
|
32 |
+
data: {
|
33 |
+
nodes: [
|
34 |
+
{
|
35 |
+
name: 'image',
|
36 |
+
type: 'ImageInputBlock',
|
37 |
+
params: {
|
38 |
+
label: null,
|
39 |
+
value: null,
|
40 |
+
source: 'upload',
|
41 |
+
},
|
42 |
+
},
|
43 |
+
{
|
44 |
+
name: 'add-prompt',
|
45 |
+
type: 'TextInputBlock',
|
46 |
+
params: {
|
47 |
+
label:
|
48 |
+
'Additional details (e.g. character name, specific style) (optional)',
|
49 |
+
value: ' ',
|
50 |
+
},
|
51 |
+
},
|
52 |
+
{
|
53 |
+
name: 'creativity',
|
54 |
+
type: 'MultipickBlock',
|
55 |
+
params: {
|
56 |
+
label: 'Creativity (Lower keeps more the original image)',
|
57 |
+
value: 'Low',
|
58 |
+
options: ['Very low', 'Low', 'Medium', 'High', 'Very high', 'Max'],
|
59 |
+
randomize: false,
|
60 |
+
},
|
61 |
+
},
|
62 |
+
{
|
63 |
+
name: 'format',
|
64 |
+
type: 'MultipickBlock',
|
65 |
+
params: {
|
66 |
+
label: 'Format',
|
67 |
+
value: 'MP4 Video (High quality - High res)',
|
68 |
+
options: [
|
69 |
+
'MP4 Video (High quality - High res)',
|
70 |
+
'Animated GIF (Low quality - Low res)',
|
71 |
+
'Animated WebP (High quality - High res. Not supported everywhere)',
|
72 |
+
],
|
73 |
+
randomize: false,
|
74 |
+
},
|
75 |
+
},
|
76 |
+
{
|
77 |
+
name: 'cfg',
|
78 |
+
type: 'CombinerBlock',
|
79 |
+
params: {
|
80 |
+
value: '1.5',
|
81 |
+
},
|
82 |
+
},
|
83 |
+
{
|
84 |
+
name: 'vision',
|
85 |
+
type: 'GlifBlock',
|
86 |
+
params: {
|
87 |
+
id: 'clu9u812w0000egv40lzjg0gi',
|
88 |
+
inputValues: [
|
89 |
+
'What is a concise and precise prompt for this image applying deep knowledge of visual arts?',
|
90 |
+
'{image}',
|
91 |
+
'300',
|
92 |
+
],
|
93 |
+
},
|
94 |
+
},
|
95 |
+
{
|
96 |
+
name: 'design',
|
97 |
+
type: 'GPTBlock',
|
98 |
+
params: {
|
99 |
+
model: 'gpt-4',
|
100 |
+
prompt:
|
101 |
+
'You are a creative AI designed to generate detailed image and animation prompts based on an image description. For each image, provide the following details in JSON format:\n- An image prompt detailing the entity\'s appearance and surroundings\n- An animation prompt fitting for the image\n\nExample image description: "A cartoon image of a dog is depicted, with muscles visible on its chest. The background features mountains, a building, and a sun in the sky. The overall color scheme is orange and red, with a pinkish hue to the dog."\n\nYour output should follow this JSON template:\n\n{\n "image_prompt": "A cartoon image of a muscular dog with visible chest muscles. The dog has a pinkish hue. The background features majestic mountains, a building, and a bright sun in the sky. The overall color scheme is dominated by vibrant shades of orange and red.",\n "animation_prompt": "The muscular dog flexes its chest muscles proudly. The sun shines brightly in the sky, casting warm hues over the scene. The mountains in the background appear to slowly move past, giving a sense of depth, while the building stands firmly. The dog occasionally wags its tail and barks."\n}\n\nProvide the JSON output for the following image description: "{add-prompt}. {vision}"\n',
|
102 |
+
jsonMode: false,
|
103 |
+
maxTokens: 1000,
|
104 |
+
temperature: 1,
|
105 |
+
systemPrompt: 'You are a helpful assistant.',
|
106 |
+
},
|
107 |
+
},
|
108 |
+
{
|
109 |
+
name: 'set-format',
|
110 |
+
type: 'GPTBlock',
|
111 |
+
params: {
|
112 |
+
model: 'gpt-4o-mini',
|
113 |
+
prompt:
|
114 |
+
'Follow this:\n\nFor input = MP4 Video (High quality - High res), output = \n{"format": "video/h264-mp4", "crf": "25", "size": "1"}\nFor input = Animated GIF (Low quality - Low res), output = \n{"format": "image/gif", "crf": "300", "size": "0.3125"}\nFor input = Animated WebP (High quality - High res. Not supported everywhere), output = \n{"format": "image/webp", "crf": "100", "size": "1"}\n\nNo intro, just go for input = {format}, output = \n',
|
115 |
+
jsonMode: false,
|
116 |
+
maxTokens: 100,
|
117 |
+
temperature: 0.5,
|
118 |
+
systemPrompt: 'You are a helpful assistant.',
|
119 |
+
},
|
120 |
+
},
|
121 |
+
{
|
122 |
+
name: 'set-creativity',
|
123 |
+
type: 'GPTBlock',
|
124 |
+
params: {
|
125 |
+
model: 'gpt-4o-mini',
|
126 |
+
prompt:
|
127 |
+
'Follow this:\n\nFor input = Very low, output = {"min": "70", "max": "80"}\nFor input = Low, output = {"min": "35", "max": "45"}\nFor input = Medium, output = {"min": "20", "max": "30"}\nFor input = High, output = {"min": "10", "max": "20"}\nFor input = Very high, output = {"min": "3", "max": "13"}\nFor input = Max, output = {"min": "0", "max": "10"}\n\nNo intro, just go for input = {creativity}, output = \n',
|
128 |
+
jsonMode: false,
|
129 |
+
maxTokens: 100,
|
130 |
+
temperature: 0.5,
|
131 |
+
systemPrompt: 'You are a helpful assistant.',
|
132 |
+
},
|
133 |
+
},
|
134 |
+
{
|
135 |
+
name: 'prompts',
|
136 |
+
type: 'JSONBlock',
|
137 |
+
params: {
|
138 |
+
paths: {
|
139 |
+
image: {
|
140 |
+
path: 'image_prompt',
|
141 |
+
fallback: null,
|
142 |
+
},
|
143 |
+
animation: {
|
144 |
+
path: 'animation_prompt',
|
145 |
+
fallback: null,
|
146 |
+
},
|
147 |
+
},
|
148 |
+
value: '{design}',
|
149 |
+
},
|
150 |
+
},
|
151 |
+
{
|
152 |
+
name: 'settings',
|
153 |
+
type: 'JSONBlock',
|
154 |
+
params: {
|
155 |
+
paths: {
|
156 |
+
crf: {
|
157 |
+
path: 'output.crf',
|
158 |
+
fallback: null,
|
159 |
+
},
|
160 |
+
max: {
|
161 |
+
path: 'creativity.max',
|
162 |
+
fallback: null,
|
163 |
+
},
|
164 |
+
min: {
|
165 |
+
path: 'creativity.min',
|
166 |
+
fallback: null,
|
167 |
+
},
|
168 |
+
size: {
|
169 |
+
path: 'output.size',
|
170 |
+
fallback: null,
|
171 |
+
},
|
172 |
+
format: {
|
173 |
+
path: 'output.format',
|
174 |
+
fallback: null,
|
175 |
+
},
|
176 |
+
},
|
177 |
+
value: '{"output": {set-format}, "creativity": {set-creativity}}',
|
178 |
+
},
|
179 |
+
},
|
180 |
+
{
|
181 |
+
name: 'animate',
|
182 |
+
type: 'ComfyBlock',
|
183 |
+
params: {
|
184 |
+
seed: null,
|
185 |
+
value:
|
186 |
+
'{\n "1": {\n "inputs": {\n "ckpt_name": "DreamShaper8_LCM.safetensors",\n "vae_name": "Baked VAE",\n "clip_skip": -1,\n "lora_name": "None",\n "lora_model_strength": -2.2600000000000002,\n "lora_clip_strength": 0.14,\n "positive": "{prompts.image} {prompts.animation}, 4k",\n "negative": "watermark, text, signature, blurry, nsfw, children, nude, naked",\n "token_normalization": "none",\n "weight_interpretation": "comfy",\n "empty_latent_width": 512,\n "empty_latent_height": 512,\n "batch_size": 32\n },\n "class_type": "Efficient Loader",\n "_meta": {\n "title": "Efficient Loader"\n }\n },\n "3": {\n "inputs": {\n "lora_name": "glimmerAnimateDiff_v3.safetensors",\n "strength": 0.8\n },\n "class_type": "ADE_AnimateDiffLoRALoader",\n "_meta": {\n "title": "Load AnimateDiff LoRA ππ
π
"\n }\n },\n "4": {\n "inputs": {\n "context_length": 16,\n "context_stride": 1,\n "context_overlap": 4,\n "closed_loop": false,\n "fuse_method": "pyramid",\n "use_on_equal_length": false,\n "start_percent": 0,\n "guarantee_steps": 1\n },\n "class_type": "ADE_LoopedUniformContextOptions",\n "_meta": {\n "title": "Context OptionsβLooped Uniform ππ
π
"\n }\n },\n "6": {\n "inputs": {\n "frame_rate": 14,\n "loop_count": 0,\n "filename_prefix": "AnimateDiff",\n "format": "{settings.format}",\n "pingpong": false,\n "save_output": true,\n "pix_fmt": "yuv420p",\n "crf": {settings.crf},\n "save_metadata": true,\n "images": [\n "32",\n 0\n ]\n },\n "class_type": "VHS_VideoCombine",\n "_meta": {\n "title": "Video Combine π₯π
₯π
π
’"\n }\n },\n "7": {\n "inputs": {\n "upscale_type": "latent",\n "hires_ckpt_name": "(use same)",\n "latent_upscaler": "ttl_nn.SD 1.x",\n "pixel_upscaler": "4x-AnimeSharp.pth",\n "upscale_by": 2,\n "use_same_seed": true,\n "seed": 0,\n "hires_steps": 8,\n "denoise": 0.6,\n "iterations": 1,\n "use_controlnet": false,\n "control_net_name": "OpenPoseXL2.safetensors",\n "strength": 1,\n "preprocessor": "none",\n "preprocessor_imgs": false\n },\n "class_type": "HighRes-Fix Script",\n "_meta": {\n "title": "HighRes-Fix Script"\n }\n },\n "8": {\n "inputs": {\n "model_name": "AnimateLCM_sd15_t2v.ckpt",\n "beta_schedule": "lcm \u003E\u003E sqrt_linear",\n "motion_scale": 1.2,\n "apply_v2_models_properly": false,\n "model": [\n "1",\n 0\n ],\n "context_options": [\n "4",\n 0\n ],\n "motion_lora": [\n "3",\n 0\n ]\n },\n "class_type": "ADE_AnimateDiffLoaderWithContext",\n "_meta": {\n "title": "AnimateDiff Loader [Legacy] ππ
π
β "\n }\n },\n "9": {\n "inputs": {\n "image": "{image}"\n },\n "class_type": "LoadImage",\n "_meta": {\n "title": "Load Image"\n }\n },\n "12": {\n "inputs": {\n "pixels": [\n "40",\n 0\n ],\n "vae": [\n "1",\n 4\n ]\n },\n "class_type": "VAEEncode",\n "_meta": {\n "title": "VAE Encode"\n }\n },\n "13": {\n "inputs": {\n "multiply_by": 32,\n "latents": [\n "12",\n 0\n ]\n },\n "class_type": "VHS_DuplicateLatents",\n "_meta": {\n "title": "Duplicate Latent Batch π₯π
₯π
π
’"\n }\n },\n "28": {\n "inputs": {\n "add_noise": "enable",\n "noise_seed": 0,\n "steps": {settings.max},\n "cfg": {cfg},\n "sampler_name": "lcm",\n "scheduler": "sgm_uniform",\n "start_at_step": {settings.min},\n "end_at_step": 10000,\n "return_with_leftover_noise": "disable",\n "preview_method": "auto",\n "vae_decode": "true",\n "model": [\n "8",\n 0\n ],\n "positive": [\n "1",\n 1\n ],\n "negative": [\n "1",\n 2\n ],\n "latent_image": [\n "13",\n 0\n ],\n "optional_vae": [\n "1",\n 4\n ],\n "script": [\n "7",\n 0\n ]\n },\n "class_type": "KSampler Adv. (Efficient)",\n "_meta": {\n "title": "KSampler Adv. (Efficient)"\n }\n },\n "32": {\n "inputs": {\n "upscale_method": "lanczos",\n "scale_by": {settings.size},\n "image": [\n "28",\n 5\n ]\n },\n "class_type": "ImageScaleBy",\n "_meta": {\n "title": "Upscale Image By"\n }\n },\n "40": {\n "inputs": {\n "upscale_method": "nearest-exact",\n "megapixels": 0.26,\n "image": [\n "9",\n 0\n ]\n },\n "class_type": "ImageScaleToTotalPixels",\n "_meta": {\n "title": "ImageScaleToTotalPixels"\n }\n }\n}',
|
187 |
+
fixSeed: false,
|
188 |
+
},
|
189 |
+
},
|
190 |
+
],
|
191 |
+
},
|
192 |
+
},
|
193 |
+
]
|
src/components/editors/WorkflowEditor/specialized/comfyui/types.ts
ADDED
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
export type ComfyuiWorkflow = {
|
2 |
+
extra: {
|
3 |
+
ds: {
|
4 |
+
scale: number
|
5 |
+
offset: number[]
|
6 |
+
}
|
7 |
+
}
|
8 |
+
links: [number, number, number, number, number, string][]
|
9 |
+
nodes: ComfyuiWorkflowNode[]
|
10 |
+
config: Record<string, any>
|
11 |
+
groups: ComfyuiWorkflowGroup[]
|
12 |
+
version: number
|
13 |
+
last_link_id: number
|
14 |
+
last_node_id: number
|
15 |
+
}
|
16 |
+
|
17 |
+
export type ComfyuiWorkflowNode = {
|
18 |
+
id: number
|
19 |
+
pos: number[]
|
20 |
+
mode: number
|
21 |
+
size: number[]
|
22 |
+
type: string
|
23 |
+
color?: string
|
24 |
+
flags: {
|
25 |
+
collapsed?: boolean
|
26 |
+
}
|
27 |
+
order: number
|
28 |
+
inputs?: ComfyuiWorkflowNodeInput[]
|
29 |
+
outputs?: ComfyuiWorkflowNodeOutput[]
|
30 |
+
properties: Record<string, any>
|
31 |
+
widgets_values?: any[]
|
32 |
+
}
|
33 |
+
|
34 |
+
export type ComfyuiWorkflowNodeInput = {
|
35 |
+
link?: number
|
36 |
+
name: string
|
37 |
+
type: string
|
38 |
+
label?: string
|
39 |
+
widget?: {
|
40 |
+
name: string
|
41 |
+
}
|
42 |
+
slot_index?: number
|
43 |
+
}
|
44 |
+
|
45 |
+
export type ComfyuiWorkflowNodeOutput = {
|
46 |
+
name: string
|
47 |
+
type: string
|
48 |
+
label?: string
|
49 |
+
links?: number[]
|
50 |
+
shape: number
|
51 |
+
slot_index?: number
|
52 |
+
}
|
53 |
+
|
54 |
+
export type ComfyuiWorkflowGroup = {
|
55 |
+
color: string
|
56 |
+
title: string
|
57 |
+
locked: boolean
|
58 |
+
bounding: number[]
|
59 |
+
font_size: number
|
60 |
+
}
|
src/components/editors/WorkflowEditor/specialized/falai/types.ts
ADDED
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
export type FalaiWorkflow = {
|
2 |
+
nodes: FalaiWorkflowNode[]
|
3 |
+
edges: FalaiWorkflowEdge[]
|
4 |
+
workflow: string
|
5 |
+
}
|
6 |
+
|
7 |
+
export type FalaiWorkflowNode = {
|
8 |
+
id: string
|
9 |
+
deletable: boolean
|
10 |
+
type: string
|
11 |
+
position: FalaiWorkflowNodePosition
|
12 |
+
data: FalaiWorkflowNodeData
|
13 |
+
}
|
14 |
+
|
15 |
+
export type FalaiWorkflowNodePosition = {
|
16 |
+
x: number
|
17 |
+
y: number
|
18 |
+
}
|
19 |
+
|
20 |
+
export type FalaiWorkflowNodeData = {
|
21 |
+
output?: Record<string, any>
|
22 |
+
app?: string
|
23 |
+
value?: any
|
24 |
+
}
|
25 |
+
|
26 |
+
export type FalaiWorkflowEdge = {
|
27 |
+
id: string
|
28 |
+
source: string
|
29 |
+
sourceHandle: string
|
30 |
+
target: string
|
31 |
+
targetHandle: string
|
32 |
+
type: string
|
33 |
+
}
|
34 |
+
|
35 |
+
// ---------
|
36 |
+
|
37 |
+
type FalaiWorkflowBaseEvent = {
|
38 |
+
type: 'submit' | 'completion' | 'error' | 'output'
|
39 |
+
node_id: string
|
40 |
+
}
|
41 |
+
|
42 |
+
export type FalaiWorkflowSubmitEvent = FalaiWorkflowBaseEvent & {
|
43 |
+
type: 'submit'
|
44 |
+
app_id: string
|
45 |
+
request_id: string
|
46 |
+
}
|
47 |
+
|
48 |
+
export type FalaiWorkflowCompletionEvent<Output = any> =
|
49 |
+
FalaiWorkflowBaseEvent & {
|
50 |
+
type: 'completion'
|
51 |
+
app_id: string
|
52 |
+
output: Output
|
53 |
+
}
|
54 |
+
|
55 |
+
export type FalaiWorkflowDoneEvent<Output = any> = FalaiWorkflowBaseEvent & {
|
56 |
+
type: 'output'
|
57 |
+
output: Output
|
58 |
+
}
|
59 |
+
|
60 |
+
export type FalaiWorkflowErrorEvent = FalaiWorkflowBaseEvent & {
|
61 |
+
type: 'error'
|
62 |
+
message: string
|
63 |
+
error: any
|
64 |
+
}
|
src/components/editors/WorkflowEditor/specialized/glif/glifToReactWorkflow.ts
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { GlifWorkflow } from './types'
|
2 |
+
|
3 |
+
import {
|
4 |
+
ReactWorkflow,
|
5 |
+
ReactWorkflowEdge,
|
6 |
+
ReactWorkflowNode,
|
7 |
+
} from '../../types'
|
8 |
+
|
9 |
+
export function glifToReactWorkflow(glif: GlifWorkflow): ReactWorkflow {
|
10 |
+
const nodes: ReactWorkflowNode[] = glif.data.nodes.map((node, i) => ({
|
11 |
+
id: node.name,
|
12 |
+
type: 'custom',
|
13 |
+
data: node,
|
14 |
+
position: { x: 0, y: i * 100 },
|
15 |
+
}))
|
16 |
+
|
17 |
+
const edges: ReactWorkflowEdge[] = []
|
18 |
+
|
19 |
+
for (let i = 0; i < nodes.length; i++) {
|
20 |
+
const source = `${nodes[i]?.id || ''}`
|
21 |
+
const target = `${nodes[i + 1]?.id || ''}`
|
22 |
+
if (!source || !target) {
|
23 |
+
continue
|
24 |
+
}
|
25 |
+
if (source === target) {
|
26 |
+
continue
|
27 |
+
}
|
28 |
+
|
29 |
+
edges.push({
|
30 |
+
id: `${source}->${target}`,
|
31 |
+
source,
|
32 |
+
target,
|
33 |
+
})
|
34 |
+
}
|
35 |
+
|
36 |
+
return { nodes, edges }
|
37 |
+
}
|
src/components/editors/WorkflowEditor/specialized/glif/types.ts
ADDED
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
export type GlifWorkflow = {
|
2 |
+
id: string
|
3 |
+
name: string
|
4 |
+
imageUrl: any
|
5 |
+
description: string
|
6 |
+
createdAt: string
|
7 |
+
updatedAt: string
|
8 |
+
publishedAt: string
|
9 |
+
output: string
|
10 |
+
outputType: string
|
11 |
+
forkedFromId: string
|
12 |
+
featuredAt: any
|
13 |
+
userId: string
|
14 |
+
completedSpellRunCount: number
|
15 |
+
averageDuration: number
|
16 |
+
user: GlifWorkflowUser
|
17 |
+
_count: GlifWorkflowCount
|
18 |
+
spheres: any[]
|
19 |
+
data: GlifWorkflowData
|
20 |
+
}
|
21 |
+
|
22 |
+
export type GlifWorkflowUser = {
|
23 |
+
id: string
|
24 |
+
name: string
|
25 |
+
image: string
|
26 |
+
username: string
|
27 |
+
}
|
28 |
+
|
29 |
+
export type GlifWorkflowCount = {
|
30 |
+
likes: number
|
31 |
+
comments: number
|
32 |
+
}
|
33 |
+
|
34 |
+
export type GlifWorkflowData = {
|
35 |
+
nodes: GlifWorkflowNode[]
|
36 |
+
}
|
37 |
+
|
38 |
+
export type GlifWorkflowNode = {
|
39 |
+
name: string
|
40 |
+
type: string
|
41 |
+
params: GlifWorkflowParams
|
42 |
+
}
|
43 |
+
|
44 |
+
// or we use a Record<string, any>
|
45 |
+
export interface GlifWorkflowParams {
|
46 |
+
label?: string | null
|
47 |
+
value?: string | null
|
48 |
+
source?: string | null
|
49 |
+
options?: string[] | null
|
50 |
+
randomize?: boolean | null
|
51 |
+
id?: string
|
52 |
+
inputValues?: string[] | null
|
53 |
+
model?: string | null
|
54 |
+
prompt?: string | null
|
55 |
+
jsonMode?: boolean | null
|
56 |
+
maxTokens?: number | null
|
57 |
+
temperature?: number | null
|
58 |
+
systemPrompt?: string | null
|
59 |
+
paths?: Record<string, GlifWorkflowPath> | null
|
60 |
+
seed?: string | number | null
|
61 |
+
fixSeed?: boolean | null
|
62 |
+
}
|
63 |
+
|
64 |
+
export type GlifWorkflowPath = {
|
65 |
+
path: string
|
66 |
+
fallback: any
|
67 |
+
}
|
src/components/editors/WorkflowEditor/types.ts
ADDED
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { Node, Edge } from '@xyflow/react'
|
2 |
+
|
3 |
+
export type ReactWorkflowNode = Node & {
|
4 |
+
id: string
|
5 |
+
type: string
|
6 |
+
position?: ReactWorkflowNodePosition
|
7 |
+
// size?: WorkflowNodeSize
|
8 |
+
data?: ReactWorkflowNodeData
|
9 |
+
}
|
10 |
+
|
11 |
+
export type ReactWorkflowNodePosition = {
|
12 |
+
x: number
|
13 |
+
y: number
|
14 |
+
}
|
15 |
+
|
16 |
+
/*
|
17 |
+
export type WorkflowNodeSize = {
|
18 |
+
width?: number
|
19 |
+
height?: number
|
20 |
+
}
|
21 |
+
*/
|
22 |
+
|
23 |
+
export type ReactWorkflowNodeData = {
|
24 |
+
label?: string
|
25 |
+
} & Record<string, any>
|
26 |
+
|
27 |
+
export type ReactWorkflowEdge = Edge & {}
|
28 |
+
|
29 |
+
export type ReactWorkflow = {
|
30 |
+
nodes: ReactWorkflowNode[]
|
31 |
+
edges: ReactWorkflowEdge[]
|
32 |
+
}
|
src/components/editors/WorkflowEditor/viewer/{Node.tsx β NodeView.tsx}
RENAMED
@@ -1,39 +1,29 @@
|
|
1 |
import React, { memo } from 'react'
|
2 |
import { Handle, Position } from '@xyflow/react'
|
3 |
|
4 |
-
|
5 |
-
|
6 |
-
}: {
|
7 |
-
data: {
|
8 |
-
name: string
|
9 |
-
job: string
|
10 |
-
emoji: string
|
11 |
-
}
|
12 |
-
}) {
|
13 |
return (
|
14 |
-
<div className="rounded-md border-2 border-stone-400 bg-
|
15 |
-
<div className="flex">
|
16 |
-
<div className="
|
17 |
-
{data
|
18 |
-
</div>
|
19 |
-
<div className="ml-2">
|
20 |
-
<div className="text-lg font-bold">{data.name}</div>
|
21 |
-
<div className="text-gray-500">{data.job}</div>
|
22 |
</div>
|
23 |
</div>
|
24 |
|
25 |
<Handle
|
26 |
type="target"
|
27 |
position={Position.Top}
|
28 |
-
className="w-16 !bg-
|
29 |
/>
|
30 |
<Handle
|
31 |
type="source"
|
32 |
position={Position.Bottom}
|
33 |
-
className="w-16 !bg-
|
34 |
/>
|
35 |
</div>
|
36 |
)
|
37 |
}
|
38 |
|
39 |
-
export const
|
|
|
1 |
import React, { memo } from 'react'
|
2 |
import { Handle, Position } from '@xyflow/react'
|
3 |
|
4 |
+
import { ReactWorkflowNode } from '../types'
|
5 |
+
|
6 |
+
function NodeComponent({ data }: ReactWorkflowNode) {
|
|
|
|
|
|
|
|
|
|
|
|
|
7 |
return (
|
8 |
+
<div className="rounded-md border-2 border-stone-400 bg-stone-50 px-4 py-2 shadow-md">
|
9 |
+
<div className="flex flex-col">
|
10 |
+
<div className="text-lg font-bold text-gray-950">
|
11 |
+
{data?.name || ''}
|
|
|
|
|
|
|
|
|
12 |
</div>
|
13 |
</div>
|
14 |
|
15 |
<Handle
|
16 |
type="target"
|
17 |
position={Position.Top}
|
18 |
+
className="w-16 !bg-indigo-500"
|
19 |
/>
|
20 |
<Handle
|
21 |
type="source"
|
22 |
position={Position.Bottom}
|
23 |
+
className="w-16 !bg-indigo-500"
|
24 |
/>
|
25 |
</div>
|
26 |
)
|
27 |
}
|
28 |
|
29 |
+
export const NodeView = memo(NodeComponent)
|
src/components/editors/WorkflowEditor/viewer/WorkflowView.tsx
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
import React, { useCallback } from 'react'
|
2 |
import {
|
3 |
ReactFlow,
|
4 |
useNodesState,
|
@@ -7,54 +7,33 @@ import {
|
|
7 |
MiniMap,
|
8 |
Controls,
|
9 |
OnConnect,
|
|
|
|
|
10 |
} from '@xyflow/react'
|
11 |
|
12 |
import '@xyflow/react/dist/base.css'
|
13 |
|
14 |
-
import {
|
|
|
|
|
|
|
|
|
|
|
15 |
|
16 |
const nodeTypes = {
|
17 |
-
custom:
|
18 |
}
|
19 |
|
20 |
-
const initNodes = [
|
21 |
-
{
|
22 |
-
id: '1',
|
23 |
-
type: 'custom',
|
24 |
-
data: { name: 'Jane Doe', job: 'CEO', emoji: 'π' },
|
25 |
-
position: { x: 0, y: 50 },
|
26 |
-
},
|
27 |
-
{
|
28 |
-
id: '2',
|
29 |
-
type: 'custom',
|
30 |
-
data: { name: 'Tyler Weary', job: 'Designer', emoji: 'π€' },
|
31 |
-
|
32 |
-
position: { x: -200, y: 200 },
|
33 |
-
},
|
34 |
-
{
|
35 |
-
id: '3',
|
36 |
-
type: 'custom',
|
37 |
-
data: { name: 'Kristi Price', job: 'Developer', emoji: 'π€©' },
|
38 |
-
position: { x: 200, y: 200 },
|
39 |
-
},
|
40 |
-
]
|
41 |
-
|
42 |
-
const initEdges = [
|
43 |
-
{
|
44 |
-
id: 'e1-2',
|
45 |
-
source: '1',
|
46 |
-
target: '2',
|
47 |
-
},
|
48 |
-
{
|
49 |
-
id: 'e1-3',
|
50 |
-
source: '1',
|
51 |
-
target: '3',
|
52 |
-
},
|
53 |
-
]
|
54 |
-
|
55 |
export function WorkflowView() {
|
56 |
-
const
|
57 |
-
const [
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
58 |
|
59 |
const onConnect: OnConnect = useCallback(
|
60 |
(params) => setEdges((eds) => addEdge(params, eds)),
|
@@ -62,13 +41,13 @@ export function WorkflowView() {
|
|
62 |
)
|
63 |
|
64 |
return (
|
65 |
-
<ReactFlow
|
66 |
nodes={nodes}
|
67 |
edges={edges}
|
68 |
onNodesChange={onNodesChange}
|
69 |
onEdgesChange={onEdgesChange}
|
70 |
onConnect={onConnect}
|
71 |
-
nodeTypes={nodeTypes}
|
72 |
fitView
|
73 |
className="bg-teal-50"
|
74 |
>
|
|
|
1 |
+
import React, { useCallback, useEffect } from 'react'
|
2 |
import {
|
3 |
ReactFlow,
|
4 |
useNodesState,
|
|
|
7 |
MiniMap,
|
8 |
Controls,
|
9 |
OnConnect,
|
10 |
+
Node,
|
11 |
+
Edge,
|
12 |
} from '@xyflow/react'
|
13 |
|
14 |
import '@xyflow/react/dist/base.css'
|
15 |
|
16 |
+
import { NodeView } from './NodeView'
|
17 |
+
import { ReactWorkflowEdge, ReactWorkflowNode } from '../types'
|
18 |
+
import { useWorkflowEditor } from '@/services/editors'
|
19 |
+
|
20 |
+
import { glifs } from '../samples/glif'
|
21 |
+
import { glifToReactWorkflow } from '../specialized/glif/glifToReactWorkflow'
|
22 |
|
23 |
const nodeTypes = {
|
24 |
+
custom: NodeView,
|
25 |
}
|
26 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
27 |
export function WorkflowView() {
|
28 |
+
const current = useWorkflowEditor((s) => s.current)
|
29 |
+
const [nodes, setNodes, onNodesChange] = useNodesState<ReactWorkflowNode>([])
|
30 |
+
const [edges, setEdges, onEdgesChange] = useEdgesState<ReactWorkflowEdge>([])
|
31 |
+
|
32 |
+
useEffect(() => {
|
33 |
+
const { nodes, edges } = glifToReactWorkflow(glifs[0])
|
34 |
+
setNodes(nodes)
|
35 |
+
setEdges(edges)
|
36 |
+
}, [])
|
37 |
|
38 |
const onConnect: OnConnect = useCallback(
|
39 |
(params) => setEdges((eds) => addEdge(params, eds)),
|
|
|
41 |
)
|
42 |
|
43 |
return (
|
44 |
+
<ReactFlow<ReactWorkflowNode>
|
45 |
nodes={nodes}
|
46 |
edges={edges}
|
47 |
onNodesChange={onNodesChange}
|
48 |
onEdgesChange={onEdgesChange}
|
49 |
onConnect={onConnect}
|
50 |
+
nodeTypes={nodeTypes as any}
|
51 |
fitView
|
52 |
className="bg-teal-50"
|
53 |
>
|