@@ -70,14 +78,14 @@ export function StaticVideo({
: hasVideoContent ?
: placeholder
diff --git a/src/components/monitor/UniversalPlayer/index.tsx b/src/components/monitor/UniversalPlayer/index.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..3ada84cf92b35d55e5c6865a7f73475f2e7132d9
--- /dev/null
+++ b/src/components/monitor/UniversalPlayer/index.tsx
@@ -0,0 +1,43 @@
+import { ClapSegment } from "@aitube/clap"
+import { useTimeline } from "@aitube/timeline"
+
+import { StaticPlayer } from "../../monitor/StaticPlayer"
+
+// TODO: put this in a separate component eg @aitube-player or @aitube/monitor
+export function UniversalPlayer() {
+ const finalVideo: ClapSegment | undefined = useTimeline(s => s.finalVideo)
+
+ const assetUrl: string = finalVideo?.assetUrl || ""
+
+ console.log('finalVideo:', finalVideo)
+
+ if (assetUrl) {
+ return (
+
+
+
+ )
+ }
+
+ console.log(`TODO: render the scene dynamically`)
+
+ return (
+
+ )
+}
\ No newline at end of file
diff --git a/src/components/monitor/icons/icon-switch.tsx b/src/components/monitor/icons/icon-switch.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..1773e5ad8f0dfcdef6a85a1b05998235b5136909
--- /dev/null
+++ b/src/components/monitor/icons/icon-switch.tsx
@@ -0,0 +1,130 @@
+import { ReactNode } from "react"
+import { IconType } from "react-icons/lib"
+
+import { cn } from "@/lib/utils"
+
+import { SingleIcon } from "./single-icon"
+
+export function IconSwitch({
+ onIcon,
+ onIconAlt,
+ offIcon,
+ offIconAlt,
+ onClick,
+ isToggledOn = false,
+ isAlt = false,
+ disabled = false,
+ className = "",
+ size = "md",
+ thickOnHover = false,
+ children = null,
+ iconClass = "",
+}: {
+ onIcon: IconType
+ onIconAlt?: IconType
+ offIcon: IconType
+ offIconAlt?: IconType
+ onClick?: () => void
+ isToggledOn?: boolean
+ isAlt?: boolean
+ disabled?: boolean
+ className?: string
+ size?: "2xs" | "xs" | "sm" | "md"
+ thickOnHover?: boolean
+ children?: ReactNode
+ iconClass?: string
+}) {
+
+ const iconSize =
+ size === "2xs" ? "w-4 h-4" :
+ size === "xs" ? "w-5 h-5" :
+ size === "sm" ? "w-6 h-6" :
+ "w-8 h-8"
+
+ return (
+
{
+ if (!disabled) {
+ onClick()
+ }
+ } : undefined}
+ >
+
+
+
+
+
+
+ {children
+ ?
+ {children}
+
: null}
+
+ )
+}
\ No newline at end of file
diff --git a/src/components/monitor/icons/single-icon.tsx b/src/components/monitor/icons/single-icon.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..b7fa12020dc81b3b1504d90a31cb0ea7bf547e57
--- /dev/null
+++ b/src/components/monitor/icons/single-icon.tsx
@@ -0,0 +1,33 @@
+import { IconType } from "react-icons/lib"
+
+import { cn } from "@/lib/utils"
+
+export function SingleIcon({
+ type,
+ className = "",
+ thickOnHover = false,
+}: {
+ type?: IconType
+ className?: string
+ thickOnHover?: boolean
+}) {
+ if (!type) {
+ return null
+ }
+
+ const Icon = type
+
+ return (
+
+)
+}
\ No newline at end of file
diff --git a/src/components/monitor/index.tsx b/src/components/monitor/index.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..0f5a84b0a388b5f7b96ee20df4968482d02da6a6
--- /dev/null
+++ b/src/components/monitor/index.tsx
@@ -0,0 +1,33 @@
+
+import { MdZoomOutMap } from "react-icons/md"
+
+import { cn } from "@/lib/utils"
+import { useFullscreenStatus } from "@/lib/hooks"
+
+import { UniversalPlayer } from "./UniversalPlayer"
+import { PlayerControls } from "./PlayerControls"
+
+export function Monitor() {
+ const [isFullscreen, setFullscreen, ref] = useFullscreenStatus()
+
+ return (
+
+
+
+
+
setFullscreen()}
+ className={cn(
+ `p-2 rounded-full cursor-pointer`,
+ `transition-all duration-100 ease-in-out`,
+ isFullscreen ? `opacity-0` : `opacity-70 hover:opacity-100 scale-95 hover:scale-100`
+ )}>
+ {/**/}
+
+
+
+
+ )
+}
diff --git a/src/components/monitor/utils/splitElapsedTime.tsx b/src/components/monitor/utils/splitElapsedTime.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..a5e29cad6251e4f2c7de6f99ccd049b01ee500f9
--- /dev/null
+++ b/src/components/monitor/utils/splitElapsedTime.tsx
@@ -0,0 +1,14 @@
+export function splitElapsedTime(elapsedTime: number) {
+ const hours = Math.floor(elapsedTime / (1000 * 60 * 60)).toFixed(0);
+ elapsedTime %= 1000 * 60 * 60;
+ const minutes = Math.floor(elapsedTime / (1000 * 60)).toFixed(0);
+ elapsedTime %= 1000 * 60;
+ const seconds = Math.floor(elapsedTime / 1000).toFixed(0);
+ const milliseconds = (elapsedTime % 1000).toFixed(0);
+ return {
+ hours: parseInt(hours),
+ minutes: parseInt(minutes),
+ seconds: parseInt(seconds),
+ milliseconds: parseInt(milliseconds)
+ };
+}
\ No newline at end of file
diff --git a/src/components/monitor/utils/zeroPad.ts b/src/components/monitor/utils/zeroPad.ts
new file mode 100644
index 0000000000000000000000000000000000000000..fc44f56715c4863189b512f5e743e4a6d34d87a2
--- /dev/null
+++ b/src/components/monitor/utils/zeroPad.ts
@@ -0,0 +1,3 @@
+export function zeroPad(num: number, size = 2) {
+ return String(num).padStart(size, '0')
+}
diff --git a/src/components/core/tasks/TaskStatusUpdate.tsx b/src/components/tasks/TaskStatusUpdate.tsx
similarity index 100%
rename from src/components/core/tasks/TaskStatusUpdate.tsx
rename to src/components/tasks/TaskStatusUpdate.tsx
diff --git a/src/components/core/tasks/types.ts b/src/components/tasks/types.ts
similarity index 100%
rename from src/components/core/tasks/types.ts
rename to src/components/tasks/types.ts
diff --git a/src/components/core/tasks/useTasks.tsx b/src/components/tasks/useTasks.tsx
similarity index 100%
rename from src/components/core/tasks/useTasks.tsx
rename to src/components/tasks/useTasks.tsx
diff --git a/src/components/toolbars/top-bar/index.tsx b/src/components/toolbars/top-bar/index.tsx
index a9de52b203a57ba368cf8dc0a438cac0f381ef82..dfdb4b388583e6190d6f9c4bc0f1fbfbcebe3994 100644
--- a/src/components/toolbars/top-bar/index.tsx
+++ b/src/components/toolbars/top-bar/index.tsx
@@ -1,14 +1,12 @@
import { ClapProject } from "@aitube/clap"
-import { useTimelineState } from "@aitube/timeline"
+import { useTimeline } from "@aitube/timeline"
import { cn } from "@/lib/utils"
import { TopMenu } from "../top-menu"
-import { APP_DOMAIN, APP_NAME, APP_REVISION } from "@/lib/core/constants"
-
export function TopBar() {
- const clap: ClapProject = useTimelineState(s => s.clap)
+ const clap: ClapProject = useTimeline(s => s.clap)
return (
s.isLoading)
- const clap = useTimelineState(s => s.clap)
- const setClap = useTimelineState(s => s.setClap)
- //const saveClapAs = useTimelineState(s => s.saveClapAs)
- //const setFullVideo = useTimelineState(s => s.fullVideo)
+ const isTimelineLoading: boolean = useTimeline(s => s.isLoading)
+ const clap = useTimeline(s => s.clap)
+ const setClap = useTimeline(s => s.setClap)
+ //const saveClapAs = useTimeline(s => s.saveClapAs)
+ //const setFullVideo = useTimeline(s => s.fullVideo)
const clapPicker = useClapFilePicker()
const screenplayPicker = useScreenplayFilePicker()
const isLoading = isTimelineLoading || clapPicker.isLoading || screenplayPicker.isLoading
+ const openClapUrl = useIO(s => s.openClapUrl)
+ const saveClap = useIO(s => s.saveClap)
+ const saveVideoFile = useIO(s => s.saveVideoFile)
+ const saveKdenline = useIO(s => s.saveKdenline)
+
useEffect(() => {
(async () => {
if (!clapUrl) {
console.log("No clap URL provided")
return
}
- const res = await fetch(clapUrl)
- const blob = await res.blob()
- const clap = await parseClap(blob)
- await setClap(clap)
+ await openClapUrl(clapUrl)
})()
}, [clapUrl])
-
- const saveClap = async () => {
- const { clap } = useTimelineState.getState()
-
- if (!clap) { throw new Error(`cannot save a clap.. if there is no clap`) }
-
- // make sure we update the total duration
- for (const s of clap.segments) {
- if (s.endTimeInMs > clap.meta.durationInMs) {
- clap.meta.durationInMs = s.endTimeInMs
- }
- }
-
- const clapBlob: Blob = await serializeClap(clap)
-
- // Create an object URL for the compressed clap blob
- const objectUrl = URL.createObjectURL(clapBlob)
-
- // Create an anchor element and force browser download
- const anchor = document.createElement("a")
- anchor.href = objectUrl
-
- anchor.download = `my_project.clap`
-
- document.body.appendChild(anchor) // Append to the body (could be removed once clicked)
- anchor.click() // Trigger the download
-
- // Cleanup: revoke the object URL and remove the anchor element
- URL.revokeObjectURL(objectUrl)
- document.body.removeChild(anchor)
- }
-
- const renderAndSaveVideoFile = async () => {
- console.log(`rendering project using the free community server..`)
-
- const clap: ClapProject = useTimelineState.getState().clap
-
- // note: I didn't put it inside the clapper's own API,
- // because this is something a bit fragile
- // (it uses ffmpeg and puppeteer, sometimes it crashes etc)
- const result = await fetch(
- // TODO: put this into a variable
- // also rename this to so cool-sounding module
- `https://jbilcke-hf-ai-tube-clap-exporter.hf.space?f=mp4`,
- {
- method: "POST",
- body: await serializeClap(clap)
- }
- )
-
- const videoBlob = await result.blob()
-
- const videoDataUrl = await blobToBase64DataUri(videoBlob)
-
- const alreadyAnEmbeddedFinalVideo = clap.segments.filter(s =>
- s.category === ClapSegmentCategory.VIDEO &&
- s.status === ClapSegmentStatus.COMPLETED &&
- s.startTimeInMs === 0 &&
- s.endTimeInMs === clap.meta.durationInMs &&
- s.assetUrl).at(0)
-
- // inject the final mp4 video file into the .clap
- if (alreadyAnEmbeddedFinalVideo) {
- console.log(`editing the clap to update the final video`)
- alreadyAnEmbeddedFinalVideo.assetUrl = videoDataUrl
- } else {
- console.log(`editing the clap to add a new final video`)
- clap.segments.push(newSegment({
- category: ClapSegmentCategory.VIDEO,
- status: ClapSegmentStatus.COMPLETED,
- startTimeInMs: 0,
- endTimeInMs: clap.meta.durationInMs,
- assetUrl: videoDataUrl,
- assetDurationInMs: clap.meta.durationInMs,
- assetSourceType: getClapAssetSourceType(videoDataUrl),
- outputGain: 1.0,
- }))
- }
- // Create an object URL for the compressed clap blob
- // object urls are short-lived urls, with the benefit of having a short id too
- const objectUrl = URL.createObjectURL(videoBlob)
-
- console.log(`The free community server responded: ${result.status} ${result.statusText}`, objectUrl)
-
- // Create an anchor element and force browser download
- const anchor = document.createElement("a")
- anchor.href = objectUrl
-
- anchor.download = `export.mp4`
-
- document.body.appendChild(anchor) // Append to the body (could be removed once clicked)
- anchor.click() // Trigger the download
-
- // Cleanup: revoke the object URL and remove the anchor element
- URL.revokeObjectURL(objectUrl)
- document.body.removeChild(anchor)
-
- }
-
// const setShowSettings = useUISettings(s => s.setShowSettings)
useHotkeys('ctrl+o', () => clapPicker.openFilePicker(), { preventDefault: true }, [])
useHotkeys('meta+o', () => clapPicker.openFilePicker(), { preventDefault: true }, [])
@@ -168,7 +70,7 @@ export function TopMenuFile() {
{
- renderAndSaveVideoFile()
+ saveVideoFile()
}}>
Render project (.mp4)
@@ -179,6 +81,14 @@ export function TopMenuFile() {
Import screenplay (.txt)
+ {/*
+
{
+ saveKdenline()
+ }}>
+ Export .kdenlive
+
+
+ */}
{
diff --git a/src/components/toolbars/top-menu/index.tsx b/src/components/toolbars/top-menu/index.tsx
index e5c2a92787027959bbf52c8594c90555b75f90b3..8b471d6c401fb35cbc3eb6cff54675d4e1310897 100644
--- a/src/components/toolbars/top-menu/index.tsx
+++ b/src/components/toolbars/top-menu/index.tsx
@@ -11,12 +11,19 @@ import { TopMenuMusic } from "./music"
import { TopMenuView } from "./view"
import { cn } from "@aitube/timeline"
import { APP_REVISION } from "@/lib/core/constants"
+import Image from "next/image"
+import logo from "../../../app/logo-v2.png"
export function TopMenu() {
return (
- Clapperai
+ {/*
+ it doesn't look great when minified like this
+ */}
+ Clapper.ai
diff --git a/src/controllers/audio/README.md b/src/controllers/audio/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..ac2fbfe99d7c7157eacb9551317e61908e42a0a8
--- /dev/null
+++ b/src/controllers/audio/README.md
@@ -0,0 +1,3 @@
+
+TODO: put those utilities inside an @aitube/audio module
+
diff --git a/src/controllers/audio/analyzeAudio.ts b/src/controllers/audio/analyzeAudio.ts
new file mode 100644
index 0000000000000000000000000000000000000000..8da94085043fb67ca81f701d6943fa9be57706e2
--- /dev/null
+++ b/src/controllers/audio/analyzeAudio.ts
@@ -0,0 +1,30 @@
+import { DEFAULT_DURATION_IN_MS_PER_STEP } from "@aitube/timeline"
+
+import { getAudioBuffer } from "./getAudioBuffer"
+import { AudioAnalysis } from "./types"
+import { detectBPM } from "./detectBPM"
+
+export async function analyzeAudio(file: File): Promise {
+ const audioBuffer = await getAudioBuffer(file)
+ const durationInMs = audioBuffer.duration * 1000
+ const durationInSteps = Math.ceil(durationInMs / DEFAULT_DURATION_IN_MS_PER_STEP)
+
+ try {
+ const bpm = await detectBPM(audioBuffer)
+
+ return {
+ audioBuffer,
+ bpm,
+ durationInMs,
+ durationInSteps
+ }
+ } catch (err) {
+ console.error(`failed to detect the BPM will fall back to 120 BPM (${err})`)
+ return {
+ audioBuffer,
+ bpm: 120,
+ durationInMs,
+ durationInSteps
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/controllers/audio/detectBPM.ts b/src/controllers/audio/detectBPM.ts
new file mode 100644
index 0000000000000000000000000000000000000000..287bb6d40d5ac70cb09d62af2eb117fd937bb8d1
--- /dev/null
+++ b/src/controllers/audio/detectBPM.ts
@@ -0,0 +1,15 @@
+// for some reason VS Code is confused when using this
+// import { analyze } from "web-audio-beat-detector"
+
+// but this one works
+import { analyze } from "web-audio-beat-detector/build/es2019/module"
+
+export async function detectBPM(audioBuffer: AudioBuffer): Promise {
+ try {
+ const bpm: number = await analyze(audioBuffer)
+
+ return bpm
+ } catch (err) {
+ return 120
+ }
+}
\ No newline at end of file
diff --git a/src/controllers/audio/getAudioBuffer.ts b/src/controllers/audio/getAudioBuffer.ts
new file mode 100644
index 0000000000000000000000000000000000000000..8ece6a6e854d603826a4eb3c2d7372ed822643b9
--- /dev/null
+++ b/src/controllers/audio/getAudioBuffer.ts
@@ -0,0 +1,15 @@
+import { readFileAsArrayBuffer } from "./readFileAsArrayBuffer"
+
+export async function getAudioBuffer(file: File): Promise {
+ const audioContext = new AudioContext() // initialize AudioContext
+ const arrayBuffer = await readFileAsArrayBuffer(file)
+
+ // decode audio data from your arrayBuffer
+ return new Promise((resolve, reject) => {
+ audioContext.decodeAudioData(arrayBuffer, (buffer) => {
+ resolve(buffer)
+ }, (err) => {
+ reject(err)
+ })
+ })
+}
\ No newline at end of file
diff --git a/src/controllers/audio/getDefaultAudioState.ts b/src/controllers/audio/getDefaultAudioState.ts
new file mode 100644
index 0000000000000000000000000000000000000000..c5f2b956fafa9bf3e7b0479b0a9f9838e39cc786
--- /dev/null
+++ b/src/controllers/audio/getDefaultAudioState.ts
@@ -0,0 +1,20 @@
+import { AudioState } from "./types"
+
+export function getDefaultAudioState(): AudioState {
+
+ const audioContext = typeof AudioContext !== "undefined"
+ ? (new AudioContext())
+ : undefined as unknown as AudioContext
+
+ if (!audioContext) {
+ console.log("Note: the audio context isn't available in the current environment")
+ }
+
+ const state: AudioState = {
+ isMuted: false,
+ audioContext,
+ currentlyPlaying: [],
+ }
+
+ return state
+}
diff --git a/src/controllers/audio/readFileAsArrayBuffer.ts b/src/controllers/audio/readFileAsArrayBuffer.ts
new file mode 100644
index 0000000000000000000000000000000000000000..68926e2966aa4b9176665b02a60c0d3ab5d7afca
--- /dev/null
+++ b/src/controllers/audio/readFileAsArrayBuffer.ts
@@ -0,0 +1,16 @@
+
+
+export async function readFileAsArrayBuffer(file: File): Promise {
+ return new Promise((resolve, reject) => {
+ let reader = new FileReader();
+ reader.onload = () => {
+ // when the reader has loaded, resolve the Promise with the result
+ resolve(reader.result as ArrayBuffer);
+ };
+ reader.onerror = (error) => {
+ // if there's an error, reject the Promise with the error
+ reject(error);
+ };
+ reader.readAsArrayBuffer(file);
+ });
+}
\ No newline at end of file
diff --git a/src/controllers/audio/startAudioSourceNode.ts b/src/controllers/audio/startAudioSourceNode.ts
new file mode 100644
index 0000000000000000000000000000000000000000..9cd1f168e5bde35ac0d9fa172c1b0bb9309d62b6
--- /dev/null
+++ b/src/controllers/audio/startAudioSourceNode.ts
@@ -0,0 +1,93 @@
+import { UUID } from "@aitube/clap"
+
+import { RuntimeSegment } from "@/types"
+
+import { CurrentlyPlayingAudioSource } from "./types"
+
+/**
+ * Create an audio source node from a segment
+ *
+ * This will instantly play the node, at the given position
+ *
+ * This means we can play a segment "late" eg. if the segment is 3 min long, we can play it at 1 min 2 min etc
+ */
+export function startAudioSourceNode({
+ audioContext,
+ segment,
+ elapsedTimeInMs,
+ onEnded
+}: {
+ /**
+ * The AudioContext to use
+ */
+ audioContext: AudioContext
+
+ /**
+ * The segment to play (this is a ClapSegment with some extra fields)
+ */
+ segment: RuntimeSegment
+
+ /**
+ * The current elapsed playback time
+ *
+ * This is the position of the playback cursor in the project, in milliseconds (eg. 20000ms)
+ */
+ elapsedTimeInMs: number
+
+ /**
+ * Called whenever the audio source will finish playing
+ *
+ * Be careful, this callback may be called in a long time,
+ * So make sure it uses fresh data when it is finally executed
+ */
+ onEnded: (sourceId: string) => void
+}): CurrentlyPlayingAudioSource {
+ if (!segment.audioBuffer) {
+ throw new Error(`Cannot playAudioBuffer on non-audio segments`)
+ }
+
+ // const audioContext = new AudioContext() // initialize AudioContext
+
+ // Get an AudioBufferSourceNode.
+ // This is the AudioNode to use when we want to play an AudioBuffer
+ // and yes, we must create a new one each time we want to play a sample
+ // https://developer.mozilla.org/en-US/docs/Web/API/AudioBufferSourceNode
+ const source = audioContext.createBufferSource()
+
+ // set the buffer in the AudioBufferSourceNode
+ source.buffer = segment.audioBuffer
+
+ const gainNode: GainNode = audioContext.createGain()
+
+ if (isFinite(segment.outputGain)) {
+ gainNode.gain.value = segment.outputGain
+ } else {
+ console.log(`segment.outputGain isn't finite for some reason? (got value ${segment.outputGain})`)
+ gainNode.gain.value = 1.0
+ }
+
+ // connect the AudioBufferSourceNode to the gain node so that we can control the volume
+ source.connect(gainNode)
+
+ // connect the gain node to the destination
+ gainNode.connect(audioContext.destination)
+
+ // make sure we play the segment at a specific time
+ const startTimeInMs = elapsedTimeInMs - segment.startTimeInMs
+
+ // convert milliseconds to seconds by dividing by 1000
+ source.start(audioContext.currentTime, startTimeInMs >= 1000 ? (startTimeInMs / 1000) : 0)
+
+ const currentlyPlaying: CurrentlyPlayingAudioSource = {
+ sourceId: UUID(),
+ segmentId: segment.id,
+ sourceNode: source,
+ gainNode: gainNode,
+ }
+
+ // before dispatching the node we still need to attach a listener to it,
+ // to detect when the sample (audio source node) stops playing
+ source.onended = () => onEnded(currentlyPlaying.sourceId)
+
+ return currentlyPlaying
+}
diff --git a/src/controllers/audio/types.ts b/src/controllers/audio/types.ts
new file mode 100644
index 0000000000000000000000000000000000000000..584a5b784eca3d031b4d4225086a9d41a45099fa
--- /dev/null
+++ b/src/controllers/audio/types.ts
@@ -0,0 +1,50 @@
+
+export type AudioAnalysis = {
+ audioBuffer: AudioBuffer
+ bpm: number
+ durationInMs: number
+ durationInSteps: number
+}
+
+export type AudioState = {
+ isMuted: boolean
+ audioContext: AudioContext // we keep a single audio context
+ currentlyPlaying: CurrentlyPlayingAudioSource[]
+}
+
+export type AudioControls = {
+}
+
+export type AudioStore = AudioState & AudioControls
+/**
+ * This is the data structure used to keep track of currently played audio nodes
+ */
+export type CurrentlyPlayingAudioSource = {
+ /**
+ * The unique ID associated with this audio source
+ *
+ * This exists because the same segment might can be present multiple times
+ *
+ * Eg. the same "cymbal crash" sound might be triggered multiple times
+ */
+ sourceId: string
+
+ /**
+ * The segment being played (this is for identification so we only need the ID)
+ */
+ segmentId: string
+
+ /**
+ * The actual source node (this allows us to call .stop() on it)
+ */
+ sourceNode: AudioScheduledSourceNode
+
+ /**
+ * The gain node, to control the volume
+ *
+ * Note that for the moment, we do not persist the changes to the gain,
+ * but this is something we should do since it is part of the project data
+ */
+
+ gainNode: GainNode
+}
\ No newline at end of file
diff --git a/src/controllers/audio/useAudio.ts b/src/controllers/audio/useAudio.ts
new file mode 100644
index 0000000000000000000000000000000000000000..2fd7f775788254eea942d78cd15d4ebdf313f4fd
--- /dev/null
+++ b/src/controllers/audio/useAudio.ts
@@ -0,0 +1,10 @@
+"use client"
+
+import { create } from "zustand"
+
+import { AudioStore } from "./types"
+import { getDefaultAudioState } from "./getDefaultAudioState"
+
+export const useAudio = create((set, get) => ({
+ ...getDefaultAudioState(),
+}))
\ No newline at end of file
diff --git a/src/controllers/io/getDefaultIOState.ts b/src/controllers/io/getDefaultIOState.ts
new file mode 100644
index 0000000000000000000000000000000000000000..6600adfaaac889b9311326c525a997863270a79a
--- /dev/null
+++ b/src/controllers/io/getDefaultIOState.ts
@@ -0,0 +1,5 @@
+import { IOState } from "./types"
+
+export function getDefaultIOState(): IOState {
+ return {}
+}
\ No newline at end of file
diff --git a/src/controllers/io/parseFileIntoSegments.ts b/src/controllers/io/parseFileIntoSegments.ts
new file mode 100644
index 0000000000000000000000000000000000000000..bb0e1dbde9be13dc096f5349b6665245565c3554
--- /dev/null
+++ b/src/controllers/io/parseFileIntoSegments.ts
@@ -0,0 +1,121 @@
+"use client"
+
+import { ClapOutputType, ClapSegment, ClapSegmentCategory, generateSeed, newSegment, UUID } from "@aitube/clap"
+import { findFreeTrack } from "@aitube/timeline"
+
+import { RuntimeSegment } from "@/types"
+
+import { analyzeAudio } from "../audio/analyzeAudio"
+import { ResourceCategory, ResourceType } from "./types"
+
+export async function parseFileIntoSegments({ file, segments }: {
+ /**
+ * The file to import
+ */
+ file: File
+
+
+ /**
+ * all existing segments
+ */
+ segments: ClapSegment[]
+}): Promise {
+ console.log(`filename: ${file.name}`)
+ console.log(`file size: ${file.size} bytes`)
+ console.log(`file type: ${file.type}`)
+
+ const extension = file.name.split(".").pop()?.toLowerCase()
+
+ console.log("TODO: open a popup to ask if this is a voice character sample, dialogue, music etc")
+
+ let type: ResourceType = "misc"
+ let category: ResourceCategory = "misc"
+
+ const newSegments: ClapSegment[] = []
+
+ switch (file.type) {
+ case "image/webp":
+ type = "image"
+ category = "control_image"
+ break;
+
+ case "audio/mpeg":
+ case "audio/wav":
+ case "audio/mp4":
+ case "audio/m4a": // shouldn't exist
+ case "audio/x-m4a": // should be rare, normallyt is is audio/mp4
+ case "audio/webm":
+ // for background track, or as an inspiration track, or a voice etc
+ type = "audio"
+ category = "background_music"
+
+ // TODO: add caption analysis
+ const { durationInMs,durationInSteps, bpm, audioBuffer } = await analyzeAudio(file)
+ console.log("User dropped an audio sample:", {
+ bpm, durationInMs, durationInSteps
+ })
+
+ // TODO: use the correct drop time
+ const startTimeInMs = 0
+ const startTimeInSteps = 1
+
+ const endTimeInSteps = durationInSteps
+ const endTimeInMs = startTimeInMs + durationInMs
+
+ const clapSegment = newSegment({
+ prompt: "audio track",
+ startTimeInMs, // start time of the segment
+ endTimeInMs, // end time of the segment (startTimeInMs + durationInMs)
+ track: findFreeTrack({ segments, startTimeInMs, endTimeInMs }), // track row index
+ label: `${file.name} (${Math.round(durationInMs / 1000)}s @ ${Math.round(bpm * 100) / 100} BPM)`, // a short label to name the segment (optional, can be human or LLM-defined)
+ category: ClapSegmentCategory.MUSIC,
+ })
+
+ const audioSegment: RuntimeSegment = {
+ ...clapSegment,
+ outputType: ClapOutputType.AUDIO,
+ outputGain: 1,
+ audioBuffer,
+ }
+
+ // console.log("newSegment:", audioSegment)
+
+ // poof! type disappears.. it's magic
+ newSegments.push(audioSegment as ClapSegment)
+ break;
+
+ case "text/plain":
+ // for dialogue, prompts..
+ type = "text"
+ category = "text_prompt"
+ break;
+
+ default:
+ console.log(`unrecognized file type "${file.type}"`)
+ break;
+ }
+
+ // note: we always upload the files, because even if it is an unhandled format (eg. a PDF)
+ // this can still be part of the project as a resource for humans (inspiration, guidelines etc)
+
+ const id = UUID()
+ const fileName = `${id}.${extension}`
+
+ const storage = `resources`
+ const filePath = `${type}/${fileName}`
+
+ /*
+ const { data, error } = await supabase
+ .storage
+ .from('avatars')
+ .upload(filePath, file, {
+ cacheControl: '3600',
+ upsert: false
+ })
+ */
+
+ // Note: uploading is optional, some file type don't need it (eg. text prompt)
+
+ return [...segments, ...newSegments]
+}
+
diff --git a/src/controllers/io/parseFilesIntoSegments.ts b/src/controllers/io/parseFilesIntoSegments.ts
new file mode 100644
index 0000000000000000000000000000000000000000..a62fe816e515e3c4eb0328e9291df8e7e1583a88
--- /dev/null
+++ b/src/controllers/io/parseFilesIntoSegments.ts
@@ -0,0 +1,23 @@
+
+import { ClapSegment } from "@aitube/clap"
+import { parseFileIntoSegments } from "./parseFileIntoSegments"
+
+export async function parseFilesIntoSegments({ files, segments, }: {
+ /**
+ * The files to import
+ */
+ files: File[]
+
+ /**
+ * all existing segments
+ */
+ segments: ClapSegment[]
+}): Promise {
+ return (
+ await Promise.all(files.map(file => parseFileIntoSegments({
+ file,
+ segments,
+ })))
+ ).reduce((acc, segments) => [...acc, ...segments], [])
+}
+
diff --git a/src/controllers/io/types.ts b/src/controllers/io/types.ts
new file mode 100644
index 0000000000000000000000000000000000000000..b92b59f8ff0f8b9883f69a554f1de57de1bb61fa
--- /dev/null
+++ b/src/controllers/io/types.ts
@@ -0,0 +1,52 @@
+
+/**
+ * Describe a resource file type that has been uploaded and attached to a project
+ *
+ */
+export type ResourceType =
+| "audio"
+| "video"
+| "image"
+| "text"
+| "misc"
+
+/**
+ * Describe a resource file category that has been uploaded and attached to a project
+ */
+export type ResourceCategory =
+ | "control_image"
+ | "control_mask"
+ | "character_face"
+ | "character_voice"
+ | "background_music"
+ | "character_dialogue"
+ | "text_prompt"
+ | "sound"
+ | "misc"
+
+
+export type IOState = {
+
+}
+
+export type IOControls = {
+ openFiles: (files: File[]) => Promise
+
+ saveAnyFile: (blob: Blob, fileName: string) => void
+ openClapUrl: (url: string) => Promise
+ saveClap: () => Promise
+ saveVideoFile: () => Promise
+
+ openMLT: (file: File) => Promise
+ saveMLT: () => Promise
+
+ openKdenline: (file: File) => Promise
+ saveKdenline: () => Promise
+
+ openOpenTimelineIO: (file: File) => Promise
+ saveOpenTimelineIO: () => Promise
+}
+
+export type IOStore =
+ IOState &
+ IOControls
\ No newline at end of file
diff --git a/src/controllers/io/useIO.ts b/src/controllers/io/useIO.ts
new file mode 100644
index 0000000000000000000000000000000000000000..9ed3ce91999f1b84af781766400529ca734894a7
--- /dev/null
+++ b/src/controllers/io/useIO.ts
@@ -0,0 +1,331 @@
+"use client"
+
+import { ClapProject, ClapSegment, ClapSegmentCategory, ClapSegmentFilteringMode, ClapSegmentStatus, filterSegments, getClapAssetSourceType, newSegment, parseClap, serializeClap } from "@aitube/clap"
+import { Track, Tracks, useTimeline } from "@aitube/timeline"
+import { create } from "zustand"
+import { mltToXml } from "mlt-xml"
+
+import { getDefaultIOState } from "./getDefaultIOState"
+import { IOStore } from "./types"
+
+import { blobToBase64DataUri } from "@/lib/utils/blobToBase64DataUri"
+import { parseFileIntoSegments } from "./parseFileIntoSegments"
+// import { Entry, Project } from "@/lib/kdenlive"
+// import { formatDuration } from "@/lib/utils/formatDuration"
+
+
+export const useIO = create((set, get) => ({
+ ...getDefaultIOState(),
+
+ openFiles: async (files: File[]) => {
+ const segments: ClapSegment[] = useTimeline.getState().segments
+
+ console.log("File", File)
+ if (Array.isArray(File)) {
+ console.log("user tried to drop some files:", File)
+
+ // for now let's simplify things, and only import the first file
+ const file = File.at(0)
+ if (!file) { return }
+
+ console.log(`file type: ${file.type}`)
+
+ const isClapFile = file.name.endsWith(".clap")
+ const isAudioFile = file.type.startsWith("audio/")
+ const isVideoFile = file.type.startsWith("video/")
+ const isTextFile = file.type.startsWith("text/")
+
+ // TODO: detect the type of file, and do a different treatment based on this
+ // screenplay files: -> analyze (if there is existing data, show a modal asking to save or not)
+ // mp3 file: ->
+ if (isAudioFile) {
+ const newSegments = await parseFileIntoSegments({
+ file,
+ segments,
+ })
+ }
+
+ // for the moment let's not care of the coordinates at all
+ /*
+ parseFilesIntoSegments({
+ files,
+ columnIndex,
+ rowIndex,
+ segments,
+ segment
+ })
+ */
+ }
+ },
+ saveAnyFile: (blob: Blob, fileName: string) => {
+ // Create an object URL for the compressed clap blob
+ // object urls are short-lived urls, with the benefit of having a short id too
+
+ const objectUrl = URL.createObjectURL(blob)
+
+ // Create an anchor element and force browser download
+ const anchor = document.createElement("a")
+ anchor.href = objectUrl
+
+ anchor.download = fileName
+
+ document.body.appendChild(anchor) // Append to the body (could be removed once clicked)
+ anchor.click() // Trigger the download
+
+ // Cleanup: revoke the object URL and remove the anchor element
+ URL.revokeObjectURL(objectUrl)
+ document.body.removeChild(anchor)
+ },
+ openClapUrl: async (url: string) => {
+ const { setClap } = useTimeline.getState()
+ const res = await fetch(url)
+ const blob = await res.blob()
+ const clap = await parseClap(blob)
+ await setClap(clap)
+ },
+ saveClap: async () => {
+ const { saveAnyFile } = get()
+ const { clap } = useTimeline.getState()
+
+ if (!clap) { throw new Error(`cannot save a clap.. if there is no clap`) }
+
+ // make sure we update the total duration
+ for (const s of clap.segments) {
+ if (s.endTimeInMs > clap.meta.durationInMs) {
+ clap.meta.durationInMs = s.endTimeInMs
+ }
+ }
+
+ const blob: Blob = await serializeClap(clap)
+ saveAnyFile(blob, `my_project.clap`)
+ },
+
+ saveVideoFile: async () => {
+ const { saveAnyFile } = get()
+ console.log(`rendering project using the free community server..`)
+
+ const clap: ClapProject = useTimeline.getState().clap
+
+ const segments: ClapSegment[] = useTimeline.getState().segments
+
+ // note: I didn't put it inside the clapper's own API,
+ // because this is something a bit fragile
+ // (it uses ffmpeg and puppeteer, sometimes it crashes etc)
+ const result = await fetch(
+ // TODO: put this into a variable
+ // also rename this to so cool-sounding module
+ `https://jbilcke-hf-ai-tube-clap-exporter.hf.space?f=mp4`,
+ {
+ method: "POST",
+ body: await serializeClap(clap)
+ }
+ )
+
+ const videoBlob = await result.blob()
+
+ const videoDataUrl = await blobToBase64DataUri(videoBlob)
+
+ const alreadyAnEmbeddedFinalVideo = segments.filter(s =>
+ s.category === ClapSegmentCategory.VIDEO &&
+ s.status === ClapSegmentStatus.COMPLETED &&
+ s.startTimeInMs === 0 &&
+ s.endTimeInMs === clap.meta.durationInMs &&
+ s.assetUrl).at(0)
+
+ // inject the final mp4 video file into the .clap
+ if (alreadyAnEmbeddedFinalVideo) {
+ console.log(`editing the clap to update the final video`)
+ alreadyAnEmbeddedFinalVideo.assetUrl = videoDataUrl
+ } else {
+ console.log(`editing the clap to add a new final video`)
+ clap.segments.push(newSegment({
+ category: ClapSegmentCategory.VIDEO,
+ status: ClapSegmentStatus.COMPLETED,
+ startTimeInMs: 0,
+ endTimeInMs: clap.meta.durationInMs,
+ assetUrl: videoDataUrl,
+ assetDurationInMs: clap.meta.durationInMs,
+ assetSourceType: getClapAssetSourceType(videoDataUrl),
+ outputGain: 1.0,
+ }))
+ }
+
+ console.log(`The free community server responded: ${result.status} ${result.statusText}`)
+
+ saveAnyFile(videoBlob, "my_project.mp4")
+
+ },
+
+ openMLT: async (file: File) => {
+
+ },
+ saveMLT: async () => {
+ const { clap } = useTimeline.getState()
+
+ const xml = mltToXml({
+ title: 'watermarkOnVideo',
+ elements: [
+ {
+ name: 'producer',
+ attributes: {
+ id: 'video',
+ in: '0',
+ out: '1000',
+ resource: 'clip.mpeg',
+ },
+ },
+ {
+ name: 'producer',
+ attributes: {
+ id: 'watermark',
+ in: '0',
+ out: '1000',
+ resource: 'watermark.png',
+ mlt_service: 'qimage',
+ length: '1000',
+ },
+ },
+ {
+ name: 'tractor',
+ attributes: {
+ id: 'tractor0',
+ },
+ elements: [
+ {
+ name: 'multitrack',
+ attributes: {
+ id: 'multitrack0',
+ },
+ elements: [
+ {
+ name: 'playlist',
+ attributes: {
+ id: 'video_track',
+ in: '0',
+ out: '1000',
+ },
+ elements: [
+ {
+ name: 'entry',
+ attributes: {
+ producer: 'video',
+ in: '0',
+ out: '1000',
+ },
+ },
+ ],
+ },
+ {
+ name: 'playlist',
+ attributes: {
+ id: 'watermark_track',
+ in: '0',
+ out: '1000',
+ },
+ elements: [
+ {
+ name: 'entry',
+ attributes: {
+ producer: 'watermark',
+ in: '0',
+ out: '1000',
+ },
+ },
+ ],
+ },
+ ],
+ },
+ {
+ name: 'transition',
+ attributes: {
+ id: 'transition0',
+ a_track: 0,
+ b_track: 1,
+ geometry: '85%/5%:10%x10%',
+ factory: 'loader',
+ progressive: 1,
+ mlt_service: 'composite',
+ fill: 1,
+ sliced_composite: 1,
+ },
+ },
+ ],
+ },
+ ],
+ })
+
+ console.log(`MLT output: `, xml)
+ },
+
+ openKdenline: async (file: File) => {
+
+ },
+
+ saveKdenline: async () => {
+ const { saveAnyFile } = get()
+ const clap: ClapProject = useTimeline.getState().clap
+ // const tracks: Tracks = useTimeline.getState().tracks
+
+ throw new Error(`cannot run in a browser, unfortunately`)
+
+ /*
+
+ // hum.. we should add FPS to the ClapProject metadata
+ const fps = 30 // clap.meta
+
+ // for documentation, look at the example test file in:
+ // https://www.npmjs.com/package/kdenlive?activeTab=code
+ const project = new Project(fps)
+
+ const cameraSegments = clap.segments.filter(s => s.category === ClapSegmentCategory.CAMERA)
+
+ const segmentsWithNonEmptyAssets = clap.segments.filter(s => s.assetUrl)
+
+ // const videoSegments = clap.segments.filter(s => s.category === ClapSegmentCategory.VIDEO && s.assetUrl)
+ const videoTractor = project.addVideoTractor()
+
+ // const soundSegments = clap.segments.filter(s => s.category === ClapSegmentCategory.SOUND && s.assetUrl)
+ const soundTractor = project.addAudioTractor()
+
+ // const voiceSegments = clap.segments.filter(s => s.category === ClapSegmentCategory.DIALOGUE && s.assetUrl)
+ const voiceTractor = project.addAudioTractor()
+
+ // const musicSegments = clap.segments.filter(s => s.category === ClapSegmentCategory.MUSIC && s.assetUrl)
+ const musicTractor = project.addAudioTractor()
+
+ for (const shot of cameraSegments) {
+ const videoSegments = filterSegments(
+ ClapSegmentFilteringMode.ANY,
+ shot,
+ segmentsWithNonEmptyAssets,
+ ClapSegmentCategory.VIDEO
+ )
+ const videoSegment = videoSegments.at(0)
+ if (videoSegment) { continue }
+
+ const producer = project.addProducer(`${videoSegment.id}.mp4`)
+
+ const entry = new Entry(
+ producer,
+ formatDuration(shot.startTimeInMs),
+ formatDuration(shot.endTimeInMs)
+ )
+ videoTractor.addEntry(entry)
+ // audio_track.addEntry(entry)
+ }
+
+ const xml = await project.toXML()
+ const blob = new Blob([xml], { type: "text/xml" })
+ saveAnyFile(blob, `my_project.kdenlive`)
+
+ */
+ },
+
+ openOpenTimelineIO: async (file: File) => {
+
+ },
+
+ saveOpenTimelineIO: async () => {
+
+ }
+}))
\ No newline at end of file
diff --git a/src/controllers/monitor/README.md b/src/controllers/monitor/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..4b5e5c7079c72e4e9f69fd7ff49f3394cc85f9c0
--- /dev/null
+++ b/src/controllers/monitor/README.md
@@ -0,0 +1 @@
+TODO: put those into an aitube/monitor package
\ No newline at end of file
diff --git a/src/controllers/monitor/getDefaultMonitorState.ts b/src/controllers/monitor/getDefaultMonitorState.ts
new file mode 100644
index 0000000000000000000000000000000000000000..24fbe9d6a061bc718ebd278f6d58edf7ebf8e517
--- /dev/null
+++ b/src/controllers/monitor/getDefaultMonitorState.ts
@@ -0,0 +1,11 @@
+import { MonitoringMode, MonitorState } from "./types"
+
+export function getDefaultMonitorState(): MonitorState {
+ const state: MonitorState = {
+ mode: MonitoringMode.NONE,
+ isPlaying: false,
+ staticVideoRef: undefined
+ }
+
+ return state
+}
diff --git a/src/controllers/monitor/types.ts b/src/controllers/monitor/types.ts
new file mode 100644
index 0000000000000000000000000000000000000000..5de43acd41892dce73ba6c535e398e22e8fc0d09
--- /dev/null
+++ b/src/controllers/monitor/types.ts
@@ -0,0 +1,38 @@
+export enum MonitoringMode {
+ NONE = "NONE",
+ STATIC = "STATIC",
+ DYNAMIC = "DYNAMIC"
+}
+
+export type MonitorState = {
+ mode: MonitoringMode
+ isPlaying: boolean
+ staticVideoRef?: HTMLVideoElement
+}
+
+export type MonitorControls = {
+ setMonitoringMode: (mode: MonitoringMode) => void
+
+ setStaticVideoRef: (staticVideoRef: HTMLVideoElement) => void
+
+ checkIfPlaying: () => boolean
+ /**
+ * Play/pause the project timeline (video and audio)
+ *
+ * @param forcePlaying
+ * @returns
+ */
+ togglePlayback: (forcePlaying?: boolean) => {
+ wasPlaying: boolean
+ isPlaying: boolean
+ }
+
+ /**
+ * Seek to a specific timestamp
+ *
+ * @param timeInMs
+ * @returns
+ */
+ jumpAt: (timeInMs?: number) => void
+}
+export type MonitorStore = MonitorState & MonitorControls
\ No newline at end of file
diff --git a/src/controllers/monitor/useMonitor.ts b/src/controllers/monitor/useMonitor.ts
new file mode 100644
index 0000000000000000000000000000000000000000..be5f7651d82b54c806021503afd56b12d6aca8aa
--- /dev/null
+++ b/src/controllers/monitor/useMonitor.ts
@@ -0,0 +1,86 @@
+"use client"
+
+import { create } from "zustand"
+import { useTimeline } from "@aitube/timeline"
+
+import { MonitoringMode, MonitorStore } from "./types"
+import { getDefaultMonitorState } from "./getDefaultMonitorState"
+
+export const useMonitor = create((set, get) => ({
+ ...getDefaultMonitorState(),
+
+ setMonitoringMode: (mode: MonitoringMode) => {
+ set({ mode })
+ },
+
+ setStaticVideoRef: (staticVideoRef: HTMLVideoElement) => {
+ set({
+ mode: MonitoringMode.STATIC,
+ staticVideoRef,
+ })
+ },
+
+ checkIfPlaying: (): boolean => {
+ return get().isPlaying
+ },
+
+ /**
+ * Used to play/pause the project timeline (video and audio)
+ * @param forceValue
+ * @returns
+ */
+ togglePlayback: (forcePlaying?: boolean): {
+ wasPlaying: boolean
+ isPlaying: boolean
+ } => {
+ const { isPlaying: wasPlaying, mode, staticVideoRef } = get()
+
+ if (mode === MonitoringMode.NONE) {
+ return {
+ wasPlaying: false,
+ isPlaying: false
+ }
+ }
+
+ const isPlaying = typeof forcePlaying === "boolean" ? forcePlaying : !wasPlaying
+
+ set({
+ isPlaying
+ })
+
+ if (mode === MonitoringMode.STATIC && staticVideoRef) {
+ if (isPlaying) {
+ console.log(`previous value = ` + staticVideoRef.currentTime)
+ staticVideoRef.play()
+ } else {
+ staticVideoRef.pause()
+ }
+ } else if (mode === MonitoringMode.DYNAMIC) {
+ console.log(`TODO Julian: implement dynamic mode`)
+ }
+
+ return {
+ wasPlaying,
+ isPlaying
+ }
+ },
+ jumpAt: (timeInMs: number = 0) => {
+ const { isPlaying, mode, staticVideoRef } = get()
+
+ const { setCursorTimestampAtInMs } = useTimeline.getState()
+
+ setCursorTimestampAtInMs(timeInMs)
+
+ if (mode === MonitoringMode.NONE || !staticVideoRef) {
+ return
+ }
+
+ if (mode === MonitoringMode.STATIC) {
+ // console.log("resetting static video current time")
+ staticVideoRef.currentTime = timeInMs / 1000
+ } else if (mode === MonitoringMode.DYNAMIC) {
+ console.log(`TODO Julian: implement dynamic mode`)
+ }
+ },
+
+}))
\ No newline at end of file
diff --git a/src/lib/hooks/README.md b/src/lib/hooks/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..e4cc4c9bc7c01561fda811f7be1ec85968a2976f
--- /dev/null
+++ b/src/lib/hooks/README.md
@@ -0,0 +1,5 @@
+You will notice that we have useAnimationFrame.ts and useRequestAnimationFrame.ts
+
+That's accidental, this is following a merge of different codebases
+
+They work a bit differently from each other, it would be cool if someone could investigate this
\ No newline at end of file
diff --git a/src/lib/hooks/useAnimationFrame.ts b/src/lib/hooks/useAnimationFrame.ts
new file mode 100644
index 0000000000000000000000000000000000000000..6ce441a252321867752ed388c5da80fe4ee7c7f4
--- /dev/null
+++ b/src/lib/hooks/useAnimationFrame.ts
@@ -0,0 +1,22 @@
+import { DependencyList, useEffect, useRef } from "react"
+
+export function useAnimationFrame(callback: (time: number) => void, deps: DependencyList | undefined = []) {
+ // Use useRef for mutable variables that we want to persist
+ // without triggering a re-render on their change
+ const requestRef = useRef()
+ const previousTimeRef = useRef()
+
+ const animate = (time: number) => {
+ if (previousTimeRef.current != undefined) {
+ const deltaTime = time - previousTimeRef.current
+ callback(deltaTime)
+ }
+ previousTimeRef.current = time;
+ requestRef.current = requestAnimationFrame(animate);
+ }
+
+ useEffect(() => {
+ requestRef.current = requestAnimationFrame(animate);
+ return () => cancelAnimationFrame(requestRef.current as any);
+ }, deps); // Make sure the effect runs only once
+}
\ No newline at end of file
diff --git a/src/lib/hooks/useClapFilePicker.ts b/src/lib/hooks/useClapFilePicker.ts
index e1ba454cf5438a079a74c49cc20e7b726ef50928..8933f47453fa79132b65e5fee109317eb298da3b 100644
--- a/src/lib/hooks/useClapFilePicker.ts
+++ b/src/lib/hooks/useClapFilePicker.ts
@@ -1,11 +1,11 @@
import { useEffect, useState } from "react"
import { useFilePicker } from "use-file-picker"
import { parseClap } from "@aitube/clap"
-import { useTimelineState } from "@aitube/timeline"
+import { useTimeline } from "@aitube/timeline"
export function useClapFilePicker() {
- const setClap = useTimelineState(s => s.setClap)
+ const setClap = useTimeline(s => s.setClap)
const [isLoading, setIsLoading] = useState(false)
const { openFilePicker, filesContent, loading } = useFilePicker({
diff --git a/src/lib/hooks/useScreenplayFilePicker.ts b/src/lib/hooks/useScreenplayFilePicker.ts
index f77dbeab953eb3ae1d2d2d18148f080877790a46..c9e82cb9df0cc6cf40218aa54662cce4e10836cc 100644
--- a/src/lib/hooks/useScreenplayFilePicker.ts
+++ b/src/lib/hooks/useScreenplayFilePicker.ts
@@ -1,10 +1,10 @@
import { useEffect, useState } from "react"
-import { useTimelineState } from "@aitube/timeline"
+import { useTimeline } from "@aitube/timeline"
import { parseClap } from "@aitube/clap"
import { useFilePicker } from "use-file-picker"
export function useScreenplayFilePicker() {
- const setClap = useTimelineState(s => s.setClap)
+ const setClap = useTimeline(s => s.setClap)
const [isLoading, setIsLoading] = useState(false)
const { openFilePicker, filesContent, loading } = useFilePicker({
diff --git a/src/lib/kdenlive/README.md b/src/lib/kdenlive/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..19dab9d124f7631fda020d96530dacd72614cc57
--- /dev/null
+++ b/src/lib/kdenlive/README.md
@@ -0,0 +1,3 @@
+This code has been cloned from:
+
+https://www.npmjs.com/package/kdenlive?activeTab=code
\ No newline at end of file
diff --git a/src/lib/kdenlive/entry.ts b/src/lib/kdenlive/entry.ts
new file mode 100644
index 0000000000000000000000000000000000000000..29280b578cb252f8bdc6a60a54cb5924caf24b4d
--- /dev/null
+++ b/src/lib/kdenlive/entry.ts
@@ -0,0 +1,19 @@
+"use server"
+
+import { Producer } from "./producer"
+
+export class Entry {
+ constructor(
+ public producer: Producer,
+ public in_point: string,
+ public out_point: string
+ ) {}
+
+ toXML(): string {
+ return /* HTML */ ``;
+ }
+}
\ No newline at end of file
diff --git a/src/lib/kdenlive/index.ts b/src/lib/kdenlive/index.ts
new file mode 100644
index 0000000000000000000000000000000000000000..942eb89910610ec366e0b094e1f40d2a9782870a
--- /dev/null
+++ b/src/lib/kdenlive/index.ts
@@ -0,0 +1,8 @@
+"use server"
+
+export { Entry } from "./entry"
+export { Project } from "./kdenlive"
+export { Playlist, playlistIndexGen } from "./playlist"
+export { Producer, producerIndexGen } from "./producer"
+export { Tractor, trackIndexGen } from "./tractor"
+export { makeIDGen } from "./makeIDGen"
diff --git a/src/lib/kdenlive/kdenlive.ts b/src/lib/kdenlive/kdenlive.ts
new file mode 100644
index 0000000000000000000000000000000000000000..278816dd7a659b673d8523e94b124e725d6e3296
--- /dev/null
+++ b/src/lib/kdenlive/kdenlive.ts
@@ -0,0 +1,73 @@
+"use server"
+
+import { BlackTrack, ConcreteProducer, Producer } from "./producer"
+import { AudioTractor, trackIndexGen, Tractor, VideoTractor } from "./tractor"
+
+export class Project {
+ producers: Producer[] = [];
+ tractors: Tractor[] = [];
+ constructor(public fps: number) {
+ this.producers.push(new BlackTrack());
+ }
+
+ addProducer(file: string): ConcreteProducer {
+ const producer = new ConcreteProducer(file);
+ this.producers.push(producer);
+ return producer;
+ }
+
+ addAudioTractor(): AudioTractor {
+ const tractor = new AudioTractor();
+ this.tractors.push(tractor);
+ return tractor;
+ }
+
+ addVideoTractor(): VideoTractor {
+ const tractor = new VideoTractor();
+ this.tractors.push(tractor);
+ return tractor;
+ }
+
+ async toXML() {
+ return `
+
+
+${(
+ await Promise.all(
+ this.producers.map((producer) => producer.toXML(this.fps))
+ )
+).join("\n")}
+
+ 2
+ 2
+ 1
+ 0
+ 1633881496938
+ [
+]
+
+ 21.08.1
+ 1.02
+
+
+ 1
+${this.producers
+ .filter((e) => !(e instanceof BlackTrack))
+ .map((producer) => ``)
+ .join("\n")}
+
+${this.tractors.map((tractor) => tractor.toXML()).join("\n")}
+
+
+${this.tractors
+ .map((tractor) => ``)
+ .join("\n")}
+
+
+`;
+ }
+}
\ No newline at end of file
diff --git a/src/lib/kdenlive/makeIDGen.ts b/src/lib/kdenlive/makeIDGen.ts
new file mode 100644
index 0000000000000000000000000000000000000000..e644f4e7bf613f2b842cc367f2549d877ee87b1a
--- /dev/null
+++ b/src/lib/kdenlive/makeIDGen.ts
@@ -0,0 +1,9 @@
+"use server"
+
+export function* makeIDGen(first = 1): Generator {
+ let i = first;
+ while (true) {
+ yield i;
+ i++;
+ }
+}
diff --git a/src/lib/kdenlive/playlist.ts b/src/lib/kdenlive/playlist.ts
new file mode 100644
index 0000000000000000000000000000000000000000..40332664f94498b80daa9e8af12ffbac8d53c7fd
--- /dev/null
+++ b/src/lib/kdenlive/playlist.ts
@@ -0,0 +1,38 @@
+"use server"
+
+import { Entry } from "./entry"
+import { makeIDGen } from "./makeIDGen"
+
+export const playlistIndexGen = makeIDGen(0);
+
+export abstract class Playlist {
+ public entries: Entry[] = [];
+ constructor(public index = playlistIndexGen.next().value) {}
+
+ abstract toXML(): string;
+
+ addEntry(entry: Entry) {
+ this.entries.push(entry);
+ }
+
+ renderEntries() {
+ return this.entries.map((e) => e.toXML()).join("\n");
+ }
+}
+
+export class AudioPlaylist extends Playlist {
+ toXML() {
+ return /* HTML */ `
+ 1
+ ${this.renderEntries()}
+ `;
+ }
+}
+
+export class VideoPlaylist extends Playlist {
+ toXML() {
+ return /* HTML */ `
+ ${this.renderEntries()}
+ `;
+ }
+}
\ No newline at end of file
diff --git a/src/lib/kdenlive/producer.ts b/src/lib/kdenlive/producer.ts
new file mode 100644
index 0000000000000000000000000000000000000000..37ea36802e019575fa97ce9472548834006b0d75
--- /dev/null
+++ b/src/lib/kdenlive/producer.ts
@@ -0,0 +1,101 @@
+"use server"
+
+import { $ } from "zx"
+
+import { makeIDGen } from "./makeIDGen"
+
+export const producerIndexGen = makeIDGen(0);
+
+export abstract class Producer {
+ index: number;
+ constructor(public path: string) {
+ this.index = producerIndexGen.next().value;
+ }
+
+ async getNativeMltXml(fps: number): Promise {
+ const xml = (
+ await $`melt ${
+ this.path
+ } -consumer xml ${`frame_rate_num=${fps}`} | htmlq producer`
+ ).stdout;
+ return xml.replace("producer0", this.id);
+ }
+
+ async toXML(fps: number): Promise {
+ return await this.getNativeMltXml(fps);
+ }
+
+ get id() {
+ return "producer" + this.index;
+ }
+}
+
+export class ConcreteProducer extends Producer {
+ video_only: VideoOnlyProducer;
+ audio_only: AudioOnlyProducer;
+ constructor(path: string) {
+ super(path);
+ this.video_only = new VideoOnlyProducer(path);
+ this.audio_only = new AudioOnlyProducer(path);
+ }
+
+ async toXML(fps: number) {
+ return (
+ await Promise.all([
+ super.toXML(fps),
+ this.video_only.toXML(fps),
+ this.audio_only.toXML(fps),
+ ])
+ ).join("\n");
+ }
+}
+
+class VideoOnlyProducer extends Producer {
+ async toXML(fps: number) {
+ const xml = await super.toXML(fps);
+ return xml.replace(
+ "",
+ ` 0
+ 1
+ `
+ );
+ }
+}
+
+class AudioOnlyProducer extends Producer {
+ async toXML(fps: number) {
+ const xml = await super.toXML(fps);
+ return xml.replace(
+ "",
+ ` 1
+ 0
+ `
+ );
+ }
+}
+
+export class BlackTrack extends Producer {
+ constructor() {
+ super("");
+ }
+
+ async toXML() {
+ return /* HTML */ `
+ 2147483647
+ continue
+ black
+ 1
+ color
+ rgb24a
+ 0
+ `;
+ }
+
+ get id() {
+ return "black_track";
+ }
+}
\ No newline at end of file
diff --git a/src/lib/kdenlive/tractor.ts b/src/lib/kdenlive/tractor.ts
new file mode 100644
index 0000000000000000000000000000000000000000..c9522097b033084ca8622f9932314201314344e6
--- /dev/null
+++ b/src/lib/kdenlive/tractor.ts
@@ -0,0 +1,72 @@
+"use server"
+
+import { Entry } from "./entry"
+import { AudioPlaylist, Playlist, VideoPlaylist } from "./playlist"
+import { makeIDGen } from "./makeIDGen"
+
+export const trackIndexGen = makeIDGen(0);
+
+export abstract class Tractor {
+ main_playlist!: Playlist;
+ secondary_playlist!: Playlist; // not sure what these are for, but Kdenlive generates them, sooo
+ public index = trackIndexGen.next().value;
+
+ abstract toXML(): string;
+
+ addEntry(entry: Entry): this {
+ this.main_playlist.addEntry(entry);
+ return this;
+ }
+}
+
+export class AudioTractor extends Tractor {
+ constructor() {
+ super();
+ this.main_playlist = new AudioPlaylist();
+ this.secondary_playlist = new AudioPlaylist();
+ }
+
+ toXML() {
+ return [
+ this.main_playlist.toXML(),
+ this.secondary_playlist.toXML(),
+ /* HTML */ `
+ 1
+ 67
+ 1
+ 0
+
+
+
+
+ `,
+ ].join("\n");
+ }
+}
+
+export class VideoTractor extends Tractor {
+ constructor() {
+ super();
+ this.main_playlist = new VideoPlaylist();
+ this.secondary_playlist = new VideoPlaylist();
+ }
+
+ toXML() {
+ return [
+ this.main_playlist.toXML(),
+ this.secondary_playlist.toXML(),
+ /* HTML */ `
+ 67
+ 1
+
+
+ `,
+ ].join("\n");
+ }
+}
\ No newline at end of file
diff --git a/src/lib/utils/findClosest.ts b/src/lib/utils/findClosest.ts
new file mode 100644
index 0000000000000000000000000000000000000000..33b344ea5874c20114f7e8e26080de0669c76856
--- /dev/null
+++ b/src/lib/utils/findClosest.ts
@@ -0,0 +1,13 @@
+export function findClosest(target: number, numbers: number[]): number | undefined {
+ // Check if the numbers array is empty
+ if (numbers.length === 0) {
+ // throw new Error("The numbers array is empty.");
+ return undefined
+ }
+
+ // Sort the numbers based on their distance to the target
+ numbers.sort((a, b) => Math.abs(target - a) - Math.abs(target - b));
+
+ // Return the number closest to the target
+ return numbers[0];
+}
\ No newline at end of file
diff --git a/src/lib/utils/formatDuration.ts b/src/lib/utils/formatDuration.ts
new file mode 100644
index 0000000000000000000000000000000000000000..1c8ecad87a05f6ba6ef9c4e6fb63b0a664a94587
--- /dev/null
+++ b/src/lib/utils/formatDuration.ts
@@ -0,0 +1,20 @@
+export const HOUR = 60 * 60;
+export const MINUTE = 60;
+
+export function formatDuration(float_s: number) {
+ const hours = Math.floor(float_s / HOUR);
+ float_s = float_s - hours * HOUR;
+ const minutes = Math.floor(float_s / MINUTE);
+ const seconds = float_s - minutes * MINUTE;
+ return `${twoDigits(hours)}:${twoDigits(minutes)}:${twoDigits(seconds, 5)}`;
+}
+
+export function twoDigits(number: number, decimal = 0) {
+ let [int, dec] = number.toFixed(decimal).split(".");
+ dec = dec || "";
+ if (dec == "") {
+ return int.padStart(2, "0");
+ } else {
+ return int.padStart(2, "0") + "." + dec;
+ }
+}
\ No newline at end of file
diff --git a/src/types.ts b/src/types.ts
index f20f67737aa82e448436bcdb1c55ced59d01b70c..73bc3a267c8c5f1a29b739891606e3d3f0bd2916 100644
--- a/src/types.ts
+++ b/src/types.ts
@@ -1,4 +1,4 @@
-import { ClapEntity, ClapMeta, ClapSegment, ClapSegmentCategory } from "@aitube/clap"
+import { ClapEntity, ClapMeta, ClapSegment, ClapSegmentCategory, ClapSegmentStatus } from "@aitube/clap"
import { SettingsState } from "./controllers/settings"
export enum SettingsCategory {
@@ -118,4 +118,21 @@ export enum FalAiImageSize {
PORTRAIT_16_9 = "portrait_16_9",
LANDSCAPE_4_3 = "landscape_4_3",
LANDSCAPE_16_9 = "landscape_16_9"
-}
\ No newline at end of file
+}
+
+export interface ImageSegment {
+ id: number;
+ box: number[];
+ color: number[];
+ label: string;
+ score: number;
+}
+
+// some data can only exist inside a browser session (eg. AudioBuffer)
+// or at least data that only make sense on client side
+// we could put things like a mouse hover or selected state in here
+export type BrowserOnlySegmentData = {
+ audioBuffer?: AudioBuffer
+}
+
+export type RuntimeSegment = ClapSegment & BrowserOnlySegmentData
\ No newline at end of file
diff --git a/tailwind.config.js b/tailwind.config.js
index a8fb7bfd0f0f8fefad62c8f2a2730b2b3113786e..285c671c7083971fe6d2e65e87a1549593607041 100644
--- a/tailwind.config.js
+++ b/tailwind.config.js
@@ -18,6 +18,7 @@ module.exports = {
extend: {
fontFamily: {
salsa: ['var(--font-salsa)'],
+ clock: ["var(--font-clock)"],
},
fontSize: {
"7xs": "5px",
@@ -54,6 +55,7 @@ module.exports = {
},
},
plugins: [
- require("tailwindcss-animate")
+ require("tailwindcss-animate"),
+ require("@tailwindcss/container-queries"),
],
}
\ No newline at end of file