Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Commit
•
9d89208
1
Parent(s):
3411c0f
update the API client
Browse files- next.config.js +2 -1
- package-lock.json +16 -13
- package.json +1 -1
- src/app/main.tsx +70 -6
- src/app/server/aitube/editClapEntities.ts +23 -0
- src/app/store.ts +4 -0
next.config.js
CHANGED
@@ -4,7 +4,8 @@ const nextConfig = {
|
|
4 |
|
5 |
experimental: {
|
6 |
serverActions: {
|
7 |
-
|
|
|
8 |
}
|
9 |
}
|
10 |
}
|
|
|
4 |
|
5 |
experimental: {
|
6 |
serverActions: {
|
7 |
+
// a clap file can be quite large - but that's OK
|
8 |
+
bodySizeLimit: '32mb'
|
9 |
}
|
10 |
}
|
11 |
}
|
package-lock.json
CHANGED
@@ -9,7 +9,7 @@
|
|
9 |
"version": "0.0.0",
|
10 |
"dependencies": {
|
11 |
"@aitube/clap": "0.0.10",
|
12 |
-
"@aitube/client": "0.0.
|
13 |
"@radix-ui/react-accordion": "^1.1.2",
|
14 |
"@radix-ui/react-avatar": "^1.0.4",
|
15 |
"@radix-ui/react-checkbox": "^1.0.4",
|
@@ -77,9 +77,12 @@
|
|
77 |
}
|
78 |
},
|
79 |
"node_modules/@aitube/client": {
|
80 |
-
"version": "0.0.
|
81 |
-
"resolved": "https://registry.npmjs.org/@aitube/client/-/client-0.0.
|
82 |
-
"integrity": "sha512-
|
|
|
|
|
|
|
83 |
"peerDependencies": {
|
84 |
"@aitube/clap": "0.0.10"
|
85 |
}
|
@@ -196,9 +199,9 @@
|
|
196 |
}
|
197 |
},
|
198 |
"node_modules/@floating-ui/dom": {
|
199 |
-
"version": "1.6.
|
200 |
-
"resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.6.
|
201 |
-
"integrity": "sha512-
|
202 |
"dependencies": {
|
203 |
"@floating-ui/core": "^1.0.0",
|
204 |
"@floating-ui/utils": "^0.2.0"
|
@@ -3125,9 +3128,9 @@
|
|
3125 |
}
|
3126 |
},
|
3127 |
"node_modules/caniuse-lite": {
|
3128 |
-
"version": "1.0.
|
3129 |
-
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.
|
3130 |
-
"integrity": "sha512-
|
3131 |
"funding": [
|
3132 |
{
|
3133 |
"type": "opencollective",
|
@@ -5585,9 +5588,9 @@
|
|
5585 |
}
|
5586 |
},
|
5587 |
"node_modules/minipass": {
|
5588 |
-
"version": "7.0
|
5589 |
-
"resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.
|
5590 |
-
"integrity": "sha512-
|
5591 |
"engines": {
|
5592 |
"node": ">=16 || 14 >=14.17"
|
5593 |
}
|
|
|
9 |
"version": "0.0.0",
|
10 |
"dependencies": {
|
11 |
"@aitube/clap": "0.0.10",
|
12 |
+
"@aitube/client": "0.0.15",
|
13 |
"@radix-ui/react-accordion": "^1.1.2",
|
14 |
"@radix-ui/react-avatar": "^1.0.4",
|
15 |
"@radix-ui/react-checkbox": "^1.0.4",
|
|
|
77 |
}
|
78 |
},
|
79 |
"node_modules/@aitube/client": {
|
80 |
+
"version": "0.0.15",
|
81 |
+
"resolved": "https://registry.npmjs.org/@aitube/client/-/client-0.0.15.tgz",
|
82 |
+
"integrity": "sha512-lGmdsBqjNVStBxZSH+Iig/nOyPdSpqpqU6M0OvOBMTwR4rohSvIQ7TnFJGvoc4WEFciNoCc6Vg6Q5W99ovG+fg==",
|
83 |
+
"dependencies": {
|
84 |
+
"query-string": "^9.0.0"
|
85 |
+
},
|
86 |
"peerDependencies": {
|
87 |
"@aitube/clap": "0.0.10"
|
88 |
}
|
|
|
199 |
}
|
200 |
},
|
201 |
"node_modules/@floating-ui/dom": {
|
202 |
+
"version": "1.6.5",
|
203 |
+
"resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.6.5.tgz",
|
204 |
+
"integrity": "sha512-Nsdud2X65Dz+1RHjAIP0t8z5e2ff/IRbei6BqFrl1urT8sDVzM1HMQ+R0XcU5ceRfyO3I6ayeqIfh+6Wb8LGTw==",
|
205 |
"dependencies": {
|
206 |
"@floating-ui/core": "^1.0.0",
|
207 |
"@floating-ui/utils": "^0.2.0"
|
|
|
3128 |
}
|
3129 |
},
|
3130 |
"node_modules/caniuse-lite": {
|
3131 |
+
"version": "1.0.30001616",
|
3132 |
+
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001616.tgz",
|
3133 |
+
"integrity": "sha512-RHVYKov7IcdNjVHJFNY/78RdG4oGVjbayxv8u5IO74Wv7Hlq4PnJE6mo/OjFijjVFNy5ijnCt6H3IIo4t+wfEw==",
|
3134 |
"funding": [
|
3135 |
{
|
3136 |
"type": "opencollective",
|
|
|
5588 |
}
|
5589 |
},
|
5590 |
"node_modules/minipass": {
|
5591 |
+
"version": "7.1.0",
|
5592 |
+
"resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.0.tgz",
|
5593 |
+
"integrity": "sha512-oGZRv2OT1lO2UF1zUcwdTb3wqUwI0kBGTgt/T7OdSj6M6N5m3o5uPf0AIW6lVxGGoiWUR7e2AwTE+xiwK8WQig==",
|
5594 |
"engines": {
|
5595 |
"node": ">=16 || 14 >=14.17"
|
5596 |
}
|
package.json
CHANGED
@@ -10,7 +10,7 @@
|
|
10 |
},
|
11 |
"dependencies": {
|
12 |
"@aitube/clap": "0.0.10",
|
13 |
-
"@aitube/client": "0.0.
|
14 |
"@radix-ui/react-accordion": "^1.1.2",
|
15 |
"@radix-ui/react-avatar": "^1.0.4",
|
16 |
"@radix-ui/react-checkbox": "^1.0.4",
|
|
|
10 |
},
|
11 |
"dependencies": {
|
12 |
"@aitube/clap": "0.0.10",
|
13 |
+
"@aitube/client": "0.0.15",
|
14 |
"@radix-ui/react-accordion": "^1.1.2",
|
15 |
"@radix-ui/react-avatar": "^1.0.4",
|
16 |
"@radix-ui/react-checkbox": "^1.0.4",
|
src/app/main.tsx
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
"use client"
|
2 |
|
3 |
import React, { useEffect, useRef, useTransition } from 'react'
|
4 |
-
import {
|
5 |
import { ClapProject } from '@aitube/clap'
|
6 |
import Image from 'next/image'
|
7 |
import { useFilePicker } from 'use-file-picker'
|
@@ -15,6 +15,7 @@ import { TextareaField } from '@/components/form/textarea-field'
|
|
15 |
import { cn } from '@/lib/utils/cn'
|
16 |
|
17 |
import { createClap } from './server/aitube/createClap'
|
|
|
18 |
import { editClapDialogues } from './server/aitube/editClapDialogues'
|
19 |
import { editClapStoryboards } from './server/aitube/editClapStoryboards'
|
20 |
import { exportClapToVideo } from './server/aitube/exportClapToVideo'
|
@@ -34,9 +35,12 @@ export function Main() {
|
|
34 |
const promptDraft = useRef("")
|
35 |
promptDraft.current = storyPromptDraft
|
36 |
const storyPrompt = useStore(s => s.storyPrompt)
|
|
|
|
|
37 |
const orientation = useStore(s => s.orientation)
|
38 |
const status = useStore(s => s.status)
|
39 |
const storyGenerationStatus = useStore(s => s.storyGenerationStatus)
|
|
|
40 |
const voiceGenerationStatus = useStore(s => s.voiceGenerationStatus)
|
41 |
const imageGenerationStatus = useStore(s => s.imageGenerationStatus)
|
42 |
const videoGenerationStatus = useStore(s => s.videoGenerationStatus)
|
@@ -45,11 +49,14 @@ export function Main() {
|
|
45 |
const currentVideoOrientation = useStore(s => s.currentVideoOrientation)
|
46 |
const setStoryPromptDraft = useStore(s => s.setStoryPromptDraft)
|
47 |
const setStoryPrompt = useStore(s => s.setStoryPrompt)
|
|
|
|
|
48 |
const setStatus = useStore(s => s.setStatus)
|
49 |
const toggleOrientation = useStore(s => s.toggleOrientation)
|
50 |
const error = useStore(s => s.error)
|
51 |
const setError = useStore(s => s.setError)
|
52 |
const setStoryGenerationStatus = useStore(s => s.setStoryGenerationStatus)
|
|
|
53 |
const setVoiceGenerationStatus = useStore(s => s.setVoiceGenerationStatus)
|
54 |
const setImageGenerationStatus = useStore(s => s.setImageGenerationStatus)
|
55 |
const setVideoGenerationStatus = useStore(s => s.setVideoGenerationStatus)
|
@@ -64,6 +71,7 @@ export function Main() {
|
|
64 |
|
65 |
const hasPendingTasks =
|
66 |
storyGenerationStatus === "generating" ||
|
|
|
67 |
voiceGenerationStatus === "generating" ||
|
68 |
imageGenerationStatus === "generating" ||
|
69 |
videoGenerationStatus === "generating"
|
@@ -99,7 +107,7 @@ export function Main() {
|
|
99 |
|
100 |
let clap: ClapProject | undefined = undefined
|
101 |
try {
|
102 |
-
setProgress(
|
103 |
|
104 |
setStatus("generating")
|
105 |
setStoryGenerationStatus("generating")
|
@@ -127,12 +135,53 @@ export function Main() {
|
|
127 |
return
|
128 |
}
|
129 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
130 |
// TODO Julian
|
131 |
console.log("handleSubmit(): TODO Julian: generate images in parallel of the dialogue using Promise.all()")
|
132 |
// this is not trivial to do btw, since we will have to merge the clap together
|
133 |
// (this could be a helper function inside @aitube/clap)
|
134 |
try {
|
135 |
-
setProgress(
|
136 |
setImageGenerationStatus("generating")
|
137 |
clap = await editClapStoryboards({ clap })
|
138 |
|
@@ -153,7 +202,7 @@ export function Main() {
|
|
153 |
|
154 |
|
155 |
try {
|
156 |
-
setProgress(
|
157 |
setVoiceGenerationStatus("generating")
|
158 |
clap = await editClapDialogues({ clap })
|
159 |
|
@@ -174,7 +223,7 @@ export function Main() {
|
|
174 |
|
175 |
let assetUrl = ""
|
176 |
try {
|
177 |
-
setProgress(
|
178 |
setVideoGenerationStatus("generating")
|
179 |
assetUrl = await exportClapToVideo({ clap })
|
180 |
|
@@ -328,6 +377,9 @@ export function Main() {
|
|
328 |
|
329 |
|
330 |
{/*
|
|
|
|
|
|
|
331 |
<div className="
|
332 |
flex flex-col
|
333 |
|
@@ -335,7 +387,18 @@ export function Main() {
|
|
335 |
transition-all duration-200 ease-in-out
|
336 |
space-y-2 md:space-y-4
|
337 |
">
|
338 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
339 |
</div>
|
340 |
*/}
|
341 |
|
@@ -504,6 +567,7 @@ export function Main() {
|
|
504 |
<p className="text-base text-white/70">{isBusy
|
505 |
? (
|
506 |
storyGenerationStatus === "generating" ? "Enhancing the story.."
|
|
|
507 |
: imageGenerationStatus === "generating" ? "Generating storyboards.."
|
508 |
: voiceGenerationStatus === "generating" ? "Generating voices.."
|
509 |
: videoGenerationStatus === "generating" ? "Assembling final video.."
|
|
|
1 |
"use client"
|
2 |
|
3 |
import React, { useEffect, useRef, useTransition } from 'react'
|
4 |
+
import { IoMdPhonePortrait } from 'react-icons/io'
|
5 |
import { ClapProject } from '@aitube/clap'
|
6 |
import Image from 'next/image'
|
7 |
import { useFilePicker } from 'use-file-picker'
|
|
|
15 |
import { cn } from '@/lib/utils/cn'
|
16 |
|
17 |
import { createClap } from './server/aitube/createClap'
|
18 |
+
import { editClapEntities } from './server/aitube/editClapEntities'
|
19 |
import { editClapDialogues } from './server/aitube/editClapDialogues'
|
20 |
import { editClapStoryboards } from './server/aitube/editClapStoryboards'
|
21 |
import { exportClapToVideo } from './server/aitube/exportClapToVideo'
|
|
|
35 |
const promptDraft = useRef("")
|
36 |
promptDraft.current = storyPromptDraft
|
37 |
const storyPrompt = useStore(s => s.storyPrompt)
|
38 |
+
const mainCharacterImage = useStore(s => s.mainCharacterImage)
|
39 |
+
const mainCharacterVoice = useStore(s => s.mainCharacterVoice)
|
40 |
const orientation = useStore(s => s.orientation)
|
41 |
const status = useStore(s => s.status)
|
42 |
const storyGenerationStatus = useStore(s => s.storyGenerationStatus)
|
43 |
+
const assetGenerationStatus = useStore(s => s.assetGenerationStatus)
|
44 |
const voiceGenerationStatus = useStore(s => s.voiceGenerationStatus)
|
45 |
const imageGenerationStatus = useStore(s => s.imageGenerationStatus)
|
46 |
const videoGenerationStatus = useStore(s => s.videoGenerationStatus)
|
|
|
49 |
const currentVideoOrientation = useStore(s => s.currentVideoOrientation)
|
50 |
const setStoryPromptDraft = useStore(s => s.setStoryPromptDraft)
|
51 |
const setStoryPrompt = useStore(s => s.setStoryPrompt)
|
52 |
+
const setMainCharacterImage = useStore(s => s.setMainCharacterImage)
|
53 |
+
const setMainCharacterVoice = useStore(s => s.setMainCharacterVoice)
|
54 |
const setStatus = useStore(s => s.setStatus)
|
55 |
const toggleOrientation = useStore(s => s.toggleOrientation)
|
56 |
const error = useStore(s => s.error)
|
57 |
const setError = useStore(s => s.setError)
|
58 |
const setStoryGenerationStatus = useStore(s => s.setStoryGenerationStatus)
|
59 |
+
const setAssetGenerationStatus = useStore(s => s.setAssetGenerationStatus)
|
60 |
const setVoiceGenerationStatus = useStore(s => s.setVoiceGenerationStatus)
|
61 |
const setImageGenerationStatus = useStore(s => s.setImageGenerationStatus)
|
62 |
const setVideoGenerationStatus = useStore(s => s.setVideoGenerationStatus)
|
|
|
71 |
|
72 |
const hasPendingTasks =
|
73 |
storyGenerationStatus === "generating" ||
|
74 |
+
assetGenerationStatus === "generating" ||
|
75 |
voiceGenerationStatus === "generating" ||
|
76 |
imageGenerationStatus === "generating" ||
|
77 |
videoGenerationStatus === "generating"
|
|
|
107 |
|
108 |
let clap: ClapProject | undefined = undefined
|
109 |
try {
|
110 |
+
setProgress(1)
|
111 |
|
112 |
setStatus("generating")
|
113 |
setStoryGenerationStatus("generating")
|
|
|
135 |
return
|
136 |
}
|
137 |
|
138 |
+
console.log("-------- GENERATED STORY --------")
|
139 |
+
console.table(clap.segments, [
|
140 |
+
// 'startTimeInMs',
|
141 |
+
'endTimeInMs',
|
142 |
+
// 'track',
|
143 |
+
'category',
|
144 |
+
'prompt'
|
145 |
+
])
|
146 |
+
|
147 |
+
|
148 |
+
try {
|
149 |
+
setProgress(10)
|
150 |
+
setAssetGenerationStatus("generating")
|
151 |
+
clap = await editClapEntities({ clap })
|
152 |
+
|
153 |
+
if (!clap) { throw new Error(`failed to edit the entities`) }
|
154 |
+
|
155 |
+
console.log(`handleSubmit(): received a clap with entities = `, clap)
|
156 |
+
setCurrentClap(clap)
|
157 |
+
setAssetGenerationStatus("finished")
|
158 |
+
} catch (err) {
|
159 |
+
setAssetGenerationStatus("error")
|
160 |
+
setStatus("error")
|
161 |
+
setError(`${err}`)
|
162 |
+
return
|
163 |
+
}
|
164 |
+
if (!clap) {
|
165 |
+
return
|
166 |
+
}
|
167 |
+
|
168 |
+
|
169 |
+
/*
|
170 |
+
if (mainCharacterImage) {
|
171 |
+
console.log("handleSubmit(): User specified a main character image")
|
172 |
+
// various strategies here, for instance we can assume that the first character is the main character,
|
173 |
+
// or maybe a more reliable way is to count the number of occurrences.
|
174 |
+
// there is a risk of misgendering, so ideally we should add some kind of UI to do this,
|
175 |
+
// such as a list of characters.
|
176 |
+
}
|
177 |
+
*/
|
178 |
+
|
179 |
// TODO Julian
|
180 |
console.log("handleSubmit(): TODO Julian: generate images in parallel of the dialogue using Promise.all()")
|
181 |
// this is not trivial to do btw, since we will have to merge the clap together
|
182 |
// (this could be a helper function inside @aitube/clap)
|
183 |
try {
|
184 |
+
setProgress(40)
|
185 |
setImageGenerationStatus("generating")
|
186 |
clap = await editClapStoryboards({ clap })
|
187 |
|
|
|
202 |
|
203 |
|
204 |
try {
|
205 |
+
setProgress(60)
|
206 |
setVoiceGenerationStatus("generating")
|
207 |
clap = await editClapDialogues({ clap })
|
208 |
|
|
|
223 |
|
224 |
let assetUrl = ""
|
225 |
try {
|
226 |
+
setProgress(80)
|
227 |
setVideoGenerationStatus("generating")
|
228 |
assetUrl = await exportClapToVideo({ clap })
|
229 |
|
|
|
377 |
|
378 |
|
379 |
{/*
|
380 |
+
|
381 |
+
TODO: To finish by Julian a bit later
|
382 |
+
|
383 |
<div className="
|
384 |
flex flex-col
|
385 |
|
|
|
387 |
transition-all duration-200 ease-in-out
|
388 |
space-y-2 md:space-y-4
|
389 |
">
|
390 |
+
<Input
|
391 |
+
type="file"
|
392 |
+
className=""
|
393 |
+
onChange={async (e: React.ChangeEvent<HTMLInputElement>) => {
|
394 |
+
if (e.target.files && e.target.files.length > 0) {
|
395 |
+
const file = e.target.files[0];
|
396 |
+
const newImageBase64 = await fileToBase64(file)
|
397 |
+
setMainCharacterImage(newImageBase64)
|
398 |
+
}
|
399 |
+
}}
|
400 |
+
accept="image/*"
|
401 |
+
/>
|
402 |
</div>
|
403 |
*/}
|
404 |
|
|
|
567 |
<p className="text-base text-white/70">{isBusy
|
568 |
? (
|
569 |
storyGenerationStatus === "generating" ? "Enhancing the story.."
|
570 |
+
: assetGenerationStatus === "generating" ? "Creating characters.."
|
571 |
: imageGenerationStatus === "generating" ? "Generating storyboards.."
|
572 |
: voiceGenerationStatus === "generating" ? "Generating voices.."
|
573 |
: videoGenerationStatus === "generating" ? "Assembling final video.."
|
src/app/server/aitube/editClapEntities.ts
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"use server"
|
2 |
+
|
3 |
+
import { ClapProject } from "@aitube/clap"
|
4 |
+
import { editClapEntities as apiEditClapEntities, ClapCompletionMode, ClapEntityPrompt } from "@aitube/client"
|
5 |
+
|
6 |
+
import { getToken } from "./getToken"
|
7 |
+
|
8 |
+
export async function editClapEntities({
|
9 |
+
clap,
|
10 |
+
entityPrompts = []
|
11 |
+
}: {
|
12 |
+
clap: ClapProject
|
13 |
+
entityPrompts?: ClapEntityPrompt[]
|
14 |
+
}): Promise<ClapProject> {
|
15 |
+
const newClap: ClapProject = await apiEditClapEntities({
|
16 |
+
clap,
|
17 |
+
entityPrompts,
|
18 |
+
completionMode: ClapCompletionMode.FULL,
|
19 |
+
token: await getToken()
|
20 |
+
})
|
21 |
+
|
22 |
+
return newClap
|
23 |
+
}
|
src/app/store.ts
CHANGED
@@ -20,6 +20,7 @@ export const useStore = create<{
|
|
20 |
|
21 |
status: GlobalStatus
|
22 |
storyGenerationStatus: TaskStatus
|
|
|
23 |
voiceGenerationStatus: TaskStatus
|
24 |
imageGenerationStatus: TaskStatus
|
25 |
videoGenerationStatus: TaskStatus
|
@@ -39,6 +40,7 @@ export const useStore = create<{
|
|
39 |
setStoryPrompt: (storyPrompt: string) => void
|
40 |
setStatus: (status: GlobalStatus) => void
|
41 |
setStoryGenerationStatus: (storyGenerationStatus: TaskStatus) => void
|
|
|
42 |
setVoiceGenerationStatus: (voiceGenerationStatus: TaskStatus) => void
|
43 |
setImageGenerationStatus: (imageGenerationStatus: TaskStatus) => void
|
44 |
setVideoGenerationStatus: (videoGenerationStatus: TaskStatus) => void
|
@@ -59,6 +61,7 @@ export const useStore = create<{
|
|
59 |
orientation: VideoOrientation.PORTRAIT,
|
60 |
status: "idle",
|
61 |
storyGenerationStatus: "idle",
|
|
|
62 |
voiceGenerationStatus: "idle",
|
63 |
imageGenerationStatus: "idle",
|
64 |
videoGenerationStatus: "idle",
|
@@ -91,6 +94,7 @@ export const useStore = create<{
|
|
91 |
setStoryPrompt: (storyPrompt: string) => { set({ storyPrompt }) },
|
92 |
setStatus: (status: GlobalStatus) => { set({ status }) },
|
93 |
setStoryGenerationStatus: (storyGenerationStatus: TaskStatus) => { set({ storyGenerationStatus }) },
|
|
|
94 |
setVoiceGenerationStatus: (voiceGenerationStatus: TaskStatus) => { set({ voiceGenerationStatus }) },
|
95 |
setImageGenerationStatus: (imageGenerationStatus: TaskStatus) => { set({ imageGenerationStatus }) },
|
96 |
setVideoGenerationStatus: (videoGenerationStatus: TaskStatus) => { set({ videoGenerationStatus }) },
|
|
|
20 |
|
21 |
status: GlobalStatus
|
22 |
storyGenerationStatus: TaskStatus
|
23 |
+
assetGenerationStatus: TaskStatus
|
24 |
voiceGenerationStatus: TaskStatus
|
25 |
imageGenerationStatus: TaskStatus
|
26 |
videoGenerationStatus: TaskStatus
|
|
|
40 |
setStoryPrompt: (storyPrompt: string) => void
|
41 |
setStatus: (status: GlobalStatus) => void
|
42 |
setStoryGenerationStatus: (storyGenerationStatus: TaskStatus) => void
|
43 |
+
setAssetGenerationStatus: (assetGenerationStatus: TaskStatus) => void
|
44 |
setVoiceGenerationStatus: (voiceGenerationStatus: TaskStatus) => void
|
45 |
setImageGenerationStatus: (imageGenerationStatus: TaskStatus) => void
|
46 |
setVideoGenerationStatus: (videoGenerationStatus: TaskStatus) => void
|
|
|
61 |
orientation: VideoOrientation.PORTRAIT,
|
62 |
status: "idle",
|
63 |
storyGenerationStatus: "idle",
|
64 |
+
assetGenerationStatus: "idle",
|
65 |
voiceGenerationStatus: "idle",
|
66 |
imageGenerationStatus: "idle",
|
67 |
videoGenerationStatus: "idle",
|
|
|
94 |
setStoryPrompt: (storyPrompt: string) => { set({ storyPrompt }) },
|
95 |
setStatus: (status: GlobalStatus) => { set({ status }) },
|
96 |
setStoryGenerationStatus: (storyGenerationStatus: TaskStatus) => { set({ storyGenerationStatus }) },
|
97 |
+
setAssetGenerationStatus: (assetGenerationStatus: TaskStatus) => { set({ assetGenerationStatus }) },
|
98 |
setVoiceGenerationStatus: (voiceGenerationStatus: TaskStatus) => { set({ voiceGenerationStatus }) },
|
99 |
setImageGenerationStatus: (imageGenerationStatus: TaskStatus) => { set({ imageGenerationStatus }) },
|
100 |
setVideoGenerationStatus: (videoGenerationStatus: TaskStatus) => { set({ videoGenerationStatus }) },
|