@@ -3,12 +3,12 @@ import { registerFont } from "canvas";
33import flatMap from "lodash-es/flatMap.js" ;
44import pMap from "p-map" ;
55import { basename } from "path" ;
6+ import { Configuration } from "./configuration.js" ;
67import { readDuration , readVideoFileInfo } from "./ffmpeg.js" ;
78import { Transition } from "./transition.js" ;
89import type {
910 AudioTrack ,
1011 CanvasLayer ,
11- Clip ,
1212 FabricLayer ,
1313 ImageLayer ,
1414 ImageOverlayLayer ,
@@ -49,13 +49,12 @@ async function validateArbitraryAudio(
4949}
5050
5151type ParseConfigOptions = {
52- clips : Clip [ ] ;
53- backgroundAudioVolume ?: string | number ;
5452 backgroundAudioPath ?: string ;
55- loopAudio ?: boolean ;
56- allowRemoteRequests ?: boolean ;
5753 arbitraryAudio : AudioTrack [ ] ;
58- } ;
54+ } & Pick <
55+ Configuration ,
56+ "clips" | "backgroundAudioVolume" | "loopAudio" | "allowRemoteRequests" | "defaults"
57+ > ;
5958
6059export default async function parseConfig ( {
6160 clips,
@@ -64,6 +63,7 @@ export default async function parseConfig({
6463 backgroundAudioVolume,
6564 loopAudio,
6665 allowRemoteRequests,
66+ defaults,
6767} : ParseConfigOptions ) {
6868 async function handleLayer ( layer : Layer ) : Promise < Layer | Layer [ ] > {
6969 // https://github.com/mifi/editly/issues/39
@@ -122,14 +122,8 @@ export default async function parseConfig({
122122 let clipsOut : ProcessedClip [ ] = await pMap (
123123 clips ,
124124 async ( clip , clipIndex ) => {
125- const { transition : userTransition , duration, layers } = clip ;
126-
127- const videoLayers = layers . filter ( ( layer ) => layer . type === "video" ) ;
128-
129- if ( videoLayers . length === 0 )
130- assert ( duration , `Duration parameter is required for videoless clip ${ clipIndex } ` ) ;
131-
132- const transition = new Transition ( userTransition , clipIndex === clips . length - 1 ) ;
125+ const { layers } = clip ;
126+ const transition = new Transition ( clip . transition , clipIndex === clips . length - 1 ) ;
133127
134128 let layersOut = flatMap (
135129 await pMap (
@@ -179,13 +173,14 @@ export default async function parseConfig({
179173 ) ,
180174 ) ;
181175
182- let clipDuration = duration ;
176+ let clipDuration = clip . duration ;
183177
184- const firstVideoLayer = layersOut . find (
185- ( layer ) : layer is VideoLayer => layer . type === "video" ,
186- ) ;
187- if ( firstVideoLayer && ! duration ) clipDuration = firstVideoLayer . layerDuration ! ;
188- assert ( clipDuration ) ;
178+ if ( ! clipDuration ) {
179+ const video = layersOut . find ( ( layer ) : layer is VideoLayer => layer . type === "video" ) ;
180+ clipDuration = video ?. layerDuration ?? defaults . duration ;
181+ }
182+
183+ assert ( clipDuration , `Duration parameter is required for videoless clip ${ clipIndex } ` ) ;
189184
190185 // We need to map again, because for audio, we need to know the correct clipDuration
191186 layersOut = (
@@ -229,9 +224,9 @@ export default async function parseConfig({
229224 let speedFactor ;
230225
231226 // If user explicitly specified duration for clip, it means that should be the output duration of the video
232- if ( duration ) {
227+ if ( clipDuration ) {
233228 // Later we will speed up or slow down video using this factor
234- speedFactor = duration / layerDuration ;
229+ speedFactor = clipDuration / layerDuration ;
235230 } else {
236231 speedFactor = 1 ;
237232 }
0 commit comments