diff --git a/apps/desktop/src-tauri/src/audio.rs b/apps/desktop/src-tauri/src/audio.rs index b8319f8e55f..7cca0218f40 100644 --- a/apps/desktop/src-tauri/src/audio.rs +++ b/apps/desktop/src-tauri/src/audio.rs @@ -18,6 +18,7 @@ fn play_audio(bytes: &'static [u8]) { pub enum AppSounds { StartRecording, StopRecording, + Screenshot, Notification, } @@ -31,6 +32,7 @@ impl AppSounds { match self { AppSounds::StartRecording => include_bytes!("../sounds/start-recording.ogg"), AppSounds::StopRecording => include_bytes!("../sounds/stop-recording.ogg"), + AppSounds::Screenshot => include_bytes!("../sounds/screenshot.ogg"), AppSounds::Notification => include_bytes!("../sounds/action.ogg"), } } diff --git a/apps/desktop/src-tauri/src/notifications.rs b/apps/desktop/src-tauri/src/notifications.rs index c61e24b9745..d872fd8380f 100644 --- a/apps/desktop/src-tauri/src/notifications.rs +++ b/apps/desktop/src-tauri/src/notifications.rs @@ -89,7 +89,6 @@ impl NotificationType { } pub fn send_notification(app: &tauri::AppHandle, notification_type: NotificationType) { - // Check if notifications are enabled in settings let enable_notifications = GeneralSettingsStore::get(app) .map(|settings| settings.is_some_and(|s| s.enable_notifications)) .unwrap_or(false); @@ -107,5 +106,15 @@ pub fn send_notification(app: &tauri::AppHandle, notification_type: Notification .show() .ok(); - AppSounds::Notification.play(); + let skip_sound = matches!( + notification_type, + NotificationType::ScreenshotSaved + | NotificationType::ScreenshotCopiedToClipboard + | NotificationType::ScreenshotSaveFailed + | NotificationType::ScreenshotCopyFailed + ); + + if !skip_sound { + AppSounds::Notification.play(); + } } diff --git a/apps/desktop/src-tauri/src/recording.rs b/apps/desktop/src-tauri/src/recording.rs index ecdab8f7592..d277e5b13e0 100644 --- a/apps/desktop/src-tauri/src/recording.rs +++ b/apps/desktop/src-tauri/src/recording.rs @@ -1205,6 +1205,8 @@ pub async fn take_screenshot( .await .map_err(|e| format!("Failed to capture screenshot: {e}"))?; + AppSounds::Notification.play(); + let image_width = image.width(); let image_height = image.height(); let image_data = image.into_raw(); @@ -1316,8 +1318,6 @@ pub async fn take_screenshot( &app_handle, notifications::NotificationType::ScreenshotSaved, ); - - AppSounds::StopRecording.play(); } Ok(Err(e)) => { error!("Failed to encode PNG: {e}"); diff --git a/apps/desktop/src-tauri/src/target_select_overlay.rs b/apps/desktop/src-tauri/src/target_select_overlay.rs index 8416efe7e3b..19e670b06e8 100644 --- a/apps/desktop/src-tauri/src/target_select_overlay.rs +++ b/apps/desktop/src-tauri/src/target_select_overlay.rs @@ -78,6 +78,8 @@ pub async fn prewarm_target_select_overlays( .await { Ok(window) => { + let _ = window.hide(); + let _ = window.set_ignore_cursor_events(true); prewarmed.store(display_id, window); } Err(e) => { @@ -152,6 +154,7 @@ pub async fn open_target_select_overlays( for display_id in &display_ids { let mut used_prewarmed = false; if let Some(window) = prewarmed.take(display_id) { + let _ = window.set_ignore_cursor_events(false); window.show().ok(); if display_id == &focus_display_id { window.set_focus().ok(); diff --git a/apps/desktop/src-tauri/src/windows.rs b/apps/desktop/src-tauri/src/windows.rs index bfe173c7e77..aaf523c7f4f 100644 --- a/apps/desktop/src-tauri/src/windows.rs +++ b/apps/desktop/src-tauri/src/windows.rs @@ -231,6 +231,7 @@ impl CapWindowId { Self::ModeSelect => "Cap Mode Selection".to_string(), Self::Camera => "Cap Camera".to_string(), Self::RecordingsOverlay => "Cap Recordings Overlay".to_string(), + Self::TargetSelectOverlay { .. } => "Cap Target Select".to_string(), _ => "Cap".to_string(), } } diff --git a/apps/desktop/src/routes/target-select-overlay.tsx b/apps/desktop/src/routes/target-select-overlay.tsx index fcbca33a741..1d095145893 100644 --- a/apps/desktop/src/routes/target-select-overlay.tsx +++ b/apps/desktop/src/routes/target-select-overlay.tsx @@ -788,6 +788,14 @@ function Inner() { ); try { + const allWindows = await WebviewWindow.getAll(); + for (const win of allWindows) { + if (win.label.startsWith("target-select-overlay-")) { + await win.hide(); + } + } + await new Promise((resolve) => setTimeout(resolve, 50)); + const path = await invoke("take_screenshot", { target, }); diff --git a/crates/editor/examples/decode-benchmark.rs b/crates/editor/examples/decode-benchmark.rs index c71c18b6bed..d29ab2dda07 100644 --- a/crates/editor/examples/decode-benchmark.rs +++ b/crates/editor/examples/decode-benchmark.rs @@ -191,7 +191,7 @@ async fn benchmark_decoder_creation(path: &Path, fps: u32, iterations: usize) -> for i in 0..iterations { let start = Instant::now(); - let decoder = spawn_decoder("benchmark", path.to_path_buf(), fps, 0.0).await; + let decoder = spawn_decoder("benchmark", path.to_path_buf(), fps, 0.0, false).await; let elapsed = start.elapsed(); match decoder { @@ -320,7 +320,14 @@ async fn run_full_benchmark(config: BenchmarkConfig) -> BenchmarkResults { println!(" Done: {:.2}ms avg", results.decoder_creation_ms); println!("[2/5] Creating decoder for remaining tests..."); - let decoder = match spawn_decoder("benchmark", config.video_path.clone(), config.fps, 0.0).await + let decoder = match spawn_decoder( + "benchmark", + config.video_path.clone(), + config.fps, + 0.0, + false, + ) + .await { Ok(d) => d, Err(e) => { diff --git a/crates/enc-avfoundation/src/mp4.rs b/crates/enc-avfoundation/src/mp4.rs index ab7ee8f1779..fce29a2de6d 100644 --- a/crates/enc-avfoundation/src/mp4.rs +++ b/crates/enc-avfoundation/src/mp4.rs @@ -25,6 +25,7 @@ pub struct MP4Encoder { timestamp_offset: Duration, is_writing: bool, is_paused: bool, + writer_failed: bool, video_frames_appended: usize, audio_frames_appended: usize, last_timestamp: Option, @@ -272,6 +273,7 @@ impl MP4Encoder { timestamp_offset: Duration::ZERO, is_writing: false, is_paused: false, + writer_failed: false, video_frames_appended: 0, audio_frames_appended: 0, last_timestamp: None, @@ -285,6 +287,10 @@ impl MP4Encoder { frame: arc::R, timestamp: Duration, ) -> Result<(), QueueFrameError> { + if self.writer_failed { + return Err(QueueFrameError::Failed); + } + if self.is_paused { return Ok(()); }; @@ -344,7 +350,14 @@ impl MP4Encoder { let new_frame = frame.copy_with_new_timing(&[timing]).unwrap(); drop(frame); - append_sample_buf(&mut self.video_input, &self.asset_writer, &new_frame)?; + match append_sample_buf(&mut self.video_input, &self.asset_writer, &new_frame) { + Ok(()) => {} + Err(QueueFrameError::WriterFailed(err)) => { + self.writer_failed = true; + return Err(QueueFrameError::WriterFailed(err)); + } + Err(e) => return Err(e), + } self.video_frames_appended += 1; self.last_timestamp = Some(timestamp); @@ -357,6 +370,10 @@ impl MP4Encoder { frame: &frame::Audio, timestamp: Duration, ) -> Result<(), QueueFrameError> { + if self.writer_failed { + return Err(QueueFrameError::Failed); + } + if self.is_paused || !self.is_writing { return Ok(()); } @@ -498,7 +515,14 @@ impl MP4Encoder { ) .map_err(QueueFrameError::Construct)?; - append_sample_buf(audio_input, &self.asset_writer, &buffer)?; + match append_sample_buf(audio_input, &self.asset_writer, &buffer) { + Ok(()) => {} + Err(QueueFrameError::WriterFailed(err)) => { + self.writer_failed = true; + return Err(QueueFrameError::WriterFailed(err)); + } + Err(e) => return Err(e), + } self.audio_frames_appended += 1; self.last_timestamp = Some(timestamp); @@ -756,18 +780,31 @@ fn append_sample_buf( writer: &av::AssetWriter, frame: &cm::SampleBuf, ) -> Result<(), QueueFrameError> { + let status = writer.status(); + if status == av::asset::writer::Status::Failed { + return Err(match writer.error() { + Some(err) => QueueFrameError::WriterFailed(err), + None => QueueFrameError::Failed, + }); + } + if status != av::asset::writer::Status::Writing { + return Err(QueueFrameError::Failed); + } + match input.append_sample_buf(frame) { Ok(true) => {} Ok(false) => { - if writer.status() == av::asset::writer::Status::Failed { + let status = writer.status(); + if status == av::asset::writer::Status::Failed { return Err(match writer.error() { Some(err) => QueueFrameError::WriterFailed(err), None => QueueFrameError::Failed, }); } - if writer.status() == av::asset::writer::Status::Writing { + if status == av::asset::writer::Status::Writing { return Err(QueueFrameError::NotReadyForMore); } + return Err(QueueFrameError::Failed); } Err(e) => return Err(QueueFrameError::AppendError(e.retained())), } diff --git a/crates/recording/src/output_pipeline/core.rs b/crates/recording/src/output_pipeline/core.rs index cdc7e0b5444..5e440969711 100644 --- a/crates/recording/src/output_pipeline/core.rs +++ b/crates/recording/src/output_pipeline/core.rs @@ -35,6 +35,7 @@ struct AudioTimestampGenerator { } const VIDEO_WALL_CLOCK_TOLERANCE_SECS: f64 = 0.1; +const AUDIO_WALL_CLOCK_TOLERANCE_SECS: f64 = 0.05; impl AudioTimestampGenerator { fn new(sample_rate: u32) -> Self { @@ -51,6 +52,84 @@ impl AudioTimestampGenerator { } } +struct AudioDriftTracker { + baseline_offset_secs: Option, + drift_warning_logged: bool, + last_corrected_secs: f64, +} + +impl AudioDriftTracker { + fn new() -> Self { + Self { + baseline_offset_secs: None, + drift_warning_logged: false, + last_corrected_secs: 0.0, + } + } + + fn calculate_timestamp( + &mut self, + sample_based_duration: Duration, + wall_clock_elapsed: Duration, + ) -> Duration { + let sample_secs = sample_based_duration.as_secs_f64(); + let wall_clock_secs = wall_clock_elapsed.as_secs_f64(); + let max_allowed_secs = wall_clock_secs + AUDIO_WALL_CLOCK_TOLERANCE_SECS; + + if wall_clock_secs < 2.0 || sample_secs < 2.0 { + let result_secs = sample_secs + .min(max_allowed_secs) + .max(self.last_corrected_secs); + self.last_corrected_secs = result_secs; + return Duration::from_secs_f64(result_secs); + } + + if self.baseline_offset_secs.is_none() { + let offset = sample_secs - wall_clock_secs; + debug!( + wall_clock_secs, + sample_secs, + baseline_offset_secs = offset, + "AudioDriftTracker: Capturing baseline offset after warmup" + ); + self.baseline_offset_secs = Some(offset); + } + + let baseline = self.baseline_offset_secs.unwrap_or(0.0); + let adjusted_sample_secs = (sample_secs - baseline).max(0.0); + + let drift_ratio = if adjusted_sample_secs > 0.0 { + wall_clock_secs / adjusted_sample_secs + } else { + 1.0 + }; + + let corrected_secs = if !(0.95..=1.05).contains(&drift_ratio) { + if !self.drift_warning_logged { + warn!( + drift_ratio, + wall_clock_secs, + adjusted_sample_secs, + baseline, + "AudioDriftTracker: Significant audio clock drift detected, clamping" + ); + self.drift_warning_logged = true; + } + let clamped_ratio = drift_ratio.clamp(0.98, 1.02); + adjusted_sample_secs * clamped_ratio + } else { + adjusted_sample_secs * drift_ratio + }; + + let final_secs = corrected_secs + .min(max_allowed_secs) + .max(self.last_corrected_secs); + self.last_corrected_secs = final_secs; + + Duration::from_secs_f64(final_secs) + } +} + struct VideoDriftTracker { baseline_offset_secs: Option, capped_frame_count: u64, @@ -1082,6 +1161,7 @@ impl PreparedAudioSources { let muxer = muxer.clone(); async move { let mut timestamp_generator = AudioTimestampGenerator::new(sample_rate); + let mut drift_tracker = AudioDriftTracker::new(); let mut dropped_during_pause: u64 = 0; let mut frame_count: u64 = 0; @@ -1102,19 +1182,32 @@ impl PreparedAudioSources { let frame_samples = frame.inner.samples() as u64; frame_count += 1; - let timestamp = timestamp_generator.next_timestamp(frame_samples); + let raw_wall_clock = timestamps.instant().elapsed(); + let wall_clock_elapsed = + raw_wall_clock.saturating_sub(total_pause_duration); + + let sample_based_timestamp = + timestamp_generator.next_timestamp(frame_samples); + let timestamp = drift_tracker + .calculate_timestamp(sample_based_timestamp, wall_clock_elapsed); if frame_count.is_multiple_of(500) { - let raw_wall_clock = timestamps.instant().elapsed(); - let effective_wall_clock = - raw_wall_clock.saturating_sub(total_pause_duration); + let drift_ratio = if sample_based_timestamp.as_secs_f64() > 0.0 { + wall_clock_elapsed.as_secs_f64() + / sample_based_timestamp.as_secs_f64() + } else { + 1.0 + }; debug!( - wall_clock_secs = effective_wall_clock.as_secs_f64(), - sample_based_secs = timestamp.as_secs_f64(), - total_samples = timestamp_generator.total_samples, frame_count, + wall_clock_secs = wall_clock_elapsed.as_secs_f64(), + sample_based_secs = sample_based_timestamp.as_secs_f64(), + corrected_secs = timestamp.as_secs_f64(), + drift_ratio, + baseline_offset = drift_tracker.baseline_offset_secs, + total_samples = timestamp_generator.total_samples, total_pause_ms = total_pause_duration.as_millis(), - "Audio timestamp status" + "Audio drift correction status" ); } diff --git a/crates/rendering/src/decoder/mod.rs b/crates/rendering/src/decoder/mod.rs index 21acd5939a4..ec7728fa528 100644 --- a/crates/rendering/src/decoder/mod.rs +++ b/crates/rendering/src/decoder/mod.rs @@ -456,7 +456,20 @@ pub struct AsyncVideoDecoderHandle { } impl AsyncVideoDecoderHandle { + const NORMAL_TIMEOUT_MS: u64 = 2000; + const INITIAL_SEEK_TIMEOUT_MS: u64 = 10000; + pub async fn get_frame(&self, time: f32) -> Option { + self.get_frame_with_timeout(time, Self::NORMAL_TIMEOUT_MS) + .await + } + + pub async fn get_frame_initial(&self, time: f32) -> Option { + self.get_frame_with_timeout(time, Self::INITIAL_SEEK_TIMEOUT_MS) + .await + } + + async fn get_frame_with_timeout(&self, time: f32, timeout_ms: u64) -> Option { let (tx, rx) = tokio::sync::oneshot::channel(); let adjusted_time = self.get_time(time); @@ -468,12 +481,13 @@ impl AsyncVideoDecoderHandle { return None; } - match tokio::time::timeout(std::time::Duration::from_millis(2000), rx).await { + match tokio::time::timeout(std::time::Duration::from_millis(timeout_ms), rx).await { Ok(result) => result.ok(), Err(_) => { tracing::warn!( time = adjusted_time, - "Frame decode request timed out after 2000ms" + timeout_ms = timeout_ms, + "Frame decode request timed out" ); None } diff --git a/crates/rendering/src/frame_pipeline.rs b/crates/rendering/src/frame_pipeline.rs index 2462e066046..5bb70be57e3 100644 --- a/crates/rendering/src/frame_pipeline.rs +++ b/crates/rendering/src/frame_pipeline.rs @@ -88,6 +88,8 @@ pub struct PipelinedGpuReadback { buffer_size: u64, current_index: usize, pending: Option, + needs_resize: bool, + pending_resize_size: u64, } impl PipelinedGpuReadback { @@ -106,11 +108,26 @@ impl PipelinedGpuReadback { buffer_size: initial_size, current_index: 0, pending: None, + needs_resize: false, + pending_resize_size: 0, } } - pub fn ensure_size(&mut self, device: &wgpu::Device, required_size: u64) { + pub fn mark_for_resize(&mut self, required_size: u64) { if self.buffer_size < required_size { + self.needs_resize = true; + self.pending_resize_size = required_size; + } + } + + pub fn perform_resize_if_needed(&mut self, device: &wgpu::Device) { + if self.needs_resize && self.pending.is_none() { + let required_size = self.pending_resize_size; + tracing::info!( + old_size = self.buffer_size, + new_size = required_size, + "Resizing GPU readback buffers" + ); let make_buffer = || { Arc::new(device.create_buffer(&wgpu::BufferDescriptor { label: Some("Pipelined Readback Buffer"), @@ -123,6 +140,29 @@ impl PipelinedGpuReadback { self.buffers = [make_buffer(), make_buffer(), make_buffer()]; self.buffer_size = required_size; self.current_index = 0; + self.needs_resize = false; + self.pending_resize_size = 0; + } + } + + pub fn ensure_size(&mut self, device: &wgpu::Device, required_size: u64) { + if self.buffer_size < required_size { + if self.pending.is_some() { + self.mark_for_resize(required_size); + } else { + let make_buffer = || { + Arc::new(device.create_buffer(&wgpu::BufferDescriptor { + label: Some("Pipelined Readback Buffer"), + size: required_size, + usage: wgpu::BufferUsages::COPY_DST | wgpu::BufferUsages::MAP_READ, + mapped_at_creation: false, + })) + }; + + self.buffers = [make_buffer(), make_buffer(), make_buffer()]; + self.buffer_size = required_size; + self.current_index = 0; + } } } @@ -208,6 +248,8 @@ pub struct RenderSession { texture_views: (wgpu::TextureView, wgpu::TextureView), pub current_is_left: bool, pub pipelined_readback: PipelinedGpuReadback, + texture_width: u32, + texture_height: u32, } impl RenderSession { @@ -243,10 +285,24 @@ impl RenderSession { ), textures, pipelined_readback: PipelinedGpuReadback::new(device, initial_buffer_size), + texture_width: width, + texture_height: height, } } pub fn update_texture_size(&mut self, device: &wgpu::Device, width: u32, height: u32) { + if self.texture_width == width && self.texture_height == height { + return; + } + + tracing::info!( + old_width = self.texture_width, + old_height = self.texture_height, + new_width = width, + new_height = height, + "Resizing render session textures" + ); + let make_texture = || { device.create_texture(&wgpu::TextureDescriptor { size: wgpu::Extent3d { @@ -271,6 +327,8 @@ impl RenderSession { self.textures.0.create_view(&Default::default()), self.textures.1.create_view(&Default::default()), ); + self.texture_width = width; + self.texture_height = height; } pub fn current_texture(&self) -> &wgpu::Texture { @@ -372,6 +430,8 @@ pub async fn finish_encoder( ) -> Result { let previous_pending = session.pipelined_readback.take_pending(); + session.pipelined_readback.perform_resize_if_needed(device); + let texture = if session.current_is_left { &session.textures.0 } else { diff --git a/crates/rendering/src/lib.rs b/crates/rendering/src/lib.rs index c110af62dd7..1c12264bfc4 100644 --- a/crates/rendering/src/lib.rs +++ b/crates/rendering/src/lib.rs @@ -218,6 +218,35 @@ impl RecordingSegmentDecoders { recording_time: segment_time + self.segment_offset as f32, }) } + + pub async fn get_frames_initial( + &self, + segment_time: f32, + needs_camera: bool, + offsets: ClipOffsets, + ) -> Option { + let camera_request_time = segment_time + offsets.camera; + let (screen, camera) = tokio::join!( + self.screen.get_frame_initial(segment_time), + OptionFuture::from( + needs_camera + .then(|| self + .camera + .as_ref() + .map(|d| d.get_frame_initial(camera_request_time))) + .flatten() + ) + ); + + let camera_frame = camera.flatten(); + + Some(DecodedSegmentFrames { + screen_frame: screen?, + camera_frame, + segment_time, + recording_time: segment_time + self.segment_offset as f32, + }) + } } #[derive(thiserror::Error, Debug)] @@ -332,21 +361,38 @@ pub async fn render_video_to_channel( let mut segment_frames = None; let mut retry_count = 0; - const MAX_RETRIES: u32 = 3; + const MAX_RETRIES: u32 = 5; + let is_initial_frame = current_frame_number == 0 || last_successful_frame.is_none(); while segment_frames.is_none() && retry_count < MAX_RETRIES { if retry_count > 0 { - tokio::time::sleep(std::time::Duration::from_millis(50 * retry_count as u64)).await; + let delay = if is_initial_frame { + 500 * (retry_count as u64 + 1) + } else { + 50 * retry_count as u64 + }; + tokio::time::sleep(std::time::Duration::from_millis(delay)).await; } - segment_frames = render_segment - .decoders - .get_frames( - segment_time as f32, - !project.camera.hide, - clip_config.map(|v| v.offsets).unwrap_or_default(), - ) - .await; + segment_frames = if is_initial_frame && retry_count == 0 { + render_segment + .decoders + .get_frames_initial( + segment_time as f32, + !project.camera.hide, + clip_config.map(|v| v.offsets).unwrap_or_default(), + ) + .await + } else { + render_segment + .decoders + .get_frames( + segment_time as f32, + !project.camera.hide, + clip_config.map(|v| v.offsets).unwrap_or_default(), + ) + .await + }; if segment_frames.is_none() { retry_count += 1; @@ -355,6 +401,7 @@ pub async fn render_video_to_channel( frame_number = current_frame_number, segment_time = segment_time, retry_count = retry_count, + is_initial = is_initial_frame, "Frame decode failed, retrying..." ); } @@ -1726,6 +1773,8 @@ pub struct FrameRenderer<'a> { } impl<'a> FrameRenderer<'a> { + const MAX_RENDER_RETRIES: u32 = 3; + pub fn new(constants: &'a RenderVideoConstants) -> Self { Self { constants, @@ -1733,6 +1782,10 @@ impl<'a> FrameRenderer<'a> { } } + pub fn reset_session(&mut self) { + self.session = None; + } + pub async fn render( &mut self, segment_frames: DecodedSegmentFrames, @@ -1740,29 +1793,67 @@ impl<'a> FrameRenderer<'a> { cursor: &CursorEvents, layers: &mut RendererLayers, ) -> Result { - let session = self.session.get_or_insert_with(|| { - RenderSession::new( + let mut last_error = None; + + for attempt in 0..Self::MAX_RENDER_RETRIES { + if attempt > 0 { + tracing::warn!( + frame_number = uniforms.frame_number, + attempt = attempt + 1, + "Retrying frame render after GPU error" + ); + self.reset_session(); + tokio::time::sleep(std::time::Duration::from_millis(100 * (attempt as u64 + 1))) + .await; + } + + let session = self.session.get_or_insert_with(|| { + RenderSession::new( + &self.constants.device, + uniforms.output_size.0, + uniforms.output_size.1, + ) + }); + + session.update_texture_size( &self.constants.device, uniforms.output_size.0, uniforms.output_size.1, - ) - }); + ); - session.update_texture_size( - &self.constants.device, - uniforms.output_size.0, - uniforms.output_size.1, - ); + match produce_frame( + self.constants, + segment_frames.clone(), + uniforms.clone(), + cursor, + layers, + session, + ) + .await + { + Ok(frame) => return Ok(frame), + Err(RenderingError::BufferMapWaitingFailed) => { + tracing::warn!( + frame_number = uniforms.frame_number, + attempt = attempt + 1, + "GPU buffer mapping failed, will retry" + ); + last_error = Some(RenderingError::BufferMapWaitingFailed); + } + Err(RenderingError::BufferMapFailed(e)) => { + tracing::warn!( + frame_number = uniforms.frame_number, + attempt = attempt + 1, + error = %e, + "GPU buffer async error, will retry" + ); + last_error = Some(RenderingError::BufferMapFailed(e)); + } + Err(e) => return Err(e), + } + } - produce_frame( - self.constants, - segment_frames, - uniforms, - cursor, - layers, - session, - ) - .await + Err(last_error.unwrap_or(RenderingError::BufferMapWaitingFailed)) } }