diff --git a/crates/eframe/src/native/wgpu_integration.rs b/crates/eframe/src/native/wgpu_integration.rs
index d13bed0bf41..acbd8c2f830 100644
--- a/crates/eframe/src/native/wgpu_integration.rs
+++ b/crates/eframe/src/native/wgpu_integration.rs
@@ -185,6 +185,7 @@ impl<'app> WgpuWinitApp<'app> {
 
         #[allow(unsafe_code, unused_mut, unused_unsafe)]
         let mut painter = egui_wgpu::winit::Painter::new(
+            egui_ctx.clone(),
             self.native_options.wgpu_options.clone(),
             self.native_options.multisampling.max(1) as _,
             egui_wgpu::depth_format_from_bits(
@@ -593,6 +594,8 @@ impl<'app> WgpuWinitRunning<'app> {
                 .map(|(id, viewport)| (*id, viewport.info.clone()))
                 .collect();
 
+            painter.handle_screenshots(&mut raw_input.events);
+
             (viewport_ui_cb, raw_input)
         };
 
@@ -652,37 +655,14 @@ impl<'app> WgpuWinitRunning<'app> {
                 true
             }
         });
-        let screenshot_requested = !screenshot_commands.is_empty();
-        let (vsync_secs, screenshot) = painter.paint_and_update_textures(
+        let vsync_secs = painter.paint_and_update_textures(
             viewport_id,
             pixels_per_point,
             app.clear_color(&egui_ctx.style().visuals),
             &clipped_primitives,
             &textures_delta,
-            screenshot_requested,
+            screenshot_commands,
         );
-        match (screenshot_requested, screenshot) {
-            (false, None) => {}
-            (true, Some(screenshot)) => {
-                let screenshot = Arc::new(screenshot);
-                for user_data in screenshot_commands {
-                    egui_winit
-                        .egui_input_mut()
-                        .events
-                        .push(egui::Event::Screenshot {
-                            viewport_id,
-                            user_data,
-                            image: screenshot.clone(),
-                        });
-                }
-            }
-            (true, None) => {
-                log::error!("Bug in egui_wgpu: screenshot requested, but no screenshot was taken");
-            }
-            (false, Some(_)) => {
-                log::warn!("Bug in egui_wgpu: Got screenshot without requesting it");
-            }
-        }
 
         for action in viewport.actions_requested.drain() {
             match action {
@@ -1024,7 +1004,7 @@ fn render_immediate_viewport(
         [0.0, 0.0, 0.0, 0.0],
         &clipped_primitives,
         &textures_delta,
-        false,
+        vec![],
     );
 
     egui_winit.handle_platform_output(window, platform_output);
diff --git a/crates/eframe/src/web/app_runner.rs b/crates/eframe/src/web/app_runner.rs
index 2f8f78d0818..d8abd3d4bd7 100644
--- a/crates/eframe/src/web/app_runner.rs
+++ b/crates/eframe/src/web/app_runner.rs
@@ -1,4 +1,5 @@
-use egui::TexturesDelta;
+use egui::{TexturesDelta, UserData, ViewportCommand};
+use std::mem;
 
 use crate::{epi, App};
 
@@ -16,6 +17,9 @@ pub struct AppRunner {
     last_save_time: f64,
     pub(crate) text_agent: TextAgent,
 
+    // If not empty, the painter should capture the next frame
+    screenshot_commands: Vec<UserData>,
+
     // Output for the last run:
     textures_delta: TexturesDelta,
     clipped_primitives: Option<Vec<egui::ClippedPrimitive>>,
@@ -36,7 +40,8 @@ impl AppRunner {
         app_creator: epi::AppCreator<'static>,
         text_agent: TextAgent,
     ) -> Result<Self, String> {
-        let painter = super::ActiveWebPainter::new(canvas, &web_options).await?;
+        let egui_ctx = egui::Context::default();
+        let painter = super::ActiveWebPainter::new(egui_ctx.clone(), canvas, &web_options).await?;
 
         let info = epi::IntegrationInfo {
             web_info: epi::WebInfo {
@@ -47,7 +52,6 @@ impl AppRunner {
         };
         let storage = LocalStorage::default();
 
-        let egui_ctx = egui::Context::default();
         egui_ctx.set_os(egui::os::OperatingSystem::from_user_agent(
             &super::user_agent().unwrap_or_default(),
         ));
@@ -110,6 +114,7 @@ impl AppRunner {
             needs_repaint,
             last_save_time: now_sec(),
             text_agent,
+            screenshot_commands: vec![],
             textures_delta: Default::default(),
             clipped_primitives: None,
         };
@@ -205,6 +210,8 @@ impl AppRunner {
     pub fn logic(&mut self) {
         // We sometimes miss blur/focus events due to the text agent, so let's just poll each frame:
         self.update_focus();
+        // We might have received a screenshot
+        self.painter.handle_screenshots(&mut self.input.raw.events);
 
         let canvas_size = super::canvas_size_in_points(self.canvas(), self.egui_ctx());
         let mut raw_input = self.input.new_frame(canvas_size);
@@ -225,12 +232,19 @@ impl AppRunner {
         if viewport_output.len() > 1 {
             log::warn!("Multiple viewports not yet supported on the web");
         }
-        for viewport_output in viewport_output.values() {
-            for command in &viewport_output.commands {
-                // TODO(emilk): handle some of the commands
-                log::warn!(
-                    "Unhandled egui viewport command: {command:?} - not implemented in web backend"
-                );
+        for (_viewport_id, viewport_output) in viewport_output {
+            for command in viewport_output.commands {
+                match command {
+                    ViewportCommand::Screenshot(user_data) => {
+                        self.screenshot_commands.push(user_data);
+                    }
+                    _ => {
+                        // TODO(emilk): handle some of the commands
+                        log::warn!(
+                            "Unhandled egui viewport command: {command:?} - not implemented in web backend"
+                        );
+                    }
+                }
             }
         }
 
@@ -250,6 +264,7 @@ impl AppRunner {
                 &clipped_primitives,
                 self.egui_ctx.pixels_per_point(),
                 &textures_delta,
+                mem::take(&mut self.screenshot_commands),
             ) {
                 log::error!("Failed to paint: {}", super::string_from_js_value(&err));
             }
diff --git a/crates/eframe/src/web/web_painter.rs b/crates/eframe/src/web/web_painter.rs
index b5164b915c0..fe751bf169f 100644
--- a/crates/eframe/src/web/web_painter.rs
+++ b/crates/eframe/src/web/web_painter.rs
@@ -1,3 +1,4 @@
+use egui::{Event, UserData};
 use wasm_bindgen::JsValue;
 
 /// Renderer for a browser canvas.
@@ -16,14 +17,19 @@ pub(crate) trait WebPainter {
     fn max_texture_side(&self) -> usize;
 
     /// Update all internal textures and paint gui.
+    /// When `capture` isn't empty, the rendered screen should be captured.
+    /// Once the screenshot is ready, the screenshot should be returned via [`Self::handle_screenshots`].
     fn paint_and_update_textures(
         &mut self,
         clear_color: [f32; 4],
         clipped_primitives: &[egui::ClippedPrimitive],
         pixels_per_point: f32,
         textures_delta: &egui::TexturesDelta,
+        capture: Vec<UserData>,
     ) -> Result<(), JsValue>;
 
+    fn handle_screenshots(&mut self, events: &mut Vec<Event>);
+
     /// Destroy all resources.
     fn destroy(&mut self);
 }
diff --git a/crates/eframe/src/web/web_painter_glow.rs b/crates/eframe/src/web/web_painter_glow.rs
index e13cb0018cd..876a6d78e2d 100644
--- a/crates/eframe/src/web/web_painter_glow.rs
+++ b/crates/eframe/src/web/web_painter_glow.rs
@@ -1,9 +1,10 @@
+use egui::{Event, UserData, ViewportId};
+use egui_glow::glow;
+use std::sync::Arc;
 use wasm_bindgen::JsCast;
 use wasm_bindgen::JsValue;
 use web_sys::HtmlCanvasElement;
 
-use egui_glow::glow;
-
 use crate::{WebGlContextOption, WebOptions};
 
 use super::web_painter::WebPainter;
@@ -11,6 +12,7 @@ use super::web_painter::WebPainter;
 pub(crate) struct WebPainterGlow {
     canvas: HtmlCanvasElement,
     painter: egui_glow::Painter,
+    screenshots: Vec<(egui::ColorImage, Vec<UserData>)>,
 }
 
 impl WebPainterGlow {
@@ -18,7 +20,11 @@ impl WebPainterGlow {
         self.painter.gl()
     }
 
-    pub async fn new(canvas: HtmlCanvasElement, options: &WebOptions) -> Result<Self, String> {
+    pub async fn new(
+        _ctx: egui::Context,
+        canvas: HtmlCanvasElement,
+        options: &WebOptions,
+    ) -> Result<Self, String> {
         let (gl, shader_prefix) =
             init_glow_context_from_canvas(&canvas, options.webgl_context_option)?;
         #[allow(clippy::arc_with_non_send_sync)]
@@ -27,7 +33,11 @@ impl WebPainterGlow {
         let painter = egui_glow::Painter::new(gl, shader_prefix, None, options.dithering)
             .map_err(|err| format!("Error starting glow painter: {err}"))?;
 
-        Ok(Self { canvas, painter })
+        Ok(Self {
+            canvas,
+            painter,
+            screenshots: Vec::new(),
+        })
     }
 }
 
@@ -46,6 +56,7 @@ impl WebPainter for WebPainterGlow {
         clipped_primitives: &[egui::ClippedPrimitive],
         pixels_per_point: f32,
         textures_delta: &egui::TexturesDelta,
+        capture: Vec<UserData>,
     ) -> Result<(), JsValue> {
         let canvas_dimension = [self.canvas.width(), self.canvas.height()];
 
@@ -57,6 +68,11 @@ impl WebPainter for WebPainterGlow {
         self.painter
             .paint_primitives(canvas_dimension, pixels_per_point, clipped_primitives);
 
+        if !capture.is_empty() {
+            let image = self.painter.read_screen_rgba(canvas_dimension);
+            self.screenshots.push((image, capture));
+        }
+
         for &id in &textures_delta.free {
             self.painter.free_texture(id);
         }
@@ -67,6 +83,19 @@ impl WebPainter for WebPainterGlow {
     fn destroy(&mut self) {
         self.painter.destroy();
     }
+
+    fn handle_screenshots(&mut self, events: &mut Vec<Event>) {
+        for (image, data) in self.screenshots.drain(..) {
+            let image = Arc::new(image);
+            for data in data {
+                events.push(Event::Screenshot {
+                    viewport_id: ViewportId::default(),
+                    image: image.clone(),
+                    user_data: data,
+                });
+            }
+        }
+    }
 }
 
 /// Returns glow context and shader prefix.
diff --git a/crates/eframe/src/web/web_painter_wgpu.rs b/crates/eframe/src/web/web_painter_wgpu.rs
index ec487622e38..591d4224d3b 100644
--- a/crates/eframe/src/web/web_painter_wgpu.rs
+++ b/crates/eframe/src/web/web_painter_wgpu.rs
@@ -1,14 +1,13 @@
 use std::sync::Arc;
 
+use super::web_painter::WebPainter;
+use crate::WebOptions;
+use egui::{Event, UserData, ViewportId};
+use egui_wgpu::capture::{capture_channel, CaptureReceiver, CaptureSender, CaptureState};
+use egui_wgpu::{RenderState, SurfaceErrorAction, WgpuSetup};
 use wasm_bindgen::JsValue;
 use web_sys::HtmlCanvasElement;
 
-use egui_wgpu::{RenderState, SurfaceErrorAction, WgpuSetup};
-
-use crate::WebOptions;
-
-use super::web_painter::WebPainter;
-
 pub(crate) struct WebPainterWgpu {
     canvas: HtmlCanvasElement,
     surface: wgpu::Surface<'static>,
@@ -17,6 +16,10 @@ pub(crate) struct WebPainterWgpu {
     on_surface_error: Arc<dyn Fn(wgpu::SurfaceError) -> SurfaceErrorAction>,
     depth_format: Option<wgpu::TextureFormat>,
     depth_texture_view: Option<wgpu::TextureView>,
+    screen_capture_state: Option<CaptureState>,
+    capture_tx: CaptureSender,
+    capture_rx: CaptureReceiver,
+    ctx: egui::Context,
 }
 
 impl WebPainterWgpu {
@@ -54,6 +57,7 @@ impl WebPainterWgpu {
 
     #[allow(unused)] // only used if `wgpu` is the only active feature.
     pub async fn new(
+        ctx: egui::Context,
         canvas: web_sys::HtmlCanvasElement,
         options: &WebOptions,
     ) -> Result<Self, String> {
@@ -119,17 +123,21 @@ impl WebPainterWgpu {
         .await
         .map_err(|err| err.to_string())?;
 
+        let default_configuration = surface
+            .get_default_config(&render_state.adapter, 0, 0) // Width/height is set later.
+            .ok_or("The surface isn't supported by this adapter")?;
+
         let surface_configuration = wgpu::SurfaceConfiguration {
             format: render_state.target_format,
             present_mode: options.wgpu_options.present_mode,
             view_formats: vec![render_state.target_format],
-            ..surface
-                .get_default_config(&render_state.adapter, 0, 0) // Width/height is set later.
-                .ok_or("The surface isn't supported by this adapter")?
+            ..default_configuration
         };
 
         log::debug!("wgpu painter initialized.");
 
+        let (capture_tx, capture_rx) = capture_channel();
+
         Ok(Self {
             canvas,
             render_state: Some(render_state),
@@ -138,6 +146,10 @@ impl WebPainterWgpu {
             depth_format,
             depth_texture_view: None,
             on_surface_error: options.wgpu_options.on_surface_error.clone(),
+            screen_capture_state: None,
+            capture_tx,
+            capture_rx,
+            ctx,
         })
     }
 }
@@ -159,7 +171,10 @@ impl WebPainter for WebPainterWgpu {
         clipped_primitives: &[egui::ClippedPrimitive],
         pixels_per_point: f32,
         textures_delta: &egui::TexturesDelta,
+        capture_data: Vec<UserData>,
     ) -> Result<(), JsValue> {
+        let capture = !capture_data.is_empty();
+
         let size_in_pixels = [self.canvas.width(), self.canvas.height()];
 
         let Some(render_state) = &self.render_state else {
@@ -203,7 +218,7 @@ impl WebPainter for WebPainterWgpu {
 
         // Resize surface if needed
         let is_zero_sized_surface = size_in_pixels[0] == 0 || size_in_pixels[1] == 0;
-        let frame = if is_zero_sized_surface {
+        let frame_and_capture_buffer = if is_zero_sized_surface {
             None
         } else {
             if size_in_pixels[0] != self.surface_configuration.width
@@ -220,7 +235,7 @@ impl WebPainter for WebPainterWgpu {
                 );
             }
 
-            let frame = match self.surface.get_current_texture() {
+            let output_frame = match self.surface.get_current_texture() {
                 Ok(frame) => frame,
                 Err(err) => match (*self.on_surface_error)(err) {
                     SurfaceErrorAction::RecreateSurface => {
@@ -236,12 +251,23 @@ impl WebPainter for WebPainterWgpu {
 
             {
                 let renderer = render_state.renderer.read();
-                let frame_view = frame
-                    .texture
-                    .create_view(&wgpu::TextureViewDescriptor::default());
+
+                let target_texture = if capture {
+                    let capture_state = self.screen_capture_state.get_or_insert_with(|| {
+                        CaptureState::new(&render_state.device, &output_frame.texture)
+                    });
+                    capture_state.update(&render_state.device, &output_frame.texture);
+
+                    &capture_state.texture
+                } else {
+                    &output_frame.texture
+                };
+                let target_view =
+                    target_texture.create_view(&wgpu::TextureViewDescriptor::default());
+
                 let render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
                     color_attachments: &[Some(wgpu::RenderPassColorAttachment {
-                        view: &frame_view,
+                        view: &target_view,
                         resolve_target: None,
                         ops: wgpu::Operations {
                             load: wgpu::LoadOp::Clear(wgpu::Color {
@@ -280,7 +306,19 @@ impl WebPainter for WebPainterWgpu {
                 );
             }
 
-            Some(frame)
+            let mut capture_buffer = None;
+
+            if capture {
+                if let Some(capture_state) = &mut self.screen_capture_state {
+                    capture_buffer = Some(capture_state.copy_textures(
+                        &render_state.device,
+                        &output_frame,
+                        &mut encoder,
+                    ));
+                }
+            };
+
+            Some((output_frame, capture_buffer))
         };
 
         {
@@ -295,13 +333,38 @@ impl WebPainter for WebPainterWgpu {
             .queue
             .submit(user_cmd_bufs.into_iter().chain([encoder.finish()]));
 
-        if let Some(frame) = frame {
+        if let Some((frame, capture_buffer)) = frame_and_capture_buffer {
+            if let Some(capture_buffer) = capture_buffer {
+                if let Some(capture_state) = &self.screen_capture_state {
+                    capture_state.read_screen_rgba(
+                        self.ctx.clone(),
+                        capture_buffer,
+                        capture_data,
+                        self.capture_tx.clone(),
+                        ViewportId::ROOT,
+                    );
+                }
+            }
+
             frame.present();
         }
 
         Ok(())
     }
 
+    fn handle_screenshots(&mut self, events: &mut Vec<Event>) {
+        for (viewport_id, user_data, screenshot) in self.capture_rx.try_iter() {
+            let screenshot = Arc::new(screenshot);
+            for data in user_data {
+                events.push(Event::Screenshot {
+                    viewport_id,
+                    user_data: data,
+                    image: screenshot.clone(),
+                });
+            }
+        }
+    }
+
     fn destroy(&mut self) {
         self.render_state = None;
     }
diff --git a/crates/egui-wgpu/src/capture.rs b/crates/egui-wgpu/src/capture.rs
new file mode 100644
index 00000000000..1ce780d054f
--- /dev/null
+++ b/crates/egui-wgpu/src/capture.rs
@@ -0,0 +1,257 @@
+use egui::{UserData, ViewportId};
+use epaint::ColorImage;
+use std::sync::{mpsc, Arc};
+use wgpu::{BindGroupLayout, MultisampleState, StoreOp};
+
+/// A texture and a buffer for reading the rendered frame back to the cpu.
+/// The texture is required since [`wgpu::TextureUsages::COPY_SRC`] is not an allowed
+/// flag for the surface texture on all platforms. This means that anytime we want to
+/// capture the frame, we first render it to this texture, and then we can copy it to
+/// both the surface texture (via a render pass) and the buffer (via a texture to buffer copy),
+/// from where we can pull it back
+/// to the cpu.
+pub struct CaptureState {
+    padding: BufferPadding,
+    pub texture: wgpu::Texture,
+    pipeline: wgpu::RenderPipeline,
+    bind_group: wgpu::BindGroup,
+}
+
+pub type CaptureReceiver = mpsc::Receiver<(ViewportId, Vec<UserData>, ColorImage)>;
+pub type CaptureSender = mpsc::Sender<(ViewportId, Vec<UserData>, ColorImage)>;
+pub use mpsc::channel as capture_channel;
+
+impl CaptureState {
+    pub fn new(device: &wgpu::Device, surface_texture: &wgpu::Texture) -> Self {
+        let shader = device.create_shader_module(wgpu::include_wgsl!("texture_copy.wgsl"));
+
+        let pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
+            label: Some("texture_copy"),
+            layout: None,
+            vertex: wgpu::VertexState {
+                module: &shader,
+                entry_point: Some("vs_main"),
+                compilation_options: Default::default(),
+                buffers: &[],
+            },
+            fragment: Some(wgpu::FragmentState {
+                module: &shader,
+                entry_point: Some("fs_main"),
+                compilation_options: Default::default(),
+                targets: &[Some(surface_texture.format().into())],
+            }),
+            primitive: wgpu::PrimitiveState {
+                topology: wgpu::PrimitiveTopology::TriangleList,
+                ..Default::default()
+            },
+            depth_stencil: None,
+            multisample: MultisampleState::default(),
+            multiview: None,
+            cache: None,
+        });
+
+        let bind_group_layout = pipeline.get_bind_group_layout(0);
+
+        let (texture, padding, bind_group) =
+            Self::create_texture(device, surface_texture, &bind_group_layout);
+
+        Self {
+            padding,
+            texture,
+            pipeline,
+            bind_group,
+        }
+    }
+
+    fn create_texture(
+        device: &wgpu::Device,
+        surface_texture: &wgpu::Texture,
+        layout: &BindGroupLayout,
+    ) -> (wgpu::Texture, BufferPadding, wgpu::BindGroup) {
+        let texture = device.create_texture(&wgpu::TextureDescriptor {
+            label: Some("egui_screen_capture_texture"),
+            size: surface_texture.size(),
+            mip_level_count: surface_texture.mip_level_count(),
+            sample_count: surface_texture.sample_count(),
+            dimension: surface_texture.dimension(),
+            format: surface_texture.format(),
+            usage: wgpu::TextureUsages::RENDER_ATTACHMENT
+                | wgpu::TextureUsages::TEXTURE_BINDING
+                | wgpu::TextureUsages::COPY_SRC,
+            view_formats: &[],
+        });
+
+        let padding = BufferPadding::new(surface_texture.width());
+
+        let view = texture.create_view(&Default::default());
+
+        let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
+            layout,
+            entries: &[wgpu::BindGroupEntry {
+                binding: 0,
+                resource: wgpu::BindingResource::TextureView(&view),
+            }],
+            label: None,
+        });
+
+        (texture, padding, bind_group)
+    }
+
+    /// Updates the [`CaptureState`] if the size of the surface texture has changed
+    pub fn update(&mut self, device: &wgpu::Device, texture: &wgpu::Texture) {
+        if self.texture.size() != texture.size() {
+            let (new_texture, padding, bind_group) =
+                Self::create_texture(device, texture, &self.pipeline.get_bind_group_layout(0));
+            self.texture = new_texture;
+            self.padding = padding;
+            self.bind_group = bind_group;
+        }
+    }
+
+    /// Handles copying from the [`CaptureState`] texture to the surface texture and the buffer.
+    /// Pass the returned buffer to [`CaptureState::read_screen_rgba`] to read the data back to the cpu.
+    pub fn copy_textures(
+        &mut self,
+        device: &wgpu::Device,
+        output_frame: &wgpu::SurfaceTexture,
+        encoder: &mut wgpu::CommandEncoder,
+    ) -> wgpu::Buffer {
+        debug_assert_eq!(
+            self.texture.size(),
+            output_frame.texture.size(),
+            "Texture sizes must match, `CaptureState::update` was probably not called"
+        );
+
+        // It would be more efficient to reuse the Buffer, e.g. via some kind of ring buffer, but
+        // for most screenshot use cases this should be fine. When taking many screenshots (e.g. for a video)
+        // it might make sense to revisit this and implement a more efficient solution.
+        #[allow(clippy::arc_with_non_send_sync)]
+        let buffer = device.create_buffer(&wgpu::BufferDescriptor {
+            label: Some("egui_screen_capture_buffer"),
+            size: (self.padding.padded_bytes_per_row * self.texture.height()) as u64,
+            usage: wgpu::BufferUsages::COPY_DST | wgpu::BufferUsages::MAP_READ,
+            mapped_at_creation: false,
+        });
+        let padding = self.padding;
+        let tex = &mut self.texture;
+
+        let tex_extent = tex.size();
+
+        encoder.copy_texture_to_buffer(
+            tex.as_image_copy(),
+            wgpu::ImageCopyBuffer {
+                buffer: &buffer,
+                layout: wgpu::ImageDataLayout {
+                    offset: 0,
+                    bytes_per_row: Some(padding.padded_bytes_per_row),
+                    rows_per_image: None,
+                },
+            },
+            tex_extent,
+        );
+
+        let mut pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
+            label: Some("texture_copy"),
+            color_attachments: &[Some(wgpu::RenderPassColorAttachment {
+                view: &output_frame.texture.create_view(&Default::default()),
+                resolve_target: None,
+                ops: wgpu::Operations {
+                    load: wgpu::LoadOp::Clear(wgpu::Color::TRANSPARENT),
+                    store: StoreOp::Store,
+                },
+            })],
+            depth_stencil_attachment: None,
+            occlusion_query_set: None,
+            timestamp_writes: None,
+        });
+
+        pass.set_pipeline(&self.pipeline);
+        pass.set_bind_group(0, &self.bind_group, &[]);
+        pass.draw(0..3, 0..1);
+
+        buffer
+    }
+
+    /// Handles copying from the [`CaptureState`] texture to the surface texture and the cpu
+    /// This function is non-blocking and will send the data to the given sender when it's ready.
+    /// Pass in the buffer returned from [`CaptureState::copy_textures`].
+    /// Make sure to call this after the encoder has been submitted.
+    pub fn read_screen_rgba(
+        &self,
+        ctx: egui::Context,
+        buffer: wgpu::Buffer,
+        data: Vec<UserData>,
+        tx: CaptureSender,
+        viewport_id: ViewportId,
+    ) {
+        #[allow(clippy::arc_with_non_send_sync)]
+        let buffer = Arc::new(buffer);
+        let buffer_clone = buffer.clone();
+        let buffer_slice = buffer_clone.slice(..);
+        let format = self.texture.format();
+        let tex_extent = self.texture.size();
+        let padding = self.padding;
+        let to_rgba = match format {
+            wgpu::TextureFormat::Rgba8Unorm => [0, 1, 2, 3],
+            wgpu::TextureFormat::Bgra8Unorm => [2, 1, 0, 3],
+            _ => {
+                log::error!("Screen can't be captured unless the surface format is Rgba8Unorm or Bgra8Unorm. Current surface format is {:?}", format);
+                return;
+            }
+        };
+        buffer_slice.map_async(wgpu::MapMode::Read, move |result| {
+            if let Err(err) = result {
+                log::error!("Failed to map buffer for reading: {:?}", err);
+                return;
+            }
+            let buffer_slice = buffer.slice(..);
+
+            let mut pixels = Vec::with_capacity((tex_extent.width * tex_extent.height) as usize);
+            for padded_row in buffer_slice
+                .get_mapped_range()
+                .chunks(padding.padded_bytes_per_row as usize)
+            {
+                let row = &padded_row[..padding.unpadded_bytes_per_row as usize];
+                for color in row.chunks(4) {
+                    pixels.push(epaint::Color32::from_rgba_premultiplied(
+                        color[to_rgba[0]],
+                        color[to_rgba[1]],
+                        color[to_rgba[2]],
+                        color[to_rgba[3]],
+                    ));
+                }
+            }
+            buffer.unmap();
+
+            tx.send((
+                viewport_id,
+                data,
+                ColorImage {
+                    size: [tex_extent.width as usize, tex_extent.height as usize],
+                    pixels,
+                },
+            ))
+            .ok();
+            ctx.request_repaint();
+        });
+    }
+}
+
+#[derive(Copy, Clone)]
+struct BufferPadding {
+    unpadded_bytes_per_row: u32,
+    padded_bytes_per_row: u32,
+}
+
+impl BufferPadding {
+    fn new(width: u32) -> Self {
+        let bytes_per_pixel = std::mem::size_of::<u32>() as u32;
+        let unpadded_bytes_per_row = width * bytes_per_pixel;
+        let padded_bytes_per_row =
+            wgpu::util::align_to(unpadded_bytes_per_row, wgpu::COPY_BYTES_PER_ROW_ALIGNMENT);
+        Self {
+            unpadded_bytes_per_row,
+            padded_bytes_per_row,
+        }
+    }
+}
diff --git a/crates/egui-wgpu/src/lib.rs b/crates/egui-wgpu/src/lib.rs
index 29dc8d8f5c2..54c76f05461 100644
--- a/crates/egui-wgpu/src/lib.rs
+++ b/crates/egui-wgpu/src/lib.rs
@@ -26,6 +26,9 @@ mod renderer;
 pub use renderer::*;
 use wgpu::{Adapter, Device, Instance, Queue};
 
+/// Helpers for capturing screenshots of the UI.
+pub mod capture;
+
 /// Module for painting [`egui`](https://github.com/emilk/egui) with [`wgpu`] on [`winit`].
 #[cfg(feature = "winit")]
 pub mod winit;
diff --git a/crates/egui-wgpu/src/texture_copy.wgsl b/crates/egui-wgpu/src/texture_copy.wgsl
new file mode 100644
index 00000000000..4096d164cc3
--- /dev/null
+++ b/crates/egui-wgpu/src/texture_copy.wgsl
@@ -0,0 +1,43 @@
+struct VertexOutput {
+    @builtin(position) position: vec4<f32>,
+};
+
+var<private> positions: array<vec2f, 3> = array<vec2f, 3>(
+    vec2f(-1.0, -3.0),
+    vec2f(-1.0, 1.0),
+    vec2f(3.0, 1.0)
+);
+
+// meant to be called with 3 vertex indices: 0, 1, 2
+// draws one large triangle over the clip space like this:
+// (the asterisks represent the clip space bounds)
+//-1,1           1,1
+// ---------------------------------
+// |              *              .
+// |              *           .
+// |              *        .
+// |              *      .
+// |              *    .
+// |              * .
+// |***************
+// |            . 1,-1
+// |          .
+// |       .
+// |     .
+// |   .
+// |.
+@vertex
+fn vs_main(@builtin(vertex_index) vertex_index: u32) -> VertexOutput {
+    var result: VertexOutput;
+    result.position = vec4f(positions[vertex_index], 0.0, 1.0);
+    return result;
+}
+
+@group(0)
+@binding(0)
+var r_color: texture_2d<f32>;
+
+@fragment
+fn fs_main(vertex: VertexOutput) -> @location(0) vec4<f32> {
+    return textureLoad(r_color, vec2i(vertex.position.xy), 0);
+}
diff --git a/crates/egui-wgpu/src/winit.rs b/crates/egui-wgpu/src/winit.rs
index 161773117b3..cf7c041f002 100644
--- a/crates/egui-wgpu/src/winit.rs
+++ b/crates/egui-wgpu/src/winit.rs
@@ -1,77 +1,16 @@
 #![allow(clippy::missing_errors_doc)]
 #![allow(clippy::undocumented_unsafe_blocks)]
 
-use std::{num::NonZeroU32, sync::Arc};
-
-use egui::{ViewportId, ViewportIdMap, ViewportIdSet};
-
+use crate::capture::{capture_channel, CaptureReceiver, CaptureSender, CaptureState};
 use crate::{renderer, RenderState, SurfaceErrorAction, WgpuConfiguration};
+use egui::{Context, Event, UserData, ViewportId, ViewportIdMap, ViewportIdSet};
+use std::{num::NonZeroU32, sync::Arc};
 
 struct SurfaceState {
     surface: wgpu::Surface<'static>,
     alpha_mode: wgpu::CompositeAlphaMode,
     width: u32,
     height: u32,
-    supports_screenshot: bool,
-}
-
-/// A texture and a buffer for reading the rendered frame back to the cpu.
-/// The texture is required since [`wgpu::TextureUsages::COPY_DST`] is not an allowed
-/// flag for the surface texture on all platforms. This means that anytime we want to
-/// capture the frame, we first render it to this texture, and then we can copy it to
-/// both the surface texture and the buffer, from where we can pull it back to the cpu.
-struct CaptureState {
-    texture: wgpu::Texture,
-    buffer: wgpu::Buffer,
-    padding: BufferPadding,
-}
-
-impl CaptureState {
-    fn new(device: &Arc<wgpu::Device>, surface_texture: &wgpu::Texture) -> Self {
-        let texture = device.create_texture(&wgpu::TextureDescriptor {
-            label: Some("egui_screen_capture_texture"),
-            size: surface_texture.size(),
-            mip_level_count: surface_texture.mip_level_count(),
-            sample_count: surface_texture.sample_count(),
-            dimension: surface_texture.dimension(),
-            format: surface_texture.format(),
-            usage: wgpu::TextureUsages::RENDER_ATTACHMENT | wgpu::TextureUsages::COPY_SRC,
-            view_formats: &[],
-        });
-
-        let padding = BufferPadding::new(surface_texture.width());
-
-        let buffer = device.create_buffer(&wgpu::BufferDescriptor {
-            label: Some("egui_screen_capture_buffer"),
-            size: (padding.padded_bytes_per_row * texture.height()) as u64,
-            usage: wgpu::BufferUsages::COPY_DST | wgpu::BufferUsages::MAP_READ,
-            mapped_at_creation: false,
-        });
-
-        Self {
-            texture,
-            buffer,
-            padding,
-        }
-    }
-}
-
-struct BufferPadding {
-    unpadded_bytes_per_row: u32,
-    padded_bytes_per_row: u32,
-}
-
-impl BufferPadding {
-    fn new(width: u32) -> Self {
-        let bytes_per_pixel = std::mem::size_of::<u32>() as u32;
-        let unpadded_bytes_per_row = width * bytes_per_pixel;
-        let padded_bytes_per_row =
-            wgpu::util::align_to(unpadded_bytes_per_row, wgpu::COPY_BYTES_PER_ROW_ALIGNMENT);
-        Self {
-            unpadded_bytes_per_row,
-            padded_bytes_per_row,
-        }
-    }
 }
 
 /// Everything you need to paint egui with [`wgpu`] on [`winit`].
@@ -80,6 +19,7 @@ impl BufferPadding {
 ///
 /// NOTE: all egui viewports share the same painter.
 pub struct Painter {
+    context: Context,
     configuration: WgpuConfiguration,
     msaa_samples: u32,
     support_transparent_backbuffer: bool,
@@ -94,6 +34,8 @@ pub struct Painter {
     depth_texture_view: ViewportIdMap<wgpu::TextureView>,
     msaa_texture_view: ViewportIdMap<wgpu::TextureView>,
     surfaces: ViewportIdMap<SurfaceState>,
+    capture_tx: CaptureSender,
+    capture_rx: CaptureReceiver,
 }
 
 impl Painter {
@@ -110,6 +52,7 @@ impl Painter {
     /// a [`winit::window::Window`] with a valid `.raw_window_handle()`
     /// associated.
     pub fn new(
+        context: Context,
         configuration: WgpuConfiguration,
         msaa_samples: u32,
         depth_format: Option<wgpu::TextureFormat>,
@@ -126,7 +69,10 @@ impl Painter {
             crate::WgpuSetup::Existing { instance, .. } => instance.clone(),
         };
 
+        let (capture_tx, capture_rx) = capture_channel();
+
         Self {
+            context,
             configuration,
             msaa_samples,
             support_transparent_backbuffer,
@@ -140,6 +86,9 @@ impl Painter {
             depth_texture_view: Default::default(),
             surfaces: Default::default(),
             msaa_texture_view: Default::default(),
+
+            capture_tx,
+            capture_rx,
         }
     }
 
@@ -157,17 +106,11 @@ impl Painter {
     ) {
         crate::profile_function!();
 
-        let usage = if surface_state.supports_screenshot {
-            wgpu::TextureUsages::RENDER_ATTACHMENT | wgpu::TextureUsages::COPY_DST
-        } else {
-            wgpu::TextureUsages::RENDER_ATTACHMENT
-        };
-
         let width = surface_state.width;
         let height = surface_state.height;
 
         let mut surf_config = wgpu::SurfaceConfiguration {
-            usage,
+            usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
             format: render_state.target_format,
             present_mode: config.present_mode,
             alpha_mode: surface_state.alpha_mode,
@@ -292,8 +235,6 @@ impl Painter {
         } else {
             wgpu::CompositeAlphaMode::Auto
         };
-        let supports_screenshot =
-            !matches!(render_state.adapter.get_info().backend, wgpu::Backend::Gl);
         self.surfaces.insert(
             viewport_id,
             SurfaceState {
@@ -301,7 +242,6 @@ impl Painter {
                 width: size.width,
                 height: size.height,
                 alpha_mode,
-                supports_screenshot,
             },
         );
         let Some(width) = NonZeroU32::new(size.width) else {
@@ -417,109 +357,12 @@ impl Painter {
         }
     }
 
-    // CaptureState only needs to be updated when the size of the two textures don't match and we want to
-    // capture a frame
-    fn update_capture_state(
-        screen_capture_state: &mut Option<CaptureState>,
-        surface_texture: &wgpu::SurfaceTexture,
-        render_state: &RenderState,
-    ) {
-        let surface_texture = &surface_texture.texture;
-        match screen_capture_state {
-            Some(capture_state) => {
-                if capture_state.texture.size() != surface_texture.size() {
-                    *capture_state = CaptureState::new(&render_state.device, surface_texture);
-                }
-            }
-            None => {
-                *screen_capture_state =
-                    Some(CaptureState::new(&render_state.device, surface_texture));
-            }
-        }
-    }
-
-    // Handles copying from the CaptureState texture to the surface texture and the cpu
-    fn read_screen_rgba(
-        screen_capture_state: &CaptureState,
-        render_state: &RenderState,
-        output_frame: &wgpu::SurfaceTexture,
-    ) -> Option<epaint::ColorImage> {
-        let CaptureState {
-            texture: tex,
-            buffer,
-            padding,
-        } = screen_capture_state;
-
-        let device = &render_state.device;
-        let queue = &render_state.queue;
-
-        let tex_extent = tex.size();
-
-        let mut encoder = device.create_command_encoder(&Default::default());
-        encoder.copy_texture_to_buffer(
-            tex.as_image_copy(),
-            wgpu::ImageCopyBuffer {
-                buffer,
-                layout: wgpu::ImageDataLayout {
-                    offset: 0,
-                    bytes_per_row: Some(padding.padded_bytes_per_row),
-                    rows_per_image: None,
-                },
-            },
-            tex_extent,
-        );
-
-        encoder.copy_texture_to_texture(
-            tex.as_image_copy(),
-            output_frame.texture.as_image_copy(),
-            tex.size(),
-        );
-
-        let id = queue.submit(Some(encoder.finish()));
-        let buffer_slice = buffer.slice(..);
-        let (sender, receiver) = std::sync::mpsc::channel();
-        buffer_slice.map_async(wgpu::MapMode::Read, move |v| {
-            drop(sender.send(v));
-        });
-        device.poll(wgpu::Maintain::WaitForSubmissionIndex(id));
-        receiver.recv().ok()?.ok()?;
-
-        let to_rgba = match tex.format() {
-            wgpu::TextureFormat::Rgba8Unorm => [0, 1, 2, 3],
-            wgpu::TextureFormat::Bgra8Unorm => [2, 1, 0, 3],
-            _ => {
-                log::error!("Screen can't be captured unless the surface format is Rgba8Unorm or Bgra8Unorm. Current surface format is {:?}", tex.format());
-                return None;
-            }
-        };
-
-        let mut pixels = Vec::with_capacity((tex.width() * tex.height()) as usize);
-        for padded_row in buffer_slice
-            .get_mapped_range()
-            .chunks(padding.padded_bytes_per_row as usize)
-        {
-            let row = &padded_row[..padding.unpadded_bytes_per_row as usize];
-            for color in row.chunks(4) {
-                pixels.push(epaint::Color32::from_rgba_premultiplied(
-                    color[to_rgba[0]],
-                    color[to_rgba[1]],
-                    color[to_rgba[2]],
-                    color[to_rgba[3]],
-                ));
-            }
-        }
-        buffer.unmap();
-
-        Some(epaint::ColorImage {
-            size: [tex.width() as usize, tex.height() as usize],
-            pixels,
-        })
-    }
-
     /// Returns two things:
     ///
     /// The approximate number of seconds spent on vsync-waiting (if any),
     /// and the captures captured screenshot if it was requested.
+    ///
+    /// If `capture_data` isn't empty, a screenshot will be captured.
     pub fn paint_and_update_textures(
         &mut self,
         viewport_id: ViewportId,
@@ -527,17 +370,18 @@ impl Painter {
         clear_color: [f32; 4],
         clipped_primitives: &[epaint::ClippedPrimitive],
         textures_delta: &epaint::textures::TexturesDelta,
-        capture: bool,
-    ) -> (f32, Option<epaint::ColorImage>) {
+        capture_data: Vec<UserData>,
+    ) -> f32 {
         crate::profile_function!();
 
+        let capture = !capture_data.is_empty();
         let mut vsync_sec = 0.0;
 
         let Some(render_state) = self.render_state.as_mut() else {
-            return (vsync_sec, None);
+            return vsync_sec;
         };
         let Some(surface_state) = self.surfaces.get(&viewport_id) else {
-            return (vsync_sec, None);
+            return vsync_sec;
         };
 
         let mut encoder =
@@ -573,15 +417,6 @@ impl Painter {
             )
         };
 
-        let capture = match (capture, surface_state.supports_screenshot) {
-            (false, _) => false,
-            (true, true) => true,
-            (true, false) => {
-                log::error!("The active render surface doesn't support taking screenshots.");
-                false
-            }
-        };
-
         let output_frame = {
             crate::profile_scope!("get_current_texture");
             // This is what vsync-waiting happens on my Mac.
@@ -596,40 +431,35 @@ impl Painter {
             Err(err) => match (*self.configuration.on_surface_error)(err) {
                 SurfaceErrorAction::RecreateSurface => {
                     Self::configure_surface(surface_state, render_state, &self.configuration);
-                    return (vsync_sec, None);
+                    return vsync_sec;
                 }
                 SurfaceErrorAction::SkipFrame => {
-                    return (vsync_sec, None);
+                    return vsync_sec;
                 }
             },
         };
 
+        let mut capture_buffer = None;
         {
             let renderer = render_state.renderer.read();
-            let frame_view = if capture {
-                Self::update_capture_state(
-                    &mut self.screen_capture_state,
-                    &output_frame,
-                    render_state,
-                );
-                self.screen_capture_state
-                    .as_ref()
-                    .map_or_else(
-                        || &output_frame.texture,
-                        |capture_state| &capture_state.texture,
-                    )
-                    .create_view(&wgpu::TextureViewDescriptor::default())
+
+            let target_texture = if capture {
+                let capture_state = self.screen_capture_state.get_or_insert_with(|| {
+                    CaptureState::new(&render_state.device, &output_frame.texture)
+                });
+                capture_state.update(&render_state.device, &output_frame.texture);
+
+                &capture_state.texture
             } else {
-                output_frame
-                    .texture
-                    .create_view(&wgpu::TextureViewDescriptor::default())
+                &output_frame.texture
             };
+            let target_view = target_texture.create_view(&wgpu::TextureViewDescriptor::default());
 
             let (view, resolve_target) = (self.msaa_samples > 1)
                 .then_some(self.msaa_texture_view.get(&viewport_id))
                 .flatten()
-                .map_or((&frame_view, None), |texture_view| {
-                    (texture_view, Some(&frame_view))
+                .map_or((&target_view, None), |texture_view| {
+                    (texture_view, Some(&target_view))
                 });
 
             let render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
@@ -671,6 +501,16 @@ impl Painter {
                 clipped_primitives,
                 &screen_descriptor,
             );
+
+            if capture {
+                if let Some(capture_state) = &mut self.screen_capture_state {
+                    capture_buffer = Some(capture_state.copy_textures(
+                        &render_state.device,
+                        &output_frame,
+                        &mut encoder,
+                    ));
+                }
+            }
         }
 
         let encoded = {
@@ -699,15 +539,17 @@ impl Painter {
             }
         }
 
-        let screenshot = if capture {
-            self.screen_capture_state
-                .as_ref()
-                .and_then(|screen_capture_state| {
-                    Self::read_screen_rgba(screen_capture_state, render_state, &output_frame)
-                })
-        } else {
-            None
-        };
+        if let Some(capture_buffer) = capture_buffer {
+            if let Some(screen_capture_state) = &mut self.screen_capture_state {
+                screen_capture_state.read_screen_rgba(
+                    self.context.clone(),
+                    capture_buffer,
+                    capture_data,
+                    self.capture_tx.clone(),
+                    viewport_id,
+                );
+            }
+        }
 
         {
             crate::profile_scope!("present");
@@ -717,7 +559,21 @@ impl Painter {
             vsync_sec += start.elapsed().as_secs_f32();
         }
 
-        (vsync_sec, screenshot)
+        vsync_sec
+    }
+
+    /// Call this at the beginning of each frame to receive the requested screenshots.
+    pub fn handle_screenshots(&self, events: &mut Vec<Event>) {
+        for (viewport_id, user_data, screenshot) in self.capture_rx.try_iter() {
+            let screenshot = Arc::new(screenshot);
+            for data in user_data {
+                events.push(Event::Screenshot {
+                    viewport_id,
+                    user_data: data,
+                    image: screenshot.clone(),
+                });
+            }
+        }
     }
 
     pub fn gc_viewports(&mut self, active_viewports: &ViewportIdSet) {
diff --git a/crates/egui_demo_lib/src/demo/demo_app_windows.rs b/crates/egui_demo_lib/src/demo/demo_app_windows.rs
index 5b57c675b5a..2cfcdfaeeba 100644
--- a/crates/egui_demo_lib/src/demo/demo_app_windows.rs
+++ b/crates/egui_demo_lib/src/demo/demo_app_windows.rs
@@ -38,6 +38,7 @@ impl Default for Demos {
             Box::<super::painting::Painting>::default(),
             Box::<super::pan_zoom::PanZoom>::default(),
             Box::<super::panels::Panels>::default(),
+            Box::<super::screenshot::Screenshot>::default(),
             Box::<super::scrolling::Scrolling>::default(),
             Box::<super::sliders::Sliders>::default(),
             Box::<super::strip_demo::StripDemo>::default(),
diff --git a/crates/egui_demo_lib/src/demo/mod.rs b/crates/egui_demo_lib/src/demo/mod.rs
index 8c9034868e4..c00725fbd59 100644
--- a/crates/egui_demo_lib/src/demo/mod.rs
+++ b/crates/egui_demo_lib/src/demo/mod.rs
@@ -24,6 +24,7 @@ pub mod painting;
 pub mod pan_zoom;
 pub mod panels;
 pub mod password;
+pub mod screenshot;
 pub mod scrolling;
 pub mod sliders;
 pub mod strip_demo;
diff --git a/crates/egui_demo_lib/src/demo/screenshot.rs b/crates/egui_demo_lib/src/demo/screenshot.rs
new file mode 100644
index 00000000000..eb62611c863
--- /dev/null
+++ b/crates/egui_demo_lib/src/demo/screenshot.rs
@@ -0,0 +1,84 @@
+use egui::{Image, UserData, ViewportCommand, Widget};
+use std::sync::Arc;
+
+/// Showcase [`ViewportCommand::Screenshot`].
+#[derive(PartialEq, Eq, Default)]
+pub struct Screenshot {
+    image: Option<(Arc<egui::ColorImage>, egui::TextureHandle)>,
+    continuous: bool,
+}
+
+impl crate::Demo for Screenshot {
+    fn name(&self) -> &'static str {
+        "📷 Screenshot"
+    }
+
+    fn show(&mut self, ctx: &egui::Context, open: &mut bool) {
+        egui::Window::new(self.name())
+            .open(open)
+            .resizable(false)
+            .default_width(250.0)
+            .show(ctx, |ui| {
+                use crate::View as _;
+                self.ui(ui);
+            });
+    }
+}
+
+impl crate::View for Screenshot {
+    fn ui(&mut self, ui: &mut egui::Ui) {
+        ui.set_width(300.0);
+        ui.vertical_centered(|ui| {
+            ui.add(crate::egui_github_link_file!());
+        });
+
+        ui.horizontal_wrapped(|ui| {
+            ui.spacing_mut().item_spacing.x = 0.0;
+            ui.label("This demo showcases how to take screenshots via ");
+            ui.code("ViewportCommand::Screenshot");
+            ui.label(".");
+        });
+
+        ui.horizontal_top(|ui| {
+            let capture = ui.button("📷 Take Screenshot").clicked();
+            ui.checkbox(&mut self.continuous, "Capture continuously");
+            if capture || self.continuous {
+                ui.ctx()
+                    .send_viewport_cmd(ViewportCommand::Screenshot(UserData::default()));
+            }
+        });
+
+        let image = ui.ctx().input(|i| {
+            i.events
+                .iter()
+                .filter_map(|e| {
+                    if let egui::Event::Screenshot { image, .. } = e {
+                        Some(image.clone())
+                    } else {
+                        None
+                    }
+                })
+                .last()
+        });
+
+        if let Some(image) = image {
+            self.image = Some((
+                image.clone(),
+                ui.ctx()
+                    .load_texture("screenshot_demo", image, Default::default()),
+            ));
+        }
+
+        if let Some((_, texture)) = &self.image {
+            Image::new(texture).shrink_to_fit().ui(ui);
+        } else {
+            ui.group(|ui| {
+                ui.set_width(ui.available_width());
+                ui.set_height(100.0);
+                ui.centered_and_justified(|ui| {
+                    ui.label("No screenshot taken yet.");
+                });
+            });
+        }
+    }
+}
diff --git a/crates/egui_demo_lib/tests/snapshots/demos/Screenshot.png b/crates/egui_demo_lib/tests/snapshots/demos/Screenshot.png
new file mode 100644
index 00000000000..56978de2001
--- /dev/null
+++ b/crates/egui_demo_lib/tests/snapshots/demos/Screenshot.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:579a7a66f86ade628e9f469b0014e9010aa56312ad5bd1e8de2faaae7e0d1af6
+size 23770