-
-
Notifications
You must be signed in to change notification settings - Fork 307
/
Copy pathcapture_hi_res.rs
200 lines (176 loc) · 6.85 KB
/
capture_hi_res.rs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
// A demonstration of drawing to a very large texture, capturing the texture in its original size
// as a PNG and displaying a down-scaled version of the image within the window each frame.
use nannou::prelude::*;
fn main() {
nannou::app(model).update(update).exit(exit).run();
}
struct Model {
// The texture that we will draw to.
texture: wgpu::Texture,
// Create a `Draw` instance for drawing to our texture.
draw: nannou::Draw,
// The type used to render the `Draw` vertices to our texture.
renderer: nannou::draw::Renderer,
// The type used to capture the texture.
texture_capturer: wgpu::TextureCapturer,
// The type used to resize our texture to the window texture.
texture_reshaper: wgpu::TextureReshaper,
}
fn model(app: &App) -> Model {
// Lets write to a 4K UHD texture.
let texture_size = [3_840, 2_160];
// Create the window.
let [win_w, win_h] = [texture_size[0] / 4, texture_size[1] / 4];
let w_id = app
.new_window()
.size(win_w, win_h)
.title("nannou")
.view(view)
.build()
.unwrap();
let window = app.window(w_id).unwrap();
// Retrieve the wgpu device.
let device = window.swap_chain_device();
// Create our custom texture.
let sample_count = window.msaa_samples();
let texture = wgpu::TextureBuilder::new()
.size(texture_size)
// Our texture will be used as the OUTPUT_ATTACHMENT for our `Draw` render pass.
// It will also be SAMPLED by the `TextureCapturer` and `TextureResizer`.
.usage(wgpu::TextureUsage::OUTPUT_ATTACHMENT | wgpu::TextureUsage::SAMPLED)
// Use nannou's default multisampling sample count.
.sample_count(sample_count)
// Use a spacious 16-bit linear sRGBA format suitable for high quality drawing.
.format(wgpu::TextureFormat::Rgba16Unorm)
// Build it!
.build(device);
// Create our `Draw` instance and a renderer for it.
let draw = nannou::Draw::new();
let descriptor = texture.descriptor();
let renderer = nannou::draw::Renderer::from_texture_descriptor(device, descriptor);
// Create the texture capturer.
let texture_capturer = wgpu::TextureCapturer::with_num_threads(4);
// Create the texture reshaper.
let texture_view = texture.create_default_view();
let src_multisampled = texture.sample_count() > 1;
let dst_format = Frame::TEXTURE_FORMAT;
let texture_reshaper = wgpu::TextureReshaper::new(
device,
&texture_view,
src_multisampled,
sample_count,
dst_format,
);
// Make sure the directory where we will save images to exists.
std::fs::create_dir_all(&capture_directory(app)).unwrap();
Model {
texture,
draw,
renderer,
texture_capturer,
texture_reshaper,
}
}
fn update(app: &App, model: &mut Model, _update: Update) {
// First, reset the `draw` state.
let draw = &model.draw;
draw.reset();
// Create a `Rect` for our texture to help with drawing.
let [w, h] = model.texture.size();
let r = geom::Rect::from_w_h(w as f32, h as f32);
// Use the frame number to animate, ensuring we get a constant update time.
let elapsed_frames = app.main_window().elapsed_frames();
let t = elapsed_frames as f32 / 60.0;
// Draw like we normally would in the `view`.
draw.background().color(BLACK);
let n_points = 10;
let weight = 8.0;
let hz = 6.0;
let vertices = (0..n_points)
.map(|i| {
let x = map_range(i, 0, n_points - 1, r.left(), r.right());
let fract = i as f32 / n_points as f32;
let amp = (t + fract * hz * TAU).sin();
let y = map_range(amp, -1.0, 1.0, r.bottom() * 0.75, r.top() * 0.75);
pt2(x, y)
})
.enumerate()
.map(|(i, p)| {
let fract = i as f32 / n_points as f32;
let r = (t + fract) % 1.0;
let g = (t + 1.0 - fract) % 1.0;
let b = (t + 0.5 + fract) % 1.0;
let rgba = srgba(r, g, b, 1.0);
(p, rgba)
});
draw.polyline()
.weight(weight)
.join_round()
.colored_points(vertices);
// Draw frame number and size in bottom left.
let string = format!("Frame {} - {:?}", elapsed_frames, [w, h]);
let text = text(&string)
.font_size(48)
.left_justify()
.align_bottom()
.build(r.pad(r.h() * 0.05));
draw.path().fill().color(WHITE).events(text.path_events());
// Render our drawing to the texture.
let window = app.main_window();
let device = window.swap_chain_device();
let ce_desc = wgpu::CommandEncoderDescriptor::default();
let mut encoder = device.create_command_encoder(&ce_desc);
model
.renderer
.render_to_texture(device, &mut encoder, draw, &model.texture);
// Take a snapshot of the texture. The capturer will do the following:
//
// 1. Resolve the texture to a non-multisampled texture if necessary.
// 2. Convert the format to non-linear 8-bit sRGBA ready for image storage.
// 3. Copy the result to a buffer ready to be mapped for reading.
let snapshot = model
.texture_capturer
.capture(device, &mut encoder, &model.texture);
// Submit the commands for our drawing and texture capture to the GPU.
window
.swap_chain_queue()
.lock()
.unwrap()
.submit(&[encoder.finish()]);
// Submit a function for writing our snapshot to a PNG.
//
//
// NOTE: It is essential that the commands for capturing the snapshot are `submit`ted before we
// attempt to read the snapshot - otherwise we will read a blank texture!
//
// NOTE: You can also use `read` instead of `read_threaded` if you want to read the texture on
// the current thread. This will slow down the main thread, but will allow the PNG writing to
// keep up with the main thread.
let path = capture_directory(app)
.join(elapsed_frames.to_string())
.with_extension("png");
snapshot.read_threaded(move |result| {
let image = result.expect("failed to map texture memory");
image
.save(&path)
.expect("failed to save texture to png image");
});
}
// Draw the state of your `Model` into the given `Frame` here.
fn view(_app: &App, model: &Model, frame: Frame) {
// Sample the texture and write it to the frame.
let mut encoder = frame.command_encoder();
model
.texture_reshaper
.encode_render_pass(frame.texture_view(), &mut *encoder);
}
// Wait for capture to finish.
fn exit(_app: &App, model: Model) {
println!("Waiting for PNG writing to complete...");
model.texture_capturer.finish();
println!("Done!");
}
// The directory where we'll save the frames.
fn capture_directory(app: &App) -> std::path::PathBuf {
app.project_dir().join(app.exe_name().unwrap())
}