Skip to content

Commit

Permalink
wip
Browse files Browse the repository at this point in the history
  • Loading branch information
IceSentry committed Mar 15, 2023
1 parent 83cc5dd commit e7b3573
Show file tree
Hide file tree
Showing 2 changed files with 135 additions and 99 deletions.
23 changes: 10 additions & 13 deletions assets/shaders/post_process_pass.wgsl
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
// This shader computes the chromatic aberration effect

#import bevy_pbr::utils

// Since post process is a fullscreen effect, we use the fullscreen vertex stage from bevy
// This will render a single fullscreen triangle.
#import bevy_core_pipeline::fullscreen_vertex_shader
Expand All @@ -8,27 +10,22 @@
var screen_texture: texture_2d<f32>;
@group(0) @binding(1)
var texture_sampler: sampler;

// This function will give you the tex_coord of the screen texture for the current fragment position
fn get_screen_coord(in: FullscreenVertexOutput) -> vec2<f32> {
let resolution = vec2<f32>(textureDimensions(screen_texture));
let frag_coord = in.position.xy;
let inverse_screen_size = 1.0 / resolution.xy;
return in.position.xy * inverse_screen_size;
struct PostProcessSettings {
intensity: f32,
}
@group(0) @binding(2)
var<uniform> settings: PostProcessSettings;

@fragment
fn fragment(in: FullscreenVertexOutput) -> @location(0) vec4<f32> {
let uv = get_screen_coord(in);

// Chromatic aberration strength
let offset_strength = 0.005;
let offset_strength = settings.intensity;

// Sample each color channel with an arbitrary shift
return vec4<f32>(
textureSample(screen_texture, texture_sampler, uv + vec2<f32>(offset_strength, -offset_strength)).r,
textureSample(screen_texture, texture_sampler, uv + vec2<f32>(-offset_strength, 0.0)).g,
textureSample(screen_texture, texture_sampler, uv + vec2<f32>(0.0, offset_strength)).b,
textureSample(screen_texture, texture_sampler, in.uv + vec2<f32>(offset_strength, -offset_strength)).r,
textureSample(screen_texture, texture_sampler, in.uv + vec2<f32>(-offset_strength, 0.0)).g,
textureSample(screen_texture, texture_sampler, in.uv + vec2<f32>(0.0, offset_strength)).b,
1.0
);
}
Expand Down
211 changes: 125 additions & 86 deletions examples/shader/post_process_pass.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,11 @@ use bevy::{
clear_color::ClearColorConfig, core_3d,
fullscreen_vertex_shader::fullscreen_shader_vertex_state,
},
ecs::query::QueryItem,
prelude::*,
render::{
extract_component::{ExtractComponent, ExtractComponentPlugin},
extract_component::{
ComponentUniforms, ExtractComponent, ExtractComponentPlugin, UniformComponentPlugin,
},
render_graph::{Node, NodeRunError, RenderGraph, RenderGraphContext, SlotInfo, SlotType},
render_resource::{
BindGroupDescriptor, BindGroupEntry, BindGroupLayout, BindGroupLayoutDescriptor,
Expand All @@ -26,7 +27,7 @@ use bevy::{
renderer::{RenderContext, RenderDevice},
texture::BevyDefault,
view::{ExtractedView, ViewTarget},
Extract, RenderApp, RenderStage,
Extract, RenderApp,
},
};

Expand All @@ -40,24 +41,32 @@ fn main() {
.add_plugin(PostProcessPlugin)
.add_startup_system(setup)
.add_system(rotate)
.add_system(update_settings)
.run();
}

/// The plugin that sets up everything required to run a custom render pass
/// It's generally encouraged to setup post processing effect as a plugin
struct PostProcessPlugin;
impl Plugin for PostProcessPlugin {
fn build(&self, app: &mut App) {
app.add_plugin(ExtractComponentPlugin::<PostProcessSettings>::default());

app
// The settings will be a component that lives in the main world but will be extracted to the render world every frame.
// This makes it possible to control the effect from the main world.
// This plugin will take care of extracting it automatically.
.add_plugin(ExtractComponentPlugin::<PostProcessSettings>::default())
// The settings will use a uniform buffer, so we need this plugin to let bevy manage this automatically.
.add_plugin(UniformComponentPlugin::<PostProcessSettings>::default());

// We need to get the render app from the main app
let Ok(render_app) = app.get_sub_app_mut(RenderApp) else {
return;
};

// The renderer has multiple stages. For more details of each stage see the docs for RenderStage
// Extract -> Prepare -> Queue -> PhaseSort -> Render -> CleanUp
// The renderer has multiple stages.
// For more details of each stage see the docs for `RenderSet`
render_app
.init_resource::<PostProcessPipeline>()
.add_system_to_stage(RenderStage::Extract, extract_post_process_settings);
.add_system(extract_post_process_settings.in_schedule(ExtractSchedule));

// Create our node with the render world
let node = PostProcessNode::new(&mut render_app.world);
Expand All @@ -71,7 +80,7 @@ impl Plugin for PostProcessPlugin {
// Register the post process node in the 3d render graph
core_3d_graph.add_node(PostProcessNode::NAME, node);

// TODO explain slot edges
// A slot edge tells the render graph which input/output value should be passed to the node.
core_3d_graph.add_slot_edge(
core_3d_graph.input_node().id,
core_3d::graph::input::VIEW_ENTITY,
Expand Down Expand Up @@ -114,16 +123,19 @@ impl Node for PostProcessNode {
// we will need when running the node.
fn input(&self) -> Vec<SlotInfo> {
// In this case we tell the graph that our node will use the view entity.
// Currently, every node in bevy uses this pattern, so it's safe to just copy it.
vec![SlotInfo::new(PostProcessNode::IN_VIEW, SlotType::Entity)]
}

// This will run every frame before the run() method
// The important difference is that `self` is `mut` here
fn update(&mut self, world: &mut World) {
// Since this is not a system we need to update the query manually.
self.query.update_archetypes(world);
}

// Runs the node logic, this is where you issue draw calls.
// Runs the node logic
// This is where you issue draw calls.
fn run(
&self,
graph_context: &mut RenderGraphContext,
Expand All @@ -149,51 +161,55 @@ impl Node for PostProcessNode {
return Ok(());
};

// Get the settings uniform binding
let settings_uniforms = world.resource::<ComponentUniforms<PostProcessSettings>>();
let Some(settings_binding) = settings_uniforms.uniforms().binding() else {
return Ok(());
};

// Get the TextureView used for post processing effects in bevy
let post_process = view_target.post_process_write();
let source = post_process.source;
let destination = post_process.destination;

let bind_group_descriptor = BindGroupDescriptor {
label: Some("post_process_bind_group"),
layout: &post_process_pipeline.layout,
entries: &[
BindGroupEntry {
binding: 0,
// Make sure to use the source view
resource: BindingResource::TextureView(source),
},
BindGroupEntry {
binding: 1,
// Use the sampler created for the pipeline
resource: BindingResource::Sampler(&post_process_pipeline.sampler),
},
],
};

// The bind_group gets created each frame. This isn't the most efficient, implementing a cache on top is recommended.
// The bind_group gets created each frame.
// It's important for this to match the BindGroupLayout defined in the PostProcessPipeline
let bind_group = render_context
.render_device
.create_bind_group(&bind_group_descriptor);
.render_device()
.create_bind_group(&BindGroupDescriptor {
label: Some("post_process_bind_group"),
layout: &post_process_pipeline.layout,
entries: &[
BindGroupEntry {
binding: 0,
// Make sure to use the source view
resource: BindingResource::TextureView(post_process.source),
},
BindGroupEntry {
binding: 1,
// Use the sampler created for the pipeline
resource: BindingResource::Sampler(&post_process_pipeline.sampler),
},
BindGroupEntry {
binding: 2,
// Set the settings binding
resource: settings_binding.clone(),
},
],
});

let descriptor = RenderPassDescriptor {
// Begin the render pass
let mut render_pass = render_context.begin_tracked_render_pass(RenderPassDescriptor {
label: Some("post_process_pass"),
color_attachments: &[Some(RenderPassColorAttachment {
// We need to specify the post process destination view here to make sure we write to the appropriate texture.
view: destination,
view: post_process.destination,
resolve_target: None,
ops: Operations::default(),
})],
depth_stencil_attachment: None,
};

// Begin the render pass
let mut render_pass = render_context
.command_encoder
.begin_render_pass(&descriptor);
});

// Set the render pipeline for this render pass
render_pass.set_pipeline(pipeline);
render_pass.set_render_pipeline(pipeline);

// Set the bind group
render_pass.set_bind_group(0, &bind_group, &[]);
Expand Down Expand Up @@ -221,7 +237,7 @@ impl FromWorld for PostProcessPipeline {
let layout = render_device.create_bind_group_layout(&BindGroupLayoutDescriptor {
label: Some("post_process_bind_group_layout"),
entries: &[
// This will be the screen texture
// The screen texture
BindGroupLayoutEntry {
binding: 0,
visibility: ShaderStages::FRAGMENT,
Expand All @@ -239,6 +255,17 @@ impl FromWorld for PostProcessPipeline {
ty: BindingType::Sampler(SamplerBindingType::Filtering),
count: None,
},
// The settings uniform that will control the effect
BindGroupLayoutEntry {
binding: 2,
visibility: ShaderStages::FRAGMENT,
ty: BindingType::Buffer {
ty: bevy::render::render_resource::BufferBindingType::Uniform,
has_dynamic_offset: false,
min_binding_size: None,
},
count: None,
},
],
});

Expand All @@ -250,28 +277,33 @@ impl FromWorld for PostProcessPipeline {
.resource::<AssetServer>()
.load("shaders/post_process_pass.wgsl");

let pipeline_id =
world
.resource_mut::<PipelineCache>()
.queue_render_pipeline(RenderPipelineDescriptor {
label: Some("post_process_pipeline".into()),
layout: Some(vec![layout.clone()]),
// This will setup a fullscreen triangle for the vertex state
vertex: fullscreen_shader_vertex_state(),
fragment: Some(FragmentState {
shader,
shader_defs: vec![],
entry_point: "fragment".into(),
targets: vec![Some(ColorTargetState {
format: TextureFormat::bevy_default(),
blend: None,
write_mask: ColorWrites::ALL,
})],
}),
primitive: PrimitiveState::default(),
depth_stencil: None,
multisample: MultisampleState::default(),
});
let pipeline_id = world
.resource_mut::<PipelineCache>()
// This will add the pipeline to the cache and queue it's creation
.queue_render_pipeline(RenderPipelineDescriptor {
label: Some("post_process_pipeline".into()),
layout: vec![layout.clone()],
// This will setup a fullscreen triangle for the vertex state
vertex: fullscreen_shader_vertex_state(),
fragment: Some(FragmentState {
shader,
shader_defs: vec![],
// Make sure this matches the entry point of your shader.
// It can be anything as long as it matches here and in the shader.
entry_point: "fragment".into(),
targets: vec![Some(ColorTargetState {
format: TextureFormat::bevy_default(),
blend: None,
write_mask: ColorWrites::ALL,
})],
}),
// All of the following property are not important for this effect so just use the default values.
// This struct doesn't have the Default trai implemented because not all field can have a default value.
primitive: PrimitiveState::default(),
depth_stencil: None,
multisample: MultisampleState::default(),
push_constant_ranges: vec![],
});

Self {
layout,
Expand All @@ -281,41 +313,30 @@ impl FromWorld for PostProcessPipeline {
}
}

#[derive(Component, Default, Clone)]
// This is the component that will get passed to the shader
// Since it's going to be a uniform. Don't forget to use the UniformComponentPlugin
#[derive(Component, Default, Clone, Copy, ExtractComponent, ShaderType)]
struct PostProcessSettings {
intensity: f32,
}

// TODO explain extract stage
// The extract stage is the only sync point between the main world and the render world.
// This is where you can get data from the main app to the render app. Like in this case for the settings of the effect.
//
// It's recommended to keep this stage as simple as possible because it blocks the render thread.
fn extract_post_process_settings(
mut commands: Commands,
cameras_2d: Extract<Query<(Entity, &Camera, &PostProcessSettings), With<Camera3d>>>,
) {
for (entity, camera, settings) in &cameras_2d {
if camera.is_active {
commands.get_or_spawn(entity).insert(PostProcessUniform {
intensity: settings.intensity,
});
// Add the PostProcessSettings component to the camera so it can be queried in the render node.
commands.get_or_spawn(entity).insert(*settings);
}
}
}

impl ExtractComponent for PostProcessSettings {
type Query = &'static Self;
type Filter = ();
type Out = Self;

fn extract_component(item: QueryItem<'_, Self::Query>) -> Option<Self> {
Some(item.clone())
}
}

#[derive(Component, ShaderType, Clone)]
struct PostProcessUniform {
intensity: f32,
}

/// set up a simple 3D scene
/// Set up a simple 3D scene
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
Expand Down Expand Up @@ -347,6 +368,8 @@ fn setup(
},
..default()
},
// Add the setting to the camera.
// This component is also used to determine on which camera to run the post processing effect.
PostProcessSettings { intensity: 0.02 },
));
}
Expand All @@ -361,3 +384,19 @@ fn rotate(time: Res<Time>, mut query: Query<&mut Transform, With<Rotates>>) {
transform.rotate_z(0.15 * time.delta_seconds());
}
}

// Change the intensity over time to show that the effect is controlled from the main world
fn update_settings(mut settings: Query<&mut PostProcessSettings>, time: Res<Time>) {
for mut setting in &mut settings {
let mut intensity = time.elapsed_seconds().sin();
// Make it loop periodically
intensity = intensity.sin();
// Remap it to 0..1 because the intensity can't be negative
intensity = intensity * 0.5 + 0.5;
// Scale it to a more reasonable level
intensity *= 0.015;

// Set the intensity. This will then be extracted to the render world and uploaded to the gpu automatically.
setting.intensity = intensity;
}
}

0 comments on commit e7b3573

Please sign in to comment.