Remove uneeded commented out gfx rendering code

This commit is contained in:
Imbris 2021-04-26 22:43:24 -04:00
parent 1647c9d607
commit 81939b4e4e
4 changed files with 6 additions and 685 deletions

View File

@ -9,8 +9,8 @@ pub struct Instances<T: Copy + Pod> {
impl<T: Copy + Pod> Instances<T> {
pub fn new(device: &wgpu::Device, len: usize) -> Self {
Self {
// TODO: examine if we have Intances that are not updated and if there would be any
// gains from separating those out
// TODO: examine if we have Intances that are not updated (e.g. sprites) and if there
// would be any gains from separating those out
buf: DynamicBuffer::new(device, len, wgpu::BufferUsage::VERTEX),
}
}

View File

@ -197,8 +197,8 @@ impl Renderer {
features: wgpu::Features::DEPTH_CLAMPING
| wgpu::Features::ADDRESS_MODE_CLAMP_TO_BORDER
| wgpu::Features::PUSH_CONSTANTS
// TODO: make optional based on enabling profiling
// NOTE: requires recreating the device/queue is this setting changes
// TODO: make optional based on enabling profiling setting?
// woould require recreating the device/queue if this setting changes
// alternatively it could be a compile time feature toggle
| (adapter.features() & wgpu_profiler::GpuProfiler::REQUIRED_WGPU_FEATURES),
limits,
@ -681,23 +681,6 @@ impl Renderer {
.unwrap_or_else(|| (Vec2::new(1, 1), Vec2::new(1, 1)))
}
// /// Queue the clearing of the shadow targets ready for a new frame to be
// /// rendered.
// pub fn clear_shadows(&mut self) {
// span!(_guard, "clear_shadows", "Renderer::clear_shadows");
// if !self.mode.shadow.is_map() {
// return;
// }
// if let Some(shadow_map) = self.shadow_map.as_mut() {
// // let point_encoder = &mut shadow_map.point_encoder;
// let point_encoder = &mut self.encoder;
// point_encoder.clear_depth(&shadow_map.point_depth_view, 1.0);
// // let directed_encoder = &mut shadow_map.directed_encoder;
// let directed_encoder = &mut self.encoder;
// directed_encoder.clear_depth(&shadow_map.directed_depth_view,
// 1.0); }
// }
// TODO: @Sharp what should this look like with wgpu?
/// NOTE: Supported by Vulkan (by default), DirectX 10+ (it seems--it's hard
/// to find proof of this, but Direct3D 10 apparently does it by
@ -1104,7 +1087,7 @@ impl Renderer {
texture: &Texture, /* <T> */
offset: [u32; 2],
size: [u32; 2],
// TODO: generic over pixel type
// TODO: be generic over pixel type
data: &[[u8; 4]],
) {
texture.update(&self.queue, offset, size, bytemuck::cast_slice(data))
@ -1138,95 +1121,6 @@ impl Renderer {
}
}
// /// Queue the rendering of the provided skybox model in the upcoming frame.
// pub fn render_skybox(
// &mut self,
// model: &Model<skybox::SkyboxPipeline>,
// global: &GlobalModel,
// locals: &Consts<skybox::Locals>,
// lod: &lod_terrain::LodData,
// ) {
// self.encoder.draw(
// &gfx::Slice {
// start: model.vertex_range().start,
// end: model.vertex_range().end,
// base_vertex: 0,
// instances: None,
// buffer: gfx::IndexBuffer::Auto,
// },
// &self.skybox_pipeline.pso,
// &skybox::pipe::Data {
// vbuf: model.vbuf.clone(),
// locals: locals.buf.clone(),
// globals: global.globals.buf.clone(),
// noise: (self.noise_tex.srv.clone(),
// self.noise_tex.sampler.clone()), alt: (lod.alt.srv.clone(),
// lod.alt.sampler.clone()), horizon: (lod.horizon.srv.clone(),
// lod.horizon.sampler.clone()), tgt_color:
// self.tgt_color_view.clone(), tgt_depth:
// (self.tgt_depth_view.clone()/* , (1, 1) */), },
// );
// }
// /// Queue the rendering of the provided figure model in the upcoming frame.
// pub fn render_figure(
// &mut self,
// model: &figure::FigureModel,
// col_lights: &Texture<ColLightFmt>,
// global: &GlobalModel,
// locals: &Consts<figure::Locals>,
// bones: &Consts<figure::BoneData>,
// lod: &lod_terrain::LodData,
// ) {
// let (point_shadow_maps, directed_shadow_maps) =
// if let Some(shadow_map) = &mut self.shadow_map {
// (
// (
// shadow_map.point_res.clone(),
// shadow_map.point_sampler.clone(),
// ),
// (
// shadow_map.directed_res.clone(),
// shadow_map.directed_sampler.clone(),
// ),
// )
// } else {
// (
// (self.noise_tex.srv.clone(), self.noise_tex.sampler.clone()),
// (self.noise_tex.srv.clone(), self.noise_tex.sampler.clone()),
// )
// };
// let model = &model.opaque;
// self.encoder.draw(
// &gfx::Slice {
// start: model.vertex_range().start,
// end: model.vertex_range().end,
// base_vertex: 0,
// instances: None,
// buffer: gfx::IndexBuffer::Auto,
// },
// &self.figure_pipeline.pso,
// &figure::pipe::Data {
// vbuf: model.vbuf.clone(),
// col_lights: (col_lights.srv.clone(), col_lights.sampler.clone()),
// locals: locals.buf.clone(),
// globals: global.globals.buf.clone(),
// bones: bones.buf.clone(),
// lights: global.lights.buf.clone(),
// shadows: global.shadows.buf.clone(),
// light_shadows: global.shadow_mats.buf.clone(),
// point_shadow_maps,
// directed_shadow_maps,
// noise: (self.noise_tex.srv.clone(),
// self.noise_tex.sampler.clone()), alt: (lod.alt.srv.clone(),
// lod.alt.sampler.clone()), horizon: (lod.horizon.srv.clone(),
// lod.horizon.sampler.clone()), tgt_color:
// self.tgt_color_view.clone(), tgt_depth:
// (self.tgt_depth_view.clone()/* , (1, 1) */), },
// );
// }
// /// Queue the rendering of the player silhouette in the upcoming frame.
// pub fn render_player_shadow(
// &mut self,
@ -1286,566 +1180,6 @@ impl Renderer {
// (self.tgt_depth_view.clone()/* , (0, 0) */), },
// ); */
// }
// /// Queue the rendering of the player model in the upcoming frame.
// pub fn render_player(
// &mut self,
// model: &figure::FigureModel,
// col_lights: &Texture<ColLightFmt>,
// global: &GlobalModel,
// locals: &Consts<figure::Locals>,
// bones: &Consts<figure::BoneData>,
// lod: &lod_terrain::LodData,
// ) {
// let (point_shadow_maps, directed_shadow_maps) =
// if let Some(shadow_map) = &mut self.shadow_map {
// (
// (
// shadow_map.point_res.clone(),
// shadow_map.point_sampler.clone(),
// ),
// (
// shadow_map.directed_res.clone(),
// shadow_map.directed_sampler.clone(),
// ),
// )
// } else {
// (
// (self.noise_tex.srv.clone(), self.noise_tex.sampler.clone()),
// (self.noise_tex.srv.clone(), self.noise_tex.sampler.clone()),
// )
// };
// let model = &model.opaque;
// self.encoder.draw(
// &gfx::Slice {
// start: model.vertex_range().start,
// end: model.vertex_range().end,
// base_vertex: 0,
// instances: None,
// buffer: gfx::IndexBuffer::Auto,
// },
// &self.figure_pipeline.pso,
// &figure::pipe::Data {
// vbuf: model.vbuf.clone(),
// col_lights: (col_lights.srv.clone(), col_lights.sampler.clone()),
// locals: locals.buf.clone(),
// globals: global.globals.buf.clone(),
// bones: bones.buf.clone(),
// lights: global.lights.buf.clone(),
// shadows: global.shadows.buf.clone(),
// light_shadows: global.shadow_mats.buf.clone(),
// point_shadow_maps,
// directed_shadow_maps,
// noise: (self.noise_tex.srv.clone(),
// self.noise_tex.sampler.clone()), alt: (lod.alt.srv.clone(),
// lod.alt.sampler.clone()), horizon: (lod.horizon.srv.clone(),
// lod.horizon.sampler.clone()), tgt_color:
// self.tgt_color_view.clone(), tgt_depth:
// (self.tgt_depth_view.clone()/* , (1, 1) */), },
// );
// }
// /// Queue the rendering of the provided terrain chunk model in the upcoming
// /// frame.
// pub fn render_terrain_chunk(
// &mut self,
// model: &Model<terrain::TerrainPipeline>,
// col_lights: &Texture<ColLightFmt>,
// global: &GlobalModel,
// locals: &Consts<terrain::Locals>,
// lod: &lod_terrain::LodData,
// ) {
// let (point_shadow_maps, directed_shadow_maps) =
// if let Some(shadow_map) = &mut self.shadow_map {
// (
// (
// shadow_map.point_res.clone(),
// shadow_map.point_sampler.clone(),
// ),
// (
// shadow_map.directed_res.clone(),
// shadow_map.directed_sampler.clone(),
// ),
// )
// } else {
// (
// (self.noise_tex.srv.clone(), self.noise_tex.sampler.clone()),
// (self.noise_tex.srv.clone(), self.noise_tex.sampler.clone()),
// )
// };
// self.encoder.draw(
// &gfx::Slice {
// start: model.vertex_range().start,
// end: model.vertex_range().end,
// base_vertex: 0,
// instances: None,
// buffer: gfx::IndexBuffer::Auto,
// },
// &self.terrain_pipeline.pso,
// &terrain::pipe::Data {
// vbuf: model.vbuf.clone(),
// // TODO: Consider splitting out texture atlas data into a
// separate vertex buffer, // since we don't need it for things
// like global.shadows. col_lights: (col_lights.srv.clone(),
// col_lights.sampler.clone()), locals: locals.buf.clone(),
// globals: global.globals.buf.clone(),
// lights: global.lights.buf.clone(),
// shadows: global.shadows.buf.clone(),
// light_shadows: global.shadow_mats.buf.clone(),
// point_shadow_maps,
// directed_shadow_maps,
// noise: (self.noise_tex.srv.clone(),
// self.noise_tex.sampler.clone()), alt: (lod.alt.srv.clone(),
// lod.alt.sampler.clone()), horizon: (lod.horizon.srv.clone(),
// lod.horizon.sampler.clone()), tgt_color:
// self.tgt_color_view.clone(), tgt_depth:
// (self.tgt_depth_view.clone()/* , (1, 1) */), },
// );
// }
// /// Queue the rendering of a shadow map from a point light in the upcoming
// /// frame.
// pub fn render_shadow_point(
// &mut self,
// model: &Model<terrain::TerrainPipeline>,
// global: &GlobalModel,
// terrain_locals: &Consts<terrain::Locals>,
// locals: &Consts<shadow::Locals>,
// ) {
// if !self.mode.shadow.is_map() {
// return;
// }
// // NOTE: Don't render shadows if the shader is not supported.
// let shadow_map = if let Some(shadow_map) = &mut self.shadow_map {
// shadow_map
// } else {
// return;
// };
// // let point_encoder = &mut shadow_map.point_encoder;
// let point_encoder = &mut self.encoder;
// point_encoder.draw(
// &gfx::Slice {
// start: model.vertex_range().start,
// end: model.vertex_range().end,
// base_vertex: 0,
// instances: None,
// buffer: gfx::IndexBuffer::Auto,
// },
// &shadow_map.point_pipeline.pso,
// &shadow::pipe::Data {
// // Terrain vertex stuff
// vbuf: model.vbuf.clone(),
// locals: terrain_locals.buf.clone(),
// globals: global.globals.buf.clone(),
// // Shadow stuff
// light_shadows: locals.buf.clone(),
// tgt_depth: shadow_map.point_depth_view.clone(),
// },
// );
// }
// /// Queue the rendering of terrain shadow map from all directional lights in
// /// the upcoming frame.
// pub fn render_terrain_shadow_directed(
// &mut self,
// model: &Model<terrain::TerrainPipeline>,
// global: &GlobalModel,
// terrain_locals: &Consts<terrain::Locals>,
// locals: &Consts<shadow::Locals>,
// ) {
// if !self.mode.shadow.is_map() {
// return;
// }
// // NOTE: Don't render shadows if the shader is not supported.
// let shadow_map = if let Some(shadow_map) = &mut self.shadow_map {
// shadow_map
// } else {
// return;
// };
// // let directed_encoder = &mut shadow_map.directed_encoder;
// let directed_encoder = &mut self.encoder;
// directed_encoder.draw(
// &gfx::Slice {
// start: model.vertex_range().start,
// end: model.vertex_range().end,
// base_vertex: 0,
// instances: None,
// buffer: gfx::IndexBuffer::Auto,
// },
// &shadow_map.terrain_directed_pipeline.pso,
// &shadow::pipe::Data {
// // Terrain vertex stuff
// vbuf: model.vbuf.clone(),
// locals: terrain_locals.buf.clone(),
// globals: global.globals.buf.clone(),
// // Shadow stuff
// light_shadows: locals.buf.clone(),
// tgt_depth:
// shadow_map.directed_depth_view.clone(), },
// );
// }
// /// Queue the rendering of figure shadow map from all directional lights in
// /// the upcoming frame.
// pub fn render_figure_shadow_directed(
// &mut self,
// model: &figure::FigureModel,
// global: &GlobalModel,
// figure_locals: &Consts<figure::Locals>,
// bones: &Consts<figure::BoneData>,
// locals: &Consts<shadow::Locals>,
// ) {
// if !self.mode.shadow.is_map() {
// return;
// }
// // NOTE: Don't render shadows if the shader is not supported.
// let shadow_map = if let Some(shadow_map) = &mut self.shadow_map {
// shadow_map
// } else {
// return;
// };
// let model = &model.opaque;
// // let directed_encoder = &mut shadow_map.directed_encoder;
// let directed_encoder = &mut self.encoder;
// directed_encoder.draw(
// &gfx::Slice {
// start: model.vertex_range().start,
// end: model.vertex_range().end,
// base_vertex: 0,
// instances: None,
// buffer: gfx::IndexBuffer::Auto,
// },
// &shadow_map.figure_directed_pipeline.pso,
// &shadow::figure_pipe::Data {
// // Terrain vertex stuff
// vbuf: model.vbuf.clone(),
// locals: figure_locals.buf.clone(),
// bones: bones.buf.clone(),
// globals: global.globals.buf.clone(),
// // Shadow stuff
// light_shadows: locals.buf.clone(),
// tgt_depth:
// shadow_map.directed_depth_view.clone(), },
// );
// }
// /// Queue the rendering of the provided terrain chunk model in the upcoming
// /// frame.
// pub fn render_fluid_chunk(
// &mut self,
// model: &Model<fluid::FluidPipeline>,
// global: &GlobalModel,
// locals: &Consts<terrain::Locals>,
// lod: &lod_terrain::LodData,
// waves: &Texture,
// ) {
// let (point_shadow_maps, directed_shadow_maps) =
// if let Some(shadow_map) = &mut self.shadow_map {
// (
// (
// shadow_map.point_res.clone(),
// shadow_map.point_sampler.clone(),
// ),
// (
// shadow_map.directed_res.clone(),
// shadow_map.directed_sampler.clone(),
// ),
// )
// } else {
// (
// (self.noise_tex.srv.clone(), self.noise_tex.sampler.clone()),
// (self.noise_tex.srv.clone(), self.noise_tex.sampler.clone()),
// )
// };
// self.encoder.draw(
// &gfx::Slice {
// start: model.vertex_range().start,
// end: model.vertex_range().end,
// base_vertex: 0,
// instances: None,
// buffer: gfx::IndexBuffer::Auto,
// },
// &self.fluid_pipeline.pso,
// &fluid::pipe::Data {
// vbuf: model.vbuf.clone(),
// locals: locals.buf.clone(),
// globals: global.globals.buf.clone(),
// lights: global.lights.buf.clone(),
// shadows: global.shadows.buf.clone(),
// light_shadows: global.shadow_mats.buf.clone(),
// point_shadow_maps,
// directed_shadow_maps,
// alt: (lod.alt.srv.clone(), lod.alt.sampler.clone()),
// horizon: (lod.horizon.srv.clone(), lod.horizon.sampler.clone()),
// noise: (self.noise_tex.srv.clone(),
// self.noise_tex.sampler.clone()), waves: (waves.srv.clone(),
// waves.sampler.clone()), tgt_color:
// self.tgt_color_view.clone(), tgt_depth:
// (self.tgt_depth_view.clone()/* , (1, 1) */), },
// );
// }
// /// Queue the rendering of the provided terrain chunk model in the upcoming
// /// frame.
// pub fn render_sprites(
// &mut self,
// model: &Model<sprite::SpritePipeline>,
// col_lights: &Texture<ColLightFmt>,
// global: &GlobalModel,
// terrain_locals: &Consts<terrain::Locals>,
// locals: &Consts<sprite::Locals>,
// instances: &Instances<sprite::Instance>,
// lod: &lod_terrain::LodData,
// ) {
// let (point_shadow_maps, directed_shadow_maps) =
// if let Some(shadow_map) = &mut self.shadow_map {
// (
// (
// shadow_map.point_res.clone(),
// shadow_map.point_sampler.clone(),
// ),
// (
// shadow_map.directed_res.clone(),
// shadow_map.directed_sampler.clone(),
// ),
// )
// } else {
// (
// (self.noise_tex.srv.clone(), self.noise_tex.sampler.clone()),
// (self.noise_tex.srv.clone(), self.noise_tex.sampler.clone()),
// )
// };
// self.encoder.draw(
// &gfx::Slice {
// start: model.vertex_range().start,
// end: model.vertex_range().end,
// base_vertex: 0,
// instances: Some((instances.count() as u32, 0)),
// buffer: gfx::IndexBuffer::Auto,
// },
// &self.sprite_pipeline.pso,
// &sprite::pipe::Data {
// vbuf: model.vbuf.clone(),
// ibuf: instances.ibuf.clone(),
// col_lights: (col_lights.srv.clone(), col_lights.sampler.clone()),
// terrain_locals: terrain_locals.buf.clone(),
// // NOTE: It would be nice if this wasn't needed and we could use
// a constant buffer // offset into the sprite data. Hopefully,
// when we switch to wgpu we can do this, // as it offers the
// exact API we want (the equivalent can be done in OpenGL using
// // glBindBufferOffset). locals: locals.buf.clone(),
// globals: global.globals.buf.clone(),
// lights: global.lights.buf.clone(),
// shadows: global.shadows.buf.clone(),
// light_shadows: global.shadow_mats.buf.clone(),
// point_shadow_maps,
// directed_shadow_maps,
// noise: (self.noise_tex.srv.clone(),
// self.noise_tex.sampler.clone()), alt: (lod.alt.srv.clone(),
// lod.alt.sampler.clone()), horizon: (lod.horizon.srv.clone(),
// lod.horizon.sampler.clone()), tgt_color:
// self.tgt_color_view.clone(), tgt_depth:
// (self.tgt_depth_view.clone()/* , (1, 1) */), },
// );
// }
// /// Queue the rendering of the provided LoD terrain model in the upcoming
// /// frame.
// pub fn render_lod_terrain(
// &mut self,
// model: &Model<lod_terrain::LodTerrainPipeline>,
// global: &GlobalModel,
// locals: &Consts<lod_terrain::Locals>,
// lod: &lod_terrain::LodData,
// ) {
// self.encoder.draw(
// &gfx::Slice {
// start: model.vertex_range().start,
// end: model.vertex_range().end,
// base_vertex: 0,
// instances: None,
// buffer: gfx::IndexBuffer::Auto,
// },
// &self.lod_terrain_pipeline.pso,
// &lod_terrain::pipe::Data {
// vbuf: model.vbuf.clone(),
// locals: locals.buf.clone(),
// globals: global.globals.buf.clone(),
// noise: (self.noise_tex.srv.clone(),
// self.noise_tex.sampler.clone()), map: (lod.map.srv.clone(),
// lod.map.sampler.clone()), alt: (lod.alt.srv.clone(),
// lod.alt.sampler.clone()), horizon: (lod.horizon.srv.clone(),
// lod.horizon.sampler.clone()), tgt_color:
// self.tgt_color_view.clone(), tgt_depth:
// (self.tgt_depth_view.clone()/* , (1, 1) */), },
// );
// }
// /// Queue the rendering of the provided particle in the upcoming frame.
// pub fn render_particles(
// &mut self,
// model: &Model<particle::ParticlePipeline>,
// global: &GlobalModel,
// instances: &Instances<particle::Instance>,
// lod: &lod_terrain::LodData,
// ) {
// let (point_shadow_maps, directed_shadow_maps) =
// if let Some(shadow_map) = &mut self.shadow_map {
// (
// (
// shadow_map.point_res.clone(),
// shadow_map.point_sampler.clone(),
// ),
// (
// shadow_map.directed_res.clone(),
// shadow_map.directed_sampler.clone(),
// ),
// )
// } else {
// (
// (self.noise_tex.srv.clone(), self.noise_tex.sampler.clone()),
// (self.noise_tex.srv.clone(), self.noise_tex.sampler.clone()),
// )
// };
// self.encoder.draw(
// &gfx::Slice {
// start: model.vertex_range().start,
// end: model.vertex_range().end,
// base_vertex: 0,
// instances: Some((instances.count() as u32, 0)),
// buffer: gfx::IndexBuffer::Auto,
// },
// &self.particle_pipeline.pso,
// &particle::pipe::Data {
// vbuf: model.vbuf.clone(),
// ibuf: instances.ibuf.clone(),
// globals: global.globals.buf.clone(),
// lights: global.lights.buf.clone(),
// shadows: global.shadows.buf.clone(),
// light_shadows: global.shadow_mats.buf.clone(),
// point_shadow_maps,
// directed_shadow_maps,
// noise: (self.noise_tex.srv.clone(),
// self.noise_tex.sampler.clone()), alt: (lod.alt.srv.clone(),
// lod.alt.sampler.clone()), horizon: (lod.horizon.srv.clone(),
// lod.horizon.sampler.clone()), tgt_color:
// self.tgt_color_view.clone(), tgt_depth:
// (self.tgt_depth_view.clone()/* , (1, 1) */), },
// );
// }
// /// Queue the rendering of the provided UI element in the upcoming frame.
// pub fn render_ui_element<F: gfx::format::Formatted<View = [f32; 4]>>(
// &mut self,
// model: Model<ui::UiPipeline>,
// tex: &Texture<F>,
// scissor: Aabr<u16>,
// globals: &Consts<Globals>,
// locals: &Consts<ui::Locals>,
// ) where
// F::Surface: gfx::format::TextureSurface,
// F::Channel: gfx::format::TextureChannel,
// <F::Surface as gfx::format::SurfaceTyped>::DataType: Copy,
// {
// let Aabr { min, max } = scissor;
// self.encoder.draw(
// &gfx::Slice {
// start: model.vertex_range.start,
// end: model.vertex_range.end,
// base_vertex: 0,
// instances: None,
// buffer: gfx::IndexBuffer::Auto,
// },
// &self.ui_pipeline.pso,
// &ui::pipe::Data {
// vbuf: model.vbuf,
// scissor: gfx::Rect {
// x: min.x,
// y: min.y,
// w: max.x - min.x,
// h: max.y - min.y,
// },
// tex: (tex.srv.clone(), tex.sampler.clone()),
// locals: locals.buf.clone(),
// globals: globals.buf.clone(),
// tgt_color: self.win_color_view.clone(),
// tgt_depth: self.win_depth_view.clone(),
// },
// );
// }
// pub fn render_clouds(
// &mut self,
// model: &Model<clouds::CloudsPipeline>,
// globals: &Consts<Globals>,
// locals: &Consts<clouds::Locals>,
// lod: &lod_terrain::LodData,
// ) {
// self.encoder.draw(
// &gfx::Slice {
// start: model.vertex_range().start,
// end: model.vertex_range().end,
// base_vertex: 0,
// instances: None,
// buffer: gfx::IndexBuffer::Auto,
// },
// &self.clouds_pipeline.pso,
// &clouds::pipe::Data {
// vbuf: model.vbuf.clone(),
// locals: locals.buf.clone(),
// globals: globals.buf.clone(),
// map: (lod.map.srv.clone(), lod.map.sampler.clone()),
// alt: (lod.alt.srv.clone(), lod.alt.sampler.clone()),
// horizon: (lod.horizon.srv.clone(), lod.horizon.sampler.clone()),
// color_sampler: (self.tgt_color_res.clone(),
// self.sampler.clone()), depth_sampler:
// (self.tgt_depth_res.clone(), self.sampler.clone()), noise:
// (self.noise_tex.srv.clone(), self.noise_tex.sampler.clone()),
// tgt_color: self.tgt_color_pp_view.clone(), },
// )
// }
// pub fn render_post_process(
// &mut self,
// model: &Model<postprocess::PostProcessPipeline>,
// globals: &Consts<Globals>,
// locals: &Consts<postprocess::Locals>,
// lod: &lod_terrain::LodData,
// ) {
// self.encoder.draw(
// &gfx::Slice {
// start: model.vertex_range().start,
// end: model.vertex_range().end,
// base_vertex: 0,
// instances: None,
// buffer: gfx::IndexBuffer::Auto,
// },
// &self.postprocess_pipeline.pso,
// &postprocess::pipe::Data {
// vbuf: model.vbuf.clone(),
// locals: locals.buf.clone(),
// globals: globals.buf.clone(),
// map: (lod.map.srv.clone(), lod.map.sampler.clone()),
// alt: (lod.alt.srv.clone(), lod.alt.sampler.clone()),
// horizon: (lod.horizon.srv.clone(), lod.horizon.sampler.clone()),
// color_sampler: (self.tgt_color_res_pp.clone(),
// self.sampler.clone()), depth_sampler:
// (self.tgt_depth_res.clone(), self.sampler.clone()), noise:
// (self.noise_tex.srv.clone(), self.noise_tex.sampler.clone()),
// tgt_color: self.win_color_view.clone(), },
// )
// }
}
fn create_quad_index_buffer_u16(device: &wgpu::Device, vert_length: usize) -> Buffer<u16> {

View File

@ -16,7 +16,6 @@ use vek::Aabr;
use wgpu_profiler::scope::{ManualOwningScope, OwningScope, Scope};
// Currently available pipelines
// #[derive(Clone, Copy)]
enum Pipelines<'frame> {
Interface(&'frame super::InterfacePipelines),
All(&'frame super::Pipelines),
@ -395,8 +394,6 @@ impl<'frame> Drawer<'frame> {
impl<'frame> Drop for Drawer<'frame> {
fn drop(&mut self) {
// TODO: submitting things to the queue can let the gpu start on them sooner
// maybe we should submit each render pass to the queue as they are produced?
let mut encoder = self.encoder.take().unwrap();
// If taking a screenshota and the blit pipeline is available
@ -436,6 +433,7 @@ impl<'frame> Drop for Drawer<'frame> {
let (mut encoder, profiler) = encoder.end_scope();
profiler.resolve_queries(&mut encoder);
// It is recommended to only do one submit per frame
self.borrow.queue.submit(std::iter::once(encoder.finish()));
profiler
@ -783,7 +781,6 @@ impl<'pass_ref, 'pass: 'pass_ref> UiDrawer<'pass_ref, 'pass> {
pub fn prepare<'data: 'pass>(
&mut self,
locals: &'data ui::BoundLocals,
//texture: &'data ui::TextureBindGroup,
buf: &'data DynamicModel<ui::Vertex>,
scissor: Aabr<u16>,
) -> PreparedUiDrawer<'_, 'pass> {
@ -794,7 +791,6 @@ impl<'pass_ref, 'pass: 'pass_ref> UiDrawer<'pass_ref, 'pass> {
};
// Prepare
prepared.set_locals(locals);
//prepared.set_texture(texture);
prepared.set_model(buf);
prepared.set_scissor(scissor);
@ -807,11 +803,6 @@ impl<'pass_ref, 'pass: 'pass_ref> PreparedUiDrawer<'pass_ref, 'pass> {
self.render_pass.set_bind_group(1, &locals.bind_group, &[]);
}
//pub fn set_texture<'data: 'pass>(&mut self, texture: &'data
// ui::TextureBindGroup) { self.render_pass.set_bind_group(1,
// &texture.bind_group, &[]);
//}
pub fn set_model<'data: 'pass>(&mut self, model: &'data DynamicModel<ui::Vertex>) {
self.render_pass.set_vertex_buffer(0, model.buf().slice(..))
}

View File

@ -7,8 +7,6 @@ use vek::*;
/// A type that holds shadow map data. Since shadow mapping may not be
/// supported on all platforms, we try to keep it separate.
pub struct ShadowMapRenderer {
// directed_encoder: gfx::Encoder<gfx_backend::Resources, gfx_backend::CommandBuffer>,
// point_encoder: gfx::Encoder<gfx_backend::Resources, gfx_backend::CommandBuffer>,
pub directed_depth: Texture,
pub point_depth: Texture,
@ -48,8 +46,6 @@ impl ShadowMap {
let layout = shadow::ShadowLayout::new(&device);
Self::Enabled(ShadowMapRenderer {
// point_encoder: factory.create_command_buffer().into(),
// directed_encoder: factory.create_command_buffer().into(),
directed_depth,
point_depth,