diff --git a/wgpu-core/src/command/bundle.rs b/wgpu-core/src/command/bundle.rs index 4de6ecf16c..1476f3e713 100644 --- a/wgpu-core/src/command/bundle.rs +++ b/wgpu-core/src/command/bundle.rs @@ -118,6 +118,7 @@ use crate::{ }; use super::{ + pass, render_command::{ArcRenderCommand, RenderCommand}, DrawKind, }; @@ -527,11 +528,13 @@ fn set_bind_group( let max_bind_groups = state.device.limits.max_bind_groups; if index >= max_bind_groups { - return Err(RenderCommandError::BindGroupIndexOutOfRange { - index, - max: max_bind_groups, - } - .into()); + return Err( + RenderCommandError::BindGroupIndexOutOfRange(pass::BindGroupIndexOutOfRange { + index, + max: max_bind_groups, + }) + .into(), + ); } // Identify the next `num_dynamic_offsets` entries from `dynamic_offsets`. @@ -1312,7 +1315,7 @@ impl State { fn pipeline(&self) -> Result<&PipelineState, RenderBundleErrorInner> { self.pipeline .as_ref() - .ok_or(DrawError::MissingPipeline.into()) + .ok_or(DrawError::MissingPipeline(pass::MissingPipeline).into()) } /// Mark all non-empty bind group table entries from `index` onwards as dirty. diff --git a/wgpu-core/src/command/compute.rs b/wgpu-core/src/command/compute.rs index 678b9f6c6a..095793920c 100644 --- a/wgpu-core/src/command/compute.rs +++ b/wgpu-core/src/command/compute.rs @@ -4,34 +4,29 @@ use wgt::{BufferAddress, DynamicOffset}; use alloc::{borrow::Cow, boxed::Box, sync::Arc, vec::Vec}; use core::{fmt, str}; -use crate::command::{EncoderStateError, PassStateError, TimestampWritesError}; -use crate::ray_tracing::AsAction; +use crate::binding_model::BindError; +use crate::command::{pass, EncoderStateError, PassStateError, TimestampWritesError}; +use crate::resource::DestroyedResourceError; use crate::{ - binding_model::{ - BindError, BindGroup, LateMinBufferBindingSizeMismatch, PushConstantUploadError, - }, + binding_model::{LateMinBufferBindingSizeMismatch, PushConstantUploadError}, command::{ bind::{Binder, BinderError}, compute_command::ArcComputeCommand, end_pipeline_statistics_query, - memory_init::{ - fixup_discarded_surfaces, CommandBufferTextureMemoryActions, SurfacesInDiscardState, - }, + memory_init::{fixup_discarded_surfaces, SurfacesInDiscardState}, pass_base, pass_try, validate_and_begin_pipeline_statistics_query, ArcPassTimestampWrites, BasePass, BindGroupStateChange, CommandBuffer, CommandEncoderError, MapPassErr, PassErrorScope, PassTimestampWrites, QueryUseError, StateChange, }, - device::{Device, DeviceError, MissingDownlevelFlags, MissingFeatures}, + device::{DeviceError, MissingDownlevelFlags, MissingFeatures}, global::Global, hal_label, id, - init_tracker::{BufferInitTrackerAction, MemoryInitKind}, + init_tracker::MemoryInitKind, pipeline::ComputePipeline, resource::{ - self, Buffer, DestroyedResourceError, InvalidResourceError, Labeled, - MissingBufferUsageError, ParentDevice, + self, Buffer, InvalidResourceError, Labeled, MissingBufferUsageError, ParentDevice, }, - snatch::SnatchGuard, - track::{ResourceUsageCompatibilityError, Tracker, TrackerIndex, UsageScope}, + track::{ResourceUsageCompatibilityError, Tracker, TrackerIndex}, Label, }; @@ -119,7 +114,7 @@ type ArcComputePassDescriptor<'a> = ComputePassDescriptor<'a, ArcPassTimestampWr #[non_exhaustive] pub enum DispatchError { #[error("Compute pipeline must be set")] - MissingPipeline, + MissingPipeline(pass::MissingPipeline), #[error(transparent)] IncompatibleBindGroup(#[from] Box), #[error( @@ -139,8 +134,8 @@ pub enum ComputePassErrorInner { EncoderState(#[from] EncoderStateError), #[error("Parent encoder is invalid")] InvalidParentEncoder, - #[error("Bind group index {index} is greater than the device's requested `max_bind_group` limit {max}")] - BindGroupIndexOutOfRange { index: u32, max: u32 }, + #[error(transparent)] + BindGroupIndexOutOfRange(#[from] pass::BindGroupIndexOutOfRange), #[error(transparent)] DestroyedResource(#[from] DestroyedResourceError), #[error("Indirect buffer offset {0:?} is not a multiple of 4")] @@ -155,8 +150,8 @@ pub enum ComputePassErrorInner { ResourceUsageCompatibility(#[from] ResourceUsageCompatibilityError), #[error(transparent)] MissingBufferUsage(#[from] MissingBufferUsageError), - #[error("Cannot pop debug group, because number of pushed debug groups is zero")] - InvalidPopDebugGroup, + #[error(transparent)] + InvalidPopDebugGroup(#[from] pass::InvalidPopDebugGroup), #[error(transparent)] Dispatch(#[from] DispatchError), #[error(transparent)] @@ -181,6 +176,9 @@ pub enum ComputePassErrorInner { InvalidResource(#[from] InvalidResourceError), #[error(transparent)] TimestampWrites(#[from] TimestampWritesError), + // This one is unreachable, but required for generic pass support + #[error(transparent)] + InvalidValuesOffset(#[from] pass::InvalidValuesOffset), } /// Error encountered when performing a compute pass, stored for later reporting @@ -193,6 +191,12 @@ pub struct ComputePassError { pub(super) inner: ComputePassErrorInner, } +impl From for ComputePassErrorInner { + fn from(value: pass::MissingPipeline) -> Self { + Self::Dispatch(DispatchError::MissingPipeline(value)) + } +} + impl MapPassErr for E where E: Into, @@ -206,34 +210,15 @@ where } struct State<'scope, 'snatch_guard, 'cmd_buf, 'raw_encoder> { - binder: Binder, pipeline: Option>, - scope: UsageScope<'scope>, - debug_scope_depth: u32, - snatch_guard: SnatchGuard<'snatch_guard>, + general: pass::BaseState<'scope, 'snatch_guard, 'cmd_buf, 'raw_encoder>, - device: &'cmd_buf Arc, - - raw_encoder: &'raw_encoder mut dyn hal::DynCommandEncoder, - - tracker: &'cmd_buf mut Tracker, - buffer_memory_init_actions: &'cmd_buf mut Vec, - texture_memory_actions: &'cmd_buf mut CommandBufferTextureMemoryActions, - as_actions: &'cmd_buf mut Vec, - - temp_offsets: Vec, - dynamic_offset_count: usize, - string_offset: usize, active_query: Option<(Arc, u32)>, push_constants: Vec, intermediate_trackers: Tracker, - - /// Immediate texture inits required because of prior discards. Need to - /// be inserted before texture reads. - pending_discard_init_fixups: SurfacesInDiscardState, } impl<'scope, 'snatch_guard, 'cmd_buf, 'raw_encoder> @@ -241,11 +226,11 @@ impl<'scope, 'snatch_guard, 'cmd_buf, 'raw_encoder> { fn is_ready(&self) -> Result<(), DispatchError> { if let Some(pipeline) = self.pipeline.as_ref() { - self.binder.check_compatibility(pipeline.as_ref())?; - self.binder.check_late_buffer_bindings()?; + self.general.binder.check_compatibility(pipeline.as_ref())?; + self.general.binder.check_late_buffer_bindings()?; Ok(()) } else { - Err(DispatchError::MissingPipeline) + Err(DispatchError::MissingPipeline(pass::MissingPipeline)) } } @@ -255,16 +240,19 @@ impl<'scope, 'snatch_guard, 'cmd_buf, 'raw_encoder> &mut self, indirect_buffer: Option, ) -> Result<(), ResourceUsageCompatibilityError> { - for bind_group in self.binder.list_active() { - unsafe { self.scope.merge_bind_group(&bind_group.used)? }; + for bind_group in self.general.binder.list_active() { + unsafe { self.general.scope.merge_bind_group(&bind_group.used)? }; // Note: stateless trackers are not merged: the lifetime reference // is held to the bind group itself. } - for bind_group in self.binder.list_active() { + for bind_group in self.general.binder.list_active() { unsafe { self.intermediate_trackers - .set_and_remove_from_usage_scope_sparse(&mut self.scope, &bind_group.used) + .set_and_remove_from_usage_scope_sparse( + &mut self.general.scope, + &bind_group.used, + ) } } @@ -272,13 +260,16 @@ impl<'scope, 'snatch_guard, 'cmd_buf, 'raw_encoder> unsafe { self.intermediate_trackers .buffers - .set_and_remove_from_usage_scope_sparse(&mut self.scope.buffers, indirect_buffer); + .set_and_remove_from_usage_scope_sparse( + &mut self.general.scope.buffers, + indirect_buffer, + ); } CommandBuffer::drain_barriers( - self.raw_encoder, + self.general.raw_encoder, &mut self.intermediate_trackers, - &self.snatch_guard, + self.general.snatch_guard, ); Ok(()) } @@ -496,36 +487,48 @@ impl Global { .open_pass(base.label.as_deref()) .map_pass_err(pass_scope)?; + let snatch_guard = device.snatchable_lock.read(); + let mut state = State { - binder: Binder::new(), pipeline: None, - scope: device.new_usage_scope(), - debug_scope_depth: 0, - snatch_guard: device.snatchable_lock.read(), + general: pass::BaseState { + device, + raw_encoder, + tracker: &mut cmd_buf_data.trackers, + buffer_memory_init_actions: &mut cmd_buf_data.buffer_memory_init_actions, + texture_memory_actions: &mut cmd_buf_data.texture_memory_actions, + as_actions: &mut cmd_buf_data.as_actions, + binder: Binder::new(), + temp_offsets: Vec::new(), + dynamic_offset_count: 0, - device, - raw_encoder, - tracker: &mut cmd_buf_data.trackers, - buffer_memory_init_actions: &mut cmd_buf_data.buffer_memory_init_actions, - texture_memory_actions: &mut cmd_buf_data.texture_memory_actions, - as_actions: &mut cmd_buf_data.as_actions, - - temp_offsets: Vec::new(), - dynamic_offset_count: 0, - string_offset: 0, + pending_discard_init_fixups: SurfacesInDiscardState::new(), + + snatch_guard: &snatch_guard, + scope: device.new_usage_scope(), + + debug_scope_depth: 0, + string_offset: 0, + }, active_query: None, push_constants: Vec::new(), intermediate_trackers: Tracker::new(), - - pending_discard_init_fixups: SurfacesInDiscardState::new(), }; - let indices = &state.device.tracker_indices; - state.tracker.buffers.set_size(indices.buffers.size()); - state.tracker.textures.set_size(indices.textures.size()); + let indices = &state.general.device.tracker_indices; + state + .general + .tracker + .buffers + .set_size(indices.buffers.size()); + state + .general + .tracker + .textures + .set_size(indices.textures.size()); let timestamp_writes: Option> = if let Some(tw) = pass.timestamp_writes.take() { @@ -533,7 +536,7 @@ impl Global { .same_device_as(cmd_buf.as_ref()) .map_pass_err(pass_scope)?; - let query_set = state.tracker.query_sets.insert_single(tw.query_set); + let query_set = state.general.tracker.query_sets.insert_single(tw.query_set); // Unlike in render passes we can't delay resetting the query sets since // there is no auxiliary pass. @@ -550,7 +553,10 @@ impl Global { // But no point in erroring over that nuance here! if let Some(range) = range { unsafe { - state.raw_encoder.reset_queries(query_set.raw(), range); + state + .general + .raw_encoder + .reset_queries(query_set.raw(), range); } } @@ -569,7 +575,7 @@ impl Global { }; unsafe { - state.raw_encoder.begin_compute_pass(&hal_desc); + state.general.raw_encoder.begin_compute_pass(&hal_desc); } for command in base.commands.drain(..) { @@ -580,13 +586,14 @@ impl Global { bind_group, } => { let scope = PassErrorScope::SetBindGroup; - set_bind_group( - &mut state, + pass::set_bind_group::( + &mut state.general, cmd_buf.as_ref(), &base.dynamic_offsets, index, num_dynamic_offsets, bind_group, + false, ) .map_pass_err(scope)?; } @@ -600,12 +607,21 @@ impl Global { values_offset, } => { let scope = PassErrorScope::SetPushConstant; - set_push_constant( - &mut state, + pass::set_push_constant::( + &mut state.general, &base.push_constant_data, + wgt::ShaderStages::COMPUTE, offset, size_bytes, - values_offset, + Some(values_offset), + |data_slice| { + let offset_in_elements = + (offset / wgt::PUSH_CONSTANT_ALIGNMENT) as usize; + let size_in_elements = + (size_bytes / wgt::PUSH_CONSTANT_ALIGNMENT) as usize; + state.push_constants[offset_in_elements..][..size_in_elements] + .copy_from_slice(data_slice); + }, ) .map_pass_err(scope)?; } @@ -619,22 +635,29 @@ impl Global { .map_pass_err(scope)?; } ArcComputeCommand::PushDebugGroup { color: _, len } => { - push_debug_group(&mut state, &base.string_data, len); + pass::push_debug_group(&mut state.general, &base.string_data, len); } ArcComputeCommand::PopDebugGroup => { let scope = PassErrorScope::PopDebugGroup; - pop_debug_group(&mut state).map_pass_err(scope)?; + pass::pop_debug_group::(&mut state.general) + .map_pass_err(scope)?; } ArcComputeCommand::InsertDebugMarker { color: _, len } => { - insert_debug_marker(&mut state, &base.string_data, len); + pass::insert_debug_marker(&mut state.general, &base.string_data, len); } ArcComputeCommand::WriteTimestamp { query_set, query_index, } => { let scope = PassErrorScope::WriteTimestamp; - write_timestamp(&mut state, cmd_buf.as_ref(), query_set, query_index) - .map_pass_err(scope)?; + pass::write_timestamp::( + &mut state.general, + cmd_buf.as_ref(), + None, + query_set, + query_index, + ) + .map_pass_err(scope)?; } ArcComputeCommand::BeginPipelineStatisticsQuery { query_set, @@ -643,8 +666,8 @@ impl Global { let scope = PassErrorScope::BeginPipelineStatisticsQuery; validate_and_begin_pipeline_statistics_query( query_set, - state.raw_encoder, - &mut state.tracker.query_sets, + state.general.raw_encoder, + &mut state.general.tracker.query_sets, cmd_buf.as_ref(), query_index, None, @@ -654,21 +677,27 @@ impl Global { } ArcComputeCommand::EndPipelineStatisticsQuery => { let scope = PassErrorScope::EndPipelineStatisticsQuery; - end_pipeline_statistics_query(state.raw_encoder, &mut state.active_query) - .map_pass_err(scope)?; + end_pipeline_statistics_query( + state.general.raw_encoder, + &mut state.active_query, + ) + .map_pass_err(scope)?; } } } unsafe { - state.raw_encoder.end_compute_pass(); + state.general.raw_encoder.end_compute_pass(); } let State { - snatch_guard, - tracker, + general: + pass::BaseState { + tracker, + pending_discard_init_fixups, + .. + }, intermediate_trackers, - pending_discard_init_fixups, .. } = state; @@ -702,88 +731,6 @@ impl Global { } } -fn set_bind_group( - state: &mut State, - cmd_buf: &CommandBuffer, - dynamic_offsets: &[DynamicOffset], - index: u32, - num_dynamic_offsets: usize, - bind_group: Option>, -) -> Result<(), ComputePassErrorInner> { - let max_bind_groups = state.device.limits.max_bind_groups; - if index >= max_bind_groups { - return Err(ComputePassErrorInner::BindGroupIndexOutOfRange { - index, - max: max_bind_groups, - }); - } - - state.temp_offsets.clear(); - state.temp_offsets.extend_from_slice( - &dynamic_offsets - [state.dynamic_offset_count..state.dynamic_offset_count + num_dynamic_offsets], - ); - state.dynamic_offset_count += num_dynamic_offsets; - - if bind_group.is_none() { - // TODO: Handle bind_group None. - return Ok(()); - } - - let bind_group = bind_group.unwrap(); - let bind_group = state.tracker.bind_groups.insert_single(bind_group); - - bind_group.same_device_as(cmd_buf)?; - - bind_group.validate_dynamic_bindings(index, &state.temp_offsets)?; - - state - .buffer_memory_init_actions - .extend(bind_group.used_buffer_ranges.iter().filter_map(|action| { - action - .buffer - .initialization_status - .read() - .check_action(action) - })); - - for action in bind_group.used_texture_ranges.iter() { - state - .pending_discard_init_fixups - .extend(state.texture_memory_actions.register_init_action(action)); - } - - let used_resource = bind_group - .used - .acceleration_structures - .into_iter() - .map(|tlas| AsAction::UseTlas(tlas.clone())); - - state.as_actions.extend(used_resource); - - let pipeline_layout = state.binder.pipeline_layout.clone(); - let entries = state - .binder - .assign_group(index as usize, bind_group, &state.temp_offsets); - if !entries.is_empty() && pipeline_layout.is_some() { - let pipeline_layout = pipeline_layout.as_ref().unwrap().raw(); - for (i, e) in entries.iter().enumerate() { - if let Some(group) = e.group.as_ref() { - let raw_bg = group.try_raw(&state.snatch_guard)?; - unsafe { - state.raw_encoder.set_bind_group( - pipeline_layout, - index + i as u32, - Some(raw_bg), - &e.dynamic_offsets, - ); - } - } - } - } - Ok(()) -} - fn set_pipeline( state: &mut State, cmd_buf: &CommandBuffer, @@ -793,113 +740,43 @@ fn set_pipeline( state.pipeline = Some(pipeline.clone()); - let pipeline = state.tracker.compute_pipelines.insert_single(pipeline); + let pipeline = state + .general + .tracker + .compute_pipelines + .insert_single(pipeline) + .clone(); unsafe { - state.raw_encoder.set_compute_pipeline(pipeline.raw()); + state + .general + .raw_encoder + .set_compute_pipeline(pipeline.raw()); } // Rebind resources - if state.binder.pipeline_layout.is_none() - || !state - .binder - .pipeline_layout - .as_ref() - .unwrap() - .is_equal(&pipeline.layout) - { - let (start_index, entries) = state - .binder - .change_pipeline_layout(&pipeline.layout, &pipeline.late_sized_buffer_groups); - if !entries.is_empty() { - for (i, e) in entries.iter().enumerate() { - if let Some(group) = e.group.as_ref() { - let raw_bg = group.try_raw(&state.snatch_guard)?; - unsafe { - state.raw_encoder.set_bind_group( - pipeline.layout.raw(), - start_index as u32 + i as u32, - Some(raw_bg), - &e.dynamic_offsets, - ); - } - } + pass::rebind_resources::( + &mut state.general, + &pipeline.layout, + &pipeline.late_sized_buffer_groups, + || { + // This only needs to be here for compute pipelines because they use push constants for + // validating indirect draws. + state.push_constants.clear(); + // Note that can only be one range for each stage. See the `MoreThanOnePushConstantRangePerStage` error. + if let Some(push_constant_range) = + pipeline.layout.push_constant_ranges.iter().find_map(|pcr| { + pcr.stages + .contains(wgt::ShaderStages::COMPUTE) + .then_some(pcr.range.clone()) + }) + { + // Note that non-0 range start doesn't work anyway https://github.com/gfx-rs/wgpu/issues/4502 + let len = push_constant_range.len() / wgt::PUSH_CONSTANT_ALIGNMENT as usize; + state.push_constants.extend(core::iter::repeat_n(0, len)); } - } - - // TODO: integrate this in the code below once we simplify push constants - state.push_constants.clear(); - // Note that can only be one range for each stage. See the `MoreThanOnePushConstantRangePerStage` error. - if let Some(push_constant_range) = - pipeline.layout.push_constant_ranges.iter().find_map(|pcr| { - pcr.stages - .contains(wgt::ShaderStages::COMPUTE) - .then_some(pcr.range.clone()) - }) - { - // Note that non-0 range start doesn't work anyway https://github.com/gfx-rs/wgpu/issues/4502 - let len = push_constant_range.len() / wgt::PUSH_CONSTANT_ALIGNMENT as usize; - state.push_constants.extend(core::iter::repeat_n(0, len)); - } - - // Clear push constant ranges - let non_overlapping = - super::bind::compute_nonoverlapping_ranges(&pipeline.layout.push_constant_ranges); - for range in non_overlapping { - let offset = range.range.start; - let size_bytes = range.range.end - offset; - super::push_constant_clear(offset, size_bytes, |clear_offset, clear_data| unsafe { - state.raw_encoder.set_push_constants( - pipeline.layout.raw(), - wgt::ShaderStages::COMPUTE, - clear_offset, - clear_data, - ); - }); - } - } - Ok(()) -} - -fn set_push_constant( - state: &mut State, - push_constant_data: &[u32], - offset: u32, - size_bytes: u32, - values_offset: u32, -) -> Result<(), ComputePassErrorInner> { - let end_offset_bytes = offset + size_bytes; - let values_end_offset = (values_offset + size_bytes / wgt::PUSH_CONSTANT_ALIGNMENT) as usize; - let data_slice = &push_constant_data[(values_offset as usize)..values_end_offset]; - - let pipeline_layout = state - .binder - .pipeline_layout - .as_ref() - // TODO: don't error here, lazily update the push constants using `state.push_constants` - .ok_or(ComputePassErrorInner::Dispatch( - DispatchError::MissingPipeline, - ))?; - - pipeline_layout.validate_push_constant_ranges( - wgt::ShaderStages::COMPUTE, - offset, - end_offset_bytes, - )?; - - let offset_in_elements = (offset / wgt::PUSH_CONSTANT_ALIGNMENT) as usize; - let size_in_elements = (size_bytes / wgt::PUSH_CONSTANT_ALIGNMENT) as usize; - state.push_constants[offset_in_elements..][..size_in_elements].copy_from_slice(data_slice); - - unsafe { - state.raw_encoder.set_push_constants( - pipeline_layout.raw(), - wgt::ShaderStages::COMPUTE, - offset, - data_slice, - ); - } - Ok(()) + }, + ) } fn dispatch(state: &mut State, groups: [u32; 3]) -> Result<(), ComputePassErrorInner> { @@ -907,7 +784,11 @@ fn dispatch(state: &mut State, groups: [u32; 3]) -> Result<(), ComputePassErrorI state.flush_states(None)?; - let groups_size_limit = state.device.limits.max_compute_workgroups_per_dimension; + let groups_size_limit = state + .general + .device + .limits + .max_compute_workgroups_per_dimension; if groups[0] > groups_size_limit || groups[1] > groups_size_limit @@ -922,7 +803,7 @@ fn dispatch(state: &mut State, groups: [u32; 3]) -> Result<(), ComputePassErrorI } unsafe { - state.raw_encoder.dispatch(groups); + state.general.raw_encoder.dispatch(groups); } Ok(()) } @@ -938,11 +819,12 @@ fn dispatch_indirect( state.is_ready()?; state + .general .device .require_downlevel_flags(wgt::DownlevelFlags::INDIRECT_EXECUTION)?; buffer.check_usage(wgt::BufferUsages::INDIRECT)?; - buffer.check_destroyed(&state.snatch_guard)?; + buffer.check_destroyed(state.general.snatch_guard)?; if offset % 4 != 0 { return Err(ComputePassErrorInner::UnalignedIndirectBufferOffset(offset)); @@ -958,25 +840,29 @@ fn dispatch_indirect( } let stride = 3 * 4; // 3 integers, x/y/z group size - state - .buffer_memory_init_actions - .extend(buffer.initialization_status.read().create_action( + state.general.buffer_memory_init_actions.extend( + buffer.initialization_status.read().create_action( &buffer, offset..(offset + stride), MemoryInitKind::NeedsInitializedMemory, - )); + ), + ); - if let Some(ref indirect_validation) = state.device.indirect_validation { - let params = indirect_validation - .dispatch - .params(&state.device.limits, offset, buffer.size); + if let Some(ref indirect_validation) = state.general.device.indirect_validation { + let params = + indirect_validation + .dispatch + .params(&state.general.device.limits, offset, buffer.size); unsafe { - state.raw_encoder.set_compute_pipeline(params.pipeline); + state + .general + .raw_encoder + .set_compute_pipeline(params.pipeline); } unsafe { - state.raw_encoder.set_push_constants( + state.general.raw_encoder.set_push_constants( params.pipeline_layout, wgt::ShaderStages::COMPUTE, 0, @@ -985,7 +871,7 @@ fn dispatch_indirect( } unsafe { - state.raw_encoder.set_bind_group( + state.general.raw_encoder.set_bind_group( params.pipeline_layout, 0, Some(params.dst_bind_group), @@ -993,13 +879,13 @@ fn dispatch_indirect( ); } unsafe { - state.raw_encoder.set_bind_group( + state.general.raw_encoder.set_bind_group( params.pipeline_layout, 1, Some( buffer .indirect_validation_bind_groups - .get(&state.snatch_guard) + .get(state.general.snatch_guard) .unwrap() .dispatch .as_ref(), @@ -1012,24 +898,30 @@ fn dispatch_indirect( .intermediate_trackers .buffers .set_single(&buffer, wgt::BufferUses::STORAGE_READ_ONLY); - let src_barrier = - src_transition.map(|transition| transition.into_hal(&buffer, &state.snatch_guard)); + let src_barrier = src_transition + .map(|transition| transition.into_hal(&buffer, state.general.snatch_guard)); unsafe { - state.raw_encoder.transition_buffers(src_barrier.as_slice()); + state + .general + .raw_encoder + .transition_buffers(src_barrier.as_slice()); } unsafe { - state.raw_encoder.transition_buffers(&[hal::BufferBarrier { - buffer: params.dst_buffer, - usage: hal::StateTransition { - from: wgt::BufferUses::INDIRECT, - to: wgt::BufferUses::STORAGE_READ_WRITE, - }, - }]); + state + .general + .raw_encoder + .transition_buffers(&[hal::BufferBarrier { + buffer: params.dst_buffer, + usage: hal::StateTransition { + from: wgt::BufferUses::INDIRECT, + to: wgt::BufferUses::STORAGE_READ_WRITE, + }, + }]); } unsafe { - state.raw_encoder.dispatch([1, 1, 1]); + state.general.raw_encoder.dispatch([1, 1, 1]); } // reset state @@ -1037,12 +929,15 @@ fn dispatch_indirect( let pipeline = state.pipeline.as_ref().unwrap(); unsafe { - state.raw_encoder.set_compute_pipeline(pipeline.raw()); + state + .general + .raw_encoder + .set_compute_pipeline(pipeline.raw()); } if !state.push_constants.is_empty() { unsafe { - state.raw_encoder.set_push_constants( + state.general.raw_encoder.set_push_constants( pipeline.layout.raw(), wgt::ShaderStages::COMPUTE, 0, @@ -1051,11 +946,11 @@ fn dispatch_indirect( } } - for (i, e) in state.binder.list_valid() { + for (i, e) in state.general.binder.list_valid() { let group = e.group.as_ref().unwrap(); - let raw_bg = group.try_raw(&state.snatch_guard)?; + let raw_bg = group.try_raw(state.general.snatch_guard)?; unsafe { - state.raw_encoder.set_bind_group( + state.general.raw_encoder.set_bind_group( pipeline.layout.raw(), i as u32, Some(raw_bg), @@ -1066,21 +961,28 @@ fn dispatch_indirect( } unsafe { - state.raw_encoder.transition_buffers(&[hal::BufferBarrier { - buffer: params.dst_buffer, - usage: hal::StateTransition { - from: wgt::BufferUses::STORAGE_READ_WRITE, - to: wgt::BufferUses::INDIRECT, - }, - }]); + state + .general + .raw_encoder + .transition_buffers(&[hal::BufferBarrier { + buffer: params.dst_buffer, + usage: hal::StateTransition { + from: wgt::BufferUses::STORAGE_READ_WRITE, + to: wgt::BufferUses::INDIRECT, + }, + }]); } state.flush_states(None)?; unsafe { - state.raw_encoder.dispatch_indirect(params.dst_buffer, 0); + state + .general + .raw_encoder + .dispatch_indirect(params.dst_buffer, 0); } } else { state + .general .scope .buffers .merge_single(&buffer, wgt::BufferUses::INDIRECT)?; @@ -1088,79 +990,15 @@ fn dispatch_indirect( use crate::resource::Trackable; state.flush_states(Some(buffer.tracker_index()))?; - let buf_raw = buffer.try_raw(&state.snatch_guard)?; + let buf_raw = buffer.try_raw(state.general.snatch_guard)?; unsafe { - state.raw_encoder.dispatch_indirect(buf_raw, offset); + state.general.raw_encoder.dispatch_indirect(buf_raw, offset); } } Ok(()) } -fn push_debug_group(state: &mut State, string_data: &[u8], len: usize) { - state.debug_scope_depth += 1; - if !state - .device - .instance_flags - .contains(wgt::InstanceFlags::DISCARD_HAL_LABELS) - { - let label = - str::from_utf8(&string_data[state.string_offset..state.string_offset + len]).unwrap(); - unsafe { - state.raw_encoder.begin_debug_marker(label); - } - } - state.string_offset += len; -} - -fn pop_debug_group(state: &mut State) -> Result<(), ComputePassErrorInner> { - if state.debug_scope_depth == 0 { - return Err(ComputePassErrorInner::InvalidPopDebugGroup); - } - state.debug_scope_depth -= 1; - if !state - .device - .instance_flags - .contains(wgt::InstanceFlags::DISCARD_HAL_LABELS) - { - unsafe { - state.raw_encoder.end_debug_marker(); - } - } - Ok(()) -} - -fn insert_debug_marker(state: &mut State, string_data: &[u8], len: usize) { - if !state - .device - .instance_flags - .contains(wgt::InstanceFlags::DISCARD_HAL_LABELS) - { - let label = - str::from_utf8(&string_data[state.string_offset..state.string_offset + len]).unwrap(); - unsafe { state.raw_encoder.insert_debug_marker(label) } - } - state.string_offset += len; -} - -fn write_timestamp( - state: &mut State, - cmd_buf: &CommandBuffer, - query_set: Arc, - query_index: u32, -) -> Result<(), ComputePassErrorInner> { - query_set.same_device_as(cmd_buf)?; - - state - .device - .require_features(wgt::Features::TIMESTAMP_QUERY_INSIDE_PASSES)?; - - let query_set = state.tracker.query_sets.insert_single(query_set); - - query_set.validate_and_write_timestamp(state.raw_encoder, query_index, None)?; - Ok(()) -} - // Recording a compute pass. // // The only error that should be returned from these methods is diff --git a/wgpu-core/src/command/draw.rs b/wgpu-core/src/command/draw.rs index 3b32b21e0e..2f04f1f3dd 100644 --- a/wgpu-core/src/command/draw.rs +++ b/wgpu-core/src/command/draw.rs @@ -2,6 +2,8 @@ use alloc::boxed::Box; use thiserror::Error; +use super::bind::BinderError; +use crate::command::pass; use crate::{ binding_model::{LateMinBufferBindingSizeMismatch, PushConstantUploadError}, resource::{ @@ -11,8 +13,6 @@ use crate::{ track::ResourceUsageCompatibilityError, }; -use super::bind::BinderError; - /// Error validating a draw call. #[derive(Clone, Debug, Error)] #[non_exhaustive] @@ -20,7 +20,7 @@ pub enum DrawError { #[error("Blend constant needs to be set")] MissingBlendConstant, #[error("Render pipeline must be set")] - MissingPipeline, + MissingPipeline(#[from] pass::MissingPipeline), #[error("Currently set {pipeline} requires vertex buffer {index} to be set")] MissingVertexBuffer { pipeline: ResourceErrorIdent, @@ -61,8 +61,8 @@ pub enum DrawError { #[derive(Clone, Debug, Error)] #[non_exhaustive] pub enum RenderCommandError { - #[error("Bind group index {index} is greater than the device's requested `max_bind_group` limit {max}")] - BindGroupIndexOutOfRange { index: u32, max: u32 }, + #[error(transparent)] + BindGroupIndexOutOfRange(#[from] pass::BindGroupIndexOutOfRange), #[error("Vertex buffer index {index} is greater than the device's requested `max_vertex_buffers` limit {max}")] VertexBufferIndexOutOfRange { index: u32, max: u32 }, #[error("Render pipeline targets are incompatible with render pass")] diff --git a/wgpu-core/src/command/mod.rs b/wgpu-core/src/command/mod.rs index 2b7a3fb56a..47ca3cc2c3 100644 --- a/wgpu-core/src/command/mod.rs +++ b/wgpu-core/src/command/mod.rs @@ -6,6 +6,7 @@ mod compute; mod compute_command; mod draw; mod memory_init; +mod pass; mod query; mod ray_tracing; mod render; diff --git a/wgpu-core/src/command/pass.rs b/wgpu-core/src/command/pass.rs new file mode 100644 index 0000000000..a03757e3e9 --- /dev/null +++ b/wgpu-core/src/command/pass.rs @@ -0,0 +1,356 @@ +//! Generic pass functions that both compute and render passes need. + +use crate::binding_model::{BindError, BindGroup, PushConstantUploadError}; +use crate::command::bind::Binder; +use crate::command::memory_init::{CommandBufferTextureMemoryActions, SurfacesInDiscardState}; +use crate::command::{CommandBuffer, QueryResetMap, QueryUseError}; +use crate::device::{Device, DeviceError, MissingFeatures}; +use crate::init_tracker::BufferInitTrackerAction; +use crate::pipeline::LateSizedBufferGroup; +use crate::ray_tracing::AsAction; +use crate::resource::{DestroyedResourceError, Labeled, ParentDevice, QuerySet}; +use crate::snatch::SnatchGuard; +use crate::track::{ResourceUsageCompatibilityError, Tracker, UsageScope}; +use crate::{api_log, binding_model}; +use alloc::sync::Arc; +use alloc::vec::Vec; +use core::str; +use thiserror::Error; +use wgt::DynamicOffset; + +#[derive(Clone, Debug, Error)] +#[error( + "Bind group index {index} is greater than the device's requested `max_bind_group` limit {max}" +)] +pub struct BindGroupIndexOutOfRange { + pub index: u32, + pub max: u32, +} + +#[derive(Clone, Debug, Error)] +#[error("Pipeline must be set")] +pub struct MissingPipeline; + +#[derive(Clone, Debug, Error)] +#[error("Setting `values_offset` to be `None` is only for internal use in render bundles")] +pub struct InvalidValuesOffset; + +#[derive(Clone, Debug, Error)] +#[error("Cannot pop debug group, because number of pushed debug groups is zero")] +pub struct InvalidPopDebugGroup; + +pub(crate) struct BaseState<'scope, 'snatch_guard, 'cmd_buf, 'raw_encoder> { + pub(crate) device: &'cmd_buf Arc, + + pub(crate) raw_encoder: &'raw_encoder mut dyn hal::DynCommandEncoder, + + pub(crate) tracker: &'cmd_buf mut Tracker, + pub(crate) buffer_memory_init_actions: &'cmd_buf mut Vec, + pub(crate) texture_memory_actions: &'cmd_buf mut CommandBufferTextureMemoryActions, + pub(crate) as_actions: &'cmd_buf mut Vec, + + /// Immediate texture inits required because of prior discards. Need to + /// be inserted before texture reads. + pub(crate) pending_discard_init_fixups: SurfacesInDiscardState, + + pub(crate) scope: UsageScope<'scope>, + + pub(crate) binder: Binder, + + pub(crate) temp_offsets: Vec, + + pub(crate) dynamic_offset_count: usize, + + pub(crate) snatch_guard: &'snatch_guard SnatchGuard<'snatch_guard>, + + pub(crate) debug_scope_depth: u32, + pub(crate) string_offset: usize, +} + +pub(crate) fn set_bind_group( + state: &mut BaseState, + cmd_buf: &CommandBuffer, + dynamic_offsets: &[DynamicOffset], + index: u32, + num_dynamic_offsets: usize, + bind_group: Option>, + merge_bind_groups: bool, +) -> Result<(), E> +where + E: From + + From + + From + + From + + From, +{ + if bind_group.is_none() { + api_log!("Pass::set_bind_group {index} None"); + } else { + api_log!( + "Pass::set_bind_group {index} {}", + bind_group.as_ref().unwrap().error_ident() + ); + } + + let max_bind_groups = state.device.limits.max_bind_groups; + if index >= max_bind_groups { + return Err(BindGroupIndexOutOfRange { + index, + max: max_bind_groups, + } + .into()); + } + + state.temp_offsets.clear(); + state.temp_offsets.extend_from_slice( + &dynamic_offsets + [state.dynamic_offset_count..state.dynamic_offset_count + num_dynamic_offsets], + ); + state.dynamic_offset_count += num_dynamic_offsets; + + if bind_group.is_none() { + // TODO: Handle bind_group None. + return Ok(()); + } + + let bind_group = bind_group.unwrap(); + let bind_group = state.tracker.bind_groups.insert_single(bind_group); + + bind_group.same_device_as(cmd_buf)?; + + bind_group.validate_dynamic_bindings(index, &state.temp_offsets)?; + + if merge_bind_groups { + // merge the resource tracker in + unsafe { + state.scope.merge_bind_group(&bind_group.used)?; + } + } + //Note: stateless trackers are not merged: the lifetime reference + // is held to the bind group itself. + + state + .buffer_memory_init_actions + .extend(bind_group.used_buffer_ranges.iter().filter_map(|action| { + action + .buffer + .initialization_status + .read() + .check_action(action) + })); + for action in bind_group.used_texture_ranges.iter() { + state + .pending_discard_init_fixups + .extend(state.texture_memory_actions.register_init_action(action)); + } + + let used_resource = bind_group + .used + .acceleration_structures + .into_iter() + .map(|tlas| AsAction::UseTlas(tlas.clone())); + + state.as_actions.extend(used_resource); + + let pipeline_layout = state.binder.pipeline_layout.clone(); + let entries = state + .binder + .assign_group(index as usize, bind_group, &state.temp_offsets); + if !entries.is_empty() && pipeline_layout.is_some() { + let pipeline_layout = pipeline_layout.as_ref().unwrap().raw(); + for (i, e) in entries.iter().enumerate() { + if let Some(group) = e.group.as_ref() { + let raw_bg = group.try_raw(state.snatch_guard)?; + unsafe { + state.raw_encoder.set_bind_group( + pipeline_layout, + index + i as u32, + Some(raw_bg), + &e.dynamic_offsets, + ); + } + } + } + } + Ok(()) +} + +/// After a pipeline has been changed, resources must be rebound +pub(crate) fn rebind_resources( + state: &mut BaseState, + pipeline_layout: &Arc, + late_sized_buffer_groups: &[LateSizedBufferGroup], + f: F, +) -> Result<(), E> +where + E: From, +{ + if state.binder.pipeline_layout.is_none() + || !state + .binder + .pipeline_layout + .as_ref() + .unwrap() + .is_equal(pipeline_layout) + { + let (start_index, entries) = state + .binder + .change_pipeline_layout(pipeline_layout, late_sized_buffer_groups); + if !entries.is_empty() { + for (i, e) in entries.iter().enumerate() { + if let Some(group) = e.group.as_ref() { + let raw_bg = group.try_raw(state.snatch_guard)?; + unsafe { + state.raw_encoder.set_bind_group( + pipeline_layout.raw(), + start_index as u32 + i as u32, + Some(raw_bg), + &e.dynamic_offsets, + ); + } + } + } + } + + f(); + + let non_overlapping = + super::bind::compute_nonoverlapping_ranges(&pipeline_layout.push_constant_ranges); + + // Clear push constant ranges + for range in non_overlapping { + let offset = range.range.start; + let size_bytes = range.range.end - offset; + super::push_constant_clear(offset, size_bytes, |clear_offset, clear_data| unsafe { + state.raw_encoder.set_push_constants( + pipeline_layout.raw(), + range.stages, + clear_offset, + clear_data, + ); + }); + } + } + Ok(()) +} + +pub(crate) fn set_push_constant( + state: &mut BaseState, + push_constant_data: &[u32], + stages: wgt::ShaderStages, + offset: u32, + size_bytes: u32, + values_offset: Option, + f: F, +) -> Result<(), E> +where + E: From + From + From, +{ + api_log!("Pass::set_push_constants"); + + let values_offset = values_offset.ok_or(InvalidValuesOffset)?; + + let end_offset_bytes = offset + size_bytes; + let values_end_offset = (values_offset + size_bytes / wgt::PUSH_CONSTANT_ALIGNMENT) as usize; + let data_slice = &push_constant_data[(values_offset as usize)..values_end_offset]; + + let pipeline_layout = state + .binder + .pipeline_layout + .as_ref() + .ok_or(MissingPipeline)?; + + pipeline_layout.validate_push_constant_ranges(stages, offset, end_offset_bytes)?; + + f(data_slice); + + unsafe { + state + .raw_encoder + .set_push_constants(pipeline_layout.raw(), stages, offset, data_slice) + } + Ok(()) +} + +pub(crate) fn write_timestamp( + state: &mut BaseState, + cmd_buf: &CommandBuffer, + pending_query_resets: Option<&mut QueryResetMap>, + query_set: Arc, + query_index: u32, +) -> Result<(), E> +where + E: From + From + From, +{ + api_log!( + "Pass::write_timestamps {query_index} {}", + query_set.error_ident() + ); + + query_set.same_device_as(cmd_buf)?; + + state + .device + .require_features(wgt::Features::TIMESTAMP_QUERY_INSIDE_PASSES)?; + + let query_set = state.tracker.query_sets.insert_single(query_set); + + query_set.validate_and_write_timestamp(state.raw_encoder, query_index, pending_query_resets)?; + Ok(()) +} + +pub(crate) fn push_debug_group(state: &mut BaseState, string_data: &[u8], len: usize) { + state.debug_scope_depth += 1; + if !state + .device + .instance_flags + .contains(wgt::InstanceFlags::DISCARD_HAL_LABELS) + { + let label = + str::from_utf8(&string_data[state.string_offset..state.string_offset + len]).unwrap(); + + api_log!("Pass::push_debug_group {label:?}"); + unsafe { + state.raw_encoder.begin_debug_marker(label); + } + } + state.string_offset += len; +} + +pub(crate) fn pop_debug_group(state: &mut BaseState) -> Result<(), E> +where + E: From, +{ + api_log!("Pass::pop_debug_group"); + + if state.debug_scope_depth == 0 { + return Err(InvalidPopDebugGroup.into()); + } + state.debug_scope_depth -= 1; + if !state + .device + .instance_flags + .contains(wgt::InstanceFlags::DISCARD_HAL_LABELS) + { + unsafe { + state.raw_encoder.end_debug_marker(); + } + } + Ok(()) +} + +pub(crate) fn insert_debug_marker(state: &mut BaseState, string_data: &[u8], len: usize) { + if !state + .device + .instance_flags + .contains(wgt::InstanceFlags::DISCARD_HAL_LABELS) + { + let label = + str::from_utf8(&string_data[state.string_offset..state.string_offset + len]).unwrap(); + api_log!("Pass::insert_debug_marker {label:?}"); + unsafe { + state.raw_encoder.insert_debug_marker(label); + } + } + state.string_offset += len; +} diff --git a/wgpu-core/src/command/render.rs b/wgpu-core/src/command/render.rs index b3e660d39e..c3bdc84e3a 100644 --- a/wgpu-core/src/command/render.rs +++ b/wgpu-core/src/command/render.rs @@ -8,19 +8,16 @@ use wgt::{ TextureSelector, TextureUsages, TextureViewDimension, VertexStepMode, }; -use crate::binding_model::BindGroup; use crate::command::{ - pass_base, pass_try, validate_and_begin_occlusion_query, + pass, pass_base, pass_try, validate_and_begin_occlusion_query, validate_and_begin_pipeline_statistics_query, EncoderStateError, PassStateError, TimestampWritesError, }; -use crate::init_tracker::BufferInitTrackerAction; use crate::pipeline::{RenderPipeline, VertexStep}; use crate::resource::{InvalidResourceError, ResourceErrorIdent}; use crate::snatch::SnatchGuard; use crate::{ api_log, - binding_model::BindError, command::{ bind::Binder, end_occlusion_query, end_pipeline_statistics_query, @@ -57,6 +54,7 @@ use super::{ }; use super::{DrawKind, Rect}; +use crate::binding_model::{BindError, PushConstantUploadError}; pub use wgt::{LoadOp, StoreOp}; fn load_hal_ops(load: LoadOp) -> hal::AttachmentOps { @@ -496,29 +494,15 @@ impl VertexState { struct State<'scope, 'snatch_guard, 'cmd_buf, 'raw_encoder> { pipeline_flags: PipelineFlags, - binder: Binder, blend_constant: OptionalState, stencil_reference: u32, pipeline: Option>, index: IndexState, vertex: VertexState, - debug_scope_depth: u32, - info: RenderPassInfo<'scope>, + info: RenderPassInfo, - snatch_guard: &'snatch_guard SnatchGuard<'snatch_guard>, - - device: &'cmd_buf Arc, - - raw_encoder: &'raw_encoder mut dyn hal::DynCommandEncoder, - - tracker: &'cmd_buf mut Tracker, - buffer_memory_init_actions: &'cmd_buf mut Vec, - texture_memory_actions: &'cmd_buf mut CommandBufferTextureMemoryActions, - - temp_offsets: Vec, - dynamic_offset_count: usize, - string_offset: usize, + general: pass::BaseState<'scope, 'snatch_guard, 'cmd_buf, 'raw_encoder>, active_occlusion_query: Option<(Arc, u32)>, active_pipeline_statistics_query: Option<(Arc, u32)>, @@ -529,8 +513,8 @@ impl<'scope, 'snatch_guard, 'cmd_buf, 'raw_encoder> { fn is_ready(&self, indexed: bool) -> Result<(), DrawError> { if let Some(pipeline) = self.pipeline.as_ref() { - self.binder.check_compatibility(pipeline.as_ref())?; - self.binder.check_late_buffer_bindings()?; + self.general.binder.check_compatibility(pipeline.as_ref())?; + self.general.binder.check_late_buffer_bindings()?; if self.blend_constant == OptionalState::Required { return Err(DrawError::MissingBlendConstant); @@ -572,13 +556,13 @@ impl<'scope, 'snatch_guard, 'cmd_buf, 'raw_encoder> } Ok(()) } else { - Err(DrawError::MissingPipeline) + Err(DrawError::MissingPipeline(pass::MissingPipeline)) } } /// Reset the `RenderBundle`-related states. fn reset_bundle(&mut self) { - self.binder.reset(); + self.general.binder.reset(); self.pipeline = None; self.index.reset(); self.vertex = Default::default(); @@ -713,8 +697,8 @@ pub enum RenderPassErrorInner { InvalidDepthOps, #[error("Unable to clear non-present/read-only stencil")] InvalidStencilOps, - #[error("Setting `values_offset` to be `None` is only for internal use in render bundles")] - InvalidValuesOffset, + #[error(transparent)] + InvalidValuesOffset(#[from] pass::InvalidValuesOffset), #[error(transparent)] MissingFeatures(#[from] MissingFeatures), #[error(transparent)] @@ -734,8 +718,8 @@ pub enum RenderPassErrorInner { end_count_offset: u64, count_buffer_size: u64, }, - #[error("Cannot pop debug group, because number of pushed debug groups is zero")] - InvalidPopDebugGroup, + #[error(transparent)] + InvalidPopDebugGroup(#[from] pass::InvalidPopDebugGroup), #[error(transparent)] ResourceUsageCompatibility(#[from] ResourceUsageCompatibilityError), #[error("Render bundle has incompatible targets, {0}")] @@ -796,6 +780,24 @@ impl From for RenderPassErrorInner { } } +impl From for RenderPassErrorInner { + fn from(error: pass::BindGroupIndexOutOfRange) -> Self { + Self::RenderCommand(RenderCommandError::BindGroupIndexOutOfRange(error)) + } +} + +impl From for RenderPassErrorInner { + fn from(error: pass::MissingPipeline) -> Self { + Self::Draw(DrawError::MissingPipeline(error)) + } +} + +impl From for RenderPassErrorInner { + fn from(error: PushConstantUploadError) -> Self { + Self::RenderCommand(error.into()) + } +} + /// Error encountered when performing a render pass. #[derive(Clone, Debug, Error)] #[error("{scope}")] @@ -833,21 +835,19 @@ impl TextureView { const MAX_TOTAL_ATTACHMENTS: usize = hal::MAX_COLOR_ATTACHMENTS + hal::MAX_COLOR_ATTACHMENTS + 1; type AttachmentDataVec = ArrayVec; -struct RenderPassInfo<'d> { +struct RenderPassInfo { context: RenderPassContext, - usage_scope: UsageScope<'d>, /// All render attachments, including depth/stencil render_attachments: AttachmentDataVec, is_depth_read_only: bool, is_stencil_read_only: bool, extent: wgt::Extent3d, - pending_discard_init_fixups: SurfacesInDiscardState, divergent_discarded_depth_stencil_aspect: Option<(wgt::TextureAspect, Arc)>, multiview: Option, } -impl<'d> RenderPassInfo<'d> { +impl RenderPassInfo { fn add_pass_texture_init_actions( load_op: LoadOp, store_op: StoreOp, @@ -884,7 +884,7 @@ impl<'d> RenderPassInfo<'d> { } fn start( - device: &'d Arc, + device: &Arc, hal_label: Option<&str>, color_attachments: ArrayVec< Option, @@ -897,6 +897,7 @@ impl<'d> RenderPassInfo<'d> { trackers: &mut Tracker, texture_memory_actions: &mut CommandBufferTextureMemoryActions, pending_query_resets: &mut QueryResetMap, + pending_discard_init_fixups: &mut SurfacesInDiscardState, snatch_guard: &SnatchGuard<'_>, ) -> Result { profiling::scope!("RenderPassInfo::start"); @@ -909,7 +910,6 @@ impl<'d> RenderPassInfo<'d> { let mut render_attachments = AttachmentDataVec::::new(); let mut discarded_surfaces = AttachmentDataVec::new(); - let mut pending_discard_init_fixups = SurfacesInDiscardState::new(); let mut divergent_discarded_depth_stencil_aspect = None; let mut attachment_location = AttachmentErrorLocation::Color { @@ -1000,7 +1000,7 @@ impl<'d> RenderPassInfo<'d> { at.depth.store_op(), texture_memory_actions, view, - &mut pending_discard_init_fixups, + pending_discard_init_fixups, ); } else if !ds_aspects.contains(hal::FormatAspects::DEPTH) { Self::add_pass_texture_init_actions( @@ -1008,7 +1008,7 @@ impl<'d> RenderPassInfo<'d> { at.stencil.store_op(), texture_memory_actions, view, - &mut pending_discard_init_fixups, + pending_discard_init_fixups, ); } else { // This is the only place (anywhere in wgpu) where Stencil & @@ -1206,7 +1206,7 @@ impl<'d> RenderPassInfo<'d> { at.store_op, texture_memory_actions, color_view, - &mut pending_discard_init_fixups, + pending_discard_init_fixups, ); render_attachments .push(color_view.to_render_attachment(wgt::TextureUses::COLOR_TARGET)); @@ -1380,23 +1380,22 @@ impl<'d> RenderPassInfo<'d> { Ok(Self { context, - usage_scope: device.new_usage_scope(), render_attachments, is_depth_read_only, is_stencil_read_only, extent, - pending_discard_init_fixups, divergent_discarded_depth_stencil_aspect, multiview, }) } fn finish( - mut self, + self, device: &Device, raw: &mut dyn hal::DynCommandEncoder, snatch_guard: &SnatchGuard, - ) -> Result<(UsageScope<'d>, SurfacesInDiscardState), RenderPassErrorInner> { + scope: &mut UsageScope<'_>, + ) -> Result<(), RenderPassErrorInner> { profiling::scope!("RenderPassInfo::finish"); unsafe { raw.end_render_pass(); @@ -1408,11 +1407,9 @@ impl<'d> RenderPassInfo<'d> { // the tracker set of the pass is always in "extend" mode unsafe { - self.usage_scope.textures.merge_single( - texture, - Some(ra.selector.clone()), - ra.usage, - )? + scope + .textures + .merge_single(texture, Some(ra.selector.clone()), ra.usage)? }; } @@ -1462,7 +1459,7 @@ impl<'d> RenderPassInfo<'d> { } } - Ok((self.usage_scope, self.pending_discard_init_fixups)) + Ok(()) } } @@ -1798,6 +1795,8 @@ impl Global { .open_pass(base.label.as_deref()) .map_pass_err(pass_scope)?; + let mut pending_discard_init_fixups = SurfacesInDiscardState::new(); + let info = RenderPassInfo::start( device, hal_label(base.label.as_deref(), device.instance_flags), @@ -1811,6 +1810,7 @@ impl Global { tracker, texture_memory_actions, pending_query_resets, + &mut pending_discard_init_fixups, snatch_guard, ) .map_pass_err(pass_scope)?; @@ -1821,27 +1821,33 @@ impl Global { let mut state = State { pipeline_flags: PipelineFlags::empty(), - binder: Binder::new(), blend_constant: OptionalState::Unused, stencil_reference: 0, pipeline: None, index: IndexState::default(), vertex: VertexState::default(), - debug_scope_depth: 0, info, - snatch_guard, - - device, - raw_encoder: encoder.raw.as_mut(), - tracker, - buffer_memory_init_actions, - texture_memory_actions, - - temp_offsets: Vec::new(), - dynamic_offset_count: 0, - string_offset: 0, + general: pass::BaseState { + device, + raw_encoder: encoder.raw.as_mut(), + tracker, + buffer_memory_init_actions, + texture_memory_actions, + as_actions: &mut cmd_buf_data.as_actions, + pending_discard_init_fixups, + scope: device.new_usage_scope(), + binder: Binder::new(), + + snatch_guard, + + temp_offsets: Vec::new(), + dynamic_offset_count: 0, + + debug_scope_depth: 0, + string_offset: 0, + }, active_occlusion_query: None, active_pipeline_statistics_query: None, @@ -1855,13 +1861,14 @@ impl Global { bind_group, } => { let scope = PassErrorScope::SetBindGroup; - set_bind_group( - &mut state, - &cmd_buf, + pass::set_bind_group::( + &mut state.general, + cmd_buf.as_ref(), &base.dynamic_offsets, index, num_dynamic_offsets, bind_group, + true, ) .map_pass_err(scope)?; } @@ -1918,13 +1925,14 @@ impl Global { values_offset, } => { let scope = PassErrorScope::SetPushConstant; - set_push_constant( - &mut state, + pass::set_push_constant::( + &mut state.general, &base.push_constant_data, stages, offset, size_bytes, values_offset, + |_| {}, ) .map_pass_err(scope)?; } @@ -2026,24 +2034,25 @@ impl Global { .map_pass_err(scope)?; } ArcRenderCommand::PushDebugGroup { color: _, len } => { - push_debug_group(&mut state, &base.string_data, len); + pass::push_debug_group(&mut state.general, &base.string_data, len); } ArcRenderCommand::PopDebugGroup => { let scope = PassErrorScope::PopDebugGroup; - pop_debug_group(&mut state).map_pass_err(scope)?; + pass::pop_debug_group::(&mut state.general) + .map_pass_err(scope)?; } ArcRenderCommand::InsertDebugMarker { color: _, len } => { - insert_debug_marker(&mut state, &base.string_data, len); + pass::insert_debug_marker(&mut state.general, &base.string_data, len); } ArcRenderCommand::WriteTimestamp { query_set, query_index, } => { let scope = PassErrorScope::WriteTimestamp; - write_timestamp( - &mut state, + pass::write_timestamp::( + &mut state.general, cmd_buf.as_ref(), - &mut cmd_buf_data.pending_query_resets, + Some(&mut cmd_buf_data.pending_query_resets), query_set, query_index, ) @@ -2061,8 +2070,8 @@ impl Global { validate_and_begin_occlusion_query( query_set, - state.raw_encoder, - &mut state.tracker.query_sets, + state.general.raw_encoder, + &mut state.general.tracker.query_sets, query_index, Some(&mut cmd_buf_data.pending_query_resets), &mut state.active_occlusion_query, @@ -2074,7 +2083,7 @@ impl Global { let scope = PassErrorScope::EndOcclusionQuery; end_occlusion_query( - state.raw_encoder, + state.general.raw_encoder, &mut state.active_occlusion_query, ) .map_pass_err(scope)?; @@ -2091,8 +2100,8 @@ impl Global { validate_and_begin_pipeline_statistics_query( query_set, - state.raw_encoder, - &mut state.tracker.query_sets, + state.general.raw_encoder, + &mut state.general.tracker.query_sets, cmd_buf.as_ref(), query_index, Some(&mut cmd_buf_data.pending_query_resets), @@ -2105,7 +2114,7 @@ impl Global { let scope = PassErrorScope::EndPipelineStatisticsQuery; end_pipeline_statistics_query( - state.raw_encoder, + state.general.raw_encoder, &mut state.active_pipeline_statistics_query, ) .map_pass_err(scope)?; @@ -2124,11 +2133,20 @@ impl Global { } } - let (trackers, pending_discard_init_fixups) = state + state .info - .finish(device, state.raw_encoder, state.snatch_guard) + .finish( + device, + state.general.raw_encoder, + state.general.snatch_guard, + &mut state.general.scope, + ) .map_pass_err(pass_scope)?; + let trackers = state.general.scope; + + let pending_discard_init_fixups = state.general.pending_discard_init_fixups; + encoder.close().map_pass_err(pass_scope)?; (trackers, pending_discard_init_fixups) }; @@ -2175,97 +2193,6 @@ impl Global { } } -fn set_bind_group( - state: &mut State, - cmd_buf: &Arc, - dynamic_offsets: &[DynamicOffset], - index: u32, - num_dynamic_offsets: usize, - bind_group: Option>, -) -> Result<(), RenderPassErrorInner> { - if bind_group.is_none() { - api_log!("RenderPass::set_bind_group {index} None"); - } else { - api_log!( - "RenderPass::set_bind_group {index} {}", - bind_group.as_ref().unwrap().error_ident() - ); - } - - let max_bind_groups = state.device.limits.max_bind_groups; - if index >= max_bind_groups { - return Err(RenderCommandError::BindGroupIndexOutOfRange { - index, - max: max_bind_groups, - } - .into()); - } - - state.temp_offsets.clear(); - state.temp_offsets.extend_from_slice( - &dynamic_offsets - [state.dynamic_offset_count..state.dynamic_offset_count + num_dynamic_offsets], - ); - state.dynamic_offset_count += num_dynamic_offsets; - - if bind_group.is_none() { - // TODO: Handle bind_group None. - return Ok(()); - } - - let bind_group = bind_group.unwrap(); - let bind_group = state.tracker.bind_groups.insert_single(bind_group); - - bind_group.same_device_as(cmd_buf.as_ref())?; - - bind_group.validate_dynamic_bindings(index, &state.temp_offsets)?; - - // merge the resource tracker in - unsafe { - state.info.usage_scope.merge_bind_group(&bind_group.used)?; - } - //Note: stateless trackers are not merged: the lifetime reference - // is held to the bind group itself. - - state - .buffer_memory_init_actions - .extend(bind_group.used_buffer_ranges.iter().filter_map(|action| { - action - .buffer - .initialization_status - .read() - .check_action(action) - })); - for action in bind_group.used_texture_ranges.iter() { - state - .info - .pending_discard_init_fixups - .extend(state.texture_memory_actions.register_init_action(action)); - } - - let pipeline_layout = state.binder.pipeline_layout.clone(); - let entries = state - .binder - .assign_group(index as usize, bind_group, &state.temp_offsets); - if !entries.is_empty() && pipeline_layout.is_some() { - let pipeline_layout = pipeline_layout.as_ref().unwrap().raw(); - for (i, e) in entries.iter().enumerate() { - if let Some(group) = e.group.as_ref() { - let raw_bg = group.try_raw(state.snatch_guard)?; - unsafe { - state.raw_encoder.set_bind_group( - pipeline_layout, - index + i as u32, - Some(raw_bg), - &e.dynamic_offsets, - ); - } - } - } - } - Ok(()) -} - fn set_pipeline( state: &mut State, cmd_buf: &Arc, @@ -2275,7 +2202,12 @@ fn set_pipeline( state.pipeline = Some(pipeline.clone()); - let pipeline = state.tracker.render_pipelines.insert_single(pipeline); + let pipeline = state + .general + .tracker + .render_pipelines + .insert_single(pipeline) + .clone(); pipeline.same_device_as(cmd_buf.as_ref())?; @@ -2299,61 +2231,28 @@ fn set_pipeline( .require(pipeline.flags.contains(PipelineFlags::BLEND_CONSTANT)); unsafe { - state.raw_encoder.set_render_pipeline(pipeline.raw()); + state + .general + .raw_encoder + .set_render_pipeline(pipeline.raw()); } if pipeline.flags.contains(PipelineFlags::STENCIL_REFERENCE) { unsafe { state + .general .raw_encoder .set_stencil_reference(state.stencil_reference); } } // Rebind resource - if state.binder.pipeline_layout.is_none() - || !state - .binder - .pipeline_layout - .as_ref() - .unwrap() - .is_equal(&pipeline.layout) - { - let (start_index, entries) = state - .binder - .change_pipeline_layout(&pipeline.layout, &pipeline.late_sized_buffer_groups); - if !entries.is_empty() { - for (i, e) in entries.iter().enumerate() { - if let Some(group) = e.group.as_ref() { - let raw_bg = group.try_raw(state.snatch_guard)?; - unsafe { - state.raw_encoder.set_bind_group( - pipeline.layout.raw(), - start_index as u32 + i as u32, - Some(raw_bg), - &e.dynamic_offsets, - ); - } - } - } - } - - // Clear push constant ranges - let non_overlapping = - super::bind::compute_nonoverlapping_ranges(&pipeline.layout.push_constant_ranges); - for range in non_overlapping { - let offset = range.range.start; - let size_bytes = range.range.end - offset; - super::push_constant_clear(offset, size_bytes, |clear_offset, clear_data| unsafe { - state.raw_encoder.set_push_constants( - pipeline.layout.raw(), - range.stages, - clear_offset, - clear_data, - ); - }); - } - } + pass::rebind_resources::( + &mut state.general, + &pipeline.layout, + &pipeline.late_sized_buffer_groups, + || {}, + )?; // Update vertex buffer limits. state.vertex.update_limits(&pipeline.vertex_steps); @@ -2371,15 +2270,15 @@ fn set_index_buffer( api_log!("RenderPass::set_index_buffer {}", buffer.error_ident()); state - .info - .usage_scope + .general + .scope .buffers .merge_single(&buffer, wgt::BufferUses::INDEX)?; buffer.same_device_as(cmd_buf.as_ref())?; buffer.check_usage(BufferUsages::INDEX)?; - let buf_raw = buffer.try_raw(state.snatch_guard)?; + let buf_raw = buffer.try_raw(state.general.snatch_guard)?; let end = match size { Some(s) => offset + s.get(), @@ -2387,13 +2286,13 @@ fn set_index_buffer( }; state.index.update_buffer(offset..end, index_format); - state - .buffer_memory_init_actions - .extend(buffer.initialization_status.read().create_action( + state.general.buffer_memory_init_actions.extend( + buffer.initialization_status.read().create_action( &buffer, offset..end, MemoryInitKind::NeedsInitializedMemory, - )); + ), + ); let bb = hal::BufferBinding { buffer: buf_raw, @@ -2401,7 +2300,7 @@ fn set_index_buffer( size, }; unsafe { - hal::DynCommandEncoder::set_index_buffer(state.raw_encoder, bb, index_format); + hal::DynCommandEncoder::set_index_buffer(state.general.raw_encoder, bb, index_format); } Ok(()) } @@ -2420,14 +2319,14 @@ fn set_vertex_buffer( ); state - .info - .usage_scope + .general + .scope .buffers .merge_single(&buffer, wgt::BufferUses::VERTEX)?; buffer.same_device_as(cmd_buf.as_ref())?; - let max_vertex_buffers = state.device.limits.max_vertex_buffers; + let max_vertex_buffers = state.general.device.limits.max_vertex_buffers; if slot >= max_vertex_buffers { return Err(RenderCommandError::VertexBufferIndexOutOfRange { index: slot, @@ -2437,7 +2336,7 @@ fn set_vertex_buffer( } buffer.check_usage(BufferUsages::VERTEX)?; - let buf_raw = buffer.try_raw(state.snatch_guard)?; + let buf_raw = buffer.try_raw(state.general.snatch_guard)?; //TODO: where are we checking that the offset is in bound? let buffer_size = match size { @@ -2446,13 +2345,13 @@ fn set_vertex_buffer( }; state.vertex.buffer_sizes[slot as usize] = Some(buffer_size); - state - .buffer_memory_init_actions - .extend(buffer.initialization_status.read().create_action( + state.general.buffer_memory_init_actions.extend( + buffer.initialization_status.read().create_action( &buffer, offset..(offset + buffer_size), MemoryInitKind::NeedsInitializedMemory, - )); + ), + ); let bb = hal::BufferBinding { buffer: buf_raw, @@ -2460,7 +2359,7 @@ fn set_vertex_buffer( size, }; unsafe { - hal::DynCommandEncoder::set_vertex_buffer(state.raw_encoder, slot, bb); + hal::DynCommandEncoder::set_vertex_buffer(state.general.raw_encoder, slot, bb); } if let Some(pipeline) = state.pipeline.as_ref() { state.vertex.update_limits(&pipeline.vertex_steps); @@ -2479,7 +2378,7 @@ fn set_blend_constant(state: &mut State, color: &Color) { color.a as f32, ]; unsafe { - state.raw_encoder.set_blend_constants(&array); + state.general.raw_encoder.set_blend_constants(&array); } } @@ -2492,7 +2391,7 @@ fn set_stencil_reference(state: &mut State, value: u32) { .contains(PipelineFlags::STENCIL_REFERENCE) { unsafe { - state.raw_encoder.set_stencil_reference(value); + state.general.raw_encoder.set_stencil_reference(value); } } } @@ -2507,18 +2406,18 @@ fn set_viewport( if rect.w < 0.0 || rect.h < 0.0 - || rect.w > state.device.limits.max_texture_dimension_2d as f32 - || rect.h > state.device.limits.max_texture_dimension_2d as f32 + || rect.w > state.general.device.limits.max_texture_dimension_2d as f32 + || rect.h > state.general.device.limits.max_texture_dimension_2d as f32 { return Err(RenderCommandError::InvalidViewportRectSize { w: rect.w, h: rect.h, - max: state.device.limits.max_texture_dimension_2d, + max: state.general.device.limits.max_texture_dimension_2d, } .into()); } - let max_viewport_range = state.device.limits.max_texture_dimension_2d as f32 * 2.0; + let max_viewport_range = state.general.device.limits.max_texture_dimension_2d as f32 * 2.0; if rect.x < -max_viewport_range || rect.y < -max_viewport_range @@ -2541,42 +2440,11 @@ fn set_viewport( w: rect.w, h: rect.h, }; - unsafe { - state.raw_encoder.set_viewport(&r, depth_min..depth_max); - } - Ok(()) -} - -fn set_push_constant( - state: &mut State, - push_constant_data: &[u32], - stages: ShaderStages, - offset: u32, - size_bytes: u32, - values_offset: Option, -) -> Result<(), RenderPassErrorInner> { - api_log!("RenderPass::set_push_constants"); - - let values_offset = values_offset.ok_or(RenderPassErrorInner::InvalidValuesOffset)?; - - let end_offset_bytes = offset + size_bytes; - let values_end_offset = (values_offset + size_bytes / wgt::PUSH_CONSTANT_ALIGNMENT) as usize; - let data_slice = &push_constant_data[(values_offset as usize)..values_end_offset]; - - let pipeline_layout = state - .binder - .pipeline_layout - .as_ref() - .ok_or(DrawError::MissingPipeline)?; - - pipeline_layout - .validate_push_constant_ranges(stages, offset, end_offset_bytes) - .map_err(RenderCommandError::from)?; - unsafe { state + .general .raw_encoder - .set_push_constants(pipeline_layout.raw(), stages, offset, data_slice) + .set_viewport(&r, depth_min..depth_max); } Ok(()) } @@ -2596,7 +2464,7 @@ fn set_scissor(state: &mut State, rect: Rect) -> Result<(), RenderPassError h: rect.h, }; unsafe { - state.raw_encoder.set_scissor_rect(&r); + state.general.raw_encoder.set_scissor_rect(&r); } Ok(()) } @@ -2623,9 +2491,12 @@ fn draw( unsafe { if instance_count > 0 && vertex_count > 0 { - state - .raw_encoder - .draw(first_vertex, vertex_count, first_instance, instance_count); + state.general.raw_encoder.draw( + first_vertex, + vertex_count, + first_instance, + instance_count, + ); } } Ok(()) @@ -2658,7 +2529,7 @@ fn draw_indexed( unsafe { if instance_count > 0 && index_count > 0 { - state.raw_encoder.draw_indexed( + state.general.raw_encoder.draw_indexed( first_index, index_count, base_vertex, @@ -2689,17 +2560,19 @@ fn multi_draw_indirect( if count != 1 { state + .general .device .require_features(wgt::Features::MULTI_DRAW_INDIRECT)?; } state + .general .device .require_downlevel_flags(wgt::DownlevelFlags::INDIRECT_EXECUTION)?; indirect_buffer.same_device_as(cmd_buf.as_ref())?; indirect_buffer.check_usage(BufferUsages::INDIRECT)?; - indirect_buffer.check_destroyed(state.snatch_guard)?; + indirect_buffer.check_destroyed(state.general.snatch_guard)?; if offset % 4 != 0 { return Err(RenderPassErrorInner::UnalignedIndirectBufferOffset(offset)); @@ -2717,7 +2590,7 @@ fn multi_draw_indirect( }); } - state.buffer_memory_init_actions.extend( + state.general.buffer_memory_init_actions.extend( indirect_buffer.initialization_status.read().create_action( &indirect_buffer, offset..end_offset, @@ -2742,10 +2615,10 @@ fn multi_draw_indirect( } } - if state.device.indirect_validation.is_some() { + if state.general.device.indirect_validation.is_some() { state - .info - .usage_scope + .general + .scope .buffers .merge_single(&indirect_buffer, wgt::BufferUses::STORAGE_READ_ONLY)?; @@ -2800,8 +2673,8 @@ fn multi_draw_indirect( } let mut draw_ctx = DrawContext { - raw_encoder: state.raw_encoder, - device: state.device, + raw_encoder: state.general.raw_encoder, + device: state.general.device, indirect_draw_validation_resources, indirect_draw_validation_batcher, indirect_buffer, @@ -2834,15 +2707,15 @@ fn multi_draw_indirect( draw_ctx.draw(current_draw_data); } else { state - .info - .usage_scope + .general + .scope .buffers .merge_single(&indirect_buffer, wgt::BufferUses::INDIRECT)?; draw( - state.raw_encoder, + state.general.raw_encoder, indexed, - indirect_buffer.try_raw(state.snatch_guard)?, + indirect_buffer.try_raw(state.general.snatch_guard)?, offset, count, ); @@ -2872,9 +2745,11 @@ fn multi_draw_indirect_count( let stride = get_stride_of_indirect_args(indexed); state + .general .device .require_features(wgt::Features::MULTI_DRAW_INDIRECT_COUNT)?; state + .general .device .require_downlevel_flags(wgt::DownlevelFlags::INDIRECT_EXECUTION)?; @@ -2882,22 +2757,22 @@ fn multi_draw_indirect_count( count_buffer.same_device_as(cmd_buf.as_ref())?; state - .info - .usage_scope + .general + .scope .buffers .merge_single(&indirect_buffer, wgt::BufferUses::INDIRECT)?; indirect_buffer.check_usage(BufferUsages::INDIRECT)?; - let indirect_raw = indirect_buffer.try_raw(state.snatch_guard)?; + let indirect_raw = indirect_buffer.try_raw(state.general.snatch_guard)?; state - .info - .usage_scope + .general + .scope .buffers .merge_single(&count_buffer, wgt::BufferUses::INDIRECT)?; count_buffer.check_usage(BufferUsages::INDIRECT)?; - let count_raw = count_buffer.try_raw(state.snatch_guard)?; + let count_raw = count_buffer.try_raw(state.general.snatch_guard)?; if offset % 4 != 0 { return Err(RenderPassErrorInner::UnalignedIndirectBufferOffset(offset)); @@ -2912,7 +2787,7 @@ fn multi_draw_indirect_count( buffer_size: indirect_buffer.size, }); } - state.buffer_memory_init_actions.extend( + state.general.buffer_memory_init_actions.extend( indirect_buffer.initialization_status.read().create_action( &indirect_buffer, offset..end_offset, @@ -2929,7 +2804,7 @@ fn multi_draw_indirect_count( count_buffer_size: count_buffer.size, }); } - state.buffer_memory_init_actions.extend( + state.general.buffer_memory_init_actions.extend( count_buffer.initialization_status.read().create_action( &count_buffer, count_buffer_offset..end_count_offset, @@ -2939,7 +2814,7 @@ fn multi_draw_indirect_count( match indexed { false => unsafe { - state.raw_encoder.draw_indirect_count( + state.general.raw_encoder.draw_indirect_count( indirect_raw, offset, count_raw, @@ -2948,7 +2823,7 @@ fn multi_draw_indirect_count( ); }, true => unsafe { - state.raw_encoder.draw_indexed_indirect_count( + state.general.raw_encoder.draw_indexed_indirect_count( indirect_raw, offset, count_raw, @@ -2960,87 +2835,6 @@ fn multi_draw_indirect_count( Ok(()) } -fn push_debug_group(state: &mut State, string_data: &[u8], len: usize) { - state.debug_scope_depth += 1; - if !state - .device - .instance_flags - .contains(wgt::InstanceFlags::DISCARD_HAL_LABELS) - { - let label = - str::from_utf8(&string_data[state.string_offset..state.string_offset + len]).unwrap(); - - api_log!("RenderPass::push_debug_group {label:?}"); - unsafe { - state.raw_encoder.begin_debug_marker(label); - } - } - state.string_offset += len; -} - -fn pop_debug_group(state: &mut State) -> Result<(), RenderPassErrorInner> { - api_log!("RenderPass::pop_debug_group"); - - if state.debug_scope_depth == 0 { - return Err(RenderPassErrorInner::InvalidPopDebugGroup); - } - state.debug_scope_depth -= 1; - if !state - .device - .instance_flags - .contains(wgt::InstanceFlags::DISCARD_HAL_LABELS) - { - unsafe { - state.raw_encoder.end_debug_marker(); - } - } - Ok(()) -} - -fn insert_debug_marker(state: &mut State, string_data: &[u8], len: usize) { - if !state - .device - .instance_flags - .contains(wgt::InstanceFlags::DISCARD_HAL_LABELS) - { - let label = - str::from_utf8(&string_data[state.string_offset..state.string_offset + len]).unwrap(); - api_log!("RenderPass::insert_debug_marker {label:?}"); - unsafe { - state.raw_encoder.insert_debug_marker(label); - } - } - state.string_offset += len; -} - -fn write_timestamp( - state: &mut State, - cmd_buf: &CommandBuffer, - pending_query_resets: &mut QueryResetMap, - query_set: Arc, - query_index: u32, -) -> Result<(), RenderPassErrorInner> { - api_log!( - "RenderPass::write_timestamps {query_index} {}", - query_set.error_ident() - ); - - query_set.same_device_as(cmd_buf)?; - - state - .device - .require_features(wgt::Features::TIMESTAMP_QUERY_INSIDE_PASSES)?; - - let query_set = state.tracker.query_sets.insert_single(query_set); - - query_set.validate_and_write_timestamp( - state.raw_encoder, - query_index, - Some(pending_query_resets), - )?; - Ok(()) -} - fn execute_bundle( state: &mut State, indirect_draw_validation_resources: &mut crate::indirect_validation::DrawResources, @@ -3050,7 +2844,7 @@ fn execute_bundle( ) -> Result<(), RenderPassErrorInner> { api_log!("RenderPass::execute_bundle {}", bundle.error_ident()); - let bundle = state.tracker.bundles.insert_single(bundle); + let bundle = state.general.tracker.bundles.insert_single(bundle); bundle.same_device_as(cmd_buf.as_ref())?; @@ -3073,33 +2867,33 @@ fn execute_bundle( ); } - state - .buffer_memory_init_actions - .extend( - bundle - .buffer_memory_init_actions - .iter() - .filter_map(|action| { - action - .buffer - .initialization_status - .read() - .check_action(action) - }), - ); + state.general.buffer_memory_init_actions.extend( + bundle + .buffer_memory_init_actions + .iter() + .filter_map(|action| { + action + .buffer + .initialization_status + .read() + .check_action(action) + }), + ); for action in bundle.texture_memory_init_actions.iter() { - state - .info - .pending_discard_init_fixups - .extend(state.texture_memory_actions.register_init_action(action)); + state.general.pending_discard_init_fixups.extend( + state + .general + .texture_memory_actions + .register_init_action(action), + ); } unsafe { bundle.execute( - state.raw_encoder, + state.general.raw_encoder, indirect_draw_validation_resources, indirect_draw_validation_batcher, - state.snatch_guard, + state.general.snatch_guard, ) } .map_err(|e| match e { @@ -3113,7 +2907,7 @@ fn execute_bundle( })?; unsafe { - state.info.usage_scope.merge_render_bundle(&bundle.used)?; + state.general.scope.merge_render_bundle(&bundle.used)?; }; state.reset_bundle(); Ok(())