2021-02-27 22:43:49 +01:00
# include "vk_rtx.h"
# include "vk_core.h"
# include "vk_common.h"
# include "vk_buffer.h"
2021-03-13 21:29:17 +01:00
# include "vk_pipeline.h"
2021-03-15 17:44:15 +01:00
# include "vk_cvar.h"
2021-03-17 17:15:33 +01:00
# include "vk_textures.h"
2021-03-28 22:52:25 +02:00
# include "vk_light.h"
2021-05-01 20:48:24 +02:00
# include "vk_descriptor.h"
2021-07-05 19:59:14 +02:00
# include "vk_ray_internal.h"
2021-02-27 22:43:49 +01:00
# include "eiface.h"
2021-03-17 17:15:33 +01:00
# include "xash3d_mathlib.h"
2021-02-27 22:43:49 +01:00
2021-03-21 00:21:26 +01:00
# include <string.h>
2021-02-27 22:43:49 +01:00
# define MAX_SCRATCH_BUFFER (16*1024*1024)
2021-02-28 00:40:57 +01:00
# define MAX_ACCELS_BUFFER (64*1024*1024)
2021-07-05 19:59:14 +02:00
2021-04-12 20:07:41 +02:00
# define MAX_LIGHT_LEAVES 8192
2021-02-27 22:43:49 +01:00
2021-09-04 19:57:29 +02:00
enum {
ShaderBindingTable_RayGen ,
ShaderBindingTable_Miss ,
ShaderBindingTable_Miss_Shadow ,
ShaderBindingTable_Hit ,
ShaderBindingTable_HitWithAlphaMask ,
ShaderBindingTable_COUNT
} ;
2021-07-05 22:24:57 +02:00
2021-03-20 19:15:57 +01:00
// TODO settings/realtime modifiable/adaptive
# define FRAME_WIDTH 1280
# define FRAME_HEIGHT 720
2021-03-01 19:52:08 +01:00
// TODO sync with shaders
2021-03-20 19:15:57 +01:00
// TODO optimal values
2021-03-01 19:52:08 +01:00
# define WG_W 16
# define WG_H 8
2021-03-07 01:40:35 +01:00
typedef struct {
vec3_t pos ;
float radius ;
vec3_t color ;
float padding_ ;
} vk_light_t ;
2021-03-15 17:44:15 +01:00
typedef struct {
2021-07-14 18:43:36 +02:00
uint32_t random_seed ;
2021-03-15 17:44:15 +01:00
int bounces ;
2021-04-12 20:07:41 +02:00
float prev_frame_blend_factor ;
2021-08-05 03:36:53 +02:00
float pixel_cone_spread_angle ;
2021-03-15 17:44:15 +01:00
} vk_rtx_push_constants_t ;
2021-04-16 20:43:37 +02:00
typedef struct {
int min_cell [ 4 ] , size [ 3 ] ; // 4th element is padding
2021-06-05 21:48:16 +02:00
vk_lights_cell_t cells [ MAX_LIGHT_CLUSTERS ] ;
2021-04-16 20:43:37 +02:00
} vk_ray_shader_light_grid ;
2021-05-01 20:48:24 +02:00
enum {
RayDescBinding_DestImage = 0 ,
RayDescBinding_TLAS = 1 ,
RayDescBinding_UBOMatrices = 2 ,
2021-07-07 21:02:12 +02:00
2021-07-10 21:37:14 +02:00
RayDescBinding_Kusochki = 3 ,
RayDescBinding_Indices = 4 ,
RayDescBinding_Vertices = 5 ,
RayDescBinding_Textures = 6 ,
2021-07-07 21:02:12 +02:00
2021-07-12 19:42:20 +02:00
RayDescBinding_UBOLights = 7 ,
RayDescBinding_EmissiveKusochki = 8 ,
RayDescBinding_LightClusters = 9 ,
2021-07-14 18:48:32 +02:00
RayDescBinding_PrevFrame = 10 ,
2021-07-07 21:02:12 +02:00
2021-05-01 20:48:24 +02:00
RayDescBinding_COUNT
} ;
2021-02-27 22:43:49 +01:00
static struct {
2021-05-01 20:48:24 +02:00
vk_descriptors_t descriptors ;
VkDescriptorSetLayoutBinding desc_bindings [ RayDescBinding_COUNT ] ;
vk_descriptor_value_t desc_values [ RayDescBinding_COUNT ] ;
VkDescriptorSet desc_sets [ 1 ] ;
2021-03-01 19:52:08 +01:00
VkPipeline pipeline ;
2021-02-27 22:43:49 +01:00
2021-07-05 22:24:57 +02:00
// Shader binding table buffer
vk_buffer_t sbt_buffer ;
uint32_t sbt_record_size ;
2021-05-01 20:48:24 +02:00
// Stores AS built data. Lifetime similar to render buffer:
// - some portion lives for entire map lifetime
// - some portion lives only for a single frame (may have several frames in flight)
// TODO: unify this with render buffer
// Needs: AS_STORAGE_BIT, SHADER_DEVICE_ADDRESS_BIT
2021-02-27 22:43:49 +01:00
vk_buffer_t accels_buffer ;
2021-05-03 20:17:01 +02:00
vk_ring_buffer_t accels_buffer_alloc ;
2021-05-01 20:48:24 +02:00
// Temp: lives only during a single frame (may have many in flight)
// Used for building ASes;
// Needs: AS_STORAGE_BIT, SHADER_DEVICE_ADDRESS_BIT
2021-02-27 22:43:49 +01:00
vk_buffer_t scratch_buffer ;
VkDeviceAddress accels_buffer_addr , scratch_buffer_addr ;
2021-05-01 20:48:24 +02:00
// Temp-ish: used for making TLAS, contains addressed to all used BLASes
// Lifetime and nature of usage similar to scratch_buffer
// TODO: unify them
// Needs: SHADER_DEVICE_ADDRESS, STORAGE_BUFFER, AS_BUILD_INPUT_READ_ONLY
2021-03-01 19:52:08 +01:00
vk_buffer_t tlas_geom_buffer ;
2021-05-01 20:48:24 +02:00
// Planned to contain seveal types of data:
// - grid structure itself
// - lights data:
// - dlights (fully dynamic)
// - entity lights (can be dynamic with light styles)
// - surface lights (map geometry is static, however brush models can have them too and move around (e.g. wagonchik and elevators))
// Therefore, this is also dynamic and lifetime is per-frame
// TODO: unify with scratch buffer
// Needs: STORAGE_BUFFER
// Can be potentially crated using compute shader (would need shader write bit)
2021-04-16 20:43:37 +02:00
vk_buffer_t light_grid_buffer ;
2021-03-20 20:34:34 +01:00
2021-05-01 20:48:24 +02:00
// TODO need several TLASes for N frames in flight
2021-03-01 19:52:08 +01:00
VkAccelerationStructureKHR tlas ;
2021-03-13 21:29:17 +01:00
2021-04-09 23:59:04 +02:00
// Per-frame data that is accumulated between RayFrameBegin and End calls
struct {
uint32_t scratch_offset ; // for building dynamic blases
} frame ;
2021-03-20 21:42:15 +01:00
unsigned frame_number ;
2021-03-20 19:15:57 +01:00
vk_image_t frames [ 2 ] ;
2021-03-13 21:29:17 +01:00
qboolean reload_pipeline ;
2021-05-24 20:14:03 +02:00
} g_rtx = { 0 } ;
2021-02-27 22:43:49 +01:00
2021-07-05 19:59:14 +02:00
VkDeviceAddress getBufferDeviceAddress ( VkBuffer buffer ) {
2021-02-27 22:43:49 +01:00
const VkBufferDeviceAddressInfo bdai = { . sType = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO , . buffer = buffer } ;
return vkGetBufferDeviceAddress ( vk_core . device , & bdai ) ;
}
2021-03-01 19:52:08 +01:00
static VkDeviceAddress getASAddress ( VkAccelerationStructureKHR as ) {
VkAccelerationStructureDeviceAddressInfoKHR asdai = {
. sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR ,
. accelerationStructure = as ,
} ;
return vkGetAccelerationStructureDeviceAddressKHR ( vk_core . device , & asdai ) ;
}
2021-07-05 19:59:14 +02:00
// TODO split this into smaller building blocks in a separate module
qboolean createOrUpdateAccelerationStructure ( VkCommandBuffer cmdbuf , const as_build_args_t * args , vk_ray_model_t * model ) {
2021-05-17 18:24:15 +02:00
qboolean should_create = * args - > p_accel = = VK_NULL_HANDLE ;
2021-07-05 19:59:14 +02:00
# if 1 // update does not work at all on AMD gpus
qboolean is_update = false ; // FIXME this crashes for some reason !should_create && args->dynamic;
# else
2021-06-23 20:37:54 +02:00
qboolean is_update = ! should_create & & args - > dynamic ;
2021-07-05 19:59:14 +02:00
# endif
2021-05-10 18:17:03 +02:00
2021-02-27 22:43:49 +01:00
VkAccelerationStructureBuildGeometryInfoKHR build_info = {
. sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR ,
2021-04-09 23:59:04 +02:00
. type = args - > type ,
. flags = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR | ( args - > dynamic ? VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR : 0 ) ,
. mode = is_update ? VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR : VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR ,
. geometryCount = args - > n_geoms ,
. pGeometries = args - > geoms ,
2021-07-10 20:06:55 +02:00
. srcAccelerationStructure = is_update ? * args - > p_accel : VK_NULL_HANDLE ,
2021-02-27 22:43:49 +01:00
} ;
VkAccelerationStructureBuildSizesInfoKHR build_size = {
2021-05-17 18:24:15 +02:00
. sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR
} ;
2021-02-27 22:43:49 +01:00
2021-04-09 23:59:04 +02:00
uint32_t scratch_buffer_size = 0 ;
2021-02-27 22:43:49 +01:00
2021-05-17 18:24:15 +02:00
ASSERT ( args - > geoms ) ;
ASSERT ( args - > n_geoms > 0 ) ;
ASSERT ( args - > p_accel ) ;
2021-02-28 00:40:57 +01:00
vkGetAccelerationStructureBuildSizesKHR (
2021-04-09 23:59:04 +02:00
vk_core . device , VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR , & build_info , args - > max_prim_counts , & build_size ) ;
scratch_buffer_size = is_update ? build_size . updateScratchSize : build_size . buildScratchSize ;
2021-02-27 22:43:49 +01:00
2021-07-05 19:59:14 +02:00
#if 0
2021-02-28 00:40:57 +01:00
{
uint32_t max_prims = 0 ;
2021-04-09 23:59:04 +02:00
for ( int i = 0 ; i < args - > n_geoms ; + + i )
max_prims + = args - > max_prim_counts [ i ] ;
2021-02-28 00:40:57 +01:00
gEngine . Con_Reportf (
2021-04-09 23:59:04 +02:00
" AS max_prims=%u, n_geoms=%u, build size: %d, scratch size: %d \n " , max_prims , args - > n_geoms , build_size . accelerationStructureSize , build_size . buildScratchSize ) ;
2021-02-28 00:40:57 +01:00
}
2021-07-05 19:59:14 +02:00
# endif
2021-02-27 22:43:49 +01:00
2021-05-08 23:34:42 +02:00
if ( MAX_SCRATCH_BUFFER < g_rtx . frame . scratch_offset + scratch_buffer_size ) {
2021-02-27 22:43:49 +01:00
gEngine . Con_Printf ( S_ERROR " Scratch buffer overflow: left %u bytes, but need %u \n " ,
2021-04-09 23:59:04 +02:00
MAX_SCRATCH_BUFFER - g_rtx . frame . scratch_offset ,
scratch_buffer_size ) ;
return false ;
2021-02-27 22:43:49 +01:00
}
2021-04-10 19:58:46 +02:00
if ( should_create ) {
2021-06-23 20:37:54 +02:00
const uint32_t as_size = build_size . accelerationStructureSize ;
2021-06-19 19:51:21 +02:00
const uint32_t buffer_offset = VK_RingBuffer_Alloc ( & g_rtx . accels_buffer_alloc , as_size , 256 ) ;
const VkAccelerationStructureCreateInfoKHR asci = {
2021-04-09 23:59:04 +02:00
. sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR ,
. buffer = g_rtx . accels_buffer . buffer ,
2021-05-03 20:17:01 +02:00
. offset = buffer_offset ,
2021-04-09 23:59:04 +02:00
. type = args - > type ,
2021-06-19 19:51:21 +02:00
. size = as_size ,
2021-04-09 23:59:04 +02:00
} ;
2021-02-27 22:43:49 +01:00
2021-05-03 20:17:01 +02:00
if ( buffer_offset = = AllocFailed ) {
2021-06-19 19:51:21 +02:00
gEngine . Con_Printf ( S_ERROR " Failed to allocated %u bytes for accel buffer \n " , asci . size ) ;
2021-04-09 23:59:04 +02:00
return false ;
}
2021-02-27 22:43:49 +01:00
2021-05-10 18:17:03 +02:00
XVK_CHECK ( vkCreateAccelerationStructureKHR ( vk_core . device , & asci , NULL , args - > p_accel ) ) ;
2021-05-17 19:36:40 +02:00
SET_DEBUG_NAME ( * args - > p_accel , VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR , args - > debug_name ) ;
2021-05-08 23:34:42 +02:00
2021-05-26 19:55:46 +02:00
if ( model ) {
model - > size = asci . size ;
model - > debug . as_offset = buffer_offset ;
}
2021-06-19 19:51:21 +02:00
// gEngine.Con_Reportf("AS=%p, n_geoms=%u, build: %#x %d %#x\n", *args->p_accel, args->n_geoms, buffer_offset, asci.size, buffer_offset + asci.size);
2021-04-09 23:59:04 +02:00
}
2021-02-27 22:43:49 +01:00
2021-05-10 18:17:03 +02:00
// If not enough data for building, just create
if ( ! cmdbuf | | ! args - > build_ranges )
return true ;
2021-05-26 19:55:46 +02:00
if ( model ) {
ASSERT ( model - > size > = build_size . accelerationStructureSize ) ;
}
2021-05-10 18:17:03 +02:00
build_info . dstAccelerationStructure = * args - > p_accel ;
2021-04-09 23:59:04 +02:00
build_info . scratchData . deviceAddress = g_rtx . scratch_buffer_addr + g_rtx . frame . scratch_offset ;
2021-06-07 06:59:23 +02:00
//uint32_t scratch_offset_initial = g_rtx.frame.scratch_offset;
2021-04-09 23:59:04 +02:00
g_rtx . frame . scratch_offset + = scratch_buffer_size ;
2021-05-08 23:34:42 +02:00
g_rtx . frame . scratch_offset = ALIGN_UP ( g_rtx . frame . scratch_offset , vk_core . physical_device . properties_accel . minAccelerationStructureScratchOffsetAlignment ) ;
2021-05-10 18:17:03 +02:00
//gEngine.Con_Reportf("AS=%p, n_geoms=%u, scratch: %#x %d %#x\n", *args->p_accel, args->n_geoms, scratch_offset_initial, scratch_buffer_size, scratch_offset_initial + scratch_buffer_size);
2021-04-09 23:59:04 +02:00
2021-07-04 20:23:42 +02:00
vkCmdBuildAccelerationStructuresKHR ( cmdbuf , 1 , & build_info , & args - > build_ranges ) ;
2021-04-09 23:59:04 +02:00
return true ;
2021-02-27 22:43:49 +01:00
}
2021-05-26 18:35:36 +02:00
static void createTlas ( VkCommandBuffer cmdbuf ) {
const VkAccelerationStructureGeometryKHR tl_geom [ ] = {
{
. sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR ,
//.flags = VK_GEOMETRY_OPAQUE_BIT,
. geometryType = VK_GEOMETRY_TYPE_INSTANCES_KHR ,
. geometry . instances =
( VkAccelerationStructureGeometryInstancesDataKHR ) {
. sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR ,
. data . deviceAddress = getBufferDeviceAddress ( g_rtx . tlas_geom_buffer . buffer ) ,
. arrayOfPointers = VK_FALSE ,
} ,
} ,
} ;
2021-07-10 20:06:55 +02:00
const uint32_t tl_max_prim_counts [ ARRAYSIZE ( tl_geom ) ] = { MAX_ACCELS } ; //cmdbuf == VK_NULL_HANDLE ? MAX_ACCELS : g_ray_model_state.frame.num_models };
2021-05-26 18:35:36 +02:00
const VkAccelerationStructureBuildRangeInfoKHR tl_build_range = {
2021-07-05 19:59:14 +02:00
. primitiveCount = g_ray_model_state . frame . num_models ,
2021-05-26 18:35:36 +02:00
} ;
const as_build_args_t asrgs = {
. geoms = tl_geom ,
. max_prim_counts = tl_max_prim_counts ,
2021-07-04 20:23:42 +02:00
. build_ranges = cmdbuf = = VK_NULL_HANDLE ? NULL : & tl_build_range ,
2021-05-26 18:35:36 +02:00
. n_geoms = ARRAYSIZE ( tl_geom ) ,
. type = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR ,
// we can't really rebuild TLAS because instance count changes are not allowed .dynamic = true,
. dynamic = false ,
. p_accel = & g_rtx . tlas ,
. debug_name = " TLAS " ,
} ;
2021-05-26 19:55:46 +02:00
if ( ! createOrUpdateAccelerationStructure ( cmdbuf , & asrgs , NULL ) ) {
2021-05-26 18:35:36 +02:00
gEngine . Host_Error ( " Could not create/update TLAS \n " ) ;
return ;
}
}
2021-04-09 23:59:04 +02:00
void VK_RayNewMap ( void ) {
ASSERT ( vk_core . rtx ) ;
2021-03-01 19:52:08 +01:00
2021-05-03 20:17:01 +02:00
VK_RingBuffer_Clear ( & g_rtx . accels_buffer_alloc ) ;
2021-07-05 19:59:14 +02:00
VK_RingBuffer_Clear ( & g_ray_model_state . kusochki_alloc ) ;
2021-04-12 20:07:41 +02:00
2021-05-24 20:14:03 +02:00
// Clear model cache
2021-07-05 19:59:14 +02:00
for ( int i = 0 ; i < ARRAYSIZE ( g_ray_model_state . models_cache ) ; + + i ) {
vk_ray_model_t * model = g_ray_model_state . models_cache + i ;
2021-05-26 19:55:46 +02:00
VK_RayModelDestroy ( model ) ;
2021-05-24 20:14:03 +02:00
}
2021-05-10 18:17:03 +02:00
// Recreate tlas
// Why here and not in init: to make sure that its memory is preserved. Map init will clear all memory regions.
{
if ( g_rtx . tlas ! = VK_NULL_HANDLE ) {
vkDestroyAccelerationStructureKHR ( vk_core . device , g_rtx . tlas , NULL ) ;
g_rtx . tlas = VK_NULL_HANDLE ;
}
2021-05-26 18:35:36 +02:00
createTlas ( VK_NULL_HANDLE ) ;
2021-05-10 18:17:03 +02:00
}
2021-02-27 22:43:49 +01:00
}
2021-05-03 20:17:01 +02:00
void VK_RayMapLoadEnd ( void ) {
VK_RingBuffer_Fix ( & g_rtx . accels_buffer_alloc ) ;
2021-07-05 19:59:14 +02:00
VK_RingBuffer_Fix ( & g_ray_model_state . kusochki_alloc ) ;
2021-05-03 20:17:01 +02:00
}
2021-04-09 23:59:04 +02:00
void VK_RayFrameBegin ( void )
2021-03-01 20:22:58 +01:00
{
ASSERT ( vk_core . rtx ) ;
2021-07-05 19:59:14 +02:00
g_rtx . frame . scratch_offset = 0 ;
2021-05-08 23:34:42 +02:00
2021-07-05 19:59:14 +02:00
if ( g_ray_model_state . freeze_models )
return ;
2021-04-24 21:53:42 +02:00
2021-07-05 19:59:14 +02:00
XVK_RayModel_ClearForNextFrame ( ) ;
2021-05-03 20:17:01 +02:00
2021-07-05 19:59:14 +02:00
// TODO shouldn't we do this in freeze models mode anyway?
2021-06-05 21:48:16 +02:00
VK_LightsFrameInit ( ) ;
2021-03-01 20:22:58 +01:00
}
2021-03-13 21:29:17 +01:00
static void createPipeline ( void )
{
2021-06-23 19:40:37 +02:00
struct RayShaderSpec {
int max_dlights ;
int max_emissive_kusochki ;
uint32_t max_visible_dlights ;
uint32_t max_visible_surface_lights ;
float light_grid_cell_size ;
int max_light_clusters ;
} spec_data = {
. max_dlights = MAX_DLIGHTS ,
. max_emissive_kusochki = MAX_EMISSIVE_KUSOCHKI ,
. max_visible_dlights = MAX_VISIBLE_DLIGHTS ,
. max_visible_surface_lights = MAX_VISIBLE_SURFACE_LIGHTS ,
. light_grid_cell_size = LIGHT_GRID_CELL_SIZE ,
. max_light_clusters = MAX_LIGHT_CLUSTERS ,
} ;
const VkSpecializationMapEntry spec_map [ ] = {
{ . constantID = 0 , . offset = offsetof ( struct RayShaderSpec , max_dlights ) , . size = sizeof ( int ) } ,
{ . constantID = 1 , . offset = offsetof ( struct RayShaderSpec , max_emissive_kusochki ) , . size = sizeof ( int ) } ,
{ . constantID = 2 , . offset = offsetof ( struct RayShaderSpec , max_visible_dlights ) , . size = sizeof ( uint32_t ) } ,
{ . constantID = 3 , . offset = offsetof ( struct RayShaderSpec , max_visible_surface_lights ) , . size = sizeof ( uint32_t ) } ,
{ . constantID = 4 , . offset = offsetof ( struct RayShaderSpec , light_grid_cell_size ) , . size = sizeof ( float ) } ,
{ . constantID = 5 , . offset = offsetof ( struct RayShaderSpec , max_light_clusters ) , . size = sizeof ( int ) } ,
} ;
VkSpecializationInfo spec = {
. mapEntryCount = ARRAYSIZE ( spec_map ) ,
. pMapEntries = spec_map ,
. dataSize = sizeof ( spec_data ) ,
. pData = & spec_data ,
} ;
2021-07-05 22:24:57 +02:00
2021-09-04 19:57:29 +02:00
enum {
ShaderStageIndex_RayGen ,
ShaderStageIndex_Miss ,
ShaderStageIndex_Miss_Shadow ,
ShaderStageIndex_ClosestHit ,
ShaderStageIndex_AnyHit_AlphaMask ,
ShaderStageIndex_COUNT ,
2021-07-05 22:24:57 +02:00
} ;
2021-09-04 19:57:29 +02:00
# define DEFINE_SHADER(filename, bit, index) \
shaders [ ShaderStageIndex_ # # index ] = ( VkPipelineShaderStageCreateInfo ) { \
. sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO , \
. stage = VK_SHADER_STAGE_ # # bit # # _BIT_KHR , \
. module = loadShader ( filename ) , \
. pName = " main " , \
}
VkPipelineShaderStageCreateInfo shaders [ ShaderStageIndex_COUNT ] ;
2021-09-04 22:36:18 +02:00
VkRayTracingShaderGroupCreateInfoKHR shader_groups [ ShaderBindingTable_COUNT ] ;
const VkRayTracingPipelineCreateInfoKHR rtpci = {
. sType = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR ,
//TODO .flags = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR ....
. stageCount = ARRAYSIZE ( shaders ) ,
. pStages = shaders ,
. groupCount = ARRAYSIZE ( shader_groups ) ,
. pGroups = shader_groups ,
. maxPipelineRayRecursionDepth = 1 ,
. layout = g_rtx . descriptors . pipeline_layout ,
} ;
2021-09-04 19:57:29 +02:00
DEFINE_SHADER ( " ray.rgen.spv " , RAYGEN , RayGen ) ;
DEFINE_SHADER ( " ray.rmiss.spv " , MISS , Miss ) ;
DEFINE_SHADER ( " shadow.rmiss.spv " , MISS , Miss_Shadow ) ;
DEFINE_SHADER ( " ray.rchit.spv " , CLOSEST_HIT , ClosestHit ) ;
DEFINE_SHADER ( " alphamask.rahit.spv " , ANY_HIT , AnyHit_AlphaMask ) ;
shader_groups [ ShaderBindingTable_RayGen ] = ( VkRayTracingShaderGroupCreateInfoKHR ) {
. sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR ,
. type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR ,
. anyHitShader = VK_SHADER_UNUSED_KHR ,
. closestHitShader = VK_SHADER_UNUSED_KHR ,
. generalShader = ShaderStageIndex_RayGen ,
. intersectionShader = VK_SHADER_UNUSED_KHR ,
} ;
shader_groups [ ShaderBindingTable_Miss ] = ( VkRayTracingShaderGroupCreateInfoKHR ) {
. sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR ,
. type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR ,
. anyHitShader = VK_SHADER_UNUSED_KHR ,
. closestHitShader = VK_SHADER_UNUSED_KHR ,
. generalShader = ShaderStageIndex_Miss ,
. intersectionShader = VK_SHADER_UNUSED_KHR ,
} ;
shader_groups [ ShaderBindingTable_Miss_Shadow ] = ( VkRayTracingShaderGroupCreateInfoKHR ) {
. sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR ,
. type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR ,
. anyHitShader = VK_SHADER_UNUSED_KHR ,
. closestHitShader = VK_SHADER_UNUSED_KHR ,
. generalShader = ShaderStageIndex_Miss_Shadow ,
. intersectionShader = VK_SHADER_UNUSED_KHR ,
} ;
shader_groups [ ShaderBindingTable_Hit ] = ( VkRayTracingShaderGroupCreateInfoKHR ) {
. sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR ,
. type = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR ,
. anyHitShader = VK_SHADER_UNUSED_KHR ,
. closestHitShader = ShaderStageIndex_ClosestHit ,
. generalShader = VK_SHADER_UNUSED_KHR ,
. intersectionShader = VK_SHADER_UNUSED_KHR ,
} ;
shader_groups [ ShaderBindingTable_HitWithAlphaMask ] = ( VkRayTracingShaderGroupCreateInfoKHR ) {
. sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR ,
. type = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR ,
. anyHitShader = ShaderStageIndex_AnyHit_AlphaMask ,
. closestHitShader = ShaderStageIndex_ClosestHit ,
. generalShader = VK_SHADER_UNUSED_KHR ,
. intersectionShader = VK_SHADER_UNUSED_KHR ,
2021-07-05 22:24:57 +02:00
} ;
XVK_CHECK ( vkCreateRayTracingPipelinesKHR ( vk_core . device , VK_NULL_HANDLE , g_pipeline_cache , 1 , & rtpci , NULL , & g_rtx . pipeline ) ) ;
2021-07-06 20:28:24 +02:00
ASSERT ( g_rtx . pipeline ! = VK_NULL_HANDLE ) ;
2021-07-05 22:24:57 +02:00
{
const uint32_t sbt_handle_size = vk_core . physical_device . properties_ray_tracing_pipeline . shaderGroupHandleSize ;
2021-07-10 20:06:55 +02:00
const uint32_t sbt_handles_buffer_size = ARRAYSIZE ( shader_groups ) * sbt_handle_size ;
2021-07-05 22:24:57 +02:00
uint8_t * sbt_handles = Mem_Malloc ( vk_core . pool , sbt_handles_buffer_size ) ;
2021-07-10 20:06:55 +02:00
XVK_CHECK ( vkGetRayTracingShaderGroupHandlesKHR ( vk_core . device , g_rtx . pipeline , 0 , ARRAYSIZE ( shader_groups ) , sbt_handles_buffer_size , sbt_handles ) ) ;
for ( int i = 0 ; i < ARRAYSIZE ( shader_groups ) ; + + i )
2021-07-05 22:24:57 +02:00
{
uint8_t * sbt_dst = g_rtx . sbt_buffer . mapped ;
2021-07-06 20:28:24 +02:00
memcpy ( sbt_dst + g_rtx . sbt_record_size * i , sbt_handles + sbt_handle_size * i , sbt_handle_size ) ;
2021-07-05 22:24:57 +02:00
}
Mem_Free ( sbt_handles ) ;
}
2021-07-10 20:06:55 +02:00
for ( int i = 0 ; i < ARRAYSIZE ( shaders ) ; + + i )
vkDestroyShaderModule ( vk_core . device , shaders [ i ] . module , NULL ) ;
2021-03-13 21:29:17 +01:00
}
2021-07-17 20:45:28 +02:00
static void prepareTlas ( VkCommandBuffer cmdbuf ) {
ASSERT ( g_ray_model_state . frame . num_models > 0 ) ;
2021-03-13 21:29:17 +01:00
2021-03-01 19:52:08 +01:00
// Upload all blas instances references to GPU mem
{
2021-04-09 23:59:04 +02:00
VkAccelerationStructureInstanceKHR * inst = g_rtx . tlas_geom_buffer . mapped ;
2021-07-05 19:59:14 +02:00
for ( int i = 0 ; i < g_ray_model_state . frame . num_models ; + + i ) {
const vk_ray_draw_model_t * const model = g_ray_model_state . frame . models + i ;
2021-05-24 20:14:03 +02:00
ASSERT ( model - > model ) ;
ASSERT ( model - > model - > as ! = VK_NULL_HANDLE ) ;
2021-03-01 19:52:08 +01:00
inst [ i ] = ( VkAccelerationStructureInstanceKHR ) {
2021-05-24 20:14:03 +02:00
. instanceCustomIndex = model - > model - > kusochki_offset ,
2021-03-01 19:52:08 +01:00
. mask = 0xff ,
2021-09-04 21:36:30 +02:00
. instanceShaderBindingTableRecordOffset = model - > alpha_test ? 1 : 0 ,
2021-08-26 18:18:20 +02:00
. flags = model - > render_mode = = kRenderNormal ? VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR : VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR , // TODO is render_mode a good indicator of transparency in general case?
2021-05-24 20:14:03 +02:00
. accelerationStructureReference = getASAddress ( model - > model - > as ) , // TODO cache this addr
2021-03-01 19:52:08 +01:00
} ;
2021-03-13 23:31:48 +01:00
memcpy ( & inst [ i ] . transform , model - > transform_row , sizeof ( VkTransformMatrixKHR ) ) ;
2021-03-01 19:52:08 +01:00
}
}
// Barrier for building all BLASes
// BLAS building is now in cmdbuf, need to synchronize with results
{
VkBufferMemoryBarrier bmb [ ] = { {
. sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER ,
2021-03-01 20:22:58 +01:00
. srcAccessMask = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR , // | VK_ACCESS_TRANSFER_WRITE_BIT,
2021-03-01 19:52:08 +01:00
. dstAccessMask = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR ,
. buffer = g_rtx . accels_buffer . buffer ,
. offset = 0 ,
. size = VK_WHOLE_SIZE ,
2021-04-09 23:59:04 +02:00
} } ;
2021-03-01 19:52:08 +01:00
vkCmdPipelineBarrier ( cmdbuf ,
2021-03-03 20:58:40 +01:00
VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR ,
VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR ,
0 , 0 , NULL , ARRAYSIZE ( bmb ) , bmb , 0 , NULL ) ;
2021-03-01 19:52:08 +01:00
}
2021-05-10 18:17:03 +02:00
// 2. Build TLAS
2021-07-17 20:45:28 +02:00
createTlas ( cmdbuf ) ;
}
2021-03-01 19:52:08 +01:00
2021-07-17 20:45:28 +02:00
static void updateDescriptors ( VkCommandBuffer cmdbuf , const vk_ray_frame_render_args_t * args , const vk_image_t * frame_src , const vk_image_t * frame_dst ) {
// 3. Update descriptor sets (bind dest image, tlas, projection matrix)
VkDescriptorImageInfo dii_all_textures [ MAX_TEXTURES ] ;
2021-03-01 19:52:08 +01:00
2021-07-17 20:45:28 +02:00
g_rtx . desc_values [ RayDescBinding_DestImage ] . image = ( VkDescriptorImageInfo ) {
. sampler = VK_NULL_HANDLE ,
. imageView = frame_dst - > view ,
. imageLayout = VK_IMAGE_LAYOUT_GENERAL ,
} ;
2021-05-01 20:48:24 +02:00
2021-07-17 20:45:28 +02:00
g_rtx . desc_values [ RayDescBinding_PrevFrame ] . image = ( VkDescriptorImageInfo ) {
. sampler = VK_NULL_HANDLE ,
. imageView = frame_src - > view ,
. imageLayout = VK_IMAGE_LAYOUT_GENERAL ,
} ;
2021-05-01 20:48:24 +02:00
2021-07-17 20:45:28 +02:00
g_rtx . desc_values [ RayDescBinding_TLAS ] . accel = ( VkWriteDescriptorSetAccelerationStructureKHR ) {
. sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR ,
. accelerationStructureCount = 1 ,
. pAccelerationStructures = & g_rtx . tlas ,
} ;
2021-07-10 20:06:55 +02:00
2021-07-17 20:45:28 +02:00
g_rtx . desc_values [ RayDescBinding_UBOMatrices ] . buffer = ( VkDescriptorBufferInfo ) {
. buffer = args - > ubo . buffer ,
. offset = args - > ubo . offset ,
. range = args - > ubo . size ,
} ;
2021-05-01 20:48:24 +02:00
2021-07-17 20:45:28 +02:00
g_rtx . desc_values [ RayDescBinding_Kusochki ] . buffer = ( VkDescriptorBufferInfo ) {
. buffer = g_ray_model_state . kusochki_buffer . buffer ,
. offset = 0 ,
. range = VK_WHOLE_SIZE , // TODO fails validation when empty g_rtx_scene.num_models * sizeof(vk_kusok_data_t),
} ;
2021-05-01 20:48:24 +02:00
2021-07-17 20:45:28 +02:00
g_rtx . desc_values [ RayDescBinding_Indices ] . buffer = ( VkDescriptorBufferInfo ) {
. buffer = args - > geometry_data . buffer ,
. offset = 0 ,
. range = VK_WHOLE_SIZE , // TODO fails validation when empty args->geometry_data.size,
} ;
2021-05-01 20:48:24 +02:00
2021-07-17 20:45:28 +02:00
g_rtx . desc_values [ RayDescBinding_Vertices ] . buffer = ( VkDescriptorBufferInfo ) {
. buffer = args - > geometry_data . buffer ,
. offset = 0 ,
. range = VK_WHOLE_SIZE , // TODO fails validation when empty args->geometry_data.size,
} ;
2021-05-01 20:48:24 +02:00
2021-07-17 20:45:28 +02:00
g_rtx . desc_values [ RayDescBinding_Textures ] . image_array = dii_all_textures ;
2021-07-07 21:02:12 +02:00
2021-07-17 20:45:28 +02:00
// TODO: move this to vk_texture.c
for ( int i = 0 ; i < MAX_TEXTURES ; + + i ) {
const vk_texture_t * texture = findTexture ( i ) ;
const qboolean exists = texture - > vk . image_view ! = VK_NULL_HANDLE ;
dii_all_textures [ i ] . sampler = vk_core . default_sampler ; // FIXME on AMD using pImmutableSamplers leads to NEAREST filtering ??. VK_NULL_HANDLE;
dii_all_textures [ i ] . imageView = exists ? texture - > vk . image_view : findTexture ( tglob . defaultTexture ) - > vk . image_view ;
ASSERT ( dii_all_textures [ i ] . imageView ! = VK_NULL_HANDLE ) ;
dii_all_textures [ i ] . imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ;
}
2021-07-07 21:02:12 +02:00
2021-07-17 20:45:28 +02:00
g_rtx . desc_values [ RayDescBinding_UBOLights ] . buffer = ( VkDescriptorBufferInfo ) {
. buffer = args - > dlights . buffer ,
. offset = args - > dlights . offset ,
. range = args - > dlights . size ,
} ;
2021-07-06 20:28:24 +02:00
2021-07-17 20:45:28 +02:00
g_rtx . desc_values [ RayDescBinding_EmissiveKusochki ] . buffer = ( VkDescriptorBufferInfo ) {
. buffer = g_ray_model_state . emissive_kusochki_buffer . buffer ,
. offset = 0 ,
. range = VK_WHOLE_SIZE ,
} ;
2021-07-06 20:28:24 +02:00
2021-07-17 20:45:28 +02:00
g_rtx . desc_values [ RayDescBinding_LightClusters ] . buffer = ( VkDescriptorBufferInfo ) {
. buffer = g_rtx . light_grid_buffer . buffer ,
. offset = 0 ,
. range = VK_WHOLE_SIZE ,
} ;
2021-07-06 20:28:24 +02:00
2021-07-17 20:45:28 +02:00
VK_DescriptorsWrite ( & g_rtx . descriptors ) ;
}
2021-03-07 01:40:35 +01:00
2021-08-05 03:36:53 +02:00
static qboolean rayTrace ( VkCommandBuffer cmdbuf , VkImage frame_dst , float fov_angle_y )
2021-07-17 20:45:28 +02:00
{
2021-07-06 20:28:24 +02:00
// 4. Barrier for TLAS build and dest image layout transfer
{
VkBufferMemoryBarrier bmb [ ] = { {
. sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER ,
. srcAccessMask = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR ,
. dstAccessMask = VK_ACCESS_SHADER_READ_BIT ,
. buffer = g_rtx . accels_buffer . buffer ,
. offset = 0 ,
. size = VK_WHOLE_SIZE ,
} } ;
VkImageMemoryBarrier image_barrier [ ] = { {
. sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ,
2021-07-17 20:45:28 +02:00
. image = frame_dst ,
2021-07-06 20:28:24 +02:00
. srcAccessMask = 0 ,
. dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT ,
. oldLayout = VK_IMAGE_LAYOUT_UNDEFINED ,
. newLayout = VK_IMAGE_LAYOUT_GENERAL ,
. subresourceRange = ( VkImageSubresourceRange ) {
. aspectMask = VK_IMAGE_ASPECT_COLOR_BIT ,
. baseMipLevel = 0 ,
. levelCount = 1 ,
. baseArrayLayer = 0 ,
. layerCount = 1 ,
} } } ;
vkCmdPipelineBarrier ( cmdbuf , VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR , VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR | VK_PIPELINE_STAGE_TRANSFER_BIT , 0 ,
0 , NULL , ARRAYSIZE ( bmb ) , bmb , ARRAYSIZE ( image_barrier ) , image_barrier ) ;
}
2021-03-01 19:52:08 +01:00
2021-07-17 20:45:28 +02:00
// 4. dispatch ray tracing
vkCmdBindPipeline ( cmdbuf , VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR , g_rtx . pipeline ) ;
{
vk_rtx_push_constants_t push_constants = {
//.t = gpGlobals->realtime,
. random_seed = ( uint32_t ) gEngine . COM_RandomLong ( 0 , INT32_MAX ) ,
. bounces = vk_rtx_bounces - > value ,
. prev_frame_blend_factor = vk_rtx_prev_frame_blend_factor - > value ,
2021-08-05 03:36:53 +02:00
. pixel_cone_spread_angle = atanf ( ( 2.0f * tanf ( fov_angle_y * 0.5f ) ) / ( float ) FRAME_HEIGHT ) ,
2021-07-17 20:45:28 +02:00
} ;
vkCmdPushConstants ( cmdbuf , g_rtx . descriptors . pipeline_layout , VK_SHADER_STAGE_RAYGEN_BIT_KHR , 0 , sizeof ( push_constants ) , & push_constants ) ;
}
vkCmdBindDescriptorSets ( cmdbuf , VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR , g_rtx . descriptors . pipeline_layout , 0 , 1 , g_rtx . descriptors . desc_sets + 0 , 0 , NULL ) ;
2021-07-05 22:24:57 +02:00
2021-07-17 20:45:28 +02:00
{
const uint32_t sbt_record_size = g_rtx . sbt_record_size ;
//const uint32_t sbt_record_size = vk_core.physical_device.properties_ray_tracing_pipeline.shaderGroupHandleSize;
2021-07-12 19:59:52 +02:00
# define SBT_INDEX(index, count) { \
2021-07-17 20:45:28 +02:00
. deviceAddress = getBufferDeviceAddress ( g_rtx . sbt_buffer . buffer ) + g_rtx . sbt_record_size * index , \
. size = sbt_record_size * count , \
. stride = sbt_record_size , \
}
2021-09-04 19:57:29 +02:00
const VkStridedDeviceAddressRegionKHR sbt_raygen = SBT_INDEX ( ShaderBindingTable_RayGen , 1 ) ;
const VkStridedDeviceAddressRegionKHR sbt_miss = SBT_INDEX ( ShaderBindingTable_Miss , 2 ) ;
const VkStridedDeviceAddressRegionKHR sbt_hit = SBT_INDEX ( ShaderBindingTable_Hit , 2 ) ;
2021-07-17 20:45:28 +02:00
const VkStridedDeviceAddressRegionKHR sbt_callable = { 0 } ;
vkCmdTraceRaysKHR ( cmdbuf , & sbt_raygen , & sbt_miss , & sbt_hit , & sbt_callable , FRAME_WIDTH , FRAME_HEIGHT , 1 ) ;
}
return true ;
2021-07-06 20:28:24 +02:00
}
2021-07-05 22:24:57 +02:00
2021-07-17 21:40:26 +02:00
// Finalize and update dynamic lights
static void updateLights ( void )
2021-07-17 20:45:28 +02:00
{
VK_LightsFrameFinalize ( ) ;
// Upload light grid
{
vk_ray_shader_light_grid * grid = g_rtx . light_grid_buffer . mapped ;
ASSERT ( g_lights . map . grid_cells < = MAX_LIGHT_CLUSTERS ) ;
VectorCopy ( g_lights . map . grid_min_cell , grid - > min_cell ) ;
VectorCopy ( g_lights . map . grid_size , grid - > size ) ;
memcpy ( grid - > cells , g_lights . cells , g_lights . map . grid_cells * sizeof ( vk_lights_cell_t ) ) ;
}
// Upload dynamic emissive kusochki
{
vk_emissive_kusochki_t * ek = g_ray_model_state . emissive_kusochki_buffer . mapped ;
ASSERT ( g_lights . num_emissive_surfaces < = MAX_EMISSIVE_KUSOCHKI ) ;
ek - > num_kusochki = g_lights . num_emissive_surfaces ;
for ( int i = 0 ; i < g_lights . num_emissive_surfaces ; + + i ) {
ek - > kusochki [ i ] . kusok_index = g_lights . emissive_surfaces [ i ] . kusok_index ;
Matrix3x4_Copy ( ek - > kusochki [ i ] . transform , g_lights . emissive_surfaces [ i ] . transform ) ;
2021-04-09 23:59:04 +02:00
}
2021-03-10 22:04:11 +01:00
}
2021-07-17 20:45:28 +02:00
}
2021-09-04 19:57:29 +02:00
static void blitImage ( VkCommandBuffer cmdbuf , VkImage src , VkImage dst , int src_width , int src_height , int dst_width , int dst_height )
2021-07-17 20:45:28 +02:00
{
// Blit raytraced image to frame buffer
{
VkImageBlit region = { 0 } ;
region . srcOffsets [ 1 ] . x = src_width ;
region . srcOffsets [ 1 ] . y = src_height ;
region . srcOffsets [ 1 ] . z = 1 ;
region . dstOffsets [ 1 ] . x = dst_width ;
region . dstOffsets [ 1 ] . y = dst_height ;
region . dstOffsets [ 1 ] . z = 1 ;
region . srcSubresource . aspectMask = region . dstSubresource . aspectMask = VK_IMAGE_ASPECT_COLOR_BIT ;
region . srcSubresource . layerCount = region . dstSubresource . layerCount = 1 ;
vkCmdBlitImage ( cmdbuf , src , VK_IMAGE_LAYOUT_GENERAL ,
dst , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL , 1 , & region ,
VK_FILTER_NEAREST ) ;
}
2021-03-20 19:15:57 +01:00
{
VkImageMemoryBarrier image_barriers [ ] = {
{
. sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ,
2021-07-17 20:45:28 +02:00
. image = dst ,
. srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT ,
. dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT ,
. oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ,
. newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL ,
2021-03-20 19:15:57 +01:00
. subresourceRange =
( VkImageSubresourceRange ) {
. aspectMask = VK_IMAGE_ASPECT_COLOR_BIT ,
. baseMipLevel = 0 ,
. levelCount = 1 ,
. baseArrayLayer = 0 ,
. layerCount = 1 ,
} ,
} } ;
2021-07-17 20:45:28 +02:00
vkCmdPipelineBarrier ( cmdbuf ,
2021-03-20 19:15:57 +01:00
VK_PIPELINE_STAGE_TRANSFER_BIT ,
2021-07-17 20:45:28 +02:00
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT ,
2021-03-20 19:15:57 +01:00
0 , 0 , NULL , 0 , NULL , ARRAYSIZE ( image_barriers ) , image_barriers ) ;
}
2021-07-17 20:45:28 +02:00
}
2021-03-20 19:15:57 +01:00
2021-07-17 20:45:28 +02:00
static void clearVkImage ( VkCommandBuffer cmdbuf , VkImage image ) {
const VkImageMemoryBarrier image_barriers [ ] = { {
. sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ,
. image = image ,
. srcAccessMask = 0 ,
. dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT ,
. oldLayout = VK_IMAGE_LAYOUT_UNDEFINED ,
. newLayout = VK_IMAGE_LAYOUT_GENERAL ,
. subresourceRange = ( VkImageSubresourceRange ) {
. aspectMask = VK_IMAGE_ASPECT_COLOR_BIT ,
. baseMipLevel = 0 ,
. levelCount = 1 ,
. baseArrayLayer = 0 ,
. layerCount = 1 ,
} } } ;
const VkClearColorValue clear_value = { 0 } ;
vkCmdPipelineBarrier ( cmdbuf , VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT , VK_PIPELINE_STAGE_TRANSFER_BIT , 0 ,
0 , NULL , 0 , NULL , ARRAYSIZE ( image_barriers ) , image_barriers ) ;
vkCmdClearColorImage ( cmdbuf , image , VK_IMAGE_LAYOUT_GENERAL , & clear_value , 1 , & image_barriers - > subresourceRange ) ;
}
void VK_RayFrameEnd ( const vk_ray_frame_render_args_t * args )
{
const VkCommandBuffer cmdbuf = args - > cmdbuf ;
const vk_image_t * frame_src = g_rtx . frames + ( ( g_rtx . frame_number + 1 ) % 2 ) ;
const vk_image_t * frame_dst = g_rtx . frames + ( g_rtx . frame_number % 2 ) ;
ASSERT ( vk_core . rtx ) ;
// ubo should contain two matrices
// FIXME pass these matrices explicitly to let RTX module handle ubo itself
ASSERT ( args - > ubo . size = = sizeof ( float ) * 16 * 2 ) ;
g_rtx . frame_number + + ;
if ( vk_core . debug )
XVK_RayModel_Validate ( ) ;
if ( g_rtx . reload_pipeline ) {
gEngine . Con_Printf ( S_WARN " Reloading RTX shaders/pipelines \n " ) ;
// TODO gracefully handle reload errors: need to change createPipeline, loadShader, VK_PipelineCreate...
vkDestroyPipeline ( vk_core . device , g_rtx . pipeline , NULL ) ;
createPipeline ( ) ;
g_rtx . reload_pipeline = false ;
2021-03-20 19:15:57 +01:00
}
2021-07-04 20:18:28 +02:00
2021-07-17 20:45:28 +02:00
updateLights ( ) ;
if ( g_ray_model_state . frame . num_models = = 0 )
2021-07-04 20:18:28 +02:00
{
2021-07-17 20:45:28 +02:00
clearVkImage ( cmdbuf , frame_dst - > image ) ;
2021-07-17 21:40:26 +02:00
{
// Prepare destination image for writing
const VkImageMemoryBarrier image_barriers [ ] = { {
. sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ,
. image = args - > dst . image ,
. srcAccessMask = 0 ,
. dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT ,
. oldLayout = VK_IMAGE_LAYOUT_UNDEFINED ,
. newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ,
. subresourceRange =
( VkImageSubresourceRange ) {
. aspectMask = VK_IMAGE_ASPECT_COLOR_BIT ,
. baseMipLevel = 0 ,
. levelCount = 1 ,
. baseArrayLayer = 0 ,
. layerCount = 1 ,
} ,
} } ;
vkCmdPipelineBarrier ( args - > cmdbuf ,
VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT ,
VK_PIPELINE_STAGE_TRANSFER_BIT ,
0 , 0 , NULL , 0 , NULL , ARRAYSIZE ( image_barriers ) , image_barriers ) ;
}
2021-07-17 20:45:28 +02:00
} else {
prepareTlas ( cmdbuf ) ;
updateDescriptors ( cmdbuf , args , frame_src , frame_dst ) ;
2021-08-05 03:36:53 +02:00
rayTrace ( cmdbuf , frame_dst - > image , args - > fov_angle_y ) ;
2021-07-17 20:45:28 +02:00
// Barrier for frame_dst image
{
const VkImageMemoryBarrier image_barriers [ ] = {
{
. sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ,
. image = frame_dst - > image ,
. srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT ,
. dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT ,
. oldLayout = VK_IMAGE_LAYOUT_GENERAL ,
. newLayout = VK_IMAGE_LAYOUT_GENERAL ,
. subresourceRange =
( VkImageSubresourceRange ) {
. aspectMask = VK_IMAGE_ASPECT_COLOR_BIT ,
. baseMipLevel = 0 ,
. levelCount = 1 ,
. baseArrayLayer = 0 ,
. layerCount = 1 ,
} ,
} ,
{
. sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ,
. image = args - > dst . image ,
. srcAccessMask = 0 ,
. dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT ,
. oldLayout = VK_IMAGE_LAYOUT_UNDEFINED ,
. newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ,
. subresourceRange =
( VkImageSubresourceRange ) {
. aspectMask = VK_IMAGE_ASPECT_COLOR_BIT ,
. baseMipLevel = 0 ,
. levelCount = 1 ,
. baseArrayLayer = 0 ,
. layerCount = 1 ,
} ,
} } ;
vkCmdPipelineBarrier ( args - > cmdbuf ,
VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR ,
VK_PIPELINE_STAGE_TRANSFER_BIT ,
0 , 0 , NULL , 0 , NULL , ARRAYSIZE ( image_barriers ) , image_barriers ) ;
}
2021-07-04 20:18:28 +02:00
}
2021-07-17 20:45:28 +02:00
// Blit RTX frame onto swapchain image
blitImage ( cmdbuf , frame_src - > image , args - > dst . image , FRAME_WIDTH , FRAME_HEIGHT , args - > dst . width , args - > dst . height ) ;
2021-03-01 19:52:08 +01:00
}
static void createLayouts ( void ) {
2021-07-10 21:37:14 +02:00
//VkSampler samplers[MAX_TEXTURES];
2021-04-22 19:45:38 +02:00
2021-05-01 20:48:24 +02:00
g_rtx . descriptors . bindings = g_rtx . desc_bindings ;
g_rtx . descriptors . num_bindings = ARRAYSIZE ( g_rtx . desc_bindings ) ;
g_rtx . descriptors . values = g_rtx . desc_values ;
g_rtx . descriptors . num_sets = 1 ;
g_rtx . descriptors . desc_sets = g_rtx . desc_sets ;
2021-07-14 18:43:36 +02:00
g_rtx . descriptors . push_constants = ( VkPushConstantRange ) {
. offset = 0 ,
. size = sizeof ( vk_rtx_push_constants_t ) ,
. stageFlags = VK_SHADER_STAGE_RAYGEN_BIT_KHR ,
} ;
2021-05-01 20:48:24 +02:00
g_rtx . desc_bindings [ RayDescBinding_DestImage ] = ( VkDescriptorSetLayoutBinding ) {
. binding = RayDescBinding_DestImage ,
2021-03-01 19:52:08 +01:00
. descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ,
. descriptorCount = 1 ,
2021-07-05 22:24:57 +02:00
. stageFlags = VK_SHADER_STAGE_RAYGEN_BIT_KHR ,
2021-05-01 20:48:24 +02:00
} ;
g_rtx . desc_bindings [ RayDescBinding_TLAS ] = ( VkDescriptorSetLayoutBinding ) {
. binding = RayDescBinding_TLAS ,
2021-03-01 19:52:08 +01:00
. descriptorType = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR ,
. descriptorCount = 1 ,
2021-07-05 22:24:57 +02:00
. stageFlags = VK_SHADER_STAGE_RAYGEN_BIT_KHR ,
2021-05-01 20:48:24 +02:00
} ;
g_rtx . desc_bindings [ RayDescBinding_UBOMatrices ] = ( VkDescriptorSetLayoutBinding ) {
. binding = RayDescBinding_UBOMatrices ,
2021-03-03 20:58:40 +01:00
. descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ,
. descriptorCount = 1 ,
2021-07-05 22:24:57 +02:00
. stageFlags = VK_SHADER_STAGE_RAYGEN_BIT_KHR ,
2021-05-01 20:48:24 +02:00
} ;
2021-07-10 21:37:14 +02:00
g_rtx . desc_bindings [ RayDescBinding_Kusochki ] = ( VkDescriptorSetLayoutBinding ) {
. binding = RayDescBinding_Kusochki ,
. descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ,
. descriptorCount = 1 ,
2021-07-14 20:54:21 +02:00
. stageFlags = VK_SHADER_STAGE_RAYGEN_BIT_KHR | VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR | VK_SHADER_STAGE_ANY_HIT_BIT_KHR ,
2021-07-10 21:37:14 +02:00
} ;
2021-07-06 20:28:24 +02:00
2021-07-10 21:37:14 +02:00
g_rtx . desc_bindings [ RayDescBinding_Indices ] = ( VkDescriptorSetLayoutBinding ) {
. binding = RayDescBinding_Indices ,
. descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ,
. descriptorCount = 1 ,
2021-07-14 20:54:21 +02:00
. stageFlags = VK_SHADER_STAGE_RAYGEN_BIT_KHR | VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR | VK_SHADER_STAGE_ANY_HIT_BIT_KHR ,
2021-07-10 21:37:14 +02:00
} ;
2021-07-06 20:28:24 +02:00
2021-07-10 21:37:14 +02:00
g_rtx . desc_bindings [ RayDescBinding_Vertices ] = ( VkDescriptorSetLayoutBinding ) {
. binding = RayDescBinding_Vertices ,
. descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ,
. descriptorCount = 1 ,
2021-07-14 20:54:21 +02:00
. stageFlags = VK_SHADER_STAGE_RAYGEN_BIT_KHR | VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR | VK_SHADER_STAGE_ANY_HIT_BIT_KHR ,
2021-07-10 21:37:14 +02:00
} ;
2021-07-07 21:02:12 +02:00
2021-07-10 21:37:14 +02:00
g_rtx . desc_bindings [ RayDescBinding_Textures ] = ( VkDescriptorSetLayoutBinding ) {
. binding = RayDescBinding_Textures ,
. descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ,
. descriptorCount = MAX_TEXTURES ,
2021-07-14 20:54:21 +02:00
. stageFlags = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR | VK_SHADER_STAGE_ANY_HIT_BIT_KHR ,
2021-07-10 21:37:14 +02:00
// FIXME on AMD using immutable samplers leads to nearest filtering ???!
. pImmutableSamplers = NULL , //samplers,
} ;
2021-07-07 21:02:12 +02:00
2021-07-10 20:06:55 +02:00
// for (int i = 0; i < ARRAYSIZE(samplers); ++i)
// samplers[i] = vk_core.default_sampler;
2021-07-06 20:28:24 +02:00
2021-07-12 19:42:20 +02:00
g_rtx . desc_bindings [ RayDescBinding_UBOLights ] = ( VkDescriptorSetLayoutBinding ) {
. binding = RayDescBinding_UBOLights ,
. descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ,
. descriptorCount = 1 ,
. stageFlags = VK_SHADER_STAGE_RAYGEN_BIT_KHR ,
} ;
2021-07-06 20:28:24 +02:00
2021-07-12 19:42:20 +02:00
g_rtx . desc_bindings [ RayDescBinding_EmissiveKusochki ] = ( VkDescriptorSetLayoutBinding ) {
. binding = RayDescBinding_EmissiveKusochki ,
. descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ,
. descriptorCount = 1 ,
. stageFlags = VK_SHADER_STAGE_RAYGEN_BIT_KHR ,
} ;
g_rtx . desc_bindings [ RayDescBinding_LightClusters ] = ( VkDescriptorSetLayoutBinding ) {
. binding = RayDescBinding_LightClusters ,
. descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ,
. descriptorCount = 1 ,
. stageFlags = VK_SHADER_STAGE_RAYGEN_BIT_KHR ,
} ;
2021-07-06 20:28:24 +02:00
2021-07-14 18:48:32 +02:00
g_rtx . desc_bindings [ RayDescBinding_PrevFrame ] = ( VkDescriptorSetLayoutBinding ) {
. binding = RayDescBinding_PrevFrame ,
. descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ,
. descriptorCount = 1 ,
. stageFlags = VK_SHADER_STAGE_RAYGEN_BIT_KHR ,
} ;
2021-07-06 20:28:24 +02:00
2021-05-01 20:48:24 +02:00
VK_DescriptorsCreate ( & g_rtx . descriptors ) ;
2021-03-01 19:52:08 +01:00
}
2021-03-13 21:29:17 +01:00
static void reloadPipeline ( void ) {
g_rtx . reload_pipeline = true ;
2021-02-27 22:43:49 +01:00
}
2021-05-08 23:34:42 +02:00
static void freezeModels ( void ) {
2021-07-05 19:59:14 +02:00
g_ray_model_state . freeze_models = ! g_ray_model_state . freeze_models ;
2021-05-08 23:34:42 +02:00
}
2021-02-27 22:43:49 +01:00
qboolean VK_RayInit ( void )
{
ASSERT ( vk_core . rtx ) ;
// TODO complain and cleanup on failure
2021-07-05 22:24:57 +02:00
//g_rtx.sbt_record_size = ALIGN_UP(vk_core.physical_device.properties_ray_tracing_pipeline.shaderGroupHandleSize, vk_core.physical_device.properties_ray_tracing_pipeline.shaderGroupHandleAlignment);
g_rtx . sbt_record_size = ALIGN_UP ( vk_core . physical_device . properties_ray_tracing_pipeline . shaderGroupHandleSize , vk_core . physical_device . properties_ray_tracing_pipeline . shaderGroupBaseAlignment ) ;
2021-09-04 19:57:29 +02:00
if ( ! createBuffer ( " ray sbt_buffer " , & g_rtx . sbt_buffer , ShaderBindingTable_COUNT * g_rtx . sbt_record_size ,
2021-07-05 22:24:57 +02:00
VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT | VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR ,
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT ) )
{
return false ;
}
2021-05-17 19:36:40 +02:00
if ( ! createBuffer ( " ray accels_buffer " , & g_rtx . accels_buffer , MAX_ACCELS_BUFFER ,
2021-03-15 18:23:26 +01:00
VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR | VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT ,
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT
2021-02-27 22:43:49 +01:00
) )
{
return false ;
}
g_rtx . accels_buffer_addr = getBufferDeviceAddress ( g_rtx . accels_buffer . buffer ) ;
2021-05-03 20:17:01 +02:00
g_rtx . accels_buffer_alloc . size = g_rtx . accels_buffer . size ;
2021-02-27 22:43:49 +01:00
2021-05-17 19:36:40 +02:00
if ( ! createBuffer ( " ray scratch_buffer " , & g_rtx . scratch_buffer , MAX_SCRATCH_BUFFER ,
2021-02-27 22:43:49 +01:00
VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR | VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT ,
2021-03-15 18:23:26 +01:00
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT
2021-02-27 22:43:49 +01:00
) ) {
return false ;
}
g_rtx . scratch_buffer_addr = getBufferDeviceAddress ( g_rtx . scratch_buffer . buffer ) ;
2021-05-17 19:36:40 +02:00
if ( ! createBuffer ( " ray tlas_geom_buffer " , & g_rtx . tlas_geom_buffer , sizeof ( VkAccelerationStructureInstanceKHR ) * MAX_ACCELS ,
2021-03-15 18:23:26 +01:00
VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT |
VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR ,
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT ) ) {
2021-03-01 19:52:08 +01:00
// FIXME complain, handle
return false ;
}
2021-07-05 19:59:14 +02:00
if ( ! createBuffer ( " ray kusochki_buffer " , & g_ray_model_state . kusochki_buffer , sizeof ( vk_kusok_data_t ) * MAX_KUSOCHKI ,
2021-03-08 21:09:11 +01:00
VK_BUFFER_USAGE_STORAGE_BUFFER_BIT /* | VK_BUFFER_USAGE_TRANSFER_DST_BIT */ ,
2021-03-15 18:23:26 +01:00
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT ) ) {
2021-03-08 21:09:11 +01:00
// FIXME complain, handle
return false ;
}
2021-07-05 19:59:14 +02:00
g_ray_model_state . kusochki_alloc . size = MAX_KUSOCHKI ;
2021-05-03 20:17:01 +02:00
2021-07-05 19:59:14 +02:00
if ( ! createBuffer ( " ray emissive_kusochki_buffer " , & g_ray_model_state . emissive_kusochki_buffer , sizeof ( vk_emissive_kusochki_t ) ,
2021-03-20 20:34:34 +01:00
VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT /* | VK_BUFFER_USAGE_TRANSFER_DST_BIT */ ,
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT ) ) {
// FIXME complain, handle
return false ;
}
2021-05-17 19:36:40 +02:00
if ( ! createBuffer ( " ray light_grid_buffer " , & g_rtx . light_grid_buffer , sizeof ( vk_ray_shader_light_grid ) ,
2021-04-12 20:07:41 +02:00
VK_BUFFER_USAGE_STORAGE_BUFFER_BIT /* | VK_BUFFER_USAGE_TRANSFER_DST_BIT */ ,
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT ) ) {
// FIXME complain, handle
return false ;
}
2021-03-01 19:52:08 +01:00
createLayouts ( ) ;
createPipeline ( ) ;
2021-03-20 19:15:57 +01:00
for ( int i = 0 ; i < ARRAYSIZE ( g_rtx . frames ) ; + + i ) {
g_rtx . frames [ i ] = VK_ImageCreate ( FRAME_WIDTH , FRAME_HEIGHT , VK_FORMAT_R8G8B8A8_UNORM , VK_IMAGE_TILING_OPTIMAL ,
2021-03-20 21:42:15 +01:00
VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT ) ;
}
// Start with black previous frame
{
const VkCommandBufferBeginInfo beginfo = {
. sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO ,
. flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT ,
} ;
XVK_CHECK ( vkBeginCommandBuffer ( vk_core . cb , & beginfo ) ) ;
2021-07-17 20:45:28 +02:00
clearVkImage ( vk_core . cb , g_rtx . frames [ 1 ] . image ) ;
2021-03-20 21:42:15 +01:00
XVK_CHECK ( vkEndCommandBuffer ( vk_core . cb ) ) ;
{
const VkSubmitInfo subinfo = {
. sType = VK_STRUCTURE_TYPE_SUBMIT_INFO ,
. commandBufferCount = 1 ,
. pCommandBuffers = & vk_core . cb ,
} ;
XVK_CHECK ( vkQueueSubmit ( vk_core . queue , 1 , & subinfo , VK_NULL_HANDLE ) ) ;
XVK_CHECK ( vkQueueWaitIdle ( vk_core . queue ) ) ;
}
2021-03-20 19:15:57 +01:00
}
2021-05-08 23:34:42 +02:00
if ( vk_core . debug ) {
2021-03-13 21:29:17 +01:00
gEngine . Cmd_AddCommand ( " vk_rtx_reload " , reloadPipeline , " Reload RTX shader " ) ;
2021-05-08 23:34:42 +02:00
gEngine . Cmd_AddCommand ( " vk_rtx_freeze " , freezeModels , " Freeze models, do not update/add/delete models from to-draw list " ) ;
}
2021-03-13 21:29:17 +01:00
2021-02-27 22:43:49 +01:00
return true ;
}
void VK_RayShutdown ( void )
{
ASSERT ( vk_core . rtx ) ;
2021-02-28 00:40:57 +01:00
2021-03-20 19:15:57 +01:00
for ( int i = 0 ; i < ARRAYSIZE ( g_rtx . frames ) ; + + i )
VK_ImageDestroy ( g_rtx . frames + i ) ;
2021-03-01 19:52:08 +01:00
vkDestroyPipeline ( vk_core . device , g_rtx . pipeline , NULL ) ;
2021-05-01 20:48:24 +02:00
VK_DescriptorsDestroy ( & g_rtx . descriptors ) ;
2021-03-01 19:52:08 +01:00
2021-04-09 23:59:04 +02:00
if ( g_rtx . tlas ! = VK_NULL_HANDLE )
vkDestroyAccelerationStructureKHR ( vk_core . device , g_rtx . tlas , NULL ) ;
2021-03-01 20:22:58 +01:00
2021-07-05 19:59:14 +02:00
for ( int i = 0 ; i < ARRAYSIZE ( g_ray_model_state . models_cache ) ; + + i ) {
vk_ray_model_t * model = g_ray_model_state . models_cache + i ;
2021-05-24 20:14:03 +02:00
if ( model - > as ! = VK_NULL_HANDLE )
vkDestroyAccelerationStructureKHR ( vk_core . device , model - > as , NULL ) ;
model - > as = VK_NULL_HANDLE ;
2021-04-13 19:46:33 +02:00
}
2021-03-01 19:52:08 +01:00
destroyBuffer ( & g_rtx . scratch_buffer ) ;
destroyBuffer ( & g_rtx . accels_buffer ) ;
destroyBuffer ( & g_rtx . tlas_geom_buffer ) ;
2021-07-05 19:59:14 +02:00
destroyBuffer ( & g_ray_model_state . kusochki_buffer ) ;
destroyBuffer ( & g_ray_model_state . emissive_kusochki_buffer ) ;
2021-04-16 20:43:37 +02:00
destroyBuffer ( & g_rtx . light_grid_buffer ) ;
2021-07-05 22:24:57 +02:00
destroyBuffer ( & g_rtx . sbt_buffer ) ;
2021-02-27 22:43:49 +01:00
}