2017-12-10 15:57:51 +01:00
|
|
|
#include <cstring>
|
|
|
|
|
2017-10-14 23:52:47 +02:00
|
|
|
#include "dxvk_device.h"
|
2017-10-10 23:32:13 +02:00
|
|
|
#include "dxvk_context.h"
|
|
|
|
#include "dxvk_main.h"
|
|
|
|
|
|
|
|
namespace dxvk {
|
|
|
|
|
2018-03-29 12:32:20 +02:00
|
|
|
DxvkContext::DxvkContext(
|
2018-04-11 17:05:12 +02:00
|
|
|
const Rc<DxvkDevice>& device,
|
|
|
|
const Rc<DxvkPipelineCache>& pipelineCache,
|
|
|
|
const Rc<DxvkMetaClearObjects>& metaClearObjects)
|
|
|
|
: m_device (device),
|
|
|
|
m_pipeCache (pipelineCache),
|
|
|
|
m_pipeMgr (new DxvkPipelineManager(device.ptr())),
|
|
|
|
m_metaClear (metaClearObjects) { }
|
2017-10-10 23:32:13 +02:00
|
|
|
|
|
|
|
|
|
|
|
DxvkContext::~DxvkContext() {
|
2017-11-26 14:01:41 +01:00
|
|
|
|
2017-10-10 23:32:13 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-12-01 09:50:47 +01:00
|
|
|
void DxvkContext::beginRecording(const Rc<DxvkCommandList>& cmdList) {
|
|
|
|
m_cmd = cmdList;
|
2017-10-14 14:28:31 +02:00
|
|
|
m_cmd->beginRecording();
|
2017-10-13 03:19:23 +02:00
|
|
|
|
2017-11-17 19:49:44 +01:00
|
|
|
// The current state of the internal command buffer is
|
|
|
|
// undefined, so we have to bind and set up everything
|
|
|
|
// before any draw or dispatch command is recorded.
|
2017-12-01 10:27:33 +01:00
|
|
|
m_flags.clr(
|
2018-04-30 17:04:13 +02:00
|
|
|
DxvkContextFlag::GpRenderPassBound,
|
|
|
|
DxvkContextFlag::GpClearRenderTargets);
|
2017-10-13 03:19:23 +02:00
|
|
|
|
2017-12-01 10:27:33 +01:00
|
|
|
m_flags.set(
|
2017-11-17 19:49:44 +01:00
|
|
|
DxvkContextFlag::GpDirtyPipeline,
|
2017-12-09 14:41:37 +01:00
|
|
|
DxvkContextFlag::GpDirtyPipelineState,
|
2017-11-17 19:49:44 +01:00
|
|
|
DxvkContextFlag::GpDirtyResources,
|
|
|
|
DxvkContextFlag::GpDirtyVertexBuffers,
|
2018-01-12 12:51:39 +01:00
|
|
|
DxvkContextFlag::GpDirtyIndexBuffer,
|
2017-11-17 19:49:44 +01:00
|
|
|
DxvkContextFlag::CpDirtyPipeline,
|
2018-01-12 12:51:39 +01:00
|
|
|
DxvkContextFlag::CpDirtyPipelineState,
|
2017-11-17 19:49:44 +01:00
|
|
|
DxvkContextFlag::CpDirtyResources);
|
2018-02-13 13:43:27 +01:00
|
|
|
|
|
|
|
// Restart queries that were active during
|
|
|
|
// the last command buffer submission.
|
|
|
|
this->beginActiveQueries();
|
2017-10-10 23:32:13 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-12-01 10:08:49 +01:00
|
|
|
Rc<DxvkCommandList> DxvkContext::endRecording() {
|
2018-04-23 11:11:40 +02:00
|
|
|
this->spillRenderPass();
|
2018-02-13 13:43:27 +01:00
|
|
|
this->endActiveQueries();
|
2017-10-11 23:29:05 +02:00
|
|
|
|
2018-02-18 20:11:05 +01:00
|
|
|
this->trackQueryPool(m_queryPools[VK_QUERY_TYPE_OCCLUSION]);
|
|
|
|
this->trackQueryPool(m_queryPools[VK_QUERY_TYPE_PIPELINE_STATISTICS]);
|
|
|
|
this->trackQueryPool(m_queryPools[VK_QUERY_TYPE_TIMESTAMP]);
|
|
|
|
|
2017-10-14 14:28:31 +02:00
|
|
|
m_cmd->endRecording();
|
2017-12-01 10:08:49 +01:00
|
|
|
return std::exchange(m_cmd, nullptr);
|
2017-10-10 23:32:13 +02:00
|
|
|
}
|
2017-10-11 23:29:05 +02:00
|
|
|
|
|
|
|
|
2018-02-13 13:43:27 +01:00
|
|
|
void DxvkContext::beginQuery(const DxvkQueryRevision& query) {
|
2018-02-18 17:14:02 +01:00
|
|
|
DxvkQueryHandle handle = this->allocQuery(query);
|
2018-02-13 13:43:27 +01:00
|
|
|
|
|
|
|
m_cmd->cmdBeginQuery(
|
|
|
|
handle.queryPool,
|
|
|
|
handle.queryId,
|
2018-02-19 11:27:14 +01:00
|
|
|
handle.flags);
|
2018-02-13 13:43:27 +01:00
|
|
|
|
|
|
|
query.query->beginRecording(query.revision);
|
|
|
|
this->insertActiveQuery(query);
|
2018-02-12 16:36:42 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-02-13 13:43:27 +01:00
|
|
|
void DxvkContext::endQuery(const DxvkQueryRevision& query) {
|
|
|
|
DxvkQueryHandle handle = query.query->getHandle();
|
|
|
|
|
|
|
|
m_cmd->cmdEndQuery(
|
|
|
|
handle.queryPool,
|
|
|
|
handle.queryId);
|
|
|
|
|
|
|
|
query.query->endRecording(query.revision);
|
|
|
|
this->eraseActiveQuery(query);
|
2018-02-12 16:36:42 +01:00
|
|
|
}
|
|
|
|
|
2018-04-15 01:09:53 +02:00
|
|
|
|
|
|
|
void DxvkContext::bindRenderTargets(const DxvkRenderTargets& targets) {
|
2018-04-26 18:43:19 +02:00
|
|
|
m_state.om.renderTargets = targets;
|
|
|
|
|
2018-04-30 17:04:13 +02:00
|
|
|
// If necessary, perform clears on the active render targets
|
|
|
|
if (m_flags.test(DxvkContextFlag::GpClearRenderTargets))
|
|
|
|
this->startRenderPass();
|
2018-04-30 13:12:28 +02:00
|
|
|
|
|
|
|
// Set up default render pass ops
|
|
|
|
this->resetRenderPassOps(
|
|
|
|
m_state.om.renderTargets,
|
|
|
|
m_state.om.renderPassOps);
|
|
|
|
|
|
|
|
if (m_state.om.framebuffer == nullptr || !m_state.om.framebuffer->hasTargets(targets)) {
|
2018-04-26 18:43:19 +02:00
|
|
|
// Create a new framebuffer object next
|
|
|
|
// time we start rendering something
|
2018-04-26 14:47:55 +02:00
|
|
|
m_flags.set(DxvkContextFlag::GpDirtyFramebuffer);
|
2018-04-26 15:30:18 +02:00
|
|
|
} else {
|
|
|
|
// Don't redundantly spill the render pass if
|
|
|
|
// the same render targets are bound again
|
|
|
|
m_flags.clr(DxvkContextFlag::GpDirtyFramebuffer);
|
2017-10-15 17:56:06 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-11-20 13:21:27 +01:00
|
|
|
void DxvkContext::bindIndexBuffer(
|
2017-12-14 15:24:43 +01:00
|
|
|
const DxvkBufferSlice& buffer,
|
2017-12-07 14:01:17 +01:00
|
|
|
VkIndexType indexType) {
|
2018-01-18 18:50:44 +01:00
|
|
|
if (!m_state.vi.indexBuffer.matches(buffer)
|
|
|
|
|| (m_state.vi.indexType != indexType)) {
|
2017-11-20 13:21:27 +01:00
|
|
|
m_state.vi.indexBuffer = buffer;
|
2017-12-07 14:01:17 +01:00
|
|
|
m_state.vi.indexType = indexType;
|
|
|
|
|
2017-12-01 10:27:33 +01:00
|
|
|
m_flags.set(DxvkContextFlag::GpDirtyIndexBuffer);
|
2017-11-20 13:21:27 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-12-03 20:23:26 +01:00
|
|
|
void DxvkContext::bindResourceBuffer(
|
|
|
|
uint32_t slot,
|
2017-12-14 15:24:43 +01:00
|
|
|
const DxvkBufferSlice& buffer) {
|
2018-01-18 18:50:44 +01:00
|
|
|
if (!m_rc[slot].bufferSlice.matches(buffer)) {
|
2018-02-05 08:58:15 +01:00
|
|
|
m_rc[slot].sampler = nullptr;
|
|
|
|
m_rc[slot].imageView = nullptr;
|
|
|
|
m_rc[slot].bufferView = nullptr;
|
2017-12-20 12:13:08 +01:00
|
|
|
m_rc[slot].bufferSlice = buffer;
|
2017-12-23 15:11:23 +01:00
|
|
|
|
|
|
|
m_flags.set(
|
|
|
|
DxvkContextFlag::CpDirtyResources,
|
|
|
|
DxvkContextFlag::GpDirtyResources);
|
2017-12-03 20:23:26 +01:00
|
|
|
}
|
2017-12-03 00:40:58 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-03-10 11:16:52 +01:00
|
|
|
void DxvkContext::bindResourceView(
|
2017-12-03 20:23:26 +01:00
|
|
|
uint32_t slot,
|
2018-03-10 11:16:52 +01:00
|
|
|
const Rc<DxvkImageView>& imageView,
|
2017-12-03 20:23:26 +01:00
|
|
|
const Rc<DxvkBufferView>& bufferView) {
|
2018-03-10 11:16:52 +01:00
|
|
|
if (m_rc[slot].imageView != imageView
|
|
|
|
|| m_rc[slot].bufferView != bufferView) {
|
2018-02-05 08:58:15 +01:00
|
|
|
m_rc[slot].sampler = nullptr;
|
2018-03-10 11:16:52 +01:00
|
|
|
m_rc[slot].imageView = imageView;
|
2018-02-05 08:58:15 +01:00
|
|
|
m_rc[slot].bufferView = bufferView;
|
|
|
|
m_rc[slot].bufferSlice = DxvkBufferSlice();
|
2017-12-23 15:11:23 +01:00
|
|
|
|
|
|
|
m_flags.set(
|
|
|
|
DxvkContextFlag::CpDirtyResources,
|
|
|
|
DxvkContextFlag::GpDirtyResources);
|
2017-12-03 20:23:26 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void DxvkContext::bindResourceSampler(
|
|
|
|
uint32_t slot,
|
|
|
|
const Rc<DxvkSampler>& sampler) {
|
2017-12-20 12:13:08 +01:00
|
|
|
if (m_rc[slot].sampler != sampler) {
|
2018-02-05 08:58:15 +01:00
|
|
|
m_rc[slot].sampler = sampler;
|
|
|
|
m_rc[slot].imageView = nullptr;
|
|
|
|
m_rc[slot].bufferView = nullptr;
|
|
|
|
m_rc[slot].bufferSlice = DxvkBufferSlice();
|
2017-12-23 15:11:23 +01:00
|
|
|
|
|
|
|
m_flags.set(
|
|
|
|
DxvkContextFlag::CpDirtyResources,
|
|
|
|
DxvkContextFlag::GpDirtyResources);
|
2017-12-03 20:23:26 +01:00
|
|
|
}
|
2017-10-15 17:56:06 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-12-07 09:38:31 +01:00
|
|
|
void DxvkContext::bindShader(
|
|
|
|
VkShaderStageFlagBits stage,
|
|
|
|
const Rc<DxvkShader>& shader) {
|
|
|
|
DxvkShaderStage* shaderStage = nullptr;
|
|
|
|
|
|
|
|
switch (stage) {
|
|
|
|
case VK_SHADER_STAGE_VERTEX_BIT: shaderStage = &m_state.gp.vs; break;
|
|
|
|
case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT: shaderStage = &m_state.gp.tcs; break;
|
|
|
|
case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT: shaderStage = &m_state.gp.tes; break;
|
|
|
|
case VK_SHADER_STAGE_GEOMETRY_BIT: shaderStage = &m_state.gp.gs; break;
|
|
|
|
case VK_SHADER_STAGE_FRAGMENT_BIT: shaderStage = &m_state.gp.fs; break;
|
|
|
|
case VK_SHADER_STAGE_COMPUTE_BIT: shaderStage = &m_state.cp.cs; break;
|
|
|
|
default: return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (shaderStage->shader != shader) {
|
|
|
|
shaderStage->shader = shader;
|
|
|
|
|
2017-12-18 16:16:21 +01:00
|
|
|
if (stage == VK_SHADER_STAGE_COMPUTE_BIT) {
|
|
|
|
m_flags.set(
|
|
|
|
DxvkContextFlag::CpDirtyPipeline,
|
2018-01-10 12:13:46 +01:00
|
|
|
DxvkContextFlag::CpDirtyPipelineState,
|
2017-12-18 16:16:21 +01:00
|
|
|
DxvkContextFlag::CpDirtyResources);
|
|
|
|
} else {
|
|
|
|
m_flags.set(
|
|
|
|
DxvkContextFlag::GpDirtyPipeline,
|
2018-01-10 12:13:46 +01:00
|
|
|
DxvkContextFlag::GpDirtyPipelineState,
|
2017-12-18 16:16:21 +01:00
|
|
|
DxvkContextFlag::GpDirtyResources);
|
|
|
|
}
|
2017-12-07 09:38:31 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-11-20 13:21:27 +01:00
|
|
|
void DxvkContext::bindVertexBuffer(
|
|
|
|
uint32_t binding,
|
2017-12-14 15:24:43 +01:00
|
|
|
const DxvkBufferSlice& buffer,
|
2017-12-07 21:47:38 +01:00
|
|
|
uint32_t stride) {
|
2018-01-18 18:50:44 +01:00
|
|
|
if (!m_state.vi.vertexBuffers[binding].matches(buffer)) {
|
2017-12-20 12:13:08 +01:00
|
|
|
m_state.vi.vertexBuffers[binding] = buffer;
|
2017-12-01 10:27:33 +01:00
|
|
|
m_flags.set(DxvkContextFlag::GpDirtyVertexBuffers);
|
2017-11-20 13:21:27 +01:00
|
|
|
}
|
2017-12-07 21:47:38 +01:00
|
|
|
|
2017-12-20 12:13:08 +01:00
|
|
|
if (m_state.vi.vertexStrides[binding] != stride) {
|
|
|
|
m_state.vi.vertexStrides[binding] = stride;
|
2017-12-07 21:47:38 +01:00
|
|
|
m_flags.set(DxvkContextFlag::GpDirtyPipelineState);
|
|
|
|
}
|
2017-11-20 13:21:27 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-02-01 14:26:38 +01:00
|
|
|
void DxvkContext::clearBuffer(
|
|
|
|
const Rc<DxvkBuffer>& buffer,
|
|
|
|
VkDeviceSize offset,
|
|
|
|
VkDeviceSize length,
|
|
|
|
uint32_t value) {
|
2018-04-23 11:11:40 +02:00
|
|
|
this->spillRenderPass();
|
2018-02-01 14:26:38 +01:00
|
|
|
|
|
|
|
auto slice = buffer->subSlice(offset, length);
|
|
|
|
|
|
|
|
m_cmd->cmdFillBuffer(
|
|
|
|
slice.handle(),
|
|
|
|
slice.offset(),
|
|
|
|
slice.length(),
|
|
|
|
value);
|
|
|
|
|
|
|
|
m_barriers.accessBuffer(slice,
|
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_WRITE_BIT,
|
|
|
|
buffer->info().stages,
|
|
|
|
buffer->info().access);
|
|
|
|
m_barriers.recordCommands(m_cmd);
|
|
|
|
|
|
|
|
m_cmd->trackResource(slice.resource());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-04-11 17:05:12 +02:00
|
|
|
void DxvkContext::clearBufferView(
|
|
|
|
const Rc<DxvkBufferView>& bufferView,
|
|
|
|
VkDeviceSize offset,
|
|
|
|
VkDeviceSize length,
|
|
|
|
VkClearColorValue value) {
|
2018-04-23 11:11:40 +02:00
|
|
|
this->spillRenderPass();
|
2018-04-11 23:13:34 +02:00
|
|
|
this->unbindComputePipeline();
|
|
|
|
|
|
|
|
// Query pipeline objects to use for this clear operation
|
|
|
|
DxvkMetaClearPipeline pipeInfo = m_metaClear->getClearBufferPipeline(
|
|
|
|
imageFormatInfo(bufferView->info().format)->flags);
|
|
|
|
|
|
|
|
// Create a descriptor set pointing to the view
|
|
|
|
VkBufferView viewObject = bufferView->handle();
|
|
|
|
|
|
|
|
VkDescriptorSet descriptorSet =
|
|
|
|
m_cmd->allocateDescriptorSet(pipeInfo.dsetLayout);
|
|
|
|
|
|
|
|
VkWriteDescriptorSet descriptorWrite;
|
|
|
|
descriptorWrite.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
|
|
|
|
descriptorWrite.pNext = nullptr;
|
|
|
|
descriptorWrite.dstSet = descriptorSet;
|
|
|
|
descriptorWrite.dstBinding = 0;
|
|
|
|
descriptorWrite.dstArrayElement = 0;
|
|
|
|
descriptorWrite.descriptorCount = 1;
|
|
|
|
descriptorWrite.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
|
|
|
|
descriptorWrite.pImageInfo = nullptr;
|
|
|
|
descriptorWrite.pBufferInfo = nullptr;
|
|
|
|
descriptorWrite.pTexelBufferView = &viewObject;
|
|
|
|
m_cmd->updateDescriptorSets(1, &descriptorWrite);
|
|
|
|
|
|
|
|
// Prepare shader arguments
|
|
|
|
DxvkMetaClearArgs pushArgs;
|
|
|
|
pushArgs.clearValue = value;
|
|
|
|
pushArgs.offset = VkOffset3D { int32_t(offset), 0, 0 };
|
|
|
|
pushArgs.extent = VkExtent3D { uint32_t(length), 1, 1 };
|
|
|
|
|
|
|
|
VkExtent3D workgroups = util::computeBlockCount(
|
|
|
|
pushArgs.extent, pipeInfo.workgroupSize);
|
|
|
|
|
|
|
|
m_cmd->cmdBindPipeline(
|
|
|
|
VK_PIPELINE_BIND_POINT_COMPUTE,
|
|
|
|
pipeInfo.pipeline);
|
|
|
|
m_cmd->cmdBindDescriptorSet(
|
|
|
|
VK_PIPELINE_BIND_POINT_COMPUTE,
|
|
|
|
pipeInfo.pipeLayout, descriptorSet);
|
|
|
|
m_cmd->cmdPushConstants(
|
|
|
|
pipeInfo.pipeLayout,
|
|
|
|
VK_SHADER_STAGE_COMPUTE_BIT,
|
|
|
|
0, sizeof(pushArgs), &pushArgs);
|
|
|
|
m_cmd->cmdDispatch(
|
|
|
|
workgroups.width,
|
|
|
|
workgroups.height,
|
|
|
|
workgroups.depth);
|
2018-04-11 17:05:12 +02:00
|
|
|
|
2018-04-11 23:13:34 +02:00
|
|
|
m_barriers.accessBuffer(
|
|
|
|
bufferView->physicalSlice(),
|
|
|
|
VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
|
|
|
|
VK_ACCESS_SHADER_WRITE_BIT,
|
|
|
|
bufferView->bufferInfo().stages,
|
|
|
|
bufferView->bufferInfo().access);
|
|
|
|
m_barriers.recordCommands(m_cmd);
|
|
|
|
|
|
|
|
m_cmd->trackResource(bufferView->viewResource());
|
|
|
|
m_cmd->trackResource(bufferView->bufferResource());
|
2018-04-11 17:05:12 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-12-01 00:52:13 +01:00
|
|
|
void DxvkContext::clearColorImage(
|
|
|
|
const Rc<DxvkImage>& image,
|
|
|
|
const VkClearColorValue& value,
|
|
|
|
const VkImageSubresourceRange& subresources) {
|
2018-04-23 11:11:40 +02:00
|
|
|
this->spillRenderPass();
|
2017-12-01 00:52:13 +01:00
|
|
|
|
2018-01-23 19:00:48 +01:00
|
|
|
m_barriers.accessImage(image, subresources,
|
|
|
|
VK_IMAGE_LAYOUT_UNDEFINED,
|
|
|
|
image->info().stages,
|
|
|
|
image->info().access,
|
2018-04-29 15:28:50 +02:00
|
|
|
image->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL),
|
2018-01-23 19:00:48 +01:00
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_WRITE_BIT);
|
|
|
|
m_barriers.recordCommands(m_cmd);
|
2017-12-05 13:00:06 +01:00
|
|
|
|
|
|
|
m_cmd->cmdClearColorImage(image->handle(),
|
2018-04-29 15:28:50 +02:00
|
|
|
image->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL),
|
2017-12-01 00:52:13 +01:00
|
|
|
&value, 1, &subresources);
|
|
|
|
|
2017-12-05 13:00:06 +01:00
|
|
|
m_barriers.accessImage(image, subresources,
|
2018-04-29 15:28:50 +02:00
|
|
|
image->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL),
|
2017-12-05 13:00:06 +01:00
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_WRITE_BIT,
|
|
|
|
image->info().layout,
|
|
|
|
image->info().stages,
|
|
|
|
image->info().access);
|
|
|
|
m_barriers.recordCommands(m_cmd);
|
2017-12-01 00:52:13 +01:00
|
|
|
|
|
|
|
m_cmd->trackResource(image);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-12-09 03:53:42 +01:00
|
|
|
void DxvkContext::clearDepthStencilImage(
|
|
|
|
const Rc<DxvkImage>& image,
|
|
|
|
const VkClearDepthStencilValue& value,
|
|
|
|
const VkImageSubresourceRange& subresources) {
|
2018-04-23 11:11:40 +02:00
|
|
|
this->spillRenderPass();
|
2017-12-09 03:53:42 +01:00
|
|
|
|
2018-01-30 15:44:18 +01:00
|
|
|
m_barriers.accessImage(
|
|
|
|
image, subresources,
|
2018-01-23 19:00:48 +01:00
|
|
|
VK_IMAGE_LAYOUT_UNDEFINED,
|
|
|
|
image->info().stages,
|
|
|
|
image->info().access,
|
2018-04-29 15:28:50 +02:00
|
|
|
image->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL),
|
2018-01-23 19:00:48 +01:00
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_WRITE_BIT);
|
|
|
|
m_barriers.recordCommands(m_cmd);
|
2017-12-09 03:53:42 +01:00
|
|
|
|
|
|
|
m_cmd->cmdClearDepthStencilImage(image->handle(),
|
2018-04-29 15:28:50 +02:00
|
|
|
image->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL),
|
2017-12-09 03:53:42 +01:00
|
|
|
&value, 1, &subresources);
|
|
|
|
|
2018-01-30 15:44:18 +01:00
|
|
|
m_barriers.accessImage(
|
|
|
|
image, subresources,
|
2018-04-29 15:28:50 +02:00
|
|
|
image->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL),
|
2017-12-09 03:53:42 +01:00
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_WRITE_BIT,
|
|
|
|
image->info().layout,
|
|
|
|
image->info().stages,
|
|
|
|
image->info().access);
|
|
|
|
m_barriers.recordCommands(m_cmd);
|
|
|
|
|
|
|
|
m_cmd->trackResource(image);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-10-11 23:29:05 +02:00
|
|
|
void DxvkContext::clearRenderTarget(
|
2018-03-17 17:59:43 +01:00
|
|
|
const Rc<DxvkImageView>& imageView,
|
|
|
|
const VkClearRect& clearRect,
|
|
|
|
VkImageAspectFlags clearAspects,
|
|
|
|
const VkClearValue& clearValue) {
|
2018-04-26 14:47:55 +02:00
|
|
|
this->updateFramebuffer();
|
|
|
|
|
2018-04-30 21:42:16 +02:00
|
|
|
// Prepare attachment ops
|
|
|
|
DxvkColorAttachmentOps colorOp;
|
|
|
|
colorOp.loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
|
|
|
|
colorOp.loadLayout = imageView->imageInfo().layout;
|
|
|
|
colorOp.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
|
|
|
|
colorOp.storeLayout = imageView->imageInfo().layout;
|
|
|
|
|
|
|
|
DxvkDepthAttachmentOps depthOp;
|
|
|
|
depthOp.loadOpD = VK_ATTACHMENT_LOAD_OP_LOAD;
|
|
|
|
depthOp.loadOpS = VK_ATTACHMENT_LOAD_OP_LOAD;
|
|
|
|
depthOp.loadLayout = imageView->imageInfo().layout;
|
|
|
|
depthOp.storeOpD = VK_ATTACHMENT_STORE_OP_STORE;
|
|
|
|
depthOp.storeOpS = VK_ATTACHMENT_STORE_OP_STORE;
|
|
|
|
depthOp.storeLayout = imageView->imageInfo().layout;
|
|
|
|
|
|
|
|
if (clearAspects & VK_IMAGE_ASPECT_COLOR_BIT)
|
|
|
|
colorOp.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
|
|
|
|
|
|
|
|
if (clearAspects & VK_IMAGE_ASPECT_DEPTH_BIT)
|
|
|
|
depthOp.loadOpD = VK_ATTACHMENT_LOAD_OP_CLEAR;
|
|
|
|
|
|
|
|
if (clearAspects & VK_IMAGE_ASPECT_STENCIL_BIT)
|
|
|
|
depthOp.loadOpS = VK_ATTACHMENT_LOAD_OP_CLEAR;
|
|
|
|
|
|
|
|
if (clearAspects == imageView->info().aspect) {
|
|
|
|
colorOp.loadLayout = VK_IMAGE_LAYOUT_UNDEFINED;
|
|
|
|
depthOp.loadLayout = VK_IMAGE_LAYOUT_UNDEFINED;
|
|
|
|
}
|
|
|
|
|
2018-03-17 17:59:43 +01:00
|
|
|
// Check whether the render target view is an attachment
|
|
|
|
// of the current framebuffer. If not, we need to create
|
|
|
|
// a temporary framebuffer.
|
2018-04-30 13:12:28 +02:00
|
|
|
int32_t attachmentIndex = -1;
|
2018-03-17 17:59:43 +01:00
|
|
|
|
|
|
|
if (m_state.om.framebuffer != nullptr)
|
|
|
|
attachmentIndex = m_state.om.framebuffer->findAttachment(imageView);
|
|
|
|
|
2018-04-30 13:12:28 +02:00
|
|
|
if (attachmentIndex < 0) {
|
2018-04-23 11:11:40 +02:00
|
|
|
this->spillRenderPass();
|
2018-03-17 17:59:43 +01:00
|
|
|
|
|
|
|
// Set up and bind a temporary framebuffer
|
|
|
|
DxvkRenderTargets attachments;
|
2018-04-30 13:12:28 +02:00
|
|
|
DxvkRenderPassOps ops;
|
2018-03-17 17:59:43 +01:00
|
|
|
|
|
|
|
if (clearAspects & VK_IMAGE_ASPECT_COLOR_BIT) {
|
2018-04-30 13:12:28 +02:00
|
|
|
attachments.color[0].view = imageView;
|
|
|
|
attachments.color[0].layout = imageView->pickLayout(VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
|
2018-04-30 21:42:16 +02:00
|
|
|
ops.colorOps[0] = colorOp;
|
2018-03-17 17:59:43 +01:00
|
|
|
} else {
|
2018-04-30 13:12:28 +02:00
|
|
|
attachments.depth.view = imageView;
|
|
|
|
attachments.depth.layout = imageView->pickLayout(VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
|
2018-04-30 21:42:16 +02:00
|
|
|
ops.depthOps = depthOp;
|
2018-03-17 17:59:43 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
this->renderPassBindFramebuffer(
|
2018-04-30 17:04:13 +02:00
|
|
|
m_device->createFramebuffer(attachments),
|
|
|
|
ops, 1, &clearValue);
|
|
|
|
this->renderPassUnbindFramebuffer();
|
|
|
|
} else if (m_flags.test(DxvkContextFlag::GpRenderPassBound)) {
|
|
|
|
// Clear the attachment in quesion. For color images,
|
|
|
|
// the attachment index for the current subpass is
|
|
|
|
// equal to the render pass attachment index.
|
|
|
|
VkClearAttachment clearInfo;
|
|
|
|
clearInfo.aspectMask = clearAspects;
|
|
|
|
clearInfo.colorAttachment = attachmentIndex;
|
|
|
|
clearInfo.clearValue = clearValue;
|
|
|
|
|
|
|
|
m_cmd->cmdClearAttachments(
|
|
|
|
1, &clearInfo, 1, &clearRect);
|
2018-03-17 17:59:43 +01:00
|
|
|
} else {
|
2018-04-30 17:04:13 +02:00
|
|
|
// Perform the clear when starting the render pass
|
2018-04-30 21:42:16 +02:00
|
|
|
if (clearAspects & VK_IMAGE_ASPECT_COLOR_BIT)
|
|
|
|
m_state.om.renderPassOps.colorOps[attachmentIndex] = colorOp;
|
|
|
|
|
|
|
|
if (clearAspects & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT))
|
|
|
|
m_state.om.renderPassOps.depthOps = depthOp;
|
2018-04-30 17:04:13 +02:00
|
|
|
|
|
|
|
m_state.om.clearValues[attachmentIndex] = clearValue;
|
|
|
|
m_flags.set(DxvkContextFlag::GpClearRenderTargets);
|
2018-03-17 17:59:43 +01:00
|
|
|
}
|
2017-10-11 23:29:05 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-04-11 17:05:12 +02:00
|
|
|
void DxvkContext::clearImageView(
|
2018-04-11 23:13:34 +02:00
|
|
|
const Rc<DxvkImageView>& imageView,
|
2018-04-11 17:05:12 +02:00
|
|
|
VkOffset3D offset,
|
|
|
|
VkExtent3D extent,
|
|
|
|
VkClearColorValue value) {
|
2018-04-23 11:11:40 +02:00
|
|
|
this->spillRenderPass();
|
2018-04-11 23:13:34 +02:00
|
|
|
this->unbindComputePipeline();
|
|
|
|
|
|
|
|
// Query pipeline objects to use for this clear operation
|
|
|
|
DxvkMetaClearPipeline pipeInfo = m_metaClear->getClearImagePipeline(
|
|
|
|
imageView->type(), imageFormatInfo(imageView->info().format)->flags);
|
|
|
|
|
|
|
|
// Create a descriptor set pointing to the view
|
|
|
|
VkDescriptorSet descriptorSet =
|
|
|
|
m_cmd->allocateDescriptorSet(pipeInfo.dsetLayout);
|
|
|
|
|
|
|
|
VkDescriptorImageInfo viewInfo;
|
|
|
|
viewInfo.sampler = VK_NULL_HANDLE;
|
|
|
|
viewInfo.imageView = imageView->handle();
|
|
|
|
viewInfo.imageLayout = imageView->imageInfo().layout;
|
|
|
|
|
|
|
|
VkWriteDescriptorSet descriptorWrite;
|
|
|
|
descriptorWrite.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
|
|
|
|
descriptorWrite.pNext = nullptr;
|
|
|
|
descriptorWrite.dstSet = descriptorSet;
|
|
|
|
descriptorWrite.dstBinding = 0;
|
|
|
|
descriptorWrite.dstArrayElement = 0;
|
|
|
|
descriptorWrite.descriptorCount = 1;
|
|
|
|
descriptorWrite.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
|
|
|
|
descriptorWrite.pImageInfo = &viewInfo;
|
|
|
|
descriptorWrite.pBufferInfo = nullptr;
|
|
|
|
descriptorWrite.pTexelBufferView = nullptr;
|
|
|
|
m_cmd->updateDescriptorSets(1, &descriptorWrite);
|
|
|
|
|
|
|
|
// Prepare shader arguments
|
|
|
|
DxvkMetaClearArgs pushArgs;
|
|
|
|
pushArgs.clearValue = value;
|
|
|
|
pushArgs.offset = offset;
|
|
|
|
pushArgs.extent = extent;
|
|
|
|
|
|
|
|
VkExtent3D workgroups = util::computeBlockCount(
|
|
|
|
pushArgs.extent, pipeInfo.workgroupSize);
|
|
|
|
|
|
|
|
if (imageView->type() == VK_IMAGE_VIEW_TYPE_1D_ARRAY)
|
|
|
|
workgroups.height = imageView->subresources().layerCount;
|
|
|
|
else if (imageView->type() == VK_IMAGE_VIEW_TYPE_2D_ARRAY)
|
|
|
|
workgroups.depth = imageView->subresources().layerCount;
|
|
|
|
|
|
|
|
m_cmd->cmdBindPipeline(
|
|
|
|
VK_PIPELINE_BIND_POINT_COMPUTE,
|
|
|
|
pipeInfo.pipeline);
|
|
|
|
m_cmd->cmdBindDescriptorSet(
|
|
|
|
VK_PIPELINE_BIND_POINT_COMPUTE,
|
|
|
|
pipeInfo.pipeLayout, descriptorSet);
|
|
|
|
m_cmd->cmdPushConstants(
|
|
|
|
pipeInfo.pipeLayout,
|
|
|
|
VK_SHADER_STAGE_COMPUTE_BIT,
|
|
|
|
0, sizeof(pushArgs), &pushArgs);
|
|
|
|
m_cmd->cmdDispatch(
|
|
|
|
workgroups.width,
|
|
|
|
workgroups.height,
|
|
|
|
workgroups.depth);
|
|
|
|
|
|
|
|
m_barriers.accessImage(
|
|
|
|
imageView->image(),
|
|
|
|
imageView->subresources(),
|
|
|
|
imageView->imageInfo().layout,
|
|
|
|
VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
|
|
|
|
VK_ACCESS_SHADER_WRITE_BIT,
|
|
|
|
imageView->imageInfo().layout,
|
|
|
|
imageView->imageInfo().stages,
|
|
|
|
imageView->imageInfo().access);
|
|
|
|
m_barriers.recordCommands(m_cmd);
|
2018-04-11 17:05:12 +02:00
|
|
|
|
2018-04-11 23:13:34 +02:00
|
|
|
m_cmd->trackResource(imageView);
|
|
|
|
m_cmd->trackResource(imageView->image());
|
2018-04-11 17:05:12 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-11-26 13:24:01 +01:00
|
|
|
void DxvkContext::copyBuffer(
|
|
|
|
const Rc<DxvkBuffer>& dstBuffer,
|
|
|
|
VkDeviceSize dstOffset,
|
|
|
|
const Rc<DxvkBuffer>& srcBuffer,
|
|
|
|
VkDeviceSize srcOffset,
|
|
|
|
VkDeviceSize numBytes) {
|
2018-01-20 19:49:41 -08:00
|
|
|
if (numBytes == 0)
|
|
|
|
return;
|
2018-03-17 09:20:06 +01:00
|
|
|
|
2018-04-23 11:11:40 +02:00
|
|
|
this->spillRenderPass();
|
2018-03-17 09:20:06 +01:00
|
|
|
|
2018-01-20 19:49:41 -08:00
|
|
|
auto dstSlice = dstBuffer->subSlice(dstOffset, numBytes);
|
|
|
|
auto srcSlice = srcBuffer->subSlice(srcOffset, numBytes);
|
|
|
|
|
|
|
|
VkBufferCopy bufferRegion;
|
|
|
|
bufferRegion.srcOffset = srcSlice.offset();
|
|
|
|
bufferRegion.dstOffset = dstSlice.offset();
|
|
|
|
bufferRegion.size = dstSlice.length();
|
|
|
|
|
|
|
|
m_cmd->cmdCopyBuffer(
|
|
|
|
srcSlice.handle(),
|
|
|
|
dstSlice.handle(),
|
|
|
|
1, &bufferRegion);
|
|
|
|
|
|
|
|
m_barriers.accessBuffer(srcSlice,
|
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_READ_BIT,
|
|
|
|
srcBuffer->info().stages,
|
|
|
|
srcBuffer->info().access);
|
|
|
|
|
|
|
|
m_barriers.accessBuffer(dstSlice,
|
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_WRITE_BIT,
|
|
|
|
dstBuffer->info().stages,
|
|
|
|
dstBuffer->info().access);
|
|
|
|
|
|
|
|
m_barriers.recordCommands(m_cmd);
|
|
|
|
|
|
|
|
m_cmd->trackResource(dstBuffer->resource());
|
|
|
|
m_cmd->trackResource(srcBuffer->resource());
|
2017-11-26 13:24:01 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-01-05 03:01:19 +01:00
|
|
|
void DxvkContext::copyBufferToImage(
|
|
|
|
const Rc<DxvkImage>& dstImage,
|
|
|
|
VkImageSubresourceLayers dstSubresource,
|
|
|
|
VkOffset3D dstOffset,
|
|
|
|
VkExtent3D dstExtent,
|
|
|
|
const Rc<DxvkBuffer>& srcBuffer,
|
|
|
|
VkDeviceSize srcOffset,
|
|
|
|
VkExtent2D srcExtent) {
|
2018-04-23 11:11:40 +02:00
|
|
|
this->spillRenderPass();
|
2018-01-05 03:01:19 +01:00
|
|
|
|
2018-01-18 18:33:13 +01:00
|
|
|
auto srcSlice = srcBuffer->subSlice(srcOffset, 0);
|
|
|
|
|
2018-01-30 15:44:18 +01:00
|
|
|
VkImageSubresourceRange dstSubresourceRange = {
|
2018-01-05 03:01:19 +01:00
|
|
|
dstSubresource.aspectMask,
|
|
|
|
dstSubresource.mipLevel, 1,
|
|
|
|
dstSubresource.baseArrayLayer,
|
|
|
|
dstSubresource.layerCount };
|
|
|
|
|
|
|
|
m_barriers.accessImage(
|
|
|
|
dstImage, dstSubresourceRange,
|
2018-01-06 02:09:07 +01:00
|
|
|
dstImage->mipLevelExtent(dstSubresource.mipLevel) == dstExtent
|
2018-01-05 03:01:19 +01:00
|
|
|
? VK_IMAGE_LAYOUT_UNDEFINED
|
|
|
|
: dstImage->info().layout,
|
|
|
|
dstImage->info().stages,
|
|
|
|
dstImage->info().access,
|
2018-04-29 15:28:50 +02:00
|
|
|
dstImage->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL),
|
2018-01-05 03:01:19 +01:00
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_WRITE_BIT);
|
|
|
|
m_barriers.recordCommands(m_cmd);
|
|
|
|
|
|
|
|
VkBufferImageCopy copyRegion;
|
2018-01-18 19:09:21 +01:00
|
|
|
copyRegion.bufferOffset = srcSlice.offset();
|
2018-01-05 03:01:19 +01:00
|
|
|
copyRegion.bufferRowLength = srcExtent.width;
|
|
|
|
copyRegion.bufferImageHeight = srcExtent.height;
|
|
|
|
copyRegion.imageSubresource = dstSubresource;
|
|
|
|
copyRegion.imageOffset = dstOffset;
|
|
|
|
copyRegion.imageExtent = dstExtent;
|
|
|
|
|
|
|
|
m_cmd->cmdCopyBufferToImage(
|
2018-01-18 18:33:13 +01:00
|
|
|
srcSlice.handle(),
|
2018-01-05 03:01:19 +01:00
|
|
|
dstImage->handle(),
|
2018-04-29 15:28:50 +02:00
|
|
|
dstImage->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL),
|
2018-01-05 03:01:19 +01:00
|
|
|
1, ©Region);
|
|
|
|
|
|
|
|
m_barriers.accessImage(
|
|
|
|
dstImage, dstSubresourceRange,
|
2018-04-29 15:28:50 +02:00
|
|
|
dstImage->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL),
|
2018-01-05 03:01:19 +01:00
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_WRITE_BIT,
|
|
|
|
dstImage->info().layout,
|
|
|
|
dstImage->info().stages,
|
|
|
|
dstImage->info().access);
|
2018-01-19 18:09:49 +01:00
|
|
|
m_barriers.accessBuffer(srcSlice,
|
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_READ_BIT,
|
|
|
|
srcBuffer->info().stages,
|
|
|
|
srcBuffer->info().access);
|
2018-01-05 03:01:19 +01:00
|
|
|
m_barriers.recordCommands(m_cmd);
|
|
|
|
|
|
|
|
m_cmd->trackResource(dstImage);
|
2018-01-18 18:33:13 +01:00
|
|
|
m_cmd->trackResource(srcSlice.resource());
|
2018-01-05 03:01:19 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-12-30 19:10:45 +01:00
|
|
|
void DxvkContext::copyImage(
|
|
|
|
const Rc<DxvkImage>& dstImage,
|
|
|
|
VkImageSubresourceLayers dstSubresource,
|
|
|
|
VkOffset3D dstOffset,
|
|
|
|
const Rc<DxvkImage>& srcImage,
|
|
|
|
VkImageSubresourceLayers srcSubresource,
|
|
|
|
VkOffset3D srcOffset,
|
|
|
|
VkExtent3D extent) {
|
2018-04-23 11:11:40 +02:00
|
|
|
this->spillRenderPass();
|
2017-12-30 19:10:45 +01:00
|
|
|
|
2018-01-30 15:44:18 +01:00
|
|
|
VkImageSubresourceRange dstSubresourceRange = {
|
2018-01-05 03:01:19 +01:00
|
|
|
dstSubresource.aspectMask,
|
|
|
|
dstSubresource.mipLevel, 1,
|
|
|
|
dstSubresource.baseArrayLayer,
|
|
|
|
dstSubresource.layerCount };
|
2017-12-30 19:10:45 +01:00
|
|
|
|
2018-01-30 15:44:18 +01:00
|
|
|
VkImageSubresourceRange srcSubresourceRange = {
|
2018-01-05 03:01:19 +01:00
|
|
|
srcSubresource.aspectMask,
|
|
|
|
srcSubresource.mipLevel, 1,
|
|
|
|
srcSubresource.baseArrayLayer,
|
|
|
|
srcSubresource.layerCount };
|
2017-12-30 19:10:45 +01:00
|
|
|
|
|
|
|
m_barriers.accessImage(
|
|
|
|
dstImage, dstSubresourceRange,
|
2018-01-06 02:09:07 +01:00
|
|
|
dstImage->mipLevelExtent(dstSubresource.mipLevel) == extent
|
2018-01-05 03:01:19 +01:00
|
|
|
? VK_IMAGE_LAYOUT_UNDEFINED
|
|
|
|
: dstImage->info().layout,
|
2017-12-30 19:10:45 +01:00
|
|
|
dstImage->info().stages,
|
|
|
|
dstImage->info().access,
|
2018-04-29 15:28:50 +02:00
|
|
|
dstImage->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL),
|
2017-12-30 19:10:45 +01:00
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_WRITE_BIT);
|
|
|
|
m_barriers.accessImage(
|
|
|
|
srcImage, srcSubresourceRange,
|
|
|
|
srcImage->info().layout,
|
|
|
|
srcImage->info().stages,
|
|
|
|
srcImage->info().access,
|
2018-04-29 15:28:50 +02:00
|
|
|
srcImage->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL),
|
2017-12-30 19:10:45 +01:00
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_READ_BIT);
|
|
|
|
m_barriers.recordCommands(m_cmd);
|
2018-02-20 13:08:50 +01:00
|
|
|
|
|
|
|
if (dstSubresource.aspectMask == srcSubresource.aspectMask) {
|
|
|
|
VkImageCopy imageRegion;
|
|
|
|
imageRegion.srcSubresource = srcSubresource;
|
|
|
|
imageRegion.srcOffset = srcOffset;
|
|
|
|
imageRegion.dstSubresource = dstSubresource;
|
|
|
|
imageRegion.dstOffset = dstOffset;
|
|
|
|
imageRegion.extent = extent;
|
|
|
|
|
|
|
|
m_cmd->cmdCopyImage(
|
2018-04-29 15:28:50 +02:00
|
|
|
srcImage->handle(), srcImage->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL),
|
|
|
|
dstImage->handle(), dstImage->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL),
|
2018-02-20 13:08:50 +01:00
|
|
|
1, &imageRegion);
|
|
|
|
} else {
|
|
|
|
const VkDeviceSize transferBufferSize = std::max(
|
|
|
|
util::computeImageDataSize(dstImage->info().format, extent),
|
|
|
|
util::computeImageDataSize(srcImage->info().format, extent));
|
|
|
|
|
|
|
|
// TODO optimize away buffer creation
|
|
|
|
DxvkBufferCreateInfo tmpBufferInfo;
|
|
|
|
tmpBufferInfo.size = transferBufferSize;
|
|
|
|
tmpBufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT
|
|
|
|
| VK_BUFFER_USAGE_TRANSFER_DST_BIT;
|
|
|
|
tmpBufferInfo.stages = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
|
|
|
tmpBufferInfo.access = VK_ACCESS_TRANSFER_READ_BIT
|
|
|
|
| VK_ACCESS_TRANSFER_WRITE_BIT;
|
|
|
|
|
|
|
|
Rc<DxvkBuffer> tmpBuffer = m_device->createBuffer(
|
|
|
|
tmpBufferInfo, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
|
|
|
|
|
|
|
|
DxvkPhysicalBufferSlice tmpSlice = tmpBuffer->slice();
|
|
|
|
|
|
|
|
VkBufferImageCopy bufferImageCopy;
|
|
|
|
bufferImageCopy.bufferOffset = tmpSlice.offset();
|
|
|
|
bufferImageCopy.bufferRowLength = 0;
|
|
|
|
bufferImageCopy.bufferImageHeight = 0;
|
|
|
|
bufferImageCopy.imageSubresource = srcSubresource;
|
|
|
|
bufferImageCopy.imageOffset = srcOffset;
|
|
|
|
bufferImageCopy.imageExtent = extent;
|
|
|
|
|
|
|
|
m_cmd->cmdCopyImageToBuffer(
|
2018-04-29 15:28:50 +02:00
|
|
|
srcImage->handle(),
|
|
|
|
srcImage->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL),
|
2018-02-20 13:08:50 +01:00
|
|
|
tmpSlice.handle(), 1, &bufferImageCopy);
|
|
|
|
|
|
|
|
m_barriers.accessBuffer(tmpSlice,
|
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_WRITE_BIT,
|
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_READ_BIT);
|
|
|
|
m_barriers.recordCommands(m_cmd);
|
|
|
|
|
|
|
|
bufferImageCopy.imageSubresource = dstSubresource;
|
|
|
|
bufferImageCopy.imageOffset = dstOffset;
|
|
|
|
|
2018-04-29 15:28:50 +02:00
|
|
|
m_cmd->cmdCopyBufferToImage(tmpSlice.handle(),
|
|
|
|
dstImage->handle(),
|
|
|
|
dstImage->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL),
|
2018-02-20 13:08:50 +01:00
|
|
|
1, &bufferImageCopy);
|
|
|
|
|
|
|
|
m_barriers.accessBuffer(tmpSlice,
|
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_READ_BIT,
|
|
|
|
tmpBuffer->info().stages,
|
|
|
|
tmpBuffer->info().access);
|
|
|
|
|
|
|
|
m_cmd->trackResource(tmpSlice.resource());
|
|
|
|
}
|
|
|
|
|
2017-12-30 19:10:45 +01:00
|
|
|
m_barriers.accessImage(
|
|
|
|
dstImage, dstSubresourceRange,
|
2018-04-29 15:28:50 +02:00
|
|
|
dstImage->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL),
|
2017-12-30 19:10:45 +01:00
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_WRITE_BIT,
|
|
|
|
dstImage->info().layout,
|
|
|
|
dstImage->info().stages,
|
|
|
|
dstImage->info().access);
|
|
|
|
m_barriers.accessImage(
|
|
|
|
srcImage, srcSubresourceRange,
|
2018-04-29 15:28:50 +02:00
|
|
|
srcImage->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL),
|
2017-12-30 19:10:45 +01:00
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_READ_BIT,
|
|
|
|
srcImage->info().layout,
|
|
|
|
srcImage->info().stages,
|
|
|
|
srcImage->info().access);
|
|
|
|
m_barriers.recordCommands(m_cmd);
|
|
|
|
|
|
|
|
m_cmd->trackResource(dstImage);
|
|
|
|
m_cmd->trackResource(srcImage);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-01-19 18:09:49 +01:00
|
|
|
void DxvkContext::copyImageToBuffer(
|
|
|
|
const Rc<DxvkBuffer>& dstBuffer,
|
|
|
|
VkDeviceSize dstOffset,
|
|
|
|
VkExtent2D dstExtent,
|
|
|
|
const Rc<DxvkImage>& srcImage,
|
|
|
|
VkImageSubresourceLayers srcSubresource,
|
|
|
|
VkOffset3D srcOffset,
|
|
|
|
VkExtent3D srcExtent) {
|
2018-04-23 11:11:40 +02:00
|
|
|
this->spillRenderPass();
|
2018-01-19 18:09:49 +01:00
|
|
|
|
|
|
|
auto dstSlice = dstBuffer->subSlice(dstOffset, 0);
|
|
|
|
|
2018-01-30 15:44:18 +01:00
|
|
|
VkImageSubresourceRange srcSubresourceRange = {
|
2018-01-19 18:09:49 +01:00
|
|
|
srcSubresource.aspectMask,
|
|
|
|
srcSubresource.mipLevel, 1,
|
|
|
|
srcSubresource.baseArrayLayer,
|
|
|
|
srcSubresource.layerCount };
|
|
|
|
|
|
|
|
m_barriers.accessImage(
|
|
|
|
srcImage, srcSubresourceRange,
|
|
|
|
srcImage->info().layout,
|
|
|
|
srcImage->info().stages,
|
|
|
|
srcImage->info().access,
|
2018-04-29 15:28:50 +02:00
|
|
|
srcImage->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL),
|
2018-01-19 18:09:49 +01:00
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_READ_BIT);
|
|
|
|
m_barriers.recordCommands(m_cmd);
|
|
|
|
|
|
|
|
VkBufferImageCopy copyRegion;
|
|
|
|
copyRegion.bufferOffset = dstSlice.offset();
|
|
|
|
copyRegion.bufferRowLength = dstExtent.width;
|
|
|
|
copyRegion.bufferImageHeight = dstExtent.height;
|
|
|
|
copyRegion.imageSubresource = srcSubresource;
|
|
|
|
copyRegion.imageOffset = srcOffset;
|
|
|
|
copyRegion.imageExtent = srcExtent;
|
|
|
|
|
|
|
|
m_cmd->cmdCopyImageToBuffer(
|
|
|
|
srcImage->handle(),
|
2018-04-29 15:28:50 +02:00
|
|
|
srcImage->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL),
|
2018-01-19 18:09:49 +01:00
|
|
|
dstSlice.handle(),
|
|
|
|
1, ©Region);
|
|
|
|
|
|
|
|
m_barriers.accessImage(
|
|
|
|
srcImage, srcSubresourceRange,
|
2018-04-29 15:28:50 +02:00
|
|
|
srcImage->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL),
|
2018-01-19 18:09:49 +01:00
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_READ_BIT,
|
|
|
|
srcImage->info().layout,
|
|
|
|
srcImage->info().stages,
|
|
|
|
srcImage->info().access);
|
|
|
|
m_barriers.accessBuffer(dstSlice,
|
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_WRITE_BIT,
|
|
|
|
dstBuffer->info().stages,
|
|
|
|
dstBuffer->info().access);
|
|
|
|
m_barriers.recordCommands(m_cmd);
|
|
|
|
|
|
|
|
m_cmd->trackResource(srcImage);
|
|
|
|
m_cmd->trackResource(dstSlice.resource());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-11-23 14:24:00 +01:00
|
|
|
void DxvkContext::dispatch(
|
|
|
|
uint32_t x,
|
|
|
|
uint32_t y,
|
|
|
|
uint32_t z) {
|
2017-11-26 14:01:41 +01:00
|
|
|
this->commitComputeState();
|
2017-11-23 14:24:00 +01:00
|
|
|
|
2018-02-24 23:56:12 +01:00
|
|
|
if (this->validateComputeState()) {
|
2018-01-25 12:57:43 +01:00
|
|
|
m_cmd->cmdDispatch(x, y, z);
|
|
|
|
|
|
|
|
this->commitComputeBarriers();
|
|
|
|
}
|
2018-04-03 11:56:12 +02:00
|
|
|
|
|
|
|
m_cmd->addStatCtr(DxvkStatCounter::CmdDispatchCalls, 1);
|
2017-11-23 14:24:00 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-12-31 01:31:08 +01:00
|
|
|
void DxvkContext::dispatchIndirect(
|
|
|
|
const DxvkBufferSlice& buffer) {
|
|
|
|
this->commitComputeState();
|
|
|
|
|
2018-01-18 18:01:47 +01:00
|
|
|
auto physicalSlice = buffer.physicalSlice();
|
|
|
|
|
2018-02-24 23:56:12 +01:00
|
|
|
if (this->validateComputeState()) {
|
2018-01-25 12:57:43 +01:00
|
|
|
m_cmd->cmdDispatchIndirect(
|
|
|
|
physicalSlice.handle(),
|
|
|
|
physicalSlice.offset());
|
|
|
|
|
|
|
|
this->commitComputeBarriers();
|
|
|
|
}
|
2018-04-03 11:56:12 +02:00
|
|
|
|
|
|
|
m_cmd->addStatCtr(DxvkStatCounter::CmdDispatchCalls, 1);
|
2017-12-31 01:31:08 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-10-11 23:29:05 +02:00
|
|
|
void DxvkContext::draw(
|
|
|
|
uint32_t vertexCount,
|
|
|
|
uint32_t instanceCount,
|
|
|
|
uint32_t firstVertex,
|
|
|
|
uint32_t firstInstance) {
|
2017-11-20 14:11:09 +01:00
|
|
|
this->commitGraphicsState();
|
2017-11-20 15:35:29 +01:00
|
|
|
|
2018-02-24 23:56:12 +01:00
|
|
|
if (this->validateGraphicsState()) {
|
2018-02-03 10:36:17 +01:00
|
|
|
m_cmd->cmdDraw(
|
|
|
|
vertexCount, instanceCount,
|
|
|
|
firstVertex, firstInstance);
|
2018-01-12 14:25:26 +01:00
|
|
|
}
|
2018-04-03 11:56:12 +02:00
|
|
|
|
|
|
|
m_cmd->addStatCtr(DxvkStatCounter::CmdDrawCalls, 1);
|
2017-10-11 23:29:05 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-12-31 01:31:08 +01:00
|
|
|
void DxvkContext::drawIndirect(
|
|
|
|
const DxvkBufferSlice& buffer,
|
|
|
|
uint32_t count,
|
|
|
|
uint32_t stride) {
|
|
|
|
this->commitGraphicsState();
|
|
|
|
|
2018-02-24 23:56:12 +01:00
|
|
|
if (this->validateGraphicsState()) {
|
2018-01-18 18:01:47 +01:00
|
|
|
auto physicalSlice = buffer.physicalSlice();
|
|
|
|
|
2018-02-03 10:36:17 +01:00
|
|
|
m_cmd->cmdDrawIndirect(
|
|
|
|
physicalSlice.handle(),
|
|
|
|
physicalSlice.offset(),
|
|
|
|
count, stride);
|
2018-01-12 14:25:26 +01:00
|
|
|
}
|
2018-04-03 11:56:12 +02:00
|
|
|
|
|
|
|
m_cmd->addStatCtr(DxvkStatCounter::CmdDrawCalls, 1);
|
2017-12-31 01:31:08 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-10-11 23:29:05 +02:00
|
|
|
void DxvkContext::drawIndexed(
|
|
|
|
uint32_t indexCount,
|
|
|
|
uint32_t instanceCount,
|
|
|
|
uint32_t firstIndex,
|
|
|
|
uint32_t vertexOffset,
|
|
|
|
uint32_t firstInstance) {
|
2017-11-20 14:11:09 +01:00
|
|
|
this->commitGraphicsState();
|
2017-11-20 15:35:29 +01:00
|
|
|
|
2018-02-24 23:56:12 +01:00
|
|
|
if (this->validateGraphicsState()) {
|
2018-01-12 14:25:26 +01:00
|
|
|
m_cmd->cmdDrawIndexed(
|
|
|
|
indexCount, instanceCount,
|
|
|
|
firstIndex, vertexOffset,
|
|
|
|
firstInstance);
|
|
|
|
}
|
2018-04-03 11:56:12 +02:00
|
|
|
|
|
|
|
m_cmd->addStatCtr(DxvkStatCounter::CmdDrawCalls, 1);
|
2017-11-20 15:35:29 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-12-31 01:31:08 +01:00
|
|
|
void DxvkContext::drawIndexedIndirect(
|
|
|
|
const DxvkBufferSlice& buffer,
|
|
|
|
uint32_t count,
|
|
|
|
uint32_t stride) {
|
|
|
|
this->commitGraphicsState();
|
|
|
|
|
2018-02-24 23:56:12 +01:00
|
|
|
if (this->validateGraphicsState()) {
|
2018-01-18 18:01:47 +01:00
|
|
|
auto physicalSlice = buffer.physicalSlice();
|
|
|
|
|
2018-01-12 14:25:26 +01:00
|
|
|
m_cmd->cmdDrawIndexedIndirect(
|
2018-01-18 18:01:47 +01:00
|
|
|
physicalSlice.handle(),
|
|
|
|
physicalSlice.offset(),
|
2018-01-12 14:25:26 +01:00
|
|
|
count, stride);
|
|
|
|
}
|
2018-04-03 11:56:12 +02:00
|
|
|
|
|
|
|
m_cmd->addStatCtr(DxvkStatCounter::CmdDrawCalls, 1);
|
2017-12-31 01:31:08 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-12-01 17:52:05 +01:00
|
|
|
void DxvkContext::initImage(
|
2017-12-05 13:00:06 +01:00
|
|
|
const Rc<DxvkImage>& image,
|
|
|
|
const VkImageSubresourceRange& subresources) {
|
|
|
|
m_barriers.accessImage(image, subresources,
|
2018-01-23 19:00:48 +01:00
|
|
|
VK_IMAGE_LAYOUT_UNDEFINED,
|
|
|
|
VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0,
|
2017-12-05 13:00:06 +01:00
|
|
|
image->info().layout,
|
|
|
|
image->info().stages,
|
|
|
|
image->info().access);
|
2017-12-01 17:52:05 +01:00
|
|
|
m_barriers.recordCommands(m_cmd);
|
2018-03-07 10:37:27 +01:00
|
|
|
|
|
|
|
m_cmd->trackResource(image);
|
2017-12-01 17:52:05 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-01-20 09:46:54 +01:00
|
|
|
void DxvkContext::generateMipmaps(
|
|
|
|
const Rc<DxvkImage>& image,
|
|
|
|
const VkImageSubresourceRange& subresources) {
|
|
|
|
if (subresources.levelCount <= 1)
|
|
|
|
return;
|
|
|
|
|
2018-04-23 11:11:40 +02:00
|
|
|
this->spillRenderPass();
|
2018-01-20 19:49:41 -08:00
|
|
|
|
2018-01-20 09:46:54 +01:00
|
|
|
// The top-most level will only be read. We can
|
|
|
|
// discard the contents of all the lower levels
|
|
|
|
// since we're going to override them anyway.
|
|
|
|
m_barriers.accessImage(image,
|
|
|
|
VkImageSubresourceRange {
|
|
|
|
subresources.aspectMask,
|
|
|
|
subresources.baseMipLevel, 1,
|
|
|
|
subresources.baseArrayLayer,
|
|
|
|
subresources.layerCount },
|
|
|
|
image->info().layout,
|
|
|
|
image->info().stages,
|
|
|
|
image->info().access,
|
2018-04-29 15:28:50 +02:00
|
|
|
image->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL),
|
2018-01-20 09:46:54 +01:00
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_READ_BIT);
|
|
|
|
|
|
|
|
m_barriers.accessImage(image,
|
|
|
|
VkImageSubresourceRange {
|
|
|
|
subresources.aspectMask,
|
|
|
|
subresources.baseMipLevel + 1,
|
|
|
|
subresources.levelCount - 1,
|
|
|
|
subresources.baseArrayLayer,
|
|
|
|
subresources.layerCount },
|
|
|
|
VK_IMAGE_LAYOUT_UNDEFINED,
|
|
|
|
image->info().stages,
|
|
|
|
image->info().access,
|
2018-04-29 15:28:50 +02:00
|
|
|
image->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL),
|
2018-01-20 09:46:54 +01:00
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_WRITE_BIT);
|
|
|
|
|
|
|
|
m_barriers.recordCommands(m_cmd);
|
|
|
|
|
|
|
|
// Generate each individual mip level with a blit
|
|
|
|
for (uint32_t i = 1; i < subresources.levelCount; i++) {
|
|
|
|
const uint32_t mip = subresources.baseMipLevel + i;
|
|
|
|
|
|
|
|
const VkExtent3D srcExtent = image->mipLevelExtent(mip - 1);
|
|
|
|
const VkExtent3D dstExtent = image->mipLevelExtent(mip);
|
|
|
|
|
|
|
|
VkImageBlit region;
|
|
|
|
region.srcSubresource = VkImageSubresourceLayers {
|
|
|
|
subresources.aspectMask, mip - 1,
|
|
|
|
subresources.baseArrayLayer,
|
|
|
|
subresources.layerCount };
|
|
|
|
region.srcOffsets[0] = VkOffset3D { 0, 0, 0 };
|
|
|
|
region.srcOffsets[1].x = srcExtent.width;
|
|
|
|
region.srcOffsets[1].y = srcExtent.height;
|
|
|
|
region.srcOffsets[1].z = srcExtent.depth;
|
|
|
|
|
|
|
|
region.dstSubresource = VkImageSubresourceLayers {
|
|
|
|
subresources.aspectMask, mip,
|
|
|
|
subresources.baseArrayLayer,
|
|
|
|
subresources.layerCount };
|
|
|
|
region.dstOffsets[0] = VkOffset3D { 0, 0, 0 };
|
|
|
|
region.dstOffsets[1].x = dstExtent.width;
|
|
|
|
region.dstOffsets[1].y = dstExtent.height;
|
|
|
|
region.dstOffsets[1].z = dstExtent.depth;
|
|
|
|
|
|
|
|
m_cmd->cmdBlitImage(
|
2018-04-29 15:28:50 +02:00
|
|
|
image->handle(), image->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL),
|
|
|
|
image->handle(), image->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL),
|
2018-01-20 09:46:54 +01:00
|
|
|
1, ®ion, VK_FILTER_LINEAR);
|
|
|
|
|
|
|
|
if (i + 1 < subresources.levelCount) {
|
|
|
|
m_barriers.accessImage(image,
|
|
|
|
VkImageSubresourceRange {
|
|
|
|
subresources.aspectMask, mip, 1,
|
|
|
|
subresources.baseArrayLayer,
|
|
|
|
subresources.layerCount },
|
2018-04-29 15:28:50 +02:00
|
|
|
image->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL),
|
2018-01-20 09:46:54 +01:00
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_WRITE_BIT,
|
2018-04-29 15:28:50 +02:00
|
|
|
image->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL),
|
2018-01-20 09:46:54 +01:00
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_READ_BIT);
|
|
|
|
m_barriers.recordCommands(m_cmd);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Transform mip levels back into their original layout.
|
|
|
|
// The last mip level is still in TRANSFER_DST_OPTIMAL.
|
|
|
|
m_barriers.accessImage(image,
|
|
|
|
VkImageSubresourceRange {
|
|
|
|
subresources.aspectMask,
|
|
|
|
subresources.baseMipLevel,
|
|
|
|
subresources.levelCount - 1,
|
|
|
|
subresources.baseArrayLayer,
|
|
|
|
subresources.layerCount },
|
2018-04-29 15:28:50 +02:00
|
|
|
image->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL),
|
2018-01-20 09:46:54 +01:00
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_READ_BIT,
|
|
|
|
image->info().layout,
|
|
|
|
image->info().stages,
|
|
|
|
image->info().access);
|
|
|
|
|
|
|
|
m_barriers.accessImage(image,
|
|
|
|
VkImageSubresourceRange {
|
|
|
|
subresources.aspectMask,
|
|
|
|
subresources.baseMipLevel
|
|
|
|
+ subresources.levelCount - 1, 1,
|
|
|
|
subresources.baseArrayLayer,
|
|
|
|
subresources.layerCount },
|
2018-04-29 15:28:50 +02:00
|
|
|
image->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL),
|
2018-01-20 09:46:54 +01:00
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_WRITE_BIT,
|
|
|
|
image->info().layout,
|
|
|
|
image->info().stages,
|
|
|
|
image->info().access);
|
|
|
|
|
|
|
|
m_barriers.recordCommands(m_cmd);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-01-20 21:42:11 +01:00
|
|
|
void DxvkContext::invalidateBuffer(
|
|
|
|
const Rc<DxvkBuffer>& buffer,
|
|
|
|
const DxvkPhysicalBufferSlice& slice) {
|
2017-12-16 13:21:11 +01:00
|
|
|
// Allocate new backing resource
|
2018-03-19 02:18:44 +01:00
|
|
|
DxvkPhysicalBufferSlice prevSlice = buffer->rename(slice);
|
|
|
|
m_cmd->freePhysicalBufferSlice(buffer, prevSlice);
|
2017-12-16 13:21:11 +01:00
|
|
|
|
|
|
|
// We also need to update all bindings that the buffer
|
|
|
|
// may be bound to either directly or through views.
|
|
|
|
const VkBufferUsageFlags usage = buffer->info().usage;
|
|
|
|
|
|
|
|
if (usage & VK_BUFFER_USAGE_INDEX_BUFFER_BIT)
|
|
|
|
m_flags.set(DxvkContextFlag::GpDirtyIndexBuffer);
|
|
|
|
|
|
|
|
if (usage & VK_BUFFER_USAGE_VERTEX_BUFFER_BIT)
|
|
|
|
m_flags.set(DxvkContextFlag::GpDirtyVertexBuffers);
|
|
|
|
|
|
|
|
if (usage & (VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT
|
|
|
|
| VK_BUFFER_USAGE_STORAGE_BUFFER_BIT
|
|
|
|
| VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT
|
|
|
|
| VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT))
|
|
|
|
m_flags.set(DxvkContextFlag::GpDirtyResources,
|
|
|
|
DxvkContextFlag::CpDirtyResources);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-12-12 00:27:49 +01:00
|
|
|
void DxvkContext::resolveImage(
|
|
|
|
const Rc<DxvkImage>& dstImage,
|
|
|
|
const VkImageSubresourceLayers& dstSubresources,
|
|
|
|
const Rc<DxvkImage>& srcImage,
|
2018-02-20 22:26:23 +01:00
|
|
|
const VkImageSubresourceLayers& srcSubresources,
|
|
|
|
VkFormat format) {
|
2018-04-23 11:11:40 +02:00
|
|
|
this->spillRenderPass();
|
2018-02-20 22:26:23 +01:00
|
|
|
|
|
|
|
if (format == VK_FORMAT_UNDEFINED)
|
|
|
|
format = srcImage->info().format;
|
|
|
|
|
2018-02-21 01:04:28 +01:00
|
|
|
if (dstImage->info().format == format
|
|
|
|
&& srcImage->info().format == format) {
|
|
|
|
// Use the built-in Vulkan resolve function if the image
|
|
|
|
// formats both match the format of the resolve operation.
|
|
|
|
VkImageSubresourceRange dstSubresourceRange = {
|
|
|
|
dstSubresources.aspectMask,
|
|
|
|
dstSubresources.mipLevel, 1,
|
|
|
|
dstSubresources.baseArrayLayer,
|
|
|
|
dstSubresources.layerCount };
|
|
|
|
|
|
|
|
VkImageSubresourceRange srcSubresourceRange = {
|
|
|
|
srcSubresources.aspectMask,
|
|
|
|
srcSubresources.mipLevel, 1,
|
|
|
|
srcSubresources.baseArrayLayer,
|
|
|
|
srcSubresources.layerCount };
|
|
|
|
|
|
|
|
// We only support resolving to the entire image
|
|
|
|
// area, so we might as well discard its contents
|
|
|
|
m_barriers.accessImage(
|
|
|
|
dstImage, dstSubresourceRange,
|
|
|
|
VK_IMAGE_LAYOUT_UNDEFINED,
|
|
|
|
dstImage->info().stages,
|
|
|
|
dstImage->info().access,
|
2018-04-29 15:28:50 +02:00
|
|
|
dstImage->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL),
|
2018-02-21 01:04:28 +01:00
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_WRITE_BIT);
|
|
|
|
m_barriers.accessImage(
|
|
|
|
srcImage, srcSubresourceRange,
|
|
|
|
srcImage->info().layout,
|
|
|
|
srcImage->info().stages,
|
|
|
|
srcImage->info().access,
|
2018-04-29 15:28:50 +02:00
|
|
|
srcImage->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL),
|
2018-02-21 01:04:28 +01:00
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_READ_BIT);
|
|
|
|
m_barriers.recordCommands(m_cmd);
|
|
|
|
|
2018-02-20 22:26:23 +01:00
|
|
|
VkImageResolve imageRegion;
|
|
|
|
imageRegion.srcSubresource = srcSubresources;
|
|
|
|
imageRegion.srcOffset = VkOffset3D { 0, 0, 0 };
|
|
|
|
imageRegion.dstSubresource = dstSubresources;
|
|
|
|
imageRegion.dstOffset = VkOffset3D { 0, 0, 0 };
|
|
|
|
imageRegion.extent = srcImage->mipLevelExtent(srcSubresources.mipLevel);
|
|
|
|
|
|
|
|
m_cmd->cmdResolveImage(
|
2018-04-29 15:28:50 +02:00
|
|
|
srcImage->handle(), srcImage->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL),
|
|
|
|
dstImage->handle(), dstImage->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL),
|
2018-02-20 22:26:23 +01:00
|
|
|
1, &imageRegion);
|
2018-02-21 01:04:28 +01:00
|
|
|
|
|
|
|
m_barriers.accessImage(
|
|
|
|
dstImage, dstSubresourceRange,
|
2018-04-29 15:28:50 +02:00
|
|
|
dstImage->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL),
|
2018-02-21 01:04:28 +01:00
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_WRITE_BIT,
|
|
|
|
dstImage->info().layout,
|
|
|
|
dstImage->info().stages,
|
|
|
|
dstImage->info().access);
|
|
|
|
m_barriers.accessImage(
|
|
|
|
srcImage, srcSubresourceRange,
|
2018-04-29 15:28:50 +02:00
|
|
|
srcImage->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL),
|
2018-02-21 01:04:28 +01:00
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_READ_BIT,
|
|
|
|
srcImage->info().layout,
|
|
|
|
srcImage->info().stages,
|
|
|
|
srcImage->info().access);
|
|
|
|
m_barriers.recordCommands(m_cmd);
|
2018-02-20 22:26:23 +01:00
|
|
|
} else {
|
2018-02-21 01:04:28 +01:00
|
|
|
// The trick here is to submit an empty render pass which
|
|
|
|
// performs the resolve op on properly typed image views.
|
|
|
|
const Rc<DxvkMetaResolveFramebuffer> fb =
|
|
|
|
new DxvkMetaResolveFramebuffer(m_device->vkd(),
|
|
|
|
dstImage, dstSubresources,
|
|
|
|
srcImage, srcSubresources, format);
|
|
|
|
|
|
|
|
VkRenderPassBeginInfo info;
|
|
|
|
info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
|
|
|
|
info.pNext = nullptr;
|
|
|
|
info.renderPass = fb->renderPass();
|
|
|
|
info.framebuffer = fb->framebuffer();
|
|
|
|
info.renderArea = VkRect2D { { 0, 0 }, {
|
|
|
|
dstImage->info().extent.width,
|
|
|
|
dstImage->info().extent.height } };
|
|
|
|
info.clearValueCount = 0;
|
|
|
|
info.pClearValues = nullptr;
|
|
|
|
|
|
|
|
m_cmd->cmdBeginRenderPass(&info, VK_SUBPASS_CONTENTS_INLINE);
|
|
|
|
m_cmd->cmdEndRenderPass();
|
|
|
|
|
|
|
|
m_cmd->trackResource(fb);
|
2018-02-20 22:26:23 +01:00
|
|
|
}
|
2018-03-07 10:37:27 +01:00
|
|
|
|
|
|
|
m_cmd->trackResource(srcImage);
|
|
|
|
m_cmd->trackResource(dstImage);
|
2017-12-12 00:27:49 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-04-20 00:19:03 +02:00
|
|
|
void DxvkContext::transformImage(
|
|
|
|
const Rc<DxvkImage>& dstImage,
|
|
|
|
const VkImageSubresourceRange& dstSubresources,
|
|
|
|
VkImageLayout srcLayout,
|
|
|
|
VkImageLayout dstLayout) {
|
|
|
|
m_barriers.accessImage(
|
|
|
|
dstImage, dstSubresources,
|
|
|
|
srcLayout,
|
|
|
|
dstImage->info().stages,
|
|
|
|
dstImage->info().access,
|
|
|
|
dstLayout,
|
|
|
|
dstImage->info().stages,
|
|
|
|
dstImage->info().access);
|
|
|
|
m_barriers.recordCommands(m_cmd);
|
|
|
|
|
|
|
|
m_cmd->trackResource(dstImage);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-12-07 18:51:41 +01:00
|
|
|
void DxvkContext::updateBuffer(
|
|
|
|
const Rc<DxvkBuffer>& buffer,
|
|
|
|
VkDeviceSize offset,
|
|
|
|
VkDeviceSize size,
|
|
|
|
const void* data) {
|
2018-04-23 11:11:40 +02:00
|
|
|
this->spillRenderPass();
|
2018-01-30 15:44:18 +01:00
|
|
|
|
2018-01-20 19:49:41 -08:00
|
|
|
// Vulkan specifies that small amounts of data (up to 64kB) can
|
|
|
|
// be copied to a buffer directly if the size is a multiple of
|
|
|
|
// four. Anything else must be copied through a staging buffer.
|
|
|
|
// We'll limit the size to 4kB in order to keep command buffers
|
|
|
|
// reasonably small, we do not know how much data apps may upload.
|
2018-01-30 15:44:18 +01:00
|
|
|
auto physicalSlice = buffer->subSlice(offset, size);
|
|
|
|
|
2018-01-20 19:49:41 -08:00
|
|
|
if ((size <= 4096) && ((size & 0x3) == 0) && ((offset & 0x3) == 0)) {
|
|
|
|
m_cmd->cmdUpdateBuffer(
|
|
|
|
physicalSlice.handle(),
|
|
|
|
physicalSlice.offset(),
|
|
|
|
physicalSlice.length(),
|
|
|
|
data);
|
|
|
|
} else {
|
|
|
|
auto slice = m_cmd->stagedAlloc(size);
|
|
|
|
std::memcpy(slice.mapPtr, data, size);
|
|
|
|
|
|
|
|
m_cmd->stagedBufferCopy(
|
|
|
|
physicalSlice.handle(),
|
|
|
|
physicalSlice.offset(),
|
|
|
|
physicalSlice.length(),
|
|
|
|
slice);
|
2017-12-10 15:57:51 +01:00
|
|
|
}
|
2018-01-20 19:49:41 -08:00
|
|
|
|
|
|
|
m_barriers.accessBuffer(
|
|
|
|
physicalSlice,
|
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_WRITE_BIT,
|
|
|
|
buffer->info().stages,
|
|
|
|
buffer->info().access);
|
|
|
|
m_barriers.recordCommands(m_cmd);
|
|
|
|
|
|
|
|
m_cmd->trackResource(buffer->resource());
|
2017-12-10 15:57:51 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void DxvkContext::updateImage(
|
|
|
|
const Rc<DxvkImage>& image,
|
|
|
|
const VkImageSubresourceLayers& subresources,
|
|
|
|
VkOffset3D imageOffset,
|
|
|
|
VkExtent3D imageExtent,
|
|
|
|
const void* data,
|
|
|
|
VkDeviceSize pitchPerRow,
|
|
|
|
VkDeviceSize pitchPerLayer) {
|
2018-04-23 11:11:40 +02:00
|
|
|
this->spillRenderPass();
|
2017-12-10 15:57:51 +01:00
|
|
|
|
2017-12-10 18:14:28 +01:00
|
|
|
// Upload data through a staging buffer. Special care needs to
|
|
|
|
// be taken when dealing with compressed image formats: Rather
|
|
|
|
// than copying pixels, we'll be copying blocks of pixels.
|
2018-01-30 15:44:18 +01:00
|
|
|
const DxvkFormatInfo* formatInfo = image->formatInfo();
|
2017-12-10 15:57:51 +01:00
|
|
|
|
2018-01-09 19:08:23 +01:00
|
|
|
// Align image extent to a full block. This is necessary in
|
|
|
|
// case the image size is not a multiple of the block size.
|
2018-01-20 21:42:11 +01:00
|
|
|
VkExtent3D elementCount = util::computeBlockCount(
|
|
|
|
imageExtent, formatInfo->blockSize);
|
|
|
|
elementCount.depth *= subresources.layerCount;
|
2017-12-10 18:14:28 +01:00
|
|
|
|
|
|
|
// Allocate staging buffer memory for the image data. The
|
|
|
|
// pixels or blocks will be tightly packed within the buffer.
|
2018-01-20 21:42:11 +01:00
|
|
|
const DxvkStagingBufferSlice slice = m_cmd->stagedAlloc(
|
|
|
|
formatInfo->elementSize * util::flattenImageExtent(elementCount));
|
2017-12-10 15:57:51 +01:00
|
|
|
|
|
|
|
auto dstData = reinterpret_cast<char*>(slice.mapPtr);
|
|
|
|
auto srcData = reinterpret_cast<const char*>(data);
|
|
|
|
|
2018-01-20 21:42:11 +01:00
|
|
|
util::packImageData(dstData, srcData,
|
|
|
|
elementCount, formatInfo->elementSize,
|
|
|
|
pitchPerRow, pitchPerLayer);
|
2017-12-10 15:57:51 +01:00
|
|
|
|
2017-12-10 18:14:28 +01:00
|
|
|
// Prepare the image layout. If the given extent covers
|
|
|
|
// the entire image, we may discard its previous contents.
|
|
|
|
VkImageSubresourceRange subresourceRange;
|
|
|
|
subresourceRange.aspectMask = subresources.aspectMask;
|
|
|
|
subresourceRange.baseMipLevel = subresources.mipLevel;
|
|
|
|
subresourceRange.levelCount = 1;
|
|
|
|
subresourceRange.baseArrayLayer = subresources.baseArrayLayer;
|
|
|
|
subresourceRange.layerCount = subresources.layerCount;
|
|
|
|
|
|
|
|
m_barriers.accessImage(
|
|
|
|
image, subresourceRange,
|
2017-12-10 19:10:17 +01:00
|
|
|
image->mipLevelExtent(subresources.mipLevel) == imageExtent
|
2017-12-10 18:14:28 +01:00
|
|
|
? VK_IMAGE_LAYOUT_UNDEFINED
|
|
|
|
: image->info().layout,
|
|
|
|
image->info().stages,
|
|
|
|
image->info().access,
|
2018-04-29 15:28:50 +02:00
|
|
|
image->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL),
|
2017-12-10 18:14:28 +01:00
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_WRITE_BIT);
|
|
|
|
m_barriers.recordCommands(m_cmd);
|
|
|
|
|
|
|
|
// Copy contents of the staging buffer into the image.
|
|
|
|
// Since our source data is tightly packed, we do not
|
|
|
|
// need to specify any strides.
|
2017-12-10 15:57:51 +01:00
|
|
|
VkBufferImageCopy region;
|
|
|
|
region.bufferOffset = slice.offset;
|
|
|
|
region.bufferRowLength = 0;
|
|
|
|
region.bufferImageHeight = 0;
|
|
|
|
region.imageSubresource = subresources;
|
|
|
|
region.imageOffset = imageOffset;
|
|
|
|
region.imageExtent = imageExtent;
|
|
|
|
|
|
|
|
m_cmd->stagedBufferImageCopy(image->handle(),
|
2018-04-29 15:28:50 +02:00
|
|
|
image->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL),
|
2017-12-10 15:57:51 +01:00
|
|
|
region, slice);
|
|
|
|
|
2017-12-10 18:14:28 +01:00
|
|
|
// Transition image back into its optimal layout
|
2017-12-10 15:57:51 +01:00
|
|
|
m_barriers.accessImage(
|
|
|
|
image, subresourceRange,
|
2018-04-29 15:28:50 +02:00
|
|
|
image->pickLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL),
|
2017-12-10 15:57:51 +01:00
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
VK_ACCESS_TRANSFER_WRITE_BIT,
|
|
|
|
image->info().layout,
|
|
|
|
image->info().stages,
|
|
|
|
image->info().access);
|
|
|
|
m_barriers.recordCommands(m_cmd);
|
|
|
|
|
|
|
|
m_cmd->trackResource(image);
|
2017-12-07 18:51:41 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-11-20 15:35:29 +01:00
|
|
|
void DxvkContext::setViewports(
|
|
|
|
uint32_t viewportCount,
|
|
|
|
const VkViewport* viewports,
|
|
|
|
const VkRect2D* scissorRects) {
|
2018-01-10 20:40:10 +01:00
|
|
|
if (m_state.gp.state.rsViewportCount != viewportCount) {
|
|
|
|
m_state.gp.state.rsViewportCount = viewportCount;
|
2017-12-07 09:44:45 +01:00
|
|
|
m_flags.set(DxvkContextFlag::GpDirtyPipelineState);
|
2017-11-20 15:35:29 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
for (uint32_t i = 0; i < viewportCount; i++) {
|
2017-12-20 12:13:08 +01:00
|
|
|
m_state.vp.viewports[i] = viewports[i];
|
|
|
|
m_state.vp.scissorRects[i] = scissorRects[i];
|
2018-03-12 13:14:27 +01:00
|
|
|
|
|
|
|
// Vulkan viewports are not allowed to have a width or
|
|
|
|
// height of zero, so we fall back to a dummy viewport.
|
|
|
|
if (viewports[i].width == 0.0f || viewports[i].height == 0.0f) {
|
|
|
|
m_state.vp.viewports[i] = VkViewport {
|
|
|
|
0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f };
|
|
|
|
}
|
2017-11-20 15:35:29 +01:00
|
|
|
}
|
|
|
|
|
2018-03-12 13:14:27 +01:00
|
|
|
m_cmd->cmdSetViewport(0, viewportCount, m_state.vp.viewports.data());
|
|
|
|
m_cmd->cmdSetScissor (0, viewportCount, m_state.vp.scissorRects.data());
|
2017-10-11 23:29:05 +02:00
|
|
|
}
|
|
|
|
|
2017-10-10 23:32:13 +02:00
|
|
|
|
2017-12-11 14:11:18 +01:00
|
|
|
void DxvkContext::setBlendConstants(
|
2018-01-20 15:41:06 +01:00
|
|
|
const DxvkBlendConstants& blendConstants) {
|
2018-01-29 20:01:49 +01:00
|
|
|
m_state.om.blendConstants = blendConstants;
|
|
|
|
m_cmd->cmdSetBlendConstants(&blendConstants.r);
|
2017-12-11 14:11:18 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void DxvkContext::setStencilReference(
|
|
|
|
const uint32_t reference) {
|
|
|
|
m_state.om.stencilReference = reference;
|
|
|
|
|
2018-01-29 20:01:49 +01:00
|
|
|
m_cmd->cmdSetStencilReference(
|
|
|
|
VK_STENCIL_FRONT_AND_BACK,
|
|
|
|
reference);
|
2017-12-11 14:11:18 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-01-10 20:40:10 +01:00
|
|
|
void DxvkContext::setInputAssemblyState(const DxvkInputAssemblyState& ia) {
|
|
|
|
m_state.gp.state.iaPrimitiveTopology = ia.primitiveTopology;
|
|
|
|
m_state.gp.state.iaPrimitiveRestart = ia.primitiveRestart;
|
2018-01-29 11:31:00 +01:00
|
|
|
m_state.gp.state.iaPatchVertexCount = ia.patchVertexCount;
|
2018-01-10 20:40:10 +01:00
|
|
|
|
2017-12-08 00:02:43 +01:00
|
|
|
m_flags.set(DxvkContextFlag::GpDirtyPipelineState);
|
2017-11-20 13:38:24 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void DxvkContext::setInputLayout(
|
2017-12-08 00:44:58 +01:00
|
|
|
uint32_t attributeCount,
|
|
|
|
const DxvkVertexAttribute* attributes,
|
|
|
|
uint32_t bindingCount,
|
|
|
|
const DxvkVertexBinding* bindings) {
|
2018-01-08 20:23:21 +01:00
|
|
|
m_flags.set(
|
|
|
|
DxvkContextFlag::GpDirtyPipelineState,
|
|
|
|
DxvkContextFlag::GpDirtyVertexBuffers);
|
2017-12-08 00:44:58 +01:00
|
|
|
|
2018-01-10 20:40:10 +01:00
|
|
|
for (uint32_t i = 0; i < attributeCount; i++) {
|
|
|
|
m_state.gp.state.ilAttributes[i].location = attributes[i].location;
|
|
|
|
m_state.gp.state.ilAttributes[i].binding = attributes[i].binding;
|
|
|
|
m_state.gp.state.ilAttributes[i].format = attributes[i].format;
|
|
|
|
m_state.gp.state.ilAttributes[i].offset = attributes[i].offset;
|
|
|
|
}
|
|
|
|
|
2018-01-28 19:37:22 +01:00
|
|
|
for (uint32_t i = attributeCount; i < m_state.gp.state.ilAttributeCount; i++)
|
|
|
|
m_state.gp.state.ilAttributes[i] = VkVertexInputAttributeDescription();
|
2018-01-10 20:40:10 +01:00
|
|
|
|
|
|
|
for (uint32_t i = 0; i < bindingCount; i++) {
|
|
|
|
m_state.gp.state.ilBindings[i].binding = bindings[i].binding;
|
|
|
|
m_state.gp.state.ilBindings[i].inputRate = bindings[i].inputRate;
|
2018-04-17 17:24:16 +02:00
|
|
|
m_state.gp.state.ilDivisors[i] = bindings[i].fetchRate;
|
2018-01-10 20:40:10 +01:00
|
|
|
}
|
2017-12-08 00:44:58 +01:00
|
|
|
|
2018-01-28 19:37:22 +01:00
|
|
|
for (uint32_t i = bindingCount; i < m_state.gp.state.ilBindingCount; i++)
|
|
|
|
m_state.gp.state.ilBindings[i] = VkVertexInputBindingDescription();
|
|
|
|
|
|
|
|
m_state.gp.state.ilAttributeCount = attributeCount;
|
|
|
|
m_state.gp.state.ilBindingCount = bindingCount;
|
2017-11-20 13:38:24 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-01-10 20:40:10 +01:00
|
|
|
void DxvkContext::setRasterizerState(const DxvkRasterizerState& rs) {
|
|
|
|
m_state.gp.state.rsEnableDepthClamp = rs.enableDepthClamp;
|
|
|
|
m_state.gp.state.rsEnableDiscard = rs.enableDiscard;
|
|
|
|
m_state.gp.state.rsPolygonMode = rs.polygonMode;
|
|
|
|
m_state.gp.state.rsCullMode = rs.cullMode;
|
|
|
|
m_state.gp.state.rsFrontFace = rs.frontFace;
|
|
|
|
m_state.gp.state.rsDepthBiasEnable = rs.depthBiasEnable;
|
|
|
|
m_state.gp.state.rsDepthBiasConstant = rs.depthBiasConstant;
|
|
|
|
m_state.gp.state.rsDepthBiasClamp = rs.depthBiasClamp;
|
|
|
|
m_state.gp.state.rsDepthBiasSlope = rs.depthBiasSlope;
|
|
|
|
|
2017-12-08 00:02:43 +01:00
|
|
|
m_flags.set(DxvkContextFlag::GpDirtyPipelineState);
|
2017-11-20 13:38:24 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-01-10 20:40:10 +01:00
|
|
|
void DxvkContext::setMultisampleState(const DxvkMultisampleState& ms) {
|
|
|
|
m_state.gp.state.msSampleMask = ms.sampleMask;
|
|
|
|
m_state.gp.state.msEnableAlphaToCoverage = ms.enableAlphaToCoverage;
|
|
|
|
m_state.gp.state.msEnableAlphaToOne = ms.enableAlphaToOne;
|
|
|
|
|
2017-12-08 00:02:43 +01:00
|
|
|
m_flags.set(DxvkContextFlag::GpDirtyPipelineState);
|
2017-11-20 13:38:24 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-01-10 20:40:10 +01:00
|
|
|
void DxvkContext::setDepthStencilState(const DxvkDepthStencilState& ds) {
|
|
|
|
m_state.gp.state.dsEnableDepthTest = ds.enableDepthTest;
|
|
|
|
m_state.gp.state.dsEnableDepthWrite = ds.enableDepthWrite;
|
|
|
|
m_state.gp.state.dsEnableDepthBounds = ds.enableDepthBounds;
|
|
|
|
m_state.gp.state.dsEnableStencilTest = ds.enableStencilTest;
|
|
|
|
m_state.gp.state.dsDepthCompareOp = ds.depthCompareOp;
|
|
|
|
m_state.gp.state.dsStencilOpFront = ds.stencilOpFront;
|
|
|
|
m_state.gp.state.dsStencilOpBack = ds.stencilOpBack;
|
|
|
|
m_state.gp.state.dsDepthBoundsMin = ds.depthBoundsMin;
|
|
|
|
m_state.gp.state.dsDepthBoundsMax = ds.depthBoundsMax;
|
|
|
|
|
2017-12-08 00:02:43 +01:00
|
|
|
m_flags.set(DxvkContextFlag::GpDirtyPipelineState);
|
2017-11-20 13:38:24 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-01-10 20:40:10 +01:00
|
|
|
void DxvkContext::setLogicOpState(const DxvkLogicOpState& lo) {
|
|
|
|
m_state.gp.state.omEnableLogicOp = lo.enableLogicOp;
|
|
|
|
m_state.gp.state.omLogicOp = lo.logicOp;
|
|
|
|
|
2017-12-08 00:51:10 +01:00
|
|
|
m_flags.set(DxvkContextFlag::GpDirtyPipelineState);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void DxvkContext::setBlendMode(
|
|
|
|
uint32_t attachment,
|
|
|
|
const DxvkBlendMode& blendMode) {
|
2018-01-10 20:40:10 +01:00
|
|
|
m_state.gp.state.omBlendAttachments[attachment].blendEnable = blendMode.enableBlending;
|
|
|
|
m_state.gp.state.omBlendAttachments[attachment].srcColorBlendFactor = blendMode.colorSrcFactor;
|
|
|
|
m_state.gp.state.omBlendAttachments[attachment].dstColorBlendFactor = blendMode.colorDstFactor;
|
|
|
|
m_state.gp.state.omBlendAttachments[attachment].colorBlendOp = blendMode.colorBlendOp;
|
|
|
|
m_state.gp.state.omBlendAttachments[attachment].srcAlphaBlendFactor = blendMode.alphaSrcFactor;
|
|
|
|
m_state.gp.state.omBlendAttachments[attachment].dstAlphaBlendFactor = blendMode.alphaDstFactor;
|
|
|
|
m_state.gp.state.omBlendAttachments[attachment].alphaBlendOp = blendMode.alphaBlendOp;
|
|
|
|
m_state.gp.state.omBlendAttachments[attachment].colorWriteMask = blendMode.writeMask;
|
|
|
|
|
2017-12-08 00:51:10 +01:00
|
|
|
m_flags.set(DxvkContextFlag::GpDirtyPipelineState);
|
2017-11-20 13:38:24 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-02-18 22:57:45 +01:00
|
|
|
void DxvkContext::signalEvent(const DxvkEventRevision& event) {
|
|
|
|
m_cmd->trackEvent(event);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-02-15 13:25:18 +01:00
|
|
|
void DxvkContext::writeTimestamp(const DxvkQueryRevision& query) {
|
2018-02-18 17:14:02 +01:00
|
|
|
DxvkQueryHandle handle = this->allocQuery(query);
|
2018-02-15 13:25:18 +01:00
|
|
|
|
|
|
|
m_cmd->cmdWriteTimestamp(
|
|
|
|
VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
|
|
|
|
handle.queryPool, handle.queryId);
|
|
|
|
|
|
|
|
query.query->endRecording(query.revision);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-04-23 11:11:40 +02:00
|
|
|
void DxvkContext::startRenderPass() {
|
2017-12-01 10:27:33 +01:00
|
|
|
if (!m_flags.test(DxvkContextFlag::GpRenderPassBound)
|
2017-11-17 19:49:44 +01:00
|
|
|
&& (m_state.om.framebuffer != nullptr)) {
|
2017-12-01 10:27:33 +01:00
|
|
|
m_flags.set(DxvkContextFlag::GpRenderPassBound);
|
2018-04-30 17:04:13 +02:00
|
|
|
m_flags.clr(DxvkContextFlag::GpClearRenderTargets);
|
2018-04-30 13:12:28 +02:00
|
|
|
|
|
|
|
this->renderPassBindFramebuffer(
|
|
|
|
m_state.om.framebuffer,
|
2018-04-30 17:04:13 +02:00
|
|
|
m_state.om.renderPassOps,
|
|
|
|
m_state.om.clearValues.size(),
|
|
|
|
m_state.om.clearValues.data());
|
2018-04-30 13:12:28 +02:00
|
|
|
|
|
|
|
// Don't discard image contents if we have
|
|
|
|
// to spill the current render pass
|
|
|
|
this->resetRenderPassOps(
|
|
|
|
m_state.om.renderTargets,
|
|
|
|
m_state.om.renderPassOps);
|
2017-10-15 19:23:10 +02:00
|
|
|
}
|
2017-10-11 23:29:05 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-04-23 11:11:40 +02:00
|
|
|
void DxvkContext::spillRenderPass() {
|
2018-04-30 17:04:13 +02:00
|
|
|
if (m_flags.test(DxvkContextFlag::GpClearRenderTargets))
|
|
|
|
this->startRenderPass();
|
2018-04-30 13:12:28 +02:00
|
|
|
|
2017-12-01 10:27:33 +01:00
|
|
|
if (m_flags.test(DxvkContextFlag::GpRenderPassBound)) {
|
|
|
|
m_flags.clr(DxvkContextFlag::GpRenderPassBound);
|
2018-03-17 17:59:43 +01:00
|
|
|
this->renderPassUnbindFramebuffer();
|
2017-11-17 19:49:44 +01:00
|
|
|
}
|
2017-10-11 23:29:05 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-04-30 13:12:28 +02:00
|
|
|
void DxvkContext::renderPassBindFramebuffer(
|
|
|
|
const Rc<DxvkFramebuffer>& framebuffer,
|
2018-04-30 17:04:13 +02:00
|
|
|
const DxvkRenderPassOps& ops,
|
|
|
|
uint32_t clearValueCount,
|
|
|
|
const VkClearValue* clearValues) {
|
2018-03-17 17:59:43 +01:00
|
|
|
const DxvkFramebufferSize fbSize = framebuffer->size();
|
|
|
|
|
|
|
|
VkRect2D renderArea;
|
|
|
|
renderArea.offset = VkOffset2D { 0, 0 };
|
|
|
|
renderArea.extent = VkExtent2D { fbSize.width, fbSize.height };
|
|
|
|
|
|
|
|
VkRenderPassBeginInfo info;
|
|
|
|
info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
|
|
|
|
info.pNext = nullptr;
|
2018-04-30 13:12:28 +02:00
|
|
|
info.renderPass = framebuffer->getRenderPassHandle(ops);
|
2018-03-17 17:59:43 +01:00
|
|
|
info.framebuffer = framebuffer->handle();
|
|
|
|
info.renderArea = renderArea;
|
2018-04-30 17:04:13 +02:00
|
|
|
info.clearValueCount = clearValueCount;
|
|
|
|
info.pClearValues = clearValues;
|
2018-03-17 17:59:43 +01:00
|
|
|
|
|
|
|
m_cmd->cmdBeginRenderPass(&info,
|
|
|
|
VK_SUBPASS_CONTENTS_INLINE);
|
|
|
|
m_cmd->trackResource(framebuffer);
|
2018-04-03 11:56:12 +02:00
|
|
|
m_cmd->addStatCtr(DxvkStatCounter::CmdRenderPassCount, 1);
|
2018-03-17 17:59:43 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void DxvkContext::renderPassUnbindFramebuffer() {
|
|
|
|
m_cmd->cmdEndRenderPass();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-04-30 13:12:28 +02:00
|
|
|
void DxvkContext::resetRenderPassOps(
|
|
|
|
const DxvkRenderTargets& renderTargets,
|
|
|
|
DxvkRenderPassOps& renderPassOps) {
|
|
|
|
renderPassOps.depthOps = renderTargets.depth.view != nullptr
|
2018-04-30 21:42:16 +02:00
|
|
|
? DxvkDepthAttachmentOps {
|
|
|
|
VK_ATTACHMENT_LOAD_OP_LOAD,
|
|
|
|
VK_ATTACHMENT_LOAD_OP_LOAD,
|
|
|
|
renderTargets.depth.view->imageInfo().layout,
|
|
|
|
VK_ATTACHMENT_STORE_OP_STORE,
|
|
|
|
VK_ATTACHMENT_STORE_OP_STORE,
|
|
|
|
renderTargets.depth.view->imageInfo().layout }
|
|
|
|
: DxvkDepthAttachmentOps { };
|
2018-04-30 13:12:28 +02:00
|
|
|
|
|
|
|
for (uint32_t i = 0; i < MaxNumRenderTargets; i++) {
|
|
|
|
renderPassOps.colorOps[i] = renderTargets.color[i].view != nullptr
|
2018-04-30 21:42:16 +02:00
|
|
|
? DxvkColorAttachmentOps {
|
|
|
|
VK_ATTACHMENT_LOAD_OP_LOAD,
|
|
|
|
renderTargets.color[i].view->imageInfo().layout,
|
|
|
|
VK_ATTACHMENT_STORE_OP_STORE,
|
|
|
|
renderTargets.color[i].view->imageInfo().layout }
|
|
|
|
: DxvkColorAttachmentOps { };
|
2018-04-30 13:12:28 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// TODO provide a sane alternative for this
|
|
|
|
if (renderPassOps.colorOps[0].loadLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
|
|
|
|
renderPassOps.colorOps[0].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
|
|
|
|
renderPassOps.colorOps[0].loadLayout = VK_IMAGE_LAYOUT_UNDEFINED;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-04-11 23:13:34 +02:00
|
|
|
void DxvkContext::unbindComputePipeline() {
|
|
|
|
m_flags.set(
|
|
|
|
DxvkContextFlag::CpDirtyPipeline,
|
|
|
|
DxvkContextFlag::CpDirtyPipelineState,
|
|
|
|
DxvkContextFlag::CpDirtyResources);
|
|
|
|
|
|
|
|
m_cpActivePipeline = VK_NULL_HANDLE;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-12-03 20:23:26 +01:00
|
|
|
void DxvkContext::updateComputePipeline() {
|
|
|
|
if (m_flags.test(DxvkContextFlag::CpDirtyPipeline)) {
|
|
|
|
m_flags.clr(DxvkContextFlag::CpDirtyPipeline);
|
|
|
|
|
2018-02-14 17:54:35 +01:00
|
|
|
m_state.cp.state.bsBindingState.clear();
|
2018-03-29 12:32:20 +02:00
|
|
|
m_state.cp.pipeline = m_pipeMgr->createComputePipeline(
|
|
|
|
m_pipeCache, m_state.cp.cs.shader);
|
2017-12-07 09:38:31 +01:00
|
|
|
|
2018-01-25 12:57:43 +01:00
|
|
|
if (m_state.cp.pipeline != nullptr)
|
|
|
|
m_cmd->trackResource(m_state.cp.pipeline);
|
2018-02-14 17:54:35 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void DxvkContext::updateComputePipelineState() {
|
|
|
|
if (m_flags.test(DxvkContextFlag::CpDirtyPipelineState)) {
|
|
|
|
m_flags.clr(DxvkContextFlag::CpDirtyPipelineState);
|
|
|
|
|
|
|
|
m_cpActivePipeline = m_state.cp.pipeline != nullptr
|
2018-04-03 15:52:39 +02:00
|
|
|
? m_state.cp.pipeline->getPipelineHandle(m_state.cp.state, m_cmd->statCounters())
|
2018-02-14 17:54:35 +01:00
|
|
|
: VK_NULL_HANDLE;
|
2018-01-25 12:57:43 +01:00
|
|
|
|
|
|
|
if (m_cpActivePipeline != VK_NULL_HANDLE) {
|
|
|
|
m_cmd->cmdBindPipeline(
|
|
|
|
VK_PIPELINE_BIND_POINT_COMPUTE,
|
|
|
|
m_cpActivePipeline);
|
|
|
|
}
|
2017-12-03 20:23:26 +01:00
|
|
|
}
|
2017-11-23 14:24:00 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-12-03 20:23:26 +01:00
|
|
|
void DxvkContext::updateGraphicsPipeline() {
|
2018-01-10 12:13:46 +01:00
|
|
|
if (m_flags.test(DxvkContextFlag::GpDirtyPipeline)) {
|
|
|
|
m_flags.clr(DxvkContextFlag::GpDirtyPipeline);
|
2017-12-07 09:44:45 +01:00
|
|
|
|
2018-01-10 20:40:10 +01:00
|
|
|
m_state.gp.state.bsBindingState.clear();
|
2018-03-29 12:32:20 +02:00
|
|
|
m_state.gp.pipeline = m_pipeMgr->createGraphicsPipeline(
|
|
|
|
m_pipeCache, m_state.gp.vs.shader,
|
|
|
|
m_state.gp.tcs.shader, m_state.gp.tes.shader,
|
2018-01-10 12:13:46 +01:00
|
|
|
m_state.gp.gs.shader, m_state.gp.fs.shader);
|
|
|
|
|
2018-01-25 12:57:43 +01:00
|
|
|
if (m_state.gp.pipeline != nullptr)
|
|
|
|
m_cmd->trackResource(m_state.gp.pipeline);
|
2018-01-10 12:13:46 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void DxvkContext::updateGraphicsPipelineState() {
|
|
|
|
if (m_flags.test(DxvkContextFlag::GpDirtyPipelineState)) {
|
|
|
|
m_flags.clr(DxvkContextFlag::GpDirtyPipelineState);
|
2017-12-07 09:38:31 +01:00
|
|
|
|
2018-01-10 21:53:13 +01:00
|
|
|
for (uint32_t i = 0; i < m_state.gp.state.ilBindingCount; i++) {
|
2018-02-01 13:29:57 +01:00
|
|
|
const uint32_t binding = m_state.gp.state.ilBindings[i].binding;
|
|
|
|
|
2018-01-10 21:53:13 +01:00
|
|
|
m_state.gp.state.ilBindings[i].stride
|
2018-02-01 13:29:57 +01:00
|
|
|
= (m_state.vi.bindingMask & (1u << binding)) != 0
|
|
|
|
? m_state.vi.vertexStrides[binding]
|
|
|
|
: 0;
|
2018-01-10 21:53:13 +01:00
|
|
|
}
|
2017-12-07 21:47:38 +01:00
|
|
|
|
2018-01-10 20:40:10 +01:00
|
|
|
for (uint32_t i = m_state.gp.state.ilBindingCount; i < MaxNumVertexBindings; i++)
|
|
|
|
m_state.gp.state.ilBindings[i].stride = 0;
|
2017-11-17 19:49:44 +01:00
|
|
|
|
2018-01-25 12:57:43 +01:00
|
|
|
m_gpActivePipeline = m_state.gp.pipeline != nullptr
|
2018-04-03 15:52:39 +02:00
|
|
|
? m_state.gp.pipeline->getPipelineHandle(m_state.gp.state, m_cmd->statCounters())
|
2018-01-25 12:57:43 +01:00
|
|
|
: VK_NULL_HANDLE;
|
2018-01-12 14:25:26 +01:00
|
|
|
|
|
|
|
if (m_gpActivePipeline != VK_NULL_HANDLE) {
|
|
|
|
m_cmd->cmdBindPipeline(
|
|
|
|
VK_PIPELINE_BIND_POINT_GRAPHICS,
|
|
|
|
m_gpActivePipeline);
|
2018-01-29 20:01:49 +01:00
|
|
|
|
|
|
|
m_cmd->cmdSetViewport(0, m_state.gp.state.rsViewportCount, m_state.vp.viewports.data());
|
|
|
|
m_cmd->cmdSetScissor (0, m_state.gp.state.rsViewportCount, m_state.vp.scissorRects.data());
|
|
|
|
|
|
|
|
m_cmd->cmdSetBlendConstants(
|
|
|
|
&m_state.om.blendConstants.r);
|
|
|
|
|
|
|
|
m_cmd->cmdSetStencilReference(
|
|
|
|
VK_STENCIL_FRONT_AND_BACK,
|
|
|
|
m_state.om.stencilReference);
|
2018-01-12 14:25:26 +01:00
|
|
|
}
|
2017-12-03 20:23:26 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void DxvkContext::updateComputeShaderResources() {
|
2018-01-10 12:13:46 +01:00
|
|
|
if (m_flags.test(DxvkContextFlag::CpDirtyResources)) {
|
2018-01-25 12:57:43 +01:00
|
|
|
if (m_state.cp.pipeline != nullptr) {
|
|
|
|
this->updateShaderResources(
|
|
|
|
VK_PIPELINE_BIND_POINT_COMPUTE,
|
|
|
|
m_state.cp.pipeline->layout());
|
|
|
|
}
|
2018-01-10 12:13:46 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void DxvkContext::updateComputeShaderDescriptors() {
|
2017-12-03 20:23:26 +01:00
|
|
|
if (m_flags.test(DxvkContextFlag::CpDirtyResources)) {
|
|
|
|
m_flags.clr(DxvkContextFlag::CpDirtyResources);
|
|
|
|
|
2018-01-25 12:57:43 +01:00
|
|
|
if (m_state.cp.pipeline != nullptr) {
|
|
|
|
this->updateShaderDescriptors(
|
|
|
|
VK_PIPELINE_BIND_POINT_COMPUTE,
|
2018-02-14 17:54:35 +01:00
|
|
|
m_state.cp.state.bsBindingState,
|
2018-01-25 12:57:43 +01:00
|
|
|
m_state.cp.pipeline->layout());
|
|
|
|
}
|
2017-12-03 20:23:26 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void DxvkContext::updateGraphicsShaderResources() {
|
2018-01-10 12:13:46 +01:00
|
|
|
if (m_flags.test(DxvkContextFlag::GpDirtyResources)) {
|
2018-01-25 12:57:43 +01:00
|
|
|
if (m_state.gp.pipeline != nullptr) {
|
|
|
|
this->updateShaderResources(
|
|
|
|
VK_PIPELINE_BIND_POINT_GRAPHICS,
|
|
|
|
m_state.gp.pipeline->layout());
|
|
|
|
}
|
2018-01-10 12:13:46 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void DxvkContext::updateGraphicsShaderDescriptors() {
|
2017-12-03 20:23:26 +01:00
|
|
|
if (m_flags.test(DxvkContextFlag::GpDirtyResources)) {
|
|
|
|
m_flags.clr(DxvkContextFlag::GpDirtyResources);
|
|
|
|
|
2018-01-25 12:57:43 +01:00
|
|
|
if (m_state.gp.pipeline != nullptr) {
|
|
|
|
this->updateShaderDescriptors(
|
|
|
|
VK_PIPELINE_BIND_POINT_GRAPHICS,
|
|
|
|
m_state.gp.state.bsBindingState,
|
|
|
|
m_state.gp.pipeline->layout());
|
|
|
|
}
|
2017-12-23 15:11:23 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void DxvkContext::updateShaderResources(
|
|
|
|
VkPipelineBindPoint bindPoint,
|
2018-02-06 17:31:23 +01:00
|
|
|
const Rc<DxvkPipelineLayout>& layout) {
|
2018-03-09 12:30:39 +01:00
|
|
|
DxvkBindingState& bindingState =
|
2018-01-10 11:44:40 +01:00
|
|
|
bindPoint == VK_PIPELINE_BIND_POINT_GRAPHICS
|
2018-01-10 20:40:10 +01:00
|
|
|
? m_state.gp.state.bsBindingState
|
2018-02-14 17:54:35 +01:00
|
|
|
: m_state.cp.state.bsBindingState;
|
2018-01-10 11:44:40 +01:00
|
|
|
|
|
|
|
bool updatePipelineState = false;
|
|
|
|
|
2018-03-09 12:30:39 +01:00
|
|
|
DxvkAttachment depthAttachment;
|
|
|
|
|
|
|
|
if (bindPoint == VK_PIPELINE_BIND_POINT_GRAPHICS && m_state.om.framebuffer != nullptr)
|
2018-04-30 13:12:28 +02:00
|
|
|
depthAttachment = m_state.om.framebuffer->getDepthTarget();
|
2018-03-09 12:30:39 +01:00
|
|
|
|
2017-12-23 15:11:23 +01:00
|
|
|
for (uint32_t i = 0; i < layout->bindingCount(); i++) {
|
2017-12-29 15:06:33 +01:00
|
|
|
const auto& binding = layout->binding(i);
|
|
|
|
const auto& res = m_rc[binding.slot];
|
|
|
|
|
|
|
|
switch (binding.type) {
|
|
|
|
case VK_DESCRIPTOR_TYPE_SAMPLER:
|
2018-01-08 20:23:21 +01:00
|
|
|
if (res.sampler != nullptr) {
|
2018-03-09 12:30:39 +01:00
|
|
|
updatePipelineState |= bindingState.setBound(i);
|
2018-01-10 11:44:40 +01:00
|
|
|
|
2018-02-04 23:59:34 +01:00
|
|
|
m_descInfos[i].image.sampler = res.sampler->handle();
|
|
|
|
m_descInfos[i].image.imageView = VK_NULL_HANDLE;
|
|
|
|
m_descInfos[i].image.imageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
|
2018-01-08 20:23:21 +01:00
|
|
|
|
|
|
|
m_cmd->trackResource(res.sampler);
|
|
|
|
} else {
|
2018-03-09 12:30:39 +01:00
|
|
|
updatePipelineState |= bindingState.setUnbound(i);
|
2018-02-04 23:59:34 +01:00
|
|
|
m_descInfos[i].image = m_device->dummySamplerDescriptor();
|
2018-01-08 20:23:21 +01:00
|
|
|
} break;
|
2017-12-19 01:08:48 +01:00
|
|
|
|
2017-12-29 15:06:33 +01:00
|
|
|
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
|
|
|
|
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
|
2018-01-10 13:44:04 +01:00
|
|
|
if (res.imageView != nullptr && res.imageView->type() == binding.view) {
|
2018-03-09 12:30:39 +01:00
|
|
|
updatePipelineState |= bindingState.setBound(i);
|
2018-01-10 11:44:40 +01:00
|
|
|
|
2018-02-04 23:59:34 +01:00
|
|
|
m_descInfos[i].image.sampler = VK_NULL_HANDLE;
|
|
|
|
m_descInfos[i].image.imageView = res.imageView->handle();
|
|
|
|
m_descInfos[i].image.imageLayout = res.imageView->imageInfo().layout;
|
2018-01-08 20:23:21 +01:00
|
|
|
|
2018-03-09 12:30:39 +01:00
|
|
|
if (depthAttachment.view != nullptr
|
|
|
|
&& depthAttachment.view->image() == res.imageView->image())
|
|
|
|
m_descInfos[i].image.imageLayout = depthAttachment.layout;
|
2018-02-06 17:31:23 +01:00
|
|
|
|
2018-01-08 20:23:21 +01:00
|
|
|
m_cmd->trackResource(res.imageView);
|
|
|
|
m_cmd->trackResource(res.imageView->image());
|
|
|
|
} else {
|
2018-03-09 12:30:39 +01:00
|
|
|
updatePipelineState |= bindingState.setUnbound(i);
|
2018-02-04 23:59:34 +01:00
|
|
|
m_descInfos[i].image = m_device->dummyImageViewDescriptor(binding.view);
|
2018-01-08 20:23:21 +01:00
|
|
|
} break;
|
2017-12-29 15:06:33 +01:00
|
|
|
|
|
|
|
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
|
|
|
|
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
|
2018-01-08 20:23:21 +01:00
|
|
|
if (res.bufferView != nullptr) {
|
2018-03-09 12:30:39 +01:00
|
|
|
updatePipelineState |= bindingState.setBound(i);
|
2018-01-10 11:44:40 +01:00
|
|
|
|
2018-03-07 13:54:28 +01:00
|
|
|
res.bufferView->updateView();
|
2018-02-04 23:59:34 +01:00
|
|
|
m_descInfos[i].texelBuffer = res.bufferView->handle();
|
2018-01-08 20:23:21 +01:00
|
|
|
|
2018-03-07 13:54:28 +01:00
|
|
|
m_cmd->trackResource(res.bufferView->viewResource());
|
|
|
|
m_cmd->trackResource(res.bufferView->bufferResource());
|
2018-01-08 20:23:21 +01:00
|
|
|
} else {
|
2018-03-09 12:30:39 +01:00
|
|
|
updatePipelineState |= bindingState.setUnbound(i);
|
2018-02-04 23:59:34 +01:00
|
|
|
m_descInfos[i].texelBuffer = m_device->dummyBufferViewDescriptor();
|
2018-01-08 20:23:21 +01:00
|
|
|
} break;
|
2017-12-23 15:11:23 +01:00
|
|
|
|
2017-12-29 15:06:33 +01:00
|
|
|
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
|
|
|
|
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
|
2018-01-18 18:01:47 +01:00
|
|
|
if (res.bufferSlice.defined()) {
|
2018-03-09 12:30:39 +01:00
|
|
|
updatePipelineState |= bindingState.setBound(i);
|
2018-01-10 11:44:40 +01:00
|
|
|
|
2018-01-18 18:01:47 +01:00
|
|
|
auto physicalSlice = res.bufferSlice.physicalSlice();
|
2018-02-04 23:59:34 +01:00
|
|
|
m_descInfos[i].buffer.buffer = physicalSlice.handle();
|
|
|
|
m_descInfos[i].buffer.offset = physicalSlice.offset();
|
|
|
|
m_descInfos[i].buffer.range = physicalSlice.length();
|
2018-01-18 18:01:47 +01:00
|
|
|
|
2018-01-18 18:50:44 +01:00
|
|
|
m_cmd->trackResource(physicalSlice.resource());
|
2018-01-08 20:23:21 +01:00
|
|
|
} else {
|
2018-03-09 12:30:39 +01:00
|
|
|
updatePipelineState |= bindingState.setUnbound(i);
|
2018-02-04 23:59:34 +01:00
|
|
|
m_descInfos[i].buffer = m_device->dummyBufferDescriptor();
|
2018-01-08 20:23:21 +01:00
|
|
|
} break;
|
2017-12-23 15:11:23 +01:00
|
|
|
|
2017-12-29 15:06:33 +01:00
|
|
|
default:
|
|
|
|
Logger::err(str::format("DxvkContext: Unhandled descriptor type: ", binding.type));
|
2017-12-19 01:08:48 +01:00
|
|
|
}
|
2017-10-15 17:56:06 +02:00
|
|
|
}
|
2017-12-23 15:11:23 +01:00
|
|
|
|
2018-01-10 12:13:46 +01:00
|
|
|
if (updatePipelineState) {
|
|
|
|
m_flags.set(bindPoint == VK_PIPELINE_BIND_POINT_GRAPHICS
|
|
|
|
? DxvkContextFlag::GpDirtyPipelineState
|
|
|
|
: DxvkContextFlag::CpDirtyPipelineState);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void DxvkContext::updateShaderDescriptors(
|
|
|
|
VkPipelineBindPoint bindPoint,
|
|
|
|
const DxvkBindingState& bindingState,
|
2018-01-29 00:01:00 +01:00
|
|
|
const Rc<DxvkPipelineLayout>& layout) {
|
2018-03-26 23:13:33 +02:00
|
|
|
if (layout->bindingCount() != 0) {
|
|
|
|
const VkDescriptorSet dset =
|
|
|
|
m_cmd->allocateDescriptorSet(
|
|
|
|
layout->descriptorSetLayout());
|
|
|
|
|
|
|
|
m_cmd->updateDescriptorSetWithTemplate(
|
|
|
|
dset, layout->descriptorTemplate(),
|
|
|
|
m_descInfos.data());
|
|
|
|
|
|
|
|
m_cmd->cmdBindDescriptorSet(bindPoint,
|
|
|
|
layout->pipelineLayout(), dset);
|
|
|
|
}
|
2017-10-15 17:56:06 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-04-26 14:47:55 +02:00
|
|
|
void DxvkContext::updateFramebuffer() {
|
|
|
|
if (m_flags.test(DxvkContextFlag::GpDirtyFramebuffer)) {
|
|
|
|
m_flags.clr(DxvkContextFlag::GpDirtyFramebuffer);
|
|
|
|
|
|
|
|
this->spillRenderPass();
|
|
|
|
|
|
|
|
auto fb = m_device->createFramebuffer(m_state.om.renderTargets);
|
|
|
|
|
2018-04-30 13:12:28 +02:00
|
|
|
m_state.gp.state.msSampleCount = fb->getSampleCount();
|
|
|
|
m_state.gp.state.omRenderPass = fb->getDefaultRenderPassHandle();
|
2018-04-26 14:47:55 +02:00
|
|
|
m_state.om.framebuffer = fb;
|
|
|
|
|
|
|
|
m_flags.set(DxvkContextFlag::GpDirtyPipelineState);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-11-21 19:50:57 +01:00
|
|
|
void DxvkContext::updateIndexBufferBinding() {
|
2017-12-01 10:27:33 +01:00
|
|
|
if (m_flags.test(DxvkContextFlag::GpDirtyIndexBuffer)) {
|
|
|
|
m_flags.clr(DxvkContextFlag::GpDirtyIndexBuffer);
|
2017-11-20 13:21:27 +01:00
|
|
|
|
2018-01-18 18:01:47 +01:00
|
|
|
if (m_state.vi.indexBuffer.defined()) {
|
|
|
|
auto physicalSlice = m_state.vi.indexBuffer.physicalSlice();
|
|
|
|
|
2017-11-20 15:35:29 +01:00
|
|
|
m_cmd->cmdBindIndexBuffer(
|
2018-01-18 18:01:47 +01:00
|
|
|
physicalSlice.handle(),
|
|
|
|
physicalSlice.offset(),
|
2017-12-07 14:01:17 +01:00
|
|
|
m_state.vi.indexType);
|
2017-11-20 15:35:29 +01:00
|
|
|
m_cmd->trackResource(
|
2018-01-18 18:01:47 +01:00
|
|
|
physicalSlice.resource());
|
2018-02-01 13:29:57 +01:00
|
|
|
} else {
|
|
|
|
m_cmd->cmdBindIndexBuffer(
|
|
|
|
m_device->dummyBufferHandle(),
|
|
|
|
0, VK_INDEX_TYPE_UINT32);
|
2017-11-20 15:35:29 +01:00
|
|
|
}
|
2017-11-20 13:21:27 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-11-21 19:50:57 +01:00
|
|
|
void DxvkContext::updateVertexBufferBindings() {
|
2017-12-01 10:27:33 +01:00
|
|
|
if (m_flags.test(DxvkContextFlag::GpDirtyVertexBuffers)) {
|
|
|
|
m_flags.clr(DxvkContextFlag::GpDirtyVertexBuffers);
|
2017-11-20 13:21:27 +01:00
|
|
|
|
2018-02-01 13:29:57 +01:00
|
|
|
uint32_t bindingMask = 0;
|
|
|
|
|
2018-01-10 20:40:10 +01:00
|
|
|
for (uint32_t i = 0; i < m_state.gp.state.ilBindingCount; i++) {
|
2018-01-10 21:53:13 +01:00
|
|
|
const uint32_t binding = m_state.gp.state.ilBindings[i].binding;
|
|
|
|
|
2018-01-18 18:01:47 +01:00
|
|
|
if (m_state.vi.vertexBuffers[binding].defined()) {
|
|
|
|
auto vbo = m_state.vi.vertexBuffers[binding].physicalSlice();
|
|
|
|
|
|
|
|
const VkBuffer handle = vbo.handle();
|
|
|
|
const VkDeviceSize offset = vbo.offset();
|
|
|
|
|
2018-01-10 21:53:13 +01:00
|
|
|
m_cmd->cmdBindVertexBuffers(binding, 1, &handle, &offset);
|
2017-12-20 12:13:08 +01:00
|
|
|
m_cmd->trackResource(vbo.resource());
|
2018-02-01 13:29:57 +01:00
|
|
|
|
|
|
|
bindingMask |= 1u << binding;
|
|
|
|
} else {
|
|
|
|
const VkBuffer handle = m_device->dummyBufferHandle();
|
|
|
|
const VkDeviceSize offset = 0;
|
|
|
|
|
|
|
|
m_cmd->cmdBindVertexBuffers(binding, 1, &handle, &offset);
|
2017-11-21 19:50:57 +01:00
|
|
|
}
|
|
|
|
}
|
2018-02-01 13:29:57 +01:00
|
|
|
|
|
|
|
if (m_state.vi.bindingMask != bindingMask) {
|
|
|
|
m_flags.set(DxvkContextFlag::GpDirtyPipelineState);
|
|
|
|
m_state.vi.bindingMask = bindingMask;
|
|
|
|
}
|
2017-11-20 13:21:27 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-02-24 23:56:12 +01:00
|
|
|
bool DxvkContext::validateComputeState() {
|
2018-02-25 00:18:30 +01:00
|
|
|
return m_cpActivePipeline != VK_NULL_HANDLE;
|
2018-02-24 23:56:12 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool DxvkContext::validateGraphicsState() {
|
2018-02-25 00:18:30 +01:00
|
|
|
if (m_gpActivePipeline == VK_NULL_HANDLE)
|
2018-02-24 23:56:12 +01:00
|
|
|
return false;
|
|
|
|
|
|
|
|
if (!m_flags.test(DxvkContextFlag::GpRenderPassBound))
|
|
|
|
return false;
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-11-23 14:24:00 +01:00
|
|
|
void DxvkContext::commitComputeState() {
|
2018-04-23 11:11:40 +02:00
|
|
|
this->spillRenderPass();
|
2017-12-03 20:23:26 +01:00
|
|
|
this->updateComputePipeline();
|
|
|
|
this->updateComputeShaderResources();
|
2018-02-14 17:54:35 +01:00
|
|
|
this->updateComputePipelineState();
|
2018-01-10 12:13:46 +01:00
|
|
|
this->updateComputeShaderDescriptors();
|
2017-11-23 14:24:00 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-11-20 14:11:09 +01:00
|
|
|
void DxvkContext::commitGraphicsState() {
|
2018-04-26 14:47:55 +02:00
|
|
|
this->updateFramebuffer();
|
2018-04-23 11:11:40 +02:00
|
|
|
this->startRenderPass();
|
2017-12-03 20:23:26 +01:00
|
|
|
this->updateGraphicsPipeline();
|
2017-11-21 19:50:57 +01:00
|
|
|
this->updateIndexBufferBinding();
|
|
|
|
this->updateVertexBufferBindings();
|
2017-12-03 20:23:26 +01:00
|
|
|
this->updateGraphicsShaderResources();
|
2018-01-10 12:13:46 +01:00
|
|
|
this->updateGraphicsPipelineState();
|
|
|
|
this->updateGraphicsShaderDescriptors();
|
2017-12-03 20:23:26 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void DxvkContext::commitComputeBarriers() {
|
2017-12-29 01:09:54 +01:00
|
|
|
// TODO optimize. Each pipeline layout should
|
|
|
|
// hold a list of resource that can be written.
|
|
|
|
// TODO generalize so that this can be used for
|
|
|
|
// graphics pipelines as well
|
|
|
|
auto layout = m_state.cp.pipeline->layout();
|
|
|
|
|
|
|
|
for (uint32_t i = 0; i < layout->bindingCount(); i++) {
|
2018-02-14 17:54:35 +01:00
|
|
|
if (m_state.cp.state.bsBindingState.isBound(i)) {
|
2018-02-14 16:18:18 +01:00
|
|
|
const DxvkDescriptorSlot binding = layout->binding(i);
|
|
|
|
const DxvkShaderResourceSlot& slot = m_rc[binding.slot];
|
|
|
|
|
|
|
|
if (binding.type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) {
|
|
|
|
m_barriers.accessBuffer(
|
|
|
|
slot.bufferSlice.physicalSlice(),
|
|
|
|
VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
|
|
|
|
VK_ACCESS_SHADER_READ_BIT |
|
|
|
|
VK_ACCESS_SHADER_WRITE_BIT,
|
|
|
|
slot.bufferSlice.bufferInfo().stages,
|
|
|
|
slot.bufferSlice.bufferInfo().access);
|
|
|
|
} else if (binding.type == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER) {
|
|
|
|
m_barriers.accessBuffer(
|
2018-03-07 16:29:13 +01:00
|
|
|
slot.bufferView->physicalSlice(),
|
2018-02-14 16:18:18 +01:00
|
|
|
VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
|
|
|
|
VK_ACCESS_SHADER_READ_BIT |
|
|
|
|
VK_ACCESS_SHADER_WRITE_BIT,
|
|
|
|
slot.bufferView->bufferInfo().stages,
|
|
|
|
slot.bufferView->bufferInfo().access);
|
|
|
|
} else if (binding.type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) {
|
|
|
|
m_barriers.accessImage(
|
|
|
|
slot.imageView->image(),
|
|
|
|
slot.imageView->subresources(),
|
|
|
|
slot.imageView->imageInfo().layout,
|
|
|
|
VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
|
|
|
|
VK_ACCESS_SHADER_READ_BIT |
|
|
|
|
VK_ACCESS_SHADER_WRITE_BIT,
|
|
|
|
slot.imageView->imageInfo().layout,
|
|
|
|
slot.imageView->imageInfo().stages,
|
|
|
|
slot.imageView->imageInfo().access);
|
|
|
|
}
|
2017-12-29 01:09:54 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
m_barriers.recordCommands(m_cmd);
|
2017-12-03 20:23:26 +01:00
|
|
|
}
|
|
|
|
|
2018-02-13 13:43:27 +01:00
|
|
|
|
2018-02-14 03:16:39 +01:00
|
|
|
|
2018-02-18 17:14:02 +01:00
|
|
|
DxvkQueryHandle DxvkContext::allocQuery(const DxvkQueryRevision& query) {
|
|
|
|
const VkQueryType queryType = query.query->type();
|
|
|
|
|
2018-02-19 11:27:14 +01:00
|
|
|
DxvkQueryHandle queryHandle = DxvkQueryHandle();
|
2018-02-18 17:14:02 +01:00
|
|
|
Rc<DxvkQueryPool> queryPool = m_queryPools[queryType];
|
|
|
|
|
|
|
|
if (queryPool != nullptr)
|
|
|
|
queryHandle = queryPool->allocQuery(query);
|
|
|
|
|
|
|
|
if (queryHandle.queryPool == VK_NULL_HANDLE) {
|
2018-02-18 20:11:05 +01:00
|
|
|
if (queryPool != nullptr)
|
|
|
|
this->trackQueryPool(queryPool);
|
|
|
|
|
2018-02-18 17:14:02 +01:00
|
|
|
m_queryPools[queryType] = m_device->createQueryPool(queryType, MaxNumQueryCountPerPool);
|
|
|
|
queryPool = m_queryPools[queryType];
|
|
|
|
|
|
|
|
this->resetQueryPool(queryPool);
|
|
|
|
queryHandle = queryPool->allocQuery(query);
|
|
|
|
}
|
|
|
|
|
|
|
|
return queryHandle;
|
2018-02-14 03:16:39 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-02-13 13:43:27 +01:00
|
|
|
void DxvkContext::resetQueryPool(const Rc<DxvkQueryPool>& pool) {
|
2018-04-23 11:11:40 +02:00
|
|
|
this->spillRenderPass();
|
2018-02-13 13:43:27 +01:00
|
|
|
|
2018-02-15 13:25:18 +01:00
|
|
|
pool->reset(m_cmd);
|
2018-02-13 13:43:27 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-02-18 20:11:05 +01:00
|
|
|
void DxvkContext::trackQueryPool(const Rc<DxvkQueryPool>& pool) {
|
|
|
|
if (pool != nullptr) {
|
|
|
|
DxvkQueryRange range = pool->getActiveQueryRange();
|
|
|
|
|
|
|
|
if (range.queryCount > 0)
|
|
|
|
m_cmd->trackQueryRange(std::move(range));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-02-13 13:43:27 +01:00
|
|
|
void DxvkContext::beginActiveQueries() {
|
|
|
|
for (const DxvkQueryRevision& query : m_activeQueries) {
|
2018-02-18 17:14:02 +01:00
|
|
|
DxvkQueryHandle handle = this->allocQuery(query);
|
2018-02-13 13:43:27 +01:00
|
|
|
|
|
|
|
m_cmd->cmdBeginQuery(
|
|
|
|
handle.queryPool,
|
|
|
|
handle.queryId,
|
2018-02-19 11:27:14 +01:00
|
|
|
handle.flags);
|
2018-02-13 13:43:27 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void DxvkContext::endActiveQueries() {
|
|
|
|
for (const DxvkQueryRevision& query : m_activeQueries) {
|
|
|
|
DxvkQueryHandle handle = query.query->getHandle();
|
|
|
|
|
|
|
|
m_cmd->cmdEndQuery(
|
|
|
|
handle.queryPool,
|
|
|
|
handle.queryId);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void DxvkContext::insertActiveQuery(const DxvkQueryRevision& query) {
|
|
|
|
m_activeQueries.push_back(query);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void DxvkContext::eraseActiveQuery(const DxvkQueryRevision& query) {
|
|
|
|
for (auto i = m_activeQueries.begin(); i != m_activeQueries.end(); i++) {
|
|
|
|
if (i->query == query.query && i->revision == query.revision) {
|
|
|
|
m_activeQueries.erase(i);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-10-10 23:32:13 +02:00
|
|
|
}
|