在ReleaseDisplayedPicture函数中消耗图片资源并且显示display完成,设置两个标志m_hasConsummerSignalFence = true 和m_hasConsummerSignalSemaphore = true
virtual int32_t ReleaseDisplayedPicture(DecodedFrameRelease** pDecodedFramesRelease, uint32_t numFramesToRelease)
{
std::lock_guard<std::mutex> lock(m_displayQueueMutex);
for (uint32_t i = 0; i < numFramesToRelease; i++) {
const DecodedFrameRelease* pDecodedFrameRelease = pDecodedFramesRelease[i];
int picId = pDecodedFrameRelease->pictureIndex;
assert((picId >= 0) && ((uint32_t)picId < m_perFrameDecodeImageSet.size()));
assert(m_perFrameDecodeImageSet[picId].m_decodeOrder == pDecodedFrameRelease->decodeOrder);
assert(m_perFrameDecodeImageSet[picId].m_displayOrder == pDecodedFrameRelease->displayOrder);
assert(m_ownedByDisplayMask & (1 << picId));
m_ownedByDisplayMask &= ~(1 << picId);
m_perFrameDecodeImageSet[picId].m_inDecodeQueue = false;
m_perFrameDecodeImageSet[picId].m_ownedByConsummer = false;
m_perFrameDecodeImageSet[picId].Release();
m_perFrameDecodeImageSet[picId].m_hasConsummerSignalFence = pDecodedFrameRelease->hasConsummerSignalFence;
m_perFrameDecodeImageSet[picId].m_hasConsummerSignalSemaphore = pDecodedFrameRelease->hasConsummerSignalSemaphore;
}
return 0;
}
ReleaseDisplayedPicture被ReleaseFrame调用,pLastDecodedFrame就是当前已经解码的帧
.....................................
m_videoQueue->ReleaseFrame(pLastDecodedFrame);
pLastDecodedFrame->Reset();
bool endOfStream = false;
int32_t numVideoFrames = 0;
numVideoFrames = m_videoQueue->GetNextFrame(pLastDecodedFrame, &endOfStream);
.............................................
//-----------------------------------------------------------------------
这两个标志一旦设置为true,在QueuePictureForDecode函数中,将设置pFrameSynchronizationInfo->frameConsumerDoneFence和 pFrameSynchronizationInfo->frameConsumerDoneSemaphore,返回后使用,同时重置两个标志为false
virtual int32_t QueuePictureForDecode(int8_t picId, VkParserDecodePictureInfo* pDecodePictureInfo,
ReferencedObjectsInfo* pReferencedObjectsInfo,
FrameSynchronizationInfo* pFrameSynchronizationInfo)
{
if (pFrameSynchronizationInfo->hasFrameCompleteSignalFence) {
pFrameSynchronizationInfo->frameCompleteFence = m_perFrameDecodeImageSet[picId].m_frameCompleteFence;
if (pFrameSynchronizationInfo->frameCompleteFence) {
m_perFrameDecodeImageSet[picId].m_hasFrameCompleteSignalFence = true;
}
}
if (m_perFrameDecodeImageSet[picId].m_hasConsummerSignalFence) {
pFrameSynchronizationInfo->frameConsumerDoneFence = m_perFrameDecodeImageSet[picId].m_frameConsumerDoneFence;
m_perFrameDecodeImageSet[picId].m_hasConsummerSignalFence = false;
}
if (pFrameSynchronizationInfo->hasFrameCompleteSignalSemaphore) {
pFrameSynchronizationInfo->frameCompleteSemaphore = m_perFrameDecodeImageSet[picId].m_frameCompleteSemaphore;
if (pFrameSynchronizationInfo->frameCompleteSemaphore) {
m_perFrameDecodeImageSet[picId].m_hasFrameCompleteSignalSemaphore = true;
}
}
if (m_perFrameDecodeImageSet[picId].m_hasConsummerSignalSemaphore) {
pFrameSynchronizationInfo->frameConsumerDoneSemaphore = m_perFrameDecodeImageSet[picId].m_frameConsumerDoneSemaphore;
m_perFrameDecodeImageSet[picId].m_hasConsummerSignalSemaphore = false;
}
.................
}
返回后如何使用pFrameSynchronizationInfo->frameConsumerDoneFence和 pFrameSynchronizationInfo->frameConsumerDoneSemaphore,代码如下:
VkFence frameCompleteFence = frameSynchronizationInfo.frameCompleteFence;
VkSemaphore frameCompleteSemaphore = frameSynchronizationInfo.frameCompleteSemaphore;
VkSemaphore frameConsumerDoneSemaphore = frameSynchronizationInfo.frameConsumerDoneSemaphore;
uint32_t waitSemaphoreCount = 0;
if (frameConsumerDoneSemaphore != VK_NULL_HANDLE) {
waitSemaphores[waitSemaphoreCount] = frameConsumerDoneSemaphore;
waitSemaphoreCount++;
}
VkSubmitInfo submitInfo = { VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr };
const VkPipelineStageFlags videoDecodeSubmitWaitStages = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
submitInfo.pNext = (m_hwLoadBalancingTimelineSemaphore != VK_NULL_HANDLE) ? &timelineSemaphoreInfos : nullptr;
submitInfo.waitSemaphoreCount = waitSemaphoreCount;
submitInfo.pWaitSemaphores = waitSemaphores;
submitInfo.pWaitDstStageMask = &videoDecodeSubmitWaitStages;
submitInfo.commandBufferCount = 1;
submitInfo.pCommandBuffers = &frameDataSlot.commandBuffer;
submitInfo.signalSemaphoreCount = signalSemaphoreCount;
submitInfo.pSignalSemaphores = signalSemaphores;
assert(VK_NOT_READY == m_vkDevCtx->GetFenceStatus(*m_vkDevCtx, videoDecodeCompleteFence));
VkResult result = m_vkDevCtx->MultiThreadedQueueSubmit(VulkanDeviceContext::DECODE, m_currentVideoQueueIndx,
1, &submitInfo, videoDecodeCompleteFence);
拷贝pictureIndex视频帧,并进行等待解码完成并且设置显示完成信号
virtual int32_t DequeueDecodedPicture(VulkanDecodedFrame* pDecodedFrame)
{
if (m_perFrameDecodeImageSet[pictureIndex].m_hasFrameCompleteSignalFence) {
pDecodedFrame->frameCompleteFence = m_perFrameDecodeImageSet[pictureIndex].m_frameCompleteFence;
m_perFrameDecodeImageSet[pictureIndex].m_hasFrameCompleteSignalFence = false;
} else {
pDecodedFrame->frameCompleteFence = VkFence();
}
if (m_perFrameDecodeImageSet[pictureIndex].m_hasFrameCompleteSignalSemaphore) {
pDecodedFrame->frameCompleteSemaphore = m_perFrameDecodeImageSet[pictureIndex].m_frameCompleteSemaphore;
m_perFrameDecodeImageSet[pictureIndex].m_hasFrameCompleteSignalSemaphore = false;
} else {
pDecodedFrame->frameCompleteSemaphore = VkSemaphore();
}
pDecodedFrame->frameConsumerDoneFence = m_perFrameDecodeImageSet[pictureIndex].m_frameConsumerDoneFence;
pDecodedFrame->frameConsumerDoneSemaphore = m_perFrameDecodeImageSet[pictureIndex].m_frameConsumerDoneSemaphore;
pDecodedFrame->timestamp = m_perFrameDecodeImageSet[pictureIndex].m_timestamp;
pDecodedFrame->decodeOrder = m_perFrameDecodeImageSet[pictureIndex].m_decodeOrder;
pDecodedFrame->displayOrder = m_perFrameDecodeImageSet[pictureIndex].m_displayOrder;
pDecodedFrame->queryPool = m_queryPool;
pDecodedFrame->startQueryId = pictureIndex;
pDecodedFrame->numQueries = 1;
}
//pDecodedFrame传递给DrawFrame最后一个参数pLastDecodedFrame
VkResult result = DrawFrame(renderIndex,
waitSemaphoreCount,
pWaitSemaphores,
signalSemaphoreCount,
pSignalSemaphores,
pLastDecodedFrame)
VkResult VulkanFrame<FrameDataType>::DrawFrame( int32_t renderIndex,
uint32_t waitSemaphoreCount,
const VkSemaphore* pWaitSemaphores,
uint32_t signalSemaphoreCount,
const VkSemaphore* pSignalSemaphores,
FrameDataType* inFrame)
{
const uint32_t maxWaitSemaphores = 2;
uint32_t numWaitSemaphores = 0;
VkSemaphore waitSemaphores[maxWaitSemaphores] = {};
assert(waitSemaphoreCount <= 1);
if ((waitSemaphoreCount > 0) && (pWaitSemaphores != nullptr)) {
//这个是等待上一次present完成
waitSemaphores[numWaitSemaphores++] = *pWaitSemaphores;
}
if (inFrame && (inFrame->frameCompleteSemaphore != VkSemaphore())) {
//等待解码完成信号
waitSemaphores[numWaitSemaphores++] = inFrame->frameCompleteSemaphore;
}
assert(numWaitSemaphores <= maxWaitSemaphores);
const uint32_t maxSignalSemaphores = 2;
uint32_t numSignalSemaphores = 0;
VkSemaphore signalSemaphores[maxSignalSemaphores] = {};
assert(signalSemaphoreCount <= 1);
if ((signalSemaphoreCount > 0) && (pSignalSemaphores != nullptr)) {
signalSemaphores[numSignalSemaphores++] = *pSignalSemaphores;
}
if (inFrame && (inFrame->frameConsumerDoneSemaphore != VkSemaphore())) {
//显示完成消费信号激活,这样这个图片资源才能被用来继续解码新视频帧,解码函数中需要等待这个frameConsumerDoneSemaphore有信号才能使用这个图片资源解码
signalSemaphores[numSignalSemaphores++] = inFrame->frameConsumerDoneSemaphore;
inFrame->hasConsummerSignalSemaphore = true;
}
assert(numSignalSemaphores <= maxSignalSemaphores);
if (frameConsumerDoneFence != VkFence()) {
inFrame->hasConsummerSignalFence = true;
}
// Wait for the image to be owned and signal for render completion
VkPipelineStageFlags primaryCmdSubmitWaitStages[2] = { VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT };
VkSubmitInfo primaryCmdSubmitInfo = VkSubmitInfo();
primaryCmdSubmitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
primaryCmdSubmitInfo.pWaitDstStageMask = primaryCmdSubmitWaitStages;
primaryCmdSubmitInfo.commandBufferCount = 1;
primaryCmdSubmitInfo.waitSemaphoreCount = numWaitSemaphores;
primaryCmdSubmitInfo.pWaitSemaphores = numWaitSemaphores ? waitSemaphores : NULL;
primaryCmdSubmitInfo.pCommandBuffers = pPerDrawContext->commandBuffer.GetCommandBuffer();
primaryCmdSubmitInfo.signalSemaphoreCount = numSignalSemaphores;
primaryCmdSubmitInfo.pSignalSemaphores = numSignalSemaphores ? signalSemaphores : NULL;
// For fence/sync debugging
if (false && inFrame && inFrame->frameCompleteFence) {
result = m_vkDevCtx->WaitForFences(*m_vkDevCtx, 1, &inFrame->frameCompleteFence, true, 100 * 1000 * 1000);
assert(result == VK_SUCCESS);
if (result != VK_SUCCESS) {
fprintf(stderr, "\nERROR: WaitForFences() result: 0x%x\n", result);
}
result = m_vkDevCtx->GetFenceStatus(*m_vkDevCtx, inFrame->frameCompleteFence);
assert(result == VK_SUCCESS);
if (result != VK_SUCCESS) {
fprintf(stderr, "\nERROR: GetFenceStatus() result: 0x%x\n", result);
}
}
result = m_vkDevCtx->MultiThreadedQueueSubmit(VulkanDeviceContext::GRAPHICS, 0, 1, &primaryCmdSubmitInfo, frameConsumerDoneFence);
if (result != VK_SUCCESS) {
assert(result == VK_SUCCESS);
fprintf(stderr, "\nERROR: MultiThreadedQueueSubmit() result: 0x%x\n", result);
return result;
}
if (false && (frameConsumerDoneFence != VkFence())) { // For fence/sync debugging
const uint64_t fenceTimeout = 100 * 1000 * 1000 /* 100 mSec */;
result = m_vkDevCtx->WaitForFences(*m_vkDevCtx, 1, &frameConsumerDoneFence, true, fenceTimeout);
assert(result == VK_SUCCESS);
if (result != VK_SUCCESS) {
fprintf(stderr, "\nERROR: WaitForFences() result: 0x%x\n", result);
}
result = m_vkDevCtx->GetFenceStatus(*m_vkDevCtx, frameConsumerDoneFence);
assert(result == VK_SUCCESS);
if (result != VK_SUCCESS) {
fprintf(stderr, "\nERROR: GetFenceStatus() result: 0x%x\n", result);
}
}
#if 0 // for testing VK_KHR_external_fence_fd
int fd = -1; // VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT
const VkFenceGetFdInfoKHR getFdInfo = { VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR, NULL, data.lastDecodedFrame.frameConsumerDoneFence, VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT};
res = m_vkDevCtx->GetFenceFdKHR(*m_vkDevCtx, &getFdInfo, &fd);
close(fd);
#endif
m_frameDataIndex = (m_frameDataIndex + 1) % m_frameData.size();
return result;
}