how to create second surface in Android NDK - opengl-es

I create surface with following code:
const EGLint attrs[] = {
EGL_LEVEL, 0,
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL_NATIVE_RENDERABLE, EGL_FALSE,
EGL_DEPTH_SIZE, EGL_DONT_CARE,
EGL_NONE
};
FDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
EGLint major = 0;
EGLint minor = 0;
if (!eglInitialize(FDisplay, &major, &minor))
return;
EGLConfig eglConfig;
// Obtain the first configuration with a depth buffer
EGLint numConfig = 0;
if (!eglChooseConfig(FDisplay, attrs, &eglConfig, 1, &numConfig))
return;
FSurface = eglCreateWindowSurface(FDisplay, eglConfig,
single_native_window: FNativeHwnd, NULL);
The problem is that eglCreateWindowSurface is not successful, returns EGL_NO_SURFACE when calling for second surface. I call it passing FDisplay which is same display as was used for the first surface. What i do wrong? Maybe there is another approach to create second surface?

Related

Performance loss with CopyResource() and then Map()/Unmap()

The problem is that if you do not use these methods, then the FPS differs by about 2 times in a big way. For example, I had about 5000 fps in a 3d scene. And it became about 2500. I know that the problem is that the application is waiting for the copy to wait. But it's only 4 bytes... If you use the D3D11_MAP_FLAG_DO_NOT_WAIT flag, Map() will always return DXGI_ERROR_WAS_STILL_DRAWING. What can be done so that I can use this method without losing fps? Here is my code:
Init
D3D11_BUFFER_DESC outputDesc;
outputDesc.Usage = D3D11_USAGE_DEFAULT;
outputDesc.ByteWidth = sizeof(float);
outputDesc.BindFlags = D3D11_BIND_UNORDERED_ACCESS;
outputDesc.CPUAccessFlags = 0;
outputDesc.StructureByteStride = sizeof(float);
outputDesc.MiscFlags = D3D11_RESOURCE_MISC_BUFFER_STRUCTURED;
FOG_TRACE(mDevice->CreateBuffer(&outputDesc, nullptr, &outputBuffer));
outputDesc.Usage = D3D11_USAGE_STAGING;
outputDesc.BindFlags = 0;
outputDesc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
FOG_TRACE(mDevice->CreateBuffer(&outputDesc, nullptr, &outputResultBuffer));
D3D11_UNORDERED_ACCESS_VIEW_DESC uavDesc{};
uavDesc.Buffer.FirstElement = 0;
uavDesc.Buffer.Flags = D3D11_BUFFER_UAV_FLAG_APPEND;
uavDesc.Buffer.NumElements = 1;
uavDesc.Format = DXGI_FORMAT_UNKNOWN;
uavDesc.ViewDimension = D3D11_UAV_DIMENSION_BUFFER;
FOG_TRACE(mDevice->CreateUnorderedAccessView(outputBuffer, &uavDesc, &unorderedAccessView));
Update
const UINT offset = 0;
mDeviceContext->OMSetRenderTargetsAndUnorderedAccessViews(1, &mRenderTargetView, mDepthStencilView, 1, 1, &unorderedAccessView, &offset);
mDeviceContext->ClearDepthStencilView(mDepthStencilView, D3D11_CLEAR_DEPTH | D3D11_CLEAR_STENCIL, 1.0f, 0);
ObjectManager::Draw();
mDeviceContext->CopyResource(outputResultBuffer, outputBuffer);
D3D11_MAPPED_SUBRESOURCE mappedBuffer;
HRESULT hr;
FOG_TRACE(hr = mDeviceContext->Map(outputResultBuffer, 0, D3D11_MAP_READ, 0/*D3D11_MAP_FLAG_DO_NOT_WAIT*/, &mappedBuffer));
if (SUCCEEDED(hr))
{
float* copy = (float*)(mappedBuffer.pData);
OutputDebugString(String::ToStr(*copy) + L"\n");
}
mDeviceContext->Unmap(outputResultBuffer, 0);
const UINT var[4]{};
mDeviceContext->ClearUnorderedAccessViewUint(unorderedAccessView, var);
I've already profiled and checked everything possible, the problem is exactly in pending. I would be very grateful if someone could explain everything in detail :)
The problem was solved very simply but for a long time! I just didn't make copy calls until I had read past data. Here is a small crutch:
static bool isWait = false;
if (!isWait)
{
mDeviceContext->CopyResource(outputResultBuffer, outputBuffer);
}
D3D11_MAPPED_SUBRESOURCE mappedBuffer;
HRESULT hr;
FOG_TRACE(hr = mDeviceContext->Map(outputResultBuffer, 0, D3D11_MAP_READ, D3D11_MAP_FLAG_DO_NOT_WAIT, &mappedBuffer));
if (SUCCEEDED(hr))
{
float* copy = (float*)(mappedBuffer.pData);
OutputDebugString(String::ToStr(*copy) + L"\n");
mDeviceContext->Unmap(outputResultBuffer, 0);
const UINT var[4]{};
mDeviceContext->ClearUnorderedAccessViewUint(unorderedAccessView, var);
isWait = false;
}
else
{
isWait = true;
}

How to support (on win7) GDI, D3D11 interoperability?

I created a D3D11 device and can perform operations such as rendering pictures smoothly, but in order to also support GDI, I tried several methods:
Through swapchain -> GetBuffer(ID3D11Texture2D) -> CreateDxgiSurfaceRenderTarget -> ID2D1GdiInteropRenderTarget -> GetDC, finally get the DC. It runs normally on my Win10, but an exception report when running GetDC on Win7: _com_error.
Via swapchain -> GetBuffer(IDXGISurface1) -> GetDC, same as 1.
I suspect that the ID3D11Texture2D/IDXGISurface1 obtained by GetBuffer on Win7 will have some restrictions on the use of GDI, so I changed to dynamically create a new ID3D11Texture2D by myself, and now use DC alone/D3D11 drawing interface alone It works fine, but if I interoperate, I will find that gdi opertaion is drawn on the custom-created ID3D11Texture2D instead of the back_buffer of swapchain:
_d3d->Clear();
_d3d->DrawImage();
HDC hdc = _d3d->GetDC();
DrawRectangleByGDI(hdc);
_d3d->ReleaseDC();
_d3d->Present();
So how to do it: Whether the D3D or DC methods is drawn, they are all on the same ID3D11Texture2D? This way, it is also convenient for my CopyResource.
HRESULT CGraphRender::Resize(const UINT32& width, const UINT32& height)
{
_back_texture2d = nullptr;
_back_rendertarget_view = nullptr;
_dc_texture2d = nullptr;
_dc_render_target = nullptr;
float dpi = GetDpiFromD2DFactory(_d2d_factory);
//Backbuffer
HRESULT hr = _swap_chain->ResizeBuffers(2, width, height, DXGI_FORMAT_B8G8R8A8_UNORM, _is_gdi_compatible ? DXGI_SWAP_CHAIN_FLAG_GDI_COMPATIBLE : 0);
RETURN_ON_FAIL(hr);
hr = _swap_chain->GetBuffer(0, __uuidof(ID3D11Texture2D), (void**)&_back_texture2d);
RETURN_ON_FAIL(hr);
hr = CreateD3D11Texture2D(_d3d_device, width, height, &_dc_texture2d);
RETURN_ON_FAIL(hr);
D3D11_RENDER_TARGET_VIEW_DESC rtv;
rtv.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
rtv.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
rtv.Texture2D.MipSlice = 0;
hr = _d3d_device->CreateRenderTargetView(_back_texture2d, &rtv, &_back_rendertarget_view);
RETURN_ON_FAIL(hr);
...
}
HRESULT CGraphRender::Clear(float color[])
{
CComPtr<ID3D11DeviceContext> immediate_context;
_d3d_device->GetImmediateContext(&immediate_context);
if (!immediate_context)
{
return E_UNEXPECTED;
}
ID3D11RenderTargetView* ref_renderTargetView = _back_rendertarget_view;
immediate_context->OMSetRenderTargets(1, &ref_renderTargetView, nullptr);
immediate_context->ClearRenderTargetView(_back_rendertarget_view, color);
return S_OK;
}
HDC CGraphRender::GetDC()
{
if (_is_gdi_compatible)
{
CComPtr<IDXGISurface1> gdi_surface;
HRESULT hr = _dc_texture2d->QueryInterface(__uuidof(IDXGISurface1), (void**)&gdi_surface);
if (SUCCEEDED(hr))
{
HDC hdc = nullptr;
hr = gdi_surface->GetDC(TRUE, &hdc);
if (SUCCEEDED(hr))
{
return hdc;
}
}
}
return nullptr;
}
HRESULT CGraphRender::CopyTexture(ID3D11Texture2D* dst_texture, ID3D11Texture2D* src_texture, POINT* dst_topleft/* = nullptr*/, POINT* src_topleft/* = nullptr*/)
{
if (!dst_texture && !src_texture)
{
return E_INVALIDARG;
}
CComPtr<ID3D11DeviceContext> immediate_context;
_d3d_device->GetImmediateContext(&immediate_context);
if (!immediate_context)
{
return E_UNEXPECTED;
}
ID3D11Texture2D* dst_texture_real = dst_texture ? dst_texture : _dc_texture2d;
POINT dst_topleft_real = dst_topleft ? (*dst_topleft) : POINT{ 0, 0 };
ID3D11Texture2D* src_texture_real = src_texture ? src_texture : _dc_texture2d;
POINT src_topleft_real = src_topleft ? (*src_topleft) : POINT{ 0, 0 };
D3D11_TEXTURE2D_DESC src_desc = { 0 };
src_texture_real->GetDesc(&src_desc);
D3D11_TEXTURE2D_DESC dst_desc = { 0 };
dst_texture_real->GetDesc(&dst_desc);
if (!dst_topleft_real.x && !src_topleft_real.x && !dst_topleft_real.y && !src_topleft_real.y && dst_desc.Width == src_desc.Width && dst_desc.Height == src_desc.Height)
{
immediate_context->CopyResource(dst_texture_real, src_texture_real);
}
else
{
D3D11_BOX src_box;
src_box.left = min((UINT)src_topleft_real.x, (UINT)dst_topleft_real.x + dst_desc.Width);
src_box.top = min((UINT)src_topleft_real.y, (UINT)dst_topleft_real.y + dst_desc.Height);
src_box.right = min((UINT)src_box.left + src_desc.Width, (UINT)dst_topleft_real.x + dst_desc.Width);
src_box.bottom = min((UINT)src_box.top + src_desc.Height, (UINT)dst_topleft_real.y + dst_desc.Height);
src_box.front = 0;
src_box.back = 1;
ATLASSERT(src_box.left < src_box.right);
ATLASSERT(src_box.top < src_box.bottom);
immediate_context->CopySubresourceRegion(dst_texture_real, 0, dst_topleft_real.x, dst_topleft_real.y, 0, src_texture_real, 0, &src_box);
}
return S_OK;
}
I don’t think Windows 7 supports what you’re trying to do. Here’s some alternatives.
Switch from GDI to something else that can render 2D graphics with D3D11. Direct2D is the most straightforward choice here. And DirectWrite if you want text in addition to rectangles.
If your 2D content is static or only changes rarely, you can use GDI+ to render into in-memory RGBA device context, create Direct3D11 texture with that data, and render a full-screen triangle with that texture.
You can overlay another Win32 window on top of your Direct3D 11 rendering one, and use GDI to render into that one. The GDI window on top must have WS_EX_LAYERED expended style, and you must update it with UpdateLayeredWindow API. This method is the most complicated and least reliable, though.

Get GDI DC from ID3D11Texture2D for drawing

I have an implementation in directx9 where I have taken GDI DC to render drawing.
But the similar code in directx11 does not get GDI DC instead throws invalid call exception.
Implementation in directx9:
IF_DX9ERR_THROW_HR(m_spIDevice->CreateTexture(UINT(cSizeOverlay.cx), UINT(cSizeOverlay.cy), 1, D3DUSAGE_DYNAMIC, D3DFMT_X8R8G8B8, D3DPOOL_DEFAULT, &m_spIOverlay, nullptr));
m_spIOverlaySurface = nullptr;
IF_DX9ERR_THROW_HR(m_spIOverlay->GetSurfaceLevel(0, &m_spIOverlaySurface));
D3DSURFACE_DESC descOverlay;
::ZeroMemory(&descOverlay, sizeof(descOverlay));
IF_DX9ERR_THROW_HR(m_spIOverlaySurface->GetDesc(&descOverlay));
// fill the texture with the color key
CRect cRect(0, 0, descOverlay.Width, descOverlay.Height);
HDC hDC = nullptr;
IF_DX9ERR_THROW_HR(m_spIOverlaySurface->GetDC(&hDC));
::SetBkColor(hDC, colKey);
::ExtTextOut(hDC, 0, 0, ETO_OPAQUE, cRect, nullptr, 0, nullptr);
IF_DX9ERR_THROW_HR(m_spIOverlaySurface->ReleaseDC(hDC));
Implementation in directx11:
D3D11_TEXTURE2D_DESC desc;
ZeroMemory(&desc, sizeof(desc));
desc.Width = gsl::narrow_cast<UINT>(width);
desc.Height = gsl::narrow_cast<UINT>(height);
desc.ArraySize = 1;
desc.Format = DXGI_FORMAT_B8G8R8X8_UNORM;
desc.Usage = D3D11_USAGE_DYNAMIC;
desc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
desc.MipLevels = 1;
desc.SampleDesc.Count = 1;
desc.MiscFlags = D3D11_RESOURCE_MISC_GDI_COMPATIBLE;
ID3D11DevicePtr device = renderer->Device();
ID3D11Texture2DPtr texture2D;
IF_FAILED_THROW_HR(device->CreateTexture2D(&desc, nullptr, &texture2D));
// get texture surface
IDXGISurface1Ptr dxgiSurface1 = tex2D;
IF_FAILED_THROW_HR(dxgiSurface1->GetDC(FALSE, &m_overlayDC));
//Draw on the DC using GDI
if (!m_overlayDC) // we have lost the device
THROW_PE(IDS_ERR_NO_VIDEO_HARDWARE);
::SetBkColor(m_overlayDC, m_effectConstants.m_keyColor);
::ExtTextOut(m_overlayDC, 0, 0, ETO_OPAQUE, overlayRect, nullptr, 0, nullptr);
//When finish drawing release the DC
dxgiSurface1->ReleaseDC(nullptr);
m_overlayDC = nullptr;
Edit: I have changed the D3D11_TEXTURE2D_DESC like below:
CD3D11_TEXTURE2D_DESC texDesc(DXGI_FORMAT_B8G8R8X8_UNORM
, gsl::narrow_cast<UINT>(targetSize.width), gsl::narrow_cast<UINT>(targetSize.height), 1U, 1, D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET);
ID3D11DevicePtr device = renderer->Device();
ID3D11Texture2DPtr texture2D;
IF_FAILED_THROW_HR(device->CreateTexture2D(&texDesc, nullptr, &texture2D));
// get texture surface
IDXGISurface1Ptr dxgiSurface1 = texture2D;
IF_FAILED_THROW_HR(dxgiSurface1->GetDC(FALSE, &m_overlayDC));
//Draw on the DC using GDI
if (!m_overlayDC) // we have lost the device
THROW_PE(IDS_ERR_NO_VIDEO_HARDWARE);
::SetBkColor(m_overlayDC, m_effectConstants.m_keyColor);
::ExtTextOut(m_overlayDC, 0, 0, ETO_OPAQUE, overlayRect, nullptr, 0, nullptr);
//When finish drawing release the DC
dxgiSurface1->ReleaseDC(nullptr);
m_overlayDC = nullptr;
Now the exception thrown from GetDC(): The application made a call that is invalid. Either the parameters of the call or the state of some object was incorrect.
Enable the D3D debug layer in order to see details via debug messages.
HResult: 0x887A0001, Facility: 2170, Code: 1
The combination of format, usage, and bind flags you have picked are not compatible with D3D11_RESOURCE_MISC_GDI_COMPATIBLE.
If you enable Direct3D Debug Device, you'd have gotten debug output to inform you of this limitation. The Direct3D Debug Device is the ideal way to figure out why you are getting E_INVALIDARG.
D3D11 ERROR: ID3D11Device::CreateTexture2D: D3D11_RESOURCE_MISC_GDI_COMPATIBLE requires that the D3D11_BIND_RENDER_TARGET flag be set. [ STATE_CREATION ERROR #103: CREATETEXTURE2D_INVALIDMISCFLAGS]
Then after fixing that, you get:
D3D11 ERROR: ID3D11Device::CreateTexture2D: D3D11_RESOURCE_MISC_GDI_COMPATIBLE requires D3D11_USAGE_DEFAULT. [ STATE_CREATION ERROR #103: CREATETEXTURE2D_INVALIDMISCFLAGS]``.
And finally:
D3D11 ERROR: ID3D11Device::CreateTexture2D: D3D11_RESOURCE_MISC_GDI_COMPATIBLE requires a B8G8R8A8 format. [ STATE_CREATION ERROR #103: CREATETEXTURE2D_INVALIDMISCFLAGS]
So taking this all together, this works:
D3D11_TEXTURE2D_DESC desc = {};
desc.Width = ...
desc.Height = ...
desc.ArraySize = 1;
desc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
desc.Usage = D3D11_USAGE_DEFAULT;
desc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET;
desc.MipLevels = 1;
desc.SampleDesc.Count = 1;
desc.MiscFlags = D3D11_RESOURCE_MISC_GDI_COMPATIBLE;
Microsoft::WRL::ComPtr<ID3D11Texture2D> texture2D;
DX::ThrowIfFailed(m_d3dDevice->CreateTexture2D(&desc, nullptr, &texture2D));
The limitations are all spelled out on Microsoft Docs

DirectX shader linking error

I recently heard about shader linking in DirectX, and decided to give it a go, following this MSDN tutorial, and seem to have it mostly working. However, it fails in the link stage, giving the error:
error X9214: ID3D11Linker::Link: failed to generate byte code
I get the same error for both the vertex shader and the pixel shader. Compiling the shader library, creating the graph, connecting the nodes and everything seems to work, but the linking still fails. I've been unable to find any details regarding the error code, and the error message isn't particularly helpful. The full relevant code is attached, I hope that someone can see what I'm doing wrong!
Linking.hlsl:
Texture2DArray inTexture : register(t0);
SamplerState samplerState : register(s0);
cbuffer CameraData : register(b0)
{
float4x4 Model;
float4x4 View;
float4x4 Projection;
};
export void VertexFunction(inout float4 position, inout float2 uv)
{
position = mul(position, Model);
position = mul(position, View);
position = mul(position, Projection);
}
export float4 SampleTexture(float2 uv)
{
return inTexture.Sample(samplerState, float3(uv,0));
}
Shader compilation:
//Compile shader to bytecode (exported functions)
ID3DBlob* shaderBlob = nullptr;
ID3DBlob* errorBlob = nullptr;
HRESULT hr = D3DCompileFromFile(aPath.c_str(), NULL, NULL, NULL,
(std::string("lib") + Graphics::Shader::Linking::CShaderLibrary::ShaderModelSuffix).c_str(), D3DCOMPILE_OPTIMIZATION_LEVEL3, 0, &shaderBlob, &errorBlob);
ErrorCheck(hr, errorBlob);
//Create shader library
ID3D11Module* shaderLibrary = nullptr;
hr = D3DLoadModule(shaderBlob->GetBufferPointer(), shaderBlob->GetBufferSize(), &shaderLibrary);
shaderBlob->Release();
ErrorCheck(hr);
//Create shader library instance
ID3D11ModuleInstance *shaderLibraryInstance = nullptr;
hr = shaderLibrary->CreateInstance("", &shaderLibraryInstance);
ErrorCheck(hr);
//Bind the resources, samplers and constant buffers
hr = shaderLibraryInstance->BindResource(0, 0, 1);
ErrorCheck(hr);
hr = shaderLibraryInstance->BindSampler(0, 0, 1);
ErrorCheck(hr);
hr = shaderLibraryInstance->BindConstantBuffer(0, 0, 0);
ErrorCheck(hr);
return new Graphics::Shader::Linking::CShaderLibrary(*shaderLibrary,*shaderLibraryInstance);
Create the vertex shader from graph (its own function):
//Create the vertex shader graph;
ID3D11FunctionLinkingGraph* vertexShaderGraph = nullptr;
HRESULT result = D3DCreateFunctionLinkingGraph(0, &vertexShaderGraph);
EffectHelper::ErrorCheck(result);
//Define the vertex shader input layout
static const D3D11_PARAMETER_DESC vertexInputParameters[] =
{
{ "inputPos", "POSITION0", D3D_SVT_FLOAT, D3D_SVC_VECTOR, 1,4,D3D_INTERPOLATION_LINEAR, D3D_PF_IN,0,0,0,0 },
{ "inputUV", "TEXCOORD0", D3D_SVT_FLOAT, D3D_SVC_VECTOR, 1,2,D3D_INTERPOLATION_LINEAR,D3D_PF_IN,0,0,0,0}
};
//Create the vertex input node. This will be used by the input assembler
ID3D11LinkingNode* vertexInputNode = nullptr;
result = vertexShaderGraph->SetInputSignature(vertexInputParameters, ARRAYSIZE(vertexInputParameters), &vertexInputNode);
EffectHelper::ErrorCheck(result, vertexShaderGraph);
//Create the vertex function node. This will be sent the data from the input node.
ID3D11LinkingNode* vertexFunctionNode = nullptr;
result = vertexShaderGraph->CallFunction("", &myLibrary, "VertexFunction", &vertexFunctionNode);
EffectHelper::ErrorCheck(result, vertexShaderGraph);
//Pass the value from the input node to the vertex function node (for future info, remember that all function arguments have to be assigned).
result = vertexShaderGraph->PassValue(vertexInputNode, 0, vertexFunctionNode, 0);
EffectHelper::ErrorCheck(result, vertexShaderGraph);
result = vertexShaderGraph->PassValue(vertexInputNode, 1, vertexFunctionNode, 1);
EffectHelper::ErrorCheck(result, vertexShaderGraph);
//Define the output layout for the vertex function
static const D3D11_PARAMETER_DESC vertexShaderOutputParameters[] =
{
{ "outputUV","TEXCOORD0",D3D_SVT_FLOAT, D3D_SVC_VECTOR,1,2,D3D_INTERPOLATION_UNDEFINED,D3D_PF_OUT,0,0,0,0 },
{ "outputPosition", "SV_POSITION", D3D_SVT_FLOAT, D3D_SVC_VECTOR, 1,4,D3D_INTERPOLATION_UNDEFINED, D3D_PF_OUT, 0,0,0,0}
};
//Create the vertex output node
ID3D11LinkingNode* vertexShaderOutputNode = nullptr;
result = vertexShaderGraph->SetOutputSignature(vertexShaderOutputParameters, ARRAYSIZE(vertexShaderOutputParameters), &vertexShaderOutputNode);
EffectHelper::ErrorCheck(result, vertexShaderGraph);
//Pass the value from the function node to the output node
result = vertexShaderGraph->PassValue(vertexFunctionNode, 0, vertexShaderOutputNode, 1);
EffectHelper::ErrorCheck(result, vertexShaderGraph);
result = vertexShaderGraph->PassValue(vertexFunctionNode, 1, vertexShaderOutputNode, 0);
EffectHelper::ErrorCheck(result, vertexShaderGraph);
//Finalize the the vertex shader graph
ID3D11ModuleInstance* vertexShaderGraphInstance = nullptr;
result = vertexShaderGraph->CreateModuleInstance(&vertexShaderGraphInstance, nullptr);
EffectHelper::ErrorCheck(result, vertexShaderGraph);
//Create a linker
ID3D11Linker* linker = nullptr;
result = D3DCreateLinker(&linker);
EffectHelper::ErrorCheck(result);
//Hook up the shader library instance
result = linker->UseLibrary(&myLibraryInstance);
EffectHelper::ErrorCheck(result);
//Link the vertex shader. This looks mostly normal, frankly. Interesting. TODO: Something goes wrong here. What? #HelpfulError
ID3DBlob* vertexShaderBlob = nullptr;
ID3DBlob* errorBlob = nullptr;
//Error occurs here
result = linker->Link(vertexShaderGraphInstance, "main", (std::string("vs") + ShaderModelSuffix).c_str(), 0, &vertexShaderBlob, &errorBlob);
EffectHelper::ErrorCheck(result, errorBlob);
//Create the vertex shader. Business as usual?
ID3D11VertexShader* vertexShader = nullptr;
result = MasterSingleton::GetInstance().Get3DEngine().GetFramework().GetDevice().CreateVertexShader(
vertexShaderBlob->GetBufferPointer(),vertexShaderBlob->GetBufferSize(),nullptr,&vertexShader);
EffectHelper::ErrorCheck(result);
Create the pixel shader from graph (also its own function):
//Create pixel shader graph
ID3D11FunctionLinkingGraph* pixelShaderGraph = nullptr;
HRESULT result = D3DCreateFunctionLinkingGraph(0, &pixelShaderGraph);
EffectHelper::ErrorCheck(result);
//Define the pixel shader input layout (will be fed by vertex shader layout)
static const D3D11_PARAMETER_DESC pixelInputDesc[] =
{
{"inputUV","TEXCOORD0",D3D_SVT_FLOAT, D3D_SVC_VECTOR, 1,2,D3D_INTERPOLATION_UNDEFINED,D3D_PF_IN,0,0,0,0},
{"inputPosition","SV_POSITION",D3D_SVT_FLOAT,D3D_SVC_VECTOR,1,4,D3D_INTERPOLATION_UNDEFINED, D3D_PF_IN,0,0,0,0}
};
//Create pixel shader input node
ID3D11LinkingNode* pixelInputNode = nullptr;
result = pixelShaderGraph->SetInputSignature(pixelInputDesc, ARRAYSIZE(pixelInputDesc), &pixelInputNode);
EffectHelper::ErrorCheck(result, pixelShaderGraph);
//Create texture sample function node
ID3D11LinkingNode* textureSampleFunctionNode = nullptr;
result = pixelShaderGraph->CallFunction("", &myLibrary, "SampleTexture", &textureSampleFunctionNode);
EffectHelper::ErrorCheck(result, pixelShaderGraph);
//Pass value from input node to texture sample node
result = pixelShaderGraph->PassValue(pixelInputNode, 0, textureSampleFunctionNode, 0);
EffectHelper::ErrorCheck(result, pixelShaderGraph);
//Define the output parameters from the pixel shader
D3D11_PARAMETER_DESC outputParameterDesc[]=
{
{"outputColor","SV_TARGET",D3D_SVT_FLOAT,D3D_SVC_VECTOR,1,4,D3D_INTERPOLATION_UNDEFINED,D3D_PF_OUT,0,0,0,0}
};
//Set the output node (using the signature)
ID3D11LinkingNode* pixelOutputNode = nullptr;
result = pixelShaderGraph->SetOutputSignature(outputParameterDesc, ARRAYSIZE(outputParameterDesc), &pixelInputNode);
EffectHelper::ErrorCheck(result, pixelShaderGraph);
//Finalize the pixel shader graph
ID3D11ModuleInstance *pixelShaderInstance = nullptr;
result = pixelShaderGraph->CreateModuleInstance(&pixelShaderInstance, nullptr);
EffectHelper::ErrorCheck(result, pixelShaderGraph);
//Create a linker and hook up to library instance
ID3D11Linker *linker = nullptr;
result = D3DCreateLinker(&linker);
EffectHelper::ErrorCheck(result);
//Set library to use
result = linker->UseLibrary(&myLibraryInstance);
EffectHelper::ErrorCheck(result);
//Link the pixel shader
ID3DBlob* shaderBlob;
ID3DBlob* errorBlob;
//And also here
result = linker->Link(pixelShaderInstance, "main", (std::string("ps") + ShaderModelSuffix).c_str(), 0, &shaderBlob, &errorBlob);
EffectHelper::ErrorCheck(result, errorBlob);
//Compile the pixel shader
ID3D11PixelShader* pixelShader = nullptr;
result = MasterSingleton::GetInstance().Get3DEngine().GetFramework().GetDevice().CreatePixelShader(shaderBlob->GetBufferPointer(), shaderBlob->GetBufferSize(), nullptr, &pixelShader);
EffectHelper::ErrorCheck(result);

Understanding animation loading with Assimp

I have made a container class that basicly contains all the information I need for rendering an animation. I'm using the Assimp library to load the animation. Then assigning the data from the scene->mVertices etc. to my array buffers, what I'm having trouble figuring out is how I'm supposed to get that data for other frames into my buffers!
I know that there is a function called HasAnimations(), and also a aiAnimation **mAnimations. But what I can't find any data in there that is relevant to getting the next set of vertex data.
I have managed to load a series of obj-files with it and draw them in order to comfirm that my class works correctly. But obviously I would prefer or actaully need to use something else when I want to expand to the real deal. As loading 250 frames individually takes a couple of minutes. (Loading a simple animation should be done in about 5 seconds tops, right?)
I'm open to using any kind of file format. But I need to know how to set it up in Blender so that the animations will get exported. As I also seem to fail horribly at for now, as I have little experience with Blender.
I've been searching for tutorials on this library and blender exporting for 2 days now, and found almost nothing useful. I did check out the documentation for Assimp as well, which took me so far, but doesn't explain a thing about how aiAnimation affects the vertices. Or how I can get the other frames of data I need.
Well, I did manage to make it work after endless hours! Sort of... I made a model that is transformed in a loop x+5, y+5, x-5, y-5...
What I ended up doing, well it was the only thing I could think of anyways. Is reading the data from the scene->mAnimations[] and this basicly consists of an array of the keyframes only. So I had to interpolate all the vertices myself, (which is always a funny task to approach!).
Effectivly:
You get the time the keyframe should have interpolated fully.
Then subtract where the object currently is to figure out how much you need to move.
Now you need to figure out how much to move each step, so I took the easiest solution, divided it by how many frames the movement should be splitted over.
Now it was just a matter of updating all my vertices before sending them to the VBO. (This step is probably a little varying depending on your data-setup)
After those steps, I got something that looks like this:
Header:
class AssimpMesh {
private:
struct ShaderProgram {
//Shader data
GLuint program;
string programName;
vector <GLuint> shaders, uniforms;
};
struct MeshData {
//Mesh data
GLuint meshArray;
vector <GLuint> buffers;
vector <string> bufferNames;
//Shader data
ShaderProgram *shader;
vector <aiVector3D> transedVertices;
int totalIndices;
};
struct Frame {
vector <MeshData*> meshes;
};
struct Animation {
string name;
vector <Frame*> frames;
};
//Global shader data
ShaderProgram *globalShader;
//Model data
Assimp::Importer importer;
const aiScene *scene;
//Mesh data
bool initialized, perMeshShading;
vector <Animation*> animations;
int currentFrame, currentAnimation;
Uint32 lastFrameTicks;
Transform *transform;
glm::mat4 projectionView;
aiVector3D lightPosition;
void loadScene(string filePath);
void loadAnimation(Animation *animation, int numFrames);
void initMesh(aiMesh *mesh, MeshData *data);
...
public:
AssimpMesh(string filePath);
~AssimpMesh();
void draw();
...
};
Source:
void AssimpMesh::loadScene(string filePath) {
//Load animation file
scene = importer.ReadFile(filePath.c_str(), aiProcessPreset_TargetRealtime_MaxQuality);
if (scene) {
if (scene->HasAnimations()) {
for (int i = 0; i < scene->mNumAnimations; i++) {
aiAnimation *anime = scene->mAnimations[i];
int framesInAnimation = ceil(anime->mDuration * ANIMATION_IMPORT_FPS);
Animation *animation = new Animation();
animation->name = anime->mName.C_Str();
loadAnimation(animation, framesInAnimation);
animations.push_back(animation);
}
}
else {
Animation *animation = new Animation();
animation->name = "Default";
loadAnimation(animation, 1);
animations.push_back(animation);
}
printf("Done loading '%s'\n", filePath.c_str());
}
else {
//Report error
printf("Assimp error: %s\n", importer.GetErrorString());
}
}
void AssimpMesh::loadAnimation(Animation *animation, int numFrames) {
int nextKeyframe = -1;
int nextKeyframeId = -1;
int transedFrames = 0;
aiVector3D transPos = aiVector3D();
aiVector3D transVec = aiVector3D();
for (int f = 0; f < numFrames; f++) {
Frame *frame = new Frame();
if (f > nextKeyframe && nextKeyframe < numFrames) {
//Get the new keyframe
aiAnimation *anime = scene->mAnimations[animations.size()];
aiNodeAnim *aniNode = anime->mChannels[0];
aiVectorKey key = aniNode->mPositionKeys[++nextKeyframeId];
nextKeyframe = ceil(key.mTime * ANIMATION_IMPORT_FPS);
if (!nextKeyframeId) {
transVec = key.mValue;
transPos = key.mValue;
}
else {
int transFrames = nextKeyframe - (f - 1);
aiVector3D transDir = key.mValue - transPos;
transPos = key.mValue;
transVec = transDir;
transVec /= transFrames;
transedFrames = 0;
}
}
if (scene->HasLights()) {
aiLight *light = scene->mLights[0];
//lightPosition = light->mPosition;
}
//Put data into vertex arrays
transedFrames++;
aiMesh *mesh;
MeshData *data;
for (int i = 0; i < scene->mNumMeshes; i++) {
mesh = scene->mMeshes[i];
data = new MeshData();
if (!i) {
for (int j = 0; j < mesh->mNumVertices; j++) {
if (!f) {
data->transedVertices.push_back(mesh->mVertices[j] + transVec);
}
else {
data->transedVertices.push_back(animation->frames[f-1]->meshes[i]->transedVertices[j] + transVec);
}
}
}
//Assign VBO
initMesh(mesh, data);
//Assign shader
if (perMeshShading) {
initShader(mesh, data);
setUniforms(mesh, data);
}
frame->meshes.push_back(data);
}
animation->frames.push_back(frame);
}
}
void AssimpMesh::initMesh(aiMesh *mesh, MeshData *data) {
//Buffer for temporary storage of new ids
GLuint id;
//Make vertex array
if (!initialized) {
glGenVertexArrays(1, &id);
}
data->meshArray = id;
//Tell OpenGL to use this array
glBindVertexArray(id);
//Assign vertices
if (mesh->HasPositions()) {
//Make buffer
if (!initialized) {
glGenBuffers(1, &id);
}
data->buffers.push_back(id);
data->bufferNames.push_back("Positions");
//Set buffer data
glBindBuffer(GL_ARRAY_BUFFER, id);
if (data->transedVertices.size()) {
glBufferData(GL_ARRAY_BUFFER, sizeof(aiVector3D) * data->transedVertices.size(), &data->transedVertices[0], GL_STATIC_DRAW);
}
else {
glBufferData(GL_ARRAY_BUFFER, sizeof(aiVector3D) * mesh->mNumVertices, &mesh->mVertices[0], GL_STATIC_DRAW);
}
//Set shader attribute data
glEnableVertexAttribArray(VBO_VERTEX);
glVertexAttribPointer(VBO_VERTEX, 3, GL_FLOAT, GL_FALSE, NULL, NULL);
}
unsigned int matId = mesh->mMaterialIndex;
aiMaterial *material = scene->mMaterials[matId];
vector <aiColor3D> colors;
aiColor3D diffuse(0, 0, 0);
material->Get(AI_MATKEY_COLOR_DIFFUSE, diffuse);
for (int i = 0; i < mesh->mNumVertices; i++) {
colors.push_back(diffuse);
}
//Make buffer
if (!initialized) {
glGenBuffers(1, &id);
}
data->buffers.push_back(id);
data->bufferNames.push_back("Colors");
//Set buffer data
glBindBuffer(GL_ARRAY_BUFFER, id);
glBufferData(GL_ARRAY_BUFFER, sizeof(aiColor3D) * mesh->mNumVertices, &colors.front(), GL_STATIC_DRAW);
//Set shader attribute data
glEnableVertexAttribArray(VBO_COLOR);
glVertexAttribPointer(VBO_COLOR, 3, GL_FLOAT, GL_FALSE, NULL, NULL);
//Assign texture coords
if (mesh->HasTextureCoords(0)) {
//Make buffer
if (!initialized) {
glGenBuffers(1, &id);
}
data->buffers.push_back(id);
data->bufferNames.push_back("TextureCoords");
//Set buffer data
glBindBuffer(GL_ARRAY_BUFFER, id);
glBufferData(GL_ARRAY_BUFFER, sizeof(aiVector3D) * mesh->mNumVertices, &mesh->mTextureCoords[0], GL_STATIC_DRAW);
//Set shader attribute data
glEnableVertexAttribArray(VBO_TEXCORD);
glVertexAttribPointer(VBO_TEXCORD, 3, GL_FLOAT, GL_FALSE, NULL, NULL);
}
//Assign colors
if (mesh->HasNormals()) {
//Make buffer
if (!initialized) {
glGenBuffers(1, &id);
}
data->buffers.push_back(id);
data->bufferNames.push_back("Normals");
//Set buffer data
glBindBuffer(GL_ARRAY_BUFFER, id);
glBufferData(GL_ARRAY_BUFFER, sizeof(aiVector3D) * mesh->mNumVertices, &mesh->mNormals[0], GL_STATIC_DRAW);
//Set shader attribute data
glEnableVertexAttribArray(VBO_NORMAL);
glVertexAttribPointer(VBO_NORMAL, 3, GL_FLOAT, GL_FALSE, NULL, NULL);
}
if (mesh->HasFaces()) {
vector <unsigned int> indices;
aiFace face;
for (int i = 0; i < mesh->mNumFaces; i++) {
face = mesh->mFaces[i];
for (int j = 0; j < face.mNumIndices; j++) {
indices.push_back(face.mIndices[j]);
}
}
data->totalIndices = indices.size();
//Make buffer
if (!initialized) {
glGenBuffers(1, &id);
}
data->buffers.push_back(id);
data->bufferNames.push_back("Faces");
//Set buffer data
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, id);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(unsigned int) * indices.size(), &indices.front(), GL_STATIC_DRAW);
}
}
Of course, it doesn't work for everything yet. Actually only translation and the entire model. Apparently it doesn't read name values correctly so I can't which meshes the animation is meant for. But it got me going, maybe someone will find this helpful. =)

Resources