AUGraph Sine wave is corrupt - core-audio

I have the following AUGraph code that connects a SineWave Generator to a multichannel mixer, to an output device, this is ok, however, when you play, the tone generated seems to be corrupt...it's almost there, but something is wrong.
Any ideas?
import Cocoa
import CoreAudio
import AudioToolbox
import AudioUnit
import AVFoundation
let sampleRate:Float64 = 41000.0
class ViewController: NSViewController {
var t = 0
let callback2:AURenderCallback = {
(inRefCon: UnsafeMutablePointer<Void>, ioActionFlags: UnsafeMutablePointer<AudioUnitRenderActionFlags>, inTimeStamp: UnsafePointer<AudioTimeStamp>, inBusNumber: UInt32, inNumberFrames: UInt32, ioData: UnsafeMutablePointer<AudioBufferList>)
in
let delta:Float = Float(880 * 2 * M_PI / sampleRate)
let abl = UnsafeMutableAudioBufferListPointer(ioData)
var x:Float = 0
for buffer:AudioBuffer in abl {
//x = self._x
memset(buffer.mData, 0, Int(buffer.mDataByteSize))
let s = sizeof(Float)
let r = sizeof(Float32)
let f = abl.count
let buf:UnsafeMutablePointer<Float> = unsafeBitCast(buffer.mData, UnsafeMutablePointer<Float>.self)
for var i:Int = 0; i < Int(inNumberFrames); i++
{
buf[i] = sin(x)
x += delta
}
memcpy(buffer.mData, buf, Int(buffer.mDataByteSize));
let x = 0
}
return noErr
}
struct MyAUGraphPlayer
{
var streamFormat:AudioStreamBasicDescription!
var graph:AUGraph = AUGraph()
var outputNode:AUNode!
var mixerNode:AUNode!
var outputUnit:AudioUnit!
var mixerUnit:AudioUnit!
var firstOutputSampleTime:Float64 = 0.0
init()
{
}
}
func addAUNode(graph:AUGraph, inout desc:AudioComponentDescription) -> AUNode
{
var outputNode:AUNode = AUNode()
let x = AUGraphAddNode(graph, &desc, &outputNode)
print("x: \(x)")
return outputNode
}
override func viewDidLoad() {
super.viewDidLoad()
doit()
}
func doit()
{
var desc:AudioStreamBasicDescription = AudioStreamBasicDescription()
desc.mSampleRate = sampleRate
desc.mFormatID = kAudioFormatLinearPCM
desc.mFormatFlags = kAudioFormatFlagsNativeFloatPacked
desc.mFramesPerPacket = 1
desc.mChannelsPerFrame = 2
desc.mBitsPerChannel = UInt32(sizeof(Float32) * 8)
desc.mBytesPerFrame = desc.mChannelsPerFrame * (desc.mBitsPerChannel / 8)
desc.mBytesPerPacket = desc.mBytesPerFrame * desc.mFramesPerPacket
var graph:AUGraph = AUGraph()
var outputNode:AUNode = AUNode()
var mixerNode:AUNode = AUNode()
var outputUnit:AudioUnit = AudioUnit()
var mixerUnit:AudioUnit = AudioUnit()
let error = NewAUGraph(&graph)
print("error: \(error)")
// Output
var outputDesc:AudioComponentDescription = AudioComponentDescription(componentType: OSType(kAudioUnitType_Output),
componentSubType: OSType(kAudioUnitSubType_DefaultOutput),
componentManufacturer: OSType(kAudioUnitManufacturer_Apple),
componentFlags: 0,
componentFlagsMask: 0)
let a1 = AUGraphAddNode(graph, &outputDesc, &outputNode)
print("a1: \(a1)")
// Mixer
var mixerDesc:AudioComponentDescription = AudioComponentDescription(componentType: OSType(kAudioUnitType_Mixer), componentSubType: OSType(kAudioUnitSubType_StereoMixer), componentManufacturer: OSType(kAudioUnitManufacturer_Apple), componentFlags: 0, componentFlagsMask: 0)
let b1 = AUGraphAddNode(graph, &mixerDesc, &mixerNode)
print("b1: \(b1)")
// Connect nodes
let y = AUGraphConnectNodeInput(graph,
mixerNode,
0,
outputNode,
0)
print("y: \(y)")
// open
let open = AUGraphOpen(graph)
print("graph should be open: \(open)")
let ufa = AUGraphNodeInfo(graph, mixerNode, nil, &mixerUnit);
print("ufa: \(ufa)")
let uf = AUGraphNodeInfo(graph, outputNode, nil, &outputUnit);
print("uf: \(uf)")
// output
// let w = AudioUnitSetProperty(outputUnit,
// kAudioUnitProperty_StreamFormat,
// kAudioUnitScope_Output,
// 0,
// &desc,
// UInt32(sizeof(AudioStreamBasicDescription) ))
// print("w: \(w)")
// let w2 = AudioUnitSetProperty(outputUnit,
// kAudioUnitProperty_StreamFormat,
// kAudioUnitScope_Input,
// 0,
// &desc,
// UInt32(sizeof(AudioStreamBasicDescription) ))
// print("w2: \(w2)")
//
var numbuses:UInt32 = 1
// let gg = AudioUnitSetProperty(mixerUnit,
// kAudioUnitProperty_ElementCount,
// kAudioUnitScope_Input,
// 0,
// &numbuses,
// UInt32(sizeof(UInt32)))
// print("gg: \(gg)")
// let s = sizeof(UInt32)
// numbuses = 1
// let gg2 = AudioUnitSetProperty(mixerUnit,
// kAudioUnitProperty_ElementCount,
// kAudioUnitScope_Output,
// 0,
// &numbuses,
// UInt32(sizeof(UInt32)))
// print("gg2: \(gg2)")
// let m1 = AudioUnitSetProperty(mixerUnit,
// kAudioUnitProperty_StreamFormat,
// kAudioUnitScope_Output,
// 0,
// &desc,
// UInt32(sizeof(AudioStreamBasicDescription) ))
// print("m1: \(m1)")
numbuses = 1
for (var i:UInt32 = 0; i < numbuses; ++i)
{
//
//
let yy = AudioUnitSetParameter(mixerUnit, kMultiChannelMixerParam_Volume, kAudioUnitScope_Output, i, 1, 0);
print("yy: \(yy)")
let xx = AudioUnitSetParameter(mixerUnit, kMultiChannelMixerParam_Volume, kAudioUnitScope_Input, i, 1, 0);
print("xx: \(xx)")
let zz = AudioUnitSetParameter(mixerUnit, kMultiChannelMixerParam_Enable, kAudioUnitScope_Input, i, 1, 0);
print("zz: \(zz)")
let aa = AudioUnitSetParameter(mixerUnit, kMultiChannelMixerParam_Enable, kAudioUnitScope_Output, i, 1, 0);
print("aa: \(aa)")
var rcbs:AURenderCallbackStruct = AURenderCallbackStruct(inputProc: callback2, inputProcRefCon: &graph)
let result1 = AUGraphSetNodeInputCallback(graph, mixerNode, i, &rcbs)
print("result1: \(result1)")
// let sf1 = AudioUnitSetProperty(mixerUnit,
// kAudioUnitProperty_StreamFormat,
// kAudioUnitScope_Input,
// i,
// &desc,
// UInt32(sizeof(AudioStreamBasicDescription)))
// print("sf1: \(sf1)")
//
// let sf2 = AudioUnitSetProperty(mixerUnit,
// kAudioUnitProperty_StreamFormat,
// kAudioUnitScope_Output,
// i,
// &desc,
// UInt32(sizeof(AudioStreamBasicDescription)))
// print("sf2: \(sf2)")
}
// let sf2 = AudioUnitSetProperty(mixerUnit,
// kAudioUnitProperty_StreamFormat,
// kAudioUnitScope_Output,
// 0,
// &desc,
// UInt32(sizeof(AudioStreamBasicDescription)))
// print("sf2: \(sf2)")
//
//
// let o1 = AudioUnitSetProperty(outputUnit,
// kAudioUnitProperty_StreamFormat,
// kAudioUnitScope_Output,
// 1,
// &desc,
// UInt32(sizeof(AudioStreamBasicDescription)))
// print("o1: \(o1)")
//
let yu = AUGraphInitialize(graph)
print("yu: \(yu)")
let ee = AUGraphStart(graph)
print("ee: \(ee)")
CAShow(UnsafeMutablePointer(graph))
}
}

You ask for 2 channels per frame, but only enough bytes per packet for 1 channel. Check the error return values on your audio property setters.

Related

Mix vertex and curveVertex in a shape?

I am trying to create a shape with both straight lines and curved parts. I want all points to connect and then to fill the interior with a color. I cannot seem to locate this information or a way to do it in a single shape. Is there a way to do this? For example a rectangle with an inverted curve on each end? I can do it with arcs and lines like the below code but I have to think there is an easier way using beginShape somehow or something similar? Also even using the lines and arcs, I am not sure how to fill it.
arc(200, 200, 150, 150, radians(0), radians(90), OPEN);
arc(200, 200, 50, 50, radians(0), radians(90), OPEN);
let x1 = 200 + Math.cos(radians(0)) * 25;
let y1 = 200 + Math.sin(radians(0)) * 25;
let x2 = 200 + Math.cos(radians(90)) * 25;
let y2 = 200 + Math.sin(radians(90)) * 25;
let x3 = 200 + Math.cos(radians(0)) * 75;
let y3 = 200 + Math.sin(radians(0)) * 75;
let x4 = 200 + Math.cos(radians(90)) * 75;
let y4 = 200 + Math.sin(radians(90)) * 75;
line(x1, y1, x3, y3)
line(x2, y2, x4, y4)
This should answer part of your question experimentally. As you can see by playing with the sketch below, if there is a single curveVertex() in the shape all of the vertices created with vertex() will also be treated as curveVertex() type vertices.
I think you should be able to achieve your goal with bezierVertex().
const InteractionThreshold = 5;
const SqInteractionThreshold = InteractionThreshold * InteractionThreshold;
function lineEditor(p) {
let controlPoints = [{
x: 50,
y: 50,
type: 'vertex'
},
{
x: 250,
y: 250,
type: 'vertex'
},
];
p.setup = () => {
p.createCanvas(300, 300);
p.rectMode(p.RADIUS);
p.ellipseMode(p.RADIUS);
p.noFill();
p.noLoop();
};
p.draw = () => {
p.background(200);
p.beginShape();
for (let i = 0; i < controlPoints.length; i++) {
let pt = controlPoints[i];
switch (pt.type) {
case 'vertex':
p.vertex(pt.x, pt.y);
break;
case 'curve':
p.curveVertex(pt.x, pt.y);
break;
}
}
p.endShape();
p.push();
p.stroke('green');
p.drawingContext.setLineDash([5, 15]);
p.beginShape();
for (let i = 0; i < controlPoints.length; i++) {
let pt = controlPoints[i];
p.vertex(pt.x, pt.y);
}
p.endShape();
p.drawingContext.setLineDash([]);
p.pop();
p.push();
p.fill("red");
p.noStroke();
for (let i = 0; i < controlPoints.length; i++) {
let pt = controlPoints[i];
switch (pt.type) {
case 'curve':
p.circle(pt.x, pt.y, 4);
break;
case 'vertex':
p.push();
p.translate(pt.x, pt.y);
p.rotate(p.PI / 4);
p.square(0, 0, 4);
p.pop();
break;
}
}
p.pop();
};
let dragging;
p.mousePressed = function() {
// is the mouse over a point
let closest = getClosest(p.mouseX, p.mouseY);
if (closest && closest.sqdist < SqInteractionThreshold) {
dragging = closest.ix;
}
};
p.mouseReleased = function() {
dragging = undefined;
};
p.mouseDragged = function() {
if (dragging !== undefined) {
controlPoints[dragging].x = p.constrain(p.mouseX, 0, p.width);
controlPoints[dragging].y = p.constrain(p.mouseY, 0, p.height);
p.redraw();
}
};
p.mouseClicked = function() {
if (p.keyIsDown(p.SHIFT)) {
// is the mouse over a point
let closest = getClosest(p.mouseX, p.mouseY);
if (closest && closest.sqdist < SqInteractionThreshold) {
let pt = controlPoints[closest.ix];
switch (pt.type) {
case 'vertex':
pt.type = 'curve';
break;
case 'curve':
pt.type = 'vertex';
break;
}
p.redraw();
}
}
}
p.doubleClicked = function() {
if (p.keyIsDown(p.SHIFT)) {
if (controlPoints.length > 2) {
// delete
let closest = getClosest(p.mouseX, p.mouseY);
if (closest && closest.sqdist < SqInteractionThreshold) {
controlPoints.splice(closest.ix, 1);
p.redraw();
}
}
} else {
// insert
let closest = getClosestSegment(p.mouseX, p.mouseY);
if (closest.error < InteractionThreshold) {
controlPoints.splice(closest.end, 0, {
x: p.mouseX,
y: p.mouseY,
type: 'vertex'
});
p.redraw();
}
}
};
function getClosest(x, y) {
let closest;
for (let i = 0; i < controlPoints.length; i++) {
let dx = x - controlPoints[i].x;
let dy = y - controlPoints[i].y;
let sqdist = dx * dx + dy * dy;
if (!closest || sqdist < closest.sqdist) {
closest = {
sqdist,
ix: i
};
}
}
return closest;
}
function getClosestSegment(x, y) {
let closest;
for (let i = 0; i < controlPoints.length - 1; i++) {
let start = controlPoints[i];
let end = controlPoints[i + 1];
let angle = p.atan2(end.y - start.y, end.x - start.x);
let len = p.dist(start.x, start.y, end.x, end.y);
// convert [mouseX, mouseY] to a position relative to start
let vec = p.createVector(x - start.x, y - start.y);
vec = p.createVector(
p.cos(angle) * vec.x + p.sin(angle) * vec.y, -p.sin(angle) * vec.x + p.cos(angle) * vec.y,
);
let error;
if (vec.x < 0) {
error = p.dist(0, 0, vec.x, vec.y);
} else if (vec.x > len) {
error = p.dist(len, 0, vec.x, vec.y);
} else {
// There is no X error, only consider the Y
error = p.abs(vec.y);
}
if (!closest || error < closest.error) {
closest = {
error,
start: i,
end: i + 1
};
}
}
return closest;
}
}
let lineEditorSketch = new p5(lineEditor);
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/1.4.0/p5.js"></script>

WriteFile on Com port indicates Tx complete, but no data transmitted

I am using VS 2013 Professional in an MFC project
I have been using my software to receive data from the com port for some time, but recently needed to add transmission capability
The Init code is:
BOOL PASCAL FAR SetupConnect(pCONNECTION pCon, pCOMCONFIG pCfg)
{
DCB dcb;
pSERBUF pSB = pCon->BufStruct;
// pSERBUF *ppSB = (pSERBUF*)pCon->BufStruct;
// pSB = *ppSB;
dcb.DCBlength = sizeof(DCB);
CheckComs(); // Gets available COM ports
pCon->Port = pNames[0].PortNames[3] - 0x30;
if (pCon->BufStruct == NULL) // This is a personal Communications structure
{ // Init
pCon->hSB = GlobalAlloc(GHND, sizeof(SERBUF));
if (pCon->hSB == NULL)
{
// return INVALID_HANDLE_VALUE;
return 0;
}
pSB = (pSERBUF)GlobalLock(pCon->hSB);
pSB->idComDev = INVALID_HANDLE_VALUE;
pCon->BufStruct = pSB;
}
else return (0);
if (pSB->idComDev == INVALID_HANDLE_VALUE)
{
pSB->idComDev = CreateFile(pNames[0].PortNames, GENERIC_READ | GENERIC_WRITE,
0, //exclusive access
NULL, // no security
OPEN_EXISTING,
FILE_ATTRIBUTE_NORMAL | FILE_FLAG_OVERLAPPED,
NULL);
}
// Current configuration
GetCommState(pSB->idComDev, &dcb);
// Setup baudrate, parity, etc.
dcb.BaudRate = pCfg->dwBaudRate;
dcb.ByteSize = pCfg->bDataBits;
dcb.Parity = pCfg->bParity;
dcb.StopBits = pCfg->bStopBits;
// Setup Flow Control
dcb.fOutxDsrFlow = pCfg->handshake_DTR;
dcb.fDtrControl = DTR_CONTROL_ENABLE; // DTR high while port open
dcb.fOutxCtsFlow = pCfg->handshake_RTS;
dcb.fRtsControl = RTS_CONTROL_DISABLE; // Toggle RTS with EscapeCommFunction
// XON/XOFF Not Used
dcb.fInX = FALSE;
dcb.fOutX = FALSE;
dcb.fBinary = TRUE;
dcb.fParity = TRUE;
//return TRUE if everything looks cool
return (SetCommState(pSB->idComDev, &dcb));
}
And:
CSerCom::CSerCom()
{
pCon = &Con;
pCfg = &Cfg;
m_SerHwnd = this;
pCfg->dwBaudRate = 115200;
pCfg->bDataBits = 8;
pCfg->bParity = NOPARITY;
pCfg->bStopBits = TWOSTOPBITS;
// here
SetupConnect(pCon, pCfg);
pSERBUF pSB = pCon->BufStruct; // pSB is set in SetUpConnect
if (pSB->idComDev == INVALID_HANDLE_VALUE)
{
// device open failure
// hardware not there or someone else controls it!
GlobalUnlock(pCon->hSB);
GlobalFree(pCon->hSB);
pCon->BufStruct = NULL;
// TODO stop this from going any further
HandleFailure();
}
else // Only continue if Port is available
{
// Clear Buffer
SetupComm(pSB->idComDev, 4096, 4096);
PurgeComm(pSB->idComDev, PURGE_TXABORT | PURGE_RXABORT | PURGE_TXCLEAR | PURGE_RXCLEAR);
// create the overlapped events
memset(&(pSB->osRead), 0, sizeof(OVERLAPPED));
memset(&(pSB->osWrite), 0, sizeof(OVERLAPPED));
pSB->osRead.hEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
pSB->osWrite.hEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
if ((pSB->osRead.hEvent == NULL) || (pSB->osWrite.hEvent == NULL))
{
ReleaseNetResources(pCon);
CloseHandle(pSB->idComDev);
pSB->idComDev = INVALID_HANDLE_VALUE;
HandleFailure();
// return (pSB->idComDev);
}
// allocate & lock the mem
// (used to contain data points to & from the MODBUS
// as well as the receive buffer for incoming serial data)
pSB->hRcv = GlobalAlloc(GHND, MAX_RX_LEN);
if (pSB->hRcv == NULL)
{
ReleaseNetResources(pCon);
CloseHandle(pSB->idComDev);
pSB->idComDev = INVALID_HANDLE_VALUE;
HandleFailure();
// return (pSB->idComDev);
}
pSB->pRcv = (char *)GlobalLock(pSB->hRcv);
pSB->hTx = (char *)GlobalAlloc(GHND, MAX_TX_LEN);
if (pSB->hTx == NULL)
{
ReleaseNetResources(pCon);
CloseHandle(pSB->idComDev);
pSB->idComDev = INVALID_HANDLE_VALUE;
HandleFailure();
// return (pSB->idComDev);
}
pSB->pTx = (char *)GlobalLock(pSB->hTx);
// remember the setup params
pSB->TimeOut = 3; //CalculateTimeOut(pCfg->dwBaudRate);
// pSB->TimerId = TimerId;
// initialize the status counters
// pSB->ValidCt = 0;
// pSB->InvalidCt = 0;
pSB->RxInIdx = 0;
// pSB->RTS_Delay[0] = pCfg->RTS_Delay[0];
// pSB->RTS_Delay[1] = pCfg->RTS_Delay[1];
pSB->RTS_Delay[0] = 100;
pSB->RTS_Delay[1] = 100;
// setup the Comm Timeouts
CommTimeOuts.ReadIntervalTimeout = 0xffffffff;
CommTimeOuts.ReadTotalTimeoutMultiplier = 0;
CommTimeOuts.ReadTotalTimeoutConstant = 1000;
CommTimeOuts.WriteTotalTimeoutMultiplier = 0;
CommTimeOuts.WriteTotalTimeoutConstant = 1000;
SetCommTimeouts(pSB->idComDev, &CommTimeOuts);
// if everything looks good to here
// create the Receive Thread & return the CONNECT handle
pSB->hIOThread = CreateThread((LPSECURITY_ATTRIBUTES)NULL,
0,
(LPTHREAD_START_ROUTINE)SerProc,
(LPVOID)pCon,
0,
&dwThreadID);
if (pSB->hIOThread == NULL)
{
ReleaseNetResources(pCon);
CloseHandle(pSB->idComDev);
pSB->idComDev = INVALID_HANDLE_VALUE;
HandleFailure();
// return (pSB->idComDev);
}
hIOT = pSB->hIOThread;
}
}
So with that set up, I enter a thread loop in which I have the following
// wait indefinitely for somthing to happen
WaitCommEvent(pSB->idComDev, &dwEvtMask, NULL);
// Catch Rx event
if ((dwEvtMask & EV_RXCHAR) == EV_RXCHAR)
{
Edit1_txt.Format(_T("Rx'd"));
E1->SetWindowText(Edit1_txt);
CMFCView->UpdateWindow();
// only try to read number of bytes in queue
ClearCommError(pSB->idComDev, &dwErrorFlags, &ComStat);
dwLength = ComStat.cbInQue;
// Read data bytes into connection Rcv Buffer at current RxInIdx
if (dwLength > 0)
{
fReadStat = ReadFile(pSB->idComDev,
&(pSB->pRcv[pSB->RxInIdx]),
dwLength,
&bytesread,
&(pSB->osRead));
if (!fReadStat)
{
if (GetLastError() == ERROR_IO_PENDING)
{
// We have to wait for read to complete.
while (!GetOverlappedResult(pSB->idComDev,
&(pSB->osRead), &bytesread, FALSE))
{
dwErrorFlags = GetLastError();
if (dwErrorFlags != ERROR_IO_INCOMPLETE)
// an error occurred, try to recover
ClearCommError(pSB->idComDev, &dwErrorFlags, &ComStat);
}
}
else
{
// some other error occurred
dwLength = 0;
ClearCommError(pSB->idComDev, &dwErrorFlags, &ComStat);
}
} // End of Read Error
} // End of Read Char
if (ComStat.cbInQue < 500)
{
// update the receive index
pSB->RxInIdx += dwLength;
wSleepime = GetTickCount(); // hkk 7/16/99 for console app
ParseAPI(pSB);
}
else
ComStat.cbInQue = 0;
}
// At some point in the program pSB->TxOutIdx is set to some positive value
if (pSB->TxOutIdx > 0)
{
dwLength = pSB->TxOutIdx;
fWriteStat = WriteFile(pSB->idComDev,
&(pSB->pTx[pSB->TxOutIdx]),
dwLength,
&byteswritten,
&(pSB->osWrite));
if (!fWriteStat)
{
if (GetLastError() == ERROR_IO_PENDING)
{
while (!GetOverlappedResult(pSB->idComDev,
&(pSB->osWrite), &byteswritten, FALSE))
{
dwErrorFlags = GetLastError();
if (dwErrorFlags != ERROR_IO_INCOMPLETE)
// an error occurred, try to recover
ClearCommError(pSB->idComDev, &dwErrorFlags, &ComStat);
}
}
pSB->TxOutIdx -= byteswritten;
}
}
}
This detects the Tx buffer full, (pSB->TxOutIdx > 0)
and transmits the data
The transmit fails with an IO pending error, but after execution of GetOverlappedResult, bytes written show the length desired.
However, no data comes out the port. I have checked, and the port found and used is correct.
Wassup?

rgb32 data resource mapping. using directx memcpy

I have been trying to solve the problem for a month with googling.
But Now I have to ask for help here.
I want to render using ffmpeg decoded frame.
and using frame(it converted to RGB32 format), I try to render frame with DX2D texture.
ZeroMemory(&TextureDesc, sizeof(TextureDesc));
TextureDesc.Height = pFrame->height;
TextureDesc.Width = pFrame->width;
TextureDesc.MipLevels = 1;
TextureDesc.ArraySize = 1;
TextureDesc.Format = DXGI_FORMAT_R32G32B32A32_FLOAT; //size 16
TextureDesc.SampleDesc.Count = 1;
TextureDesc.SampleDesc.Quality = 0;
TextureDesc.Usage = D3D11_USAGE_DYNAMIC;
TextureDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
TextureDesc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
TextureDesc.MiscFlags = 0;
result = m_device->CreateTexture2D(&TextureDesc, NULL, &m_2DTex);
if (FAILED(result)) return false;
ShaderResourceViewDesc.Format = TextureDesc.Format;
ShaderResourceViewDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
ShaderResourceViewDesc.Texture2D.MostDetailedMip = 0;
ShaderResourceViewDesc.Texture2D.MipLevels = 1;
D3D11_MAPPED_SUBRESOURCE S_mappedResource_tt = { 0, };
ZeroMemory(&S_mappedResource_tt, sizeof(D3D11_MAPPED_SUBRESOURCE));
result = m_deviceContext->Map(m_2DTex, 0, D3D11_MAP_WRITE_DISCARD, 0, &S_mappedResource_tt);
if (FAILED(result)) return false;
BYTE* mappedData = reinterpret_cast<BYTE *>(S_mappedResource_tt.pData);
for (auto i = 0; i < pFrame->height; ++i) {
memcpy(mappedData, pFrame->data, pFrame->linesize[0]);
mappedData += S_mappedResource_tt.RowPitch;
pFrame->data[0] += pFrame->linesize[0];
}
m_deviceContext->Unmap(m_2DTex, 0);
result = m_device->CreateShaderResourceView(m_2DTex, &ShaderResourceViewDesc, &m_ShaderResourceView);
if (FAILED(result)) return false;
m_deviceContext->PSSetShaderResources(0, 1, &m_ShaderResourceView);
but it shows me just black screen(nothing render).
I guess it's wrong memcpy size.
The biggest problem is that I don't know what is the problem.
Question 1 :
It has any problem creating 2D texture for mapping?
Question 2 :
What size of the memcpy parameters should I enter (related to formatting)?
I based on the link below.
[1]https://www.gamedev.net/forums/topic/667097-copy-2d-array-into-texture2d/
[2]https://www.gamedev.net/forums/topic/645514-directx-11-maping-id3d11texture2d/
[3]https://www.gamedev.net/forums/topic/606100-solved-dx11-updating-texture-data/
Thank U for watching, Please reply.
Nobody reply. I solved my issue.
I have modified some code and I'm not sure if it solves the problem. The problem with the black screen Reason is my matrix.
D3D11_TEXTURE2D_DESC TextureDesc;
D3D11_RENDER_TARGET_VIEW_DESC RenderTargetViewDesc;
D3D11_SHADER_RESOURCE_VIEW_DESC ShaderResourceViewDesc;
ZeroMemory(&TextureDesc, sizeof(TextureDesc));
TextureDesc.Height = pFrame->height;
TextureDesc.Width = pFrame->width;
TextureDesc.MipLevels = 1;
TextureDesc.ArraySize = 1;
TextureDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;/*DXGI_FORMAT_R8G8B8A8_UNORM_SRGB;*/ //size 32bit
TextureDesc.SampleDesc.Count = 1;
TextureDesc.SampleDesc.Quality = 0;
TextureDesc.Usage = D3D11_USAGE_DYNAMIC;
TextureDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
TextureDesc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
TextureDesc.MiscFlags = 0;
DWORD* pInitImage = new DWORD[pFrame->width*pFrame->height];
memset(pInitImage, 0, sizeof(DWORD)*pFrame->width*pFrame->height);
D3D11_SUBRESOURCE_DATA InitData;
InitData.pSysMem = pInitImage;
InitData.SysMemPitch = pFrame->width*sizeof(DWORD);
InitData.SysMemSlicePitch = 0;
result = m_device->CreateTexture2D(&TextureDesc, &InitData, &m_2DTex);
if (FAILED(result)) return false;
ShaderResourceViewDesc.Format = TextureDesc.Format;
ShaderResourceViewDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
ShaderResourceViewDesc.Texture2D.MostDetailedMip = 0;
ShaderResourceViewDesc.Texture2D.MipLevels = 1;
result = m_device->CreateShaderResourceView(m_2DTex, &ShaderResourceViewDesc, &m_ShaderResourceView);
if (FAILED(result)) return false;
D3D11_MAPPED_SUBRESOURCE S_mappedResource_tt;
ZeroMemory(&S_mappedResource_tt, sizeof(S_mappedResource_tt));
DWORD Stride = pFrame->linesize[0];
result = m_deviceContext->Map(m_2DTex, 0, D3D11_MAP_WRITE_DISCARD, 0, &S_mappedResource_tt);
if (FAILED(result)) return false;
BYTE * pFrameData = pFrame->data[0]; // now we have a pointer that points to begin of the destination buffer
BYTE* mappedData = (BYTE *)S_mappedResource_tt.pData;// +S_mappedResource_tt.RowPitch;
for (auto i = 0; i < pFrame->height; i++) {
memcpy(mappedData, pFrameData, Stride);
mappedData += S_mappedResource_tt.RowPitch;
pFrameData += Stride;
}
m_deviceContext->Unmap(m_2DTex, 0);
It works vell. I hope that it will be helpful to those who are doing the same thing with me.

Remote IO Play with constant noise

guys! I have a trouble with using remote IO to playback a stream audio.I verified the PCM frame data before I put it in,it's correct.So I'm confused.Could you help me? Thanks a lot!
Below is my codes.
-
(void)initializeAudioPlay
{
OSStatus status;
// Describe audio component
AudioComponentDescription desc;
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_RemoteIO;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
// Get component
AudioComponent inputComponent = AudioComponentFindNext(NULL, &desc);
// Get audio units
status = AudioComponentInstanceNew(inputComponent, &audioPlayUnit);
[self checkStatus:status];
// Enable IO for playback
UInt32 flag = 1;
//kAUVoiceIOProperty_VoiceProcessingEnableAGC
status = AudioUnitSetProperty(audioPlayUnit, kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input, kOutputBus, &flag, sizeof(flag));
[self checkStatus:status];
// Describe format
AudioStreamBasicDescription audioFormat;
memset(&audioFormat, 0, sizeof(audioFormat));
audioFormat.mSampleRate = 8000;
audioFormat.mFormatID = kAudioFormatLinearPCM;
audioFormat.mFormatFlags = kAudioFormatFlagsCanonical;//kAudioFormatFlagIsNonInterleaved | kAudioFormatFlagIsSignedInteger;
/*kAudioFormatFlagsCanonical
| (kAudioUnitSampleFractionBits << kLinearPCMFormatFlagsSampleFractionShift)*/
audioFormat.mFramesPerPacket = 1;
audioFormat.mChannelsPerFrame = 1;
audioFormat.mBitsPerChannel = 16;
audioFormat.mBytesPerFrame = (audioFormat.mBitsPerChannel/8) * audioFormat.mChannelsPerFrame;
audioFormat.mBytesPerPacket = audioFormat.mBytesPerFrame;
// Apply format
status = AudioUnitSetProperty(audioPlayUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input,
kOutputBus,
&audioFormat,
sizeof(audioFormat));
[self checkStatus:status];
float value = (float)10 / 255.0;
AudioUnitSetParameter(audioPlayUnit, kAudioUnitParameterUnit_LinearGain, kAudioUnitScope_Input, 0, value, 0);
AudioChannelLayout new_layout;
new_layout.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
AudioUnitSetProperty( audioPlayUnit,
kAudioUnitProperty_AudioChannelLayout,
kAudioUnitScope_Global,
0, &new_layout, sizeof(new_layout) );
UInt32 bypassEffect = kAudioUnitProperty_RenderQuality;
status = AudioUnitSetProperty(audioPlayUnit,
kAudioUnitProperty_RenderQuality,
kAudioUnitScope_Global,
0,
&bypassEffect,
sizeof(bypassEffect));
[self checkStatus:status];
// Set output callback
AURenderCallbackStruct callbackStruct;
callbackStruct.inputProc = playCallback;
callbackStruct.inputProcRefCon = self;
status = AudioUnitSetProperty(audioPlayUnit,
kAudioUnitProperty_SetRenderCallback,
kAudioUnitScope_Input,
kOutputBus,
&callbackStruct,
sizeof(callbackStruct));
[self checkStatus:status];
flag = 0;
// Initialize
status = AudioUnitInitialize(audioPlayUnit);
[self checkStatus:status];
DGLog(#"audio play unit initialize = %d", status);
circularBuf = [[CircularBuf alloc] initWithBufLen:kBufferLength];
/*
AudioSessionInitialize(NULL, NULL, NULL, NULL);
Float64 rate =32000.0;
AudioSessionSetProperty(kAudioSessionProperty_PreferredHardwareSampleRate, sizeof(rate), &rate);
Float32 volume=20.0;
UInt32 size = sizeof(Float32);
AudioSessionSetProperty(
kAudioSessionProperty_PreferredHardwareIOBufferDuration,
&size, &volume);
//float aBufferLength = 0.185759637188209;
//AudioSessionSetProperty(kAudioSessionProperty_PreferredHardwareIOBufferDuration, sizeof(aBufferLength), &aBufferLength);
AudioSessionSetActive(YES);
*/
AudioSessionInitialize(NULL, NULL, NULL, nil);
AudioSessionSetActive(true);
UInt32 sessionCategory = kAudioSessionCategory_MediaPlayback ;
/* for Iphone we need to do this to route the audio to speaker */
status= AudioSessionSetProperty (
kAudioSessionProperty_AudioCategory,
sizeof (sessionCategory),
&sessionCategory
);
//NSLog(#"Error: %d", status);
//
// UInt32 audioRouteOverride = kAudioSessionOverrideAudioRoute_Speaker;
// status = AudioSessionSetProperty (
// kAudioSessionProperty_OverrideAudioRoute,
// sizeof (audioRouteOverride),
// &audioRouteOverride);
UInt32 audioMixed = 1;
status = AudioSessionSetProperty (
kAudioSessionProperty_OverrideCategoryMixWithOthers,
sizeof (audioMixed),
&audioMixed);
}
- (void)processAudio:(AudioBuffer *)buffer
{
short pcmTemp[160];
unsigned char * amrBuffer=NULL;
AudioUnitSampleType sample;
int i = 0;
int j = 0;
if ([circularBuf isReadTwoRegion]) {
amrBuffer = [circularBuf ReadData];
} else {
amrBuffer = [circularBuf ReadData];
i = [circularBuf ReadPos];
}
j = i + circularBuf.Length;
if (j - i >= 320) {
memcpy((void*)pcmTemp, (void*)amrBuffer, 320);
for(i=0; i<160; i++)
{
sample = 3.162277*pcmTemp[i];//10db
if(sample > 32767)sample = 32767;
else if(sample < -32768)sample = -32768;
buffData[i] = sample;
}
memcpy(buffer->mData, buffData, buffer->mDataByteSize);
[circularBuf AdvanceReadPos:320];
}
else
{
memset(buffer->mData, 0, buffer->mDataByteSize);
}
}
/**
This callback is called when the audioUnit needs new data to play through the
speakers. If you don't have any, just don't write anything in the buffers
*/
static OSStatus playCallback(void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData) {
// Notes: ioData contains buffers (may be more than one!)
// Fill them up as much as you can. Remember to set the size value in each buffer to match how
// much data is in the buffer.
AudioPlay *audioPlay = (AudioPlay *)inRefCon;
for ( int i=0; i < ioData->mNumberBuffers; i++ ) {
memset(ioData->mBuffers[i].mData, 0, ioData->mBuffers[i].mDataByteSize);
}
ioData->mBuffers[0].mNumberChannels = 1;
[audioPlay processAudio:&ioData->mBuffers[0]];
return noErr;
}

iOS RemoteIO - AudioUnitAddRenderNotify Callback

I'm trying to do a recording from RemoteIO using AudioUnitAddRenderNotify like this.
Basically, I'm not able to get the samples from bus1, which is my input bus. The recordingCallback does not go past this :
if (*ioActionFlags & kAudioUnitRenderAction_PostRender || inBusNumber != 1) {
return noErr;
}
But I was told that the recordingCallback should be called for each bus every round. ie. called with inBusNumber ==0, then inBusNumber ==1, which are the output (remoteIO out) and input (recording bus) respectively.
What can I do to get recordingCallback to be called on my input bus so that I can record?
Thanks.
Pier.
Here's the callback.
static OSStatus recordingCallback(void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData) {
NSLog(#"Entered recording callback");
// Only do pre render on bus 1
if (*ioActionFlags & kAudioUnitRenderAction_PostRender || inBusNumber != 1) {
return noErr;
}
RIO *rio = (RIO*)inRefCon;
AudioUnit rioUnit = rio->theAudioUnit;
//ExtAudioFileRef eaf = rio->outEAF;
AudioBufferList abl = rio->audioBufferList;
SInt32 samples[NUMBER_OF_SAMPLES]; // A large enough size to not have to worry about buffer overrun
abl.mNumberBuffers = 1;
abl.mBuffers[0].mData = &samples;
abl.mBuffers[0].mNumberChannels = 1;
abl.mBuffers[0].mDataByteSize = inNumberFrames * sizeof(SInt16);
OSStatus result;
result = AudioUnitRender(rioUnit,
ioActionFlags,
inTimeStamp,
inBusNumber,
inNumberFrames,
&abl);
if (noErr != result) { NSLog(#"Obtain recorded samples error! Error : %ld", result); }
NSLog(#"Bus %ld", inBusNumber);
// React to a recording flag, if recording, save the abl into own buffer, else ignore
if (rio->recording)
{
TPCircularBufferProduceBytes(&rio->buffer, abl.mBuffers[0].mData, inNumberFrames * sizeof(SInt16));
//rio->timeIncurred += (('p'float)inNumberFrames) / 44100.0;
//NSLog(#"Self-calculated time incurred: %f", rio->timeIncurred);
}
return noErr;
}
Here's the code which calls the callback.
- (void)setupAudioUnitRemoteIO {
UInt32 framesPerSlice = 0;
UInt32 framesPerSlicePropertySize = sizeof (framesPerSlice);
UInt32 sampleRatePropertySize = sizeof (_graphSampleRate);
// Describe audio component
AudioComponentDescription desc;
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_RemoteIO;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
// Get component
AudioComponent inputComponent = AudioComponentFindNext(NULL, &desc);
// Get audio units
status = AudioComponentInstanceNew(inputComponent, &_remoteIOUnit);
if (noErr != status) { NSLog(#"Get audio units error"); return; }
// Enable IO for recording
UInt32 flag = 1;
status = AudioUnitSetProperty(_remoteIOUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input,
kInputBus,
&flag,
sizeof(flag));
if (noErr != status) { NSLog(#"Enable IO for recording error"); return; }
// Enable IO for playback
status = AudioUnitSetProperty(_remoteIOUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Output,
kOutputBus,
&flag,
sizeof(flag));
if (noErr != status) { NSLog(#"Enable IO for playback error"); return; }
// Obtain the value of the maximum-frames-per-slice from the I/O unit.
status = AudioUnitGetProperty (
_remoteIOUnit,
kAudioUnitProperty_MaximumFramesPerSlice,
kAudioUnitScope_Global,
0,
&framesPerSlice,
&framesPerSlicePropertySize
);
// Describe format
audioFormat.mSampleRate = 44100.00;
audioFormat.mFormatID = kAudioFormatLinearPCM;
audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
audioFormat.mFramesPerPacket = 1;
audioFormat.mChannelsPerFrame = 1;
audioFormat.mBitsPerChannel = 16;
audioFormat.mBytesPerPacket = 2;
audioFormat.mBytesPerFrame = 2;
// Apply format
status = AudioUnitSetProperty(_remoteIOUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output,
kInputBus,
&audioFormat,
sizeof(audioFormat));
if (noErr != status) { NSLog(#"Apply format to input bus error"); return; }
status = AudioUnitSetProperty(_remoteIOUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input,
kOutputBus,
&audioFormat,
sizeof(audioFormat));
if (noErr != status) { NSLog(#"Apply format to output bus error"); return; }
rio.theAudioUnit = _remoteIOUnit; // Need this, as used in callbacks to refer to remoteIO
AURenderCallbackStruct callbackStruct;
callbackStruct.inputProc = recordingCallback;
callbackStruct.inputProcRefCon = &rio;
status = AudioUnitAddRenderNotify(_remoteIOUnit, callbackStruct.inputProc, callbackStruct.inputProcRefCon);
NSAssert (status == noErr, #"Problem adding recordingCallback to RemoteIO. Error code: %d '%.4s'", (int) status, (const char *)&status);
I managed to resolve this by not using AudioUnitAddRenderNotify, and by using the following code.
AURenderCallbackStruct callbackStruct;
callbackStruct.inputProc = recordingCallback;
callbackStruct.inputProcRefCon = &rio;
status = AudioUnitSetProperty(_remoteIOUnit,
kAudioOutputUnitProperty_SetInputCallback,
kAudioUnitScope_Global,
kInputBus,
&callbackStruct,
sizeof(callbackStruct));
if (noErr != status) { NSLog(#"Set input callback error"); return; }
on the input bus instead

Resources