Is it possible to record sound played on the sound card? - windows

Is it even remotely possible to record sound that is being played on the sound card?
Supposedly, I am playing an audio file, what I need is to redirect the output to the disk. DirectShow or might be feasible.
Any help is greatly appreciated, thanks.

You need to enable audio loopback device, and you will be able to record from in a stadnard way with all the well-known APIs (including DirectShow).
Loopback Recording
Enabling "Stereo Mix" in Vista
Capturing Window's audio in C#
Once enabled, you will see the device on DirectShow apps:

Check out NAudio and this thread for a WasapiLoopbackCapture class that takes the output from your soundcard and turns it into a wavein device you can record or whatever...
https://naudio.codeplex.com/discussions/203605/

My Solution C# NAUDIO Library v1.9.0
waveInStream = new WasapiLoopbackCapture(); //record sound card.
waveInStream.DataAvailable += new EventHandler<WaveInEventArgs>(this.OnDataAvailable); // sound data event
waveInStream.RecordingStopped += new EventHandler<StoppedEventArgs>(this.OnDataStopped); // record stopped event
MessageBox.Show(waveInStream.WaveFormat.Encoding + " - " + waveInStream.WaveFormat.SampleRate +" - " + waveInStream.WaveFormat.BitsPerSample + " - " + waveInStream.WaveFormat.Channels);
//IEEEFLOAT - 48000 - 32 - 2
//Explanation: IEEEFLOAT = Encoding | 48000 Hz | 32 bit | 2 = STEREO and 1 = mono
writer = new WaveFileWriter("out.wav", new WaveFormat(48000, 24, 2));
waveInStream.StartRecording(); // record start
Events
WaveFormat myOutFormat = new WaveFormat(48000, 24, 2); // --> Encoding PCM standard.
private void OnDataAvailable(object sender, WaveInEventArgs e)
{
//standard e.Buffer encoding = IEEEFLOAT
//MessageBox.Show(e.Buffer + " - " + e.BytesRecorded);
//if you needed change for encoding. FOLLOW WaveFormatConvert ->
byte[] output = WaveFormatConvert(e.Buffer, e.BytesRecorded, waveInStream.WaveFormat, myOutFormat);
writer.Write(output, 0, output.Length);
}
private void OnDataStopped(object sender, StoppedEventArgs e)
{
if (writer != null)
{
writer.Close();
}
if (waveInStream != null)
{
waveInStream.Dispose();
}
}
WaveFormatConvert -> Optional
public byte[] WaveFormatConvert(byte[] input, int length, WaveFormat inFormat, WaveFormat outFormat)
{
if (length == 0)
return new byte[0];
using (var memStream = new MemoryStream(input, 0, length))
{
using (var inputStream = new RawSourceWaveStream(memStream, inFormat))
{
using (var resampler = new MediaFoundationResampler(inputStream, outFormat)) {
resampler.ResamplerQuality = 60; // 1 low - 60 max high
//CONVERTED READ STREAM
byte[] buffer = new byte[length];
using (var stream = new MemoryStream())
{
int read;
while ((read = resampler.Read(buffer, 0, length)) > 0)
{
stream.Write(buffer, 0, read);
}
return stream.ToArray();
}
}
}
}
}

Related

Save image from Elgato Game Capture using DirectShow

Im using a Elgato Game Capture HD60 to live prewview a GoPro Hero 5 in my application. Now i want to save the stream as a JPG in my folder. But i cant find out how to.
To bind the pin
DsROTEntry rot; //Used for remotely connecting to graph
IFilterGraph2 graph;
ICaptureGraphBuilder2 captureGraph;
IBaseFilter elgatoFilter;
IBaseFilter smartTeeFilter;
IBaseFilter videoRendererFilter;
Size videoSize;
private IPin GetPin(PinDirection pinDir, IBaseFilter filter)
{
IEnumPins epins;
int hr = filter.EnumPins(out epins);
if (hr < 0)
return null;
IntPtr fetched = Marshal.AllocCoTaskMem(4);
IPin[] pins = new IPin[1];
epins.Reset();
while (epins.Next(1, pins, fetched) == 0)
{
PinInfo pinfo;
pins[0].QueryPinInfo(out pinfo);
bool found = (pinfo.dir == pinDir);
DsUtils.FreePinInfo(pinfo);
if (found)
return pins[0];
}
return null;
}
private IPin GetPin(PinDirection pinDir, string name, IBaseFilter filter)
{
IEnumPins epins;
int hr = filter.EnumPins(out epins);
if (hr < 0)
return null;
IntPtr fetched = Marshal.AllocCoTaskMem(4);
IPin[] pins = new IPin[1];
epins.Reset();
while (epins.Next(1, pins, fetched) == 0)
{
PinInfo pinfo;
pins[0].QueryPinInfo(out pinfo);
bool found = (pinfo.dir == pinDir && pinfo.name == name);
DsUtils.FreePinInfo(pinfo);
if (found)
return pins[0];
}
return null;
}
And to start the stream
private void button1_Click(object sender, EventArgs e)
{
//Set the video size to use for capture and recording
videoSize = new Size(1280, 720);
//Initialize filter graph and capture graph
graph = (IFilterGraph2)new FilterGraph();
captureGraph = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();
captureGraph.SetFiltergraph(graph);
rot = new DsROTEntry(graph);
//Create filter for Elgato
Guid elgatoGuid = new Guid("39F50F4C-99E1-464A-B6F9-D605B4FB5918");
Type comType = Type.GetTypeFromCLSID(elgatoGuid);
elgatoFilter = (IBaseFilter)Activator.CreateInstance(comType);
graph.AddFilter(elgatoFilter, "Elgato Video Capture Filter");
//Create smart tee filter, add to graph, connect Elgato's video out to smart tee in
smartTeeFilter = (IBaseFilter)new SmartTee();
graph.AddFilter(smartTeeFilter, "Smart Tee");
IPin outPin = GetPin(PinDirection.Output, "Video", elgatoFilter);
IPin inPin = GetPin(PinDirection.Input, smartTeeFilter);
graph.Connect(outPin, inPin);
//Create video renderer filter, add it to graph, connect smartTee Preview pin to video renderer's input pin
videoRendererFilter = (IBaseFilter)new VideoRenderer();
graph.AddFilter(videoRendererFilter, "Video Renderer");
outPin = GetPin(PinDirection.Output, "Preview", smartTeeFilter);
inPin = GetPin(PinDirection.Input, videoRendererFilter);
graph.Connect(outPin, inPin);
//Render stream from video renderer
captureGraph.RenderStream(PinCategory.Preview, MediaType.Video, videoRendererFilter, null, null);
//Set the video preview to be the videoFeed panel
IVideoWindow vw = (IVideoWindow)graph;
vw.put_Owner(pictureBox1.Handle);
vw.put_MessageDrain(this.Handle);
vw.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipSiblings | WindowStyle.ClipChildren);
vw.SetWindowPosition(0, 0, 1280, 720);
//Start the preview
IMediaControl mediaControl = graph as IMediaControl;
mediaControl.Run();
}
Can you run the filter graph successfully, and which step did you get error info?
You can get sample code \Samples\Capture\PlayCap to see how to build video capture filter graph.
If you want to get a video snapshot, you can get the sample code at \Samples\Capture\DxSnap.
You can modify the video source index and video snapshot size to get what you want.
const int VIDEODEVICE = 0; // zero based index of video capture device to use
const int VIDEOWIDTH = 2048; // Depends on video device caps
const int VIDEOHEIGHT = 1536; // Depends on video device caps
const int VIDEOBITSPERPIXEL = 24; // BitsPerPixel values determined by device

BufferedSoundStream can't play wav files

I'm making an app for my drum classes and to make it cross-platform I've chosen Urho.Sharp, because it has low level Sound API as well as rich graphics capabilities.
As a first step I'm making a metronome app and for that I'm working with BufferedSoundStream adding here audio and then needed silence, as described here: https://github.com/xamarin/urho-samples/blob/master/FeatureSamples/Core/29_SoundSynthesis/SoundSynthesis.cs
But the resulting sound is not a sound at all, like random bits got into buffered stream.
This is my code:
///
/// this code initialize sound subsystem
///
void CreateSound()
{
// Sound source needs a node so that it is considered enabled
node = new Node();
SoundSource source = node.CreateComponent<SoundSource>();
soundStream = new BufferedSoundStream();
// Set format: 44100 Hz, sixteen bit, stereo
soundStream.SetFormat(44100, true, true);
// Start playback. We don't have data in the stream yet, but the
SoundSource will wait until there is data,
// as the stream is by default in the "don't stop at end" mode
source.Play(soundStream);
}
///
/// this code preload all sound resources
///
readonly Dictionary<PointSoundType, string> SoundsMapping = new Dictionary<PointSoundType, string>
{
{PointSoundType.beat, "wav/beat.wav"},
{PointSoundType.click, "wav/click.wav"},
{PointSoundType.click_accent, "wav/click_accent.wav"},
{PointSoundType.crash, "wav/crash.wav"},
{PointSoundType.foot_hh, "wav/foot_hh.wav"},
{PointSoundType.hh, "wav/hh.wav"},
{PointSoundType.open_hh, "wav/open_hh.wav"},
{PointSoundType.ride, "wav/ride.wav"},
{PointSoundType.snare, "wav/snare.wav"},
{PointSoundType.tom_1, "wav/tom_1.wav"},
{PointSoundType.tom_2, "wav/tom_2.wav"},
};
Dictionary<PointSoundType, Sound> SoundCache = new Dictionary<PointSoundType, Sound>();
private void LoadSoundResources()
{
// preload all sounds
foreach (var s in SoundsMapping)
{
SoundCache[s.Key] = ResourceCache.GetSound(s.Value);
Debug.WriteLine("resource loaded: " + s.Value + ", length = " + SoundCache[s.Key].Length);
}
}
///
/// this code fill up the stream with audio
///
private void UpdateSound()
{
// Try to keep 1/10 seconds of sound in the buffer, to avoid both dropouts and unnecessary latency
//float targetLength = 1.0f / 10.0f;
// temporary increase buffer to 1s
float targetLength = 1.0f;
float requiredLength = targetLength - soundStream.BufferLength;
if (requiredLength < 0.0f)
return;
uint numSamples = (uint)(soundStream.Frequency * requiredLength);
// check if stream is still full
if (numSamples == 0)
return;
var silencePause = new short[44100];
// iterate and play all sounds
SoundCache.All(s =>
{
soundStream.AddData(s.Value.Handle, s.Value.DataSize);
// add silencio
soundStream.AddData(silencePause, 0, silencePause.Length);
return true;
});
}
Make sure your wav files are in the resource cache. Then don't play the BufferedSoundStream, but the Urho.Audio.Sound sound. This is just a different override of the same method Urho.Audio.SoundSource.Play(), but it works.
int PlaySound(string sSound)
{
var cache = Application.Current.ResourceCache;
Urho.Audio.Sound sound = cache.GetSound(sSound);
if (sound != null)
{
Node soundNode = scene.CreateChild("Sound");
Urho.Audio.SoundSource soundSource = soundNode.CreateComponent<Urho.Audio.SoundSource>();
soundSource.Play(sound);
soundSource.Gain = 0.99f;
return 1;
}
return 0;
}
Since you're using urhosamples, you can start each drum sample from the override update something like this:
public float fRun = 0.0f;
public int iRet = 0; // keep counting the played sounds
public override void OnUpdate(float timeStep)
{
fRun = fRun + timeStep;
int iMS = (int)(10f * fRun); // tenth of seconds
if (iMS == 100) iRet = iRet + PlaySound("wav/hh.wav");
if (iMS == 120) iRet = iRet + PlaySound("wav/hh.wav");
if (iMS == 140) iRet = iRet + PlaySound("wav/hh.wav");
if (iMS == 160) iRet = iRet + PlaySound("wav/open_hh.wav");
if (iMS >= 160) fRun = 0.8f;
}

Windows 8.1 store xaml save InkManager in a string

I'm trying to save what i have drawn with the pencil as a string , and i do this by SaveAsync() method to put it in an IOutputStream then convert this IOutputStream to a stream using AsStreamForWrite() method from this point things should go fine, however i get a lot of problems after this part , if i use for example this code block:
using (var stream = new MemoryStream())
{
byte[] buffer = new byte[2048]; // read in chunks of 2KB
int bytesRead = (int)size;
while (bytesRead < 0)
{
stream.Write(buffer, 0, bytesRead);
}
byte[] result = stream.ToArray();
// TODO: do something with the result
}
i get this exception
"Offset and length were out of bounds for the array or count is greater than the number of elements from index to the end of the source collection."
or if i try to convert the stream into an image using InMemoryRandomAccessStream like this:
InMemoryRandomAccessStream ras = new InMemoryRandomAccessStream();
await s.CopyToAsync(ras.AsStreamForWrite());
my InMemoryRandomAccessStream variable is always zero in size.
also tried
StreamReader.ReadToEnd();
but it returns an empty string.
found the answer here :
http://social.msdn.microsoft.com/Forums/windowsapps/en-US/2359f360-832e-4ce5-8315-7f351f2edf6e/stream-inkmanager-strokes-to-string
private async void ReadInk(string base64)
{
if (!string.IsNullOrEmpty(base64))
{
var bytes = Convert.FromBase64String(base64);
using (var inMemoryRAS = new InMemoryRandomAccessStream())
{
await inMemoryRAS.WriteAsync(bytes.AsBuffer());
await inMemoryRAS.FlushAsync();
inMemoryRAS.Seek(0);
await m_InkManager.LoadAsync(inMemoryRAS);
if (m_InkManager.GetStrokes().Count > 0)
{
// You would do whatever you want with the strokes
// RenderStrokes();
}
}
}
}

Xugller illegalArgumentExeception

I'm try to convert a movie (.mp4) in audio (.mp3) with Xuggler. I use porcessing on OS X.
IMediaReader reader = ToolFactory.makeReader("/Users/nouv/Desktop/video1.mp4");
IMediaWriter writer = ToolFactory.makeWriter("/Users/nouv/Desktop/audioOutput.mp3", reader);
int sampleRate = 22050;
int channels = 1;
writer.addAudioStream(0, 0, ICodec.ID.CODEC_ID_MP3, channels, sampleRate);
reader.addListener(writer);
try {
while (reader.readPacket() == null)
;
} finally {}
I have this error : IllegalArgumentException : stream[0] is not video
Exception in thread "Animation Thread" java.lang.IllegalArgumentException: stream[0] is not video
at com.xuggle.mediatool.MediaWriter.encodeVideo(MediaWriter.java:754)
at com.xuggle.mediatool.MediaWriter.encodeVideo(MediaWriter.java:783)
at com.xuggle.mediatool.MediaWriter.onVideoPicture(MediaWriter.java:1434)
at com.xuggle.mediatool.AMediaToolMixin.onVideoPicture(AMediaToolMixin.java:166)
at com.xuggle.mediatool.MediaReader.dispatchVideoPicture(MediaReader.java:610)
at com.xuggle.mediatool.MediaReader.decodeVideo(MediaReader.java:519)
at com.xuggle.mediatool.MediaReader.readPacket(MediaReader.java:475)
at xuggle.setup(xuggle.java:135)
at processing.core.PApplet.handleDraw(PApplet.java:2117)
at processing.core.PGraphicsJava2D.requestDraw(PGraphicsJava2D.java:193)
at processing.core.PApplet.run(PApplet.java:2020)
at java.lang.Thread.run(Thread.java:680)
I tried with .mp4 , .mov and .flv movie and I have the same error.
IContainer container = IContainer.make();
int result = container.open(inputFilename, IContainer.Type.READ, null);
// check if the operation was successful
if (result<0)
throw new RuntimeException("Failed to open media file");
int numStreams = container.getNumStreams();
int audioStreamId = -1;
IContainer writer = IContainer.make();
writer.open(outputFilename, IContainer.Type.WRITE, IContainerFormat.make());
for (int i=0; i<numStreams; i++) {
IStream stream = container.getStream(i);
IStreamCoder coder = stream.getStreamCoder();
IStreamCoder audioCoder = IStreamCoder.make(IStreamCoder.Direction.ENCODING, coder);
if( coder.getCodecType() == ICodec.Type.CODEC_TYPE_AUDIO){
coder.open(IMetaData.make(), IMetaData.make());
audioStreamId = i;
ICodec inputCodec = ICodec.findDecodingCodec(ICodec.ID.CODEC_ID_MP3);
if (inputCodec == null)
throw new IllegalArgumentException("could not find input codec id");
if (audioStreamId == -1)
throw new RuntimeException("could not find audio stream in container: "+inputFilename);
writer.addNewStream(audioCoder);
if(writer.writeHeader() == 0)
{
IPacket packet = IPacket.make();
while(container.readNextPacket(packet) >= 0){
if(packet.getStreamIndex() == audioStreamId)
{
if(coder.isOpen()){
writer.writePacket(packet);
} else {throw new RuntimeException("Could not open Coder"); }
}
}
}else {throw new RuntimeException("Header not Written for writer container.");}
}
coder.close(); audioCoder.close();
}
writer.writeTrailer();
writer.close();
hope this helps... :)

Posting file on Background Agent / HttpWebRequest stream buffer keeps growing?

I need to POST a 5MB file from within a ResourceIntensiveTask, where the OS sets a max memory usage of 5MB.
So trying to stream the file directly from storage, but the Stream associated to the HttpWebRequest keeps growing in size. This is the code:
public void writeStream(Stream writer, string filesource, string filename)
{
var store = System.IO.IsolatedStorage.IsolatedStorageFile.GetUserStoreForApplication();
var f = store.OpenFile(filesource, FileMode.Open, FileAccess.Read);
store.Dispose();
byte[] buffer = Encoding.UTF8.GetBytes(String.Format(#"Content-Disposition: form-data; name=""file""; filename=""{0}""\n", filename));
writer.Write(buffer, 0, buffer.Length);
buffer = Encoding.UTF8.GetBytes("Content-Type: application/octet-stream\n");
writer.Write(buffer, 0, buffer.Length);
long initialMemory = Microsoft.Phone.Info.DeviceStatus.ApplicationCurrentMemoryUsage;
buffer = new byte[2048];
int DataRead = 0;
do
{
DataRead = f.Read(buffer, 0, 2048);
if (DataRead > 0)
{
writer.Write(buffer, 0, DataRead);
Array.Clear(buffer, 0, 2048);
}
} while (DataRead > 0);
double increasedMemory = ((double)Microsoft.Phone.Info.DeviceStatus.ApplicationCurrentMemoryUsage - initialMemory) / 1000000;
buffer = Encoding.UTF8.GetBytes("\n--" + boundary + "\n--");
writer.Write(buffer, 0, buffer.Length);
writer.Flush();
}
increasedMemory debug variable is used to get the differential memory before and after the file is read and streamed to the HttpWebRequest, and it gives almost the exact size of the file (5MB) which means the process memory is increasing 5MB.
I am also setting AllowReadStreamBuffering=false to the HttpWebRequest.
How to keep memory low? How to upload large files when memory usage limit is 5MB?
The problem is that without being able to turn off write buffering, the connection to the server is not even made until BeginGetResponse() is called after closing the request stream (verified with WireShark).
The only way I can think of to get around this would be to use sockets directly (although that will be way more complicated if using an SSL connection).
This code works for me and doesn't increase memory usage while sending data to the server. I haven't tested it in a background task but don't see any reason it wouldn't work.
Socket _socket;
const int BUFFERSIZE = 4096;
byte[] writebuffer = new byte[BUFFERSIZE];
string hostName = "www.testdomain.com";
string hostPath = "/test/testupload.aspx";
IsolatedStorageFileStream isoFile;
public void SocketPOST(string hostName, string filesource)
{
using (IsolatedStorageFile store = IsolatedStorageFile.GetUserStoreForApplication())
{
if (store.FileExists(filesource))
{
isoFile = store.OpenFile(filesource, FileMode.Open, FileAccess.Read);
}
}
_socket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp);
_socket.SetNetworkRequirement(NetworkSelectionCharacteristics.NonCellular);
SocketAsyncEventArgs socketEventArg = new SocketAsyncEventArgs();
socketEventArg.RemoteEndPoint = new DnsEndPoint(hostName, 80);
socketEventArg.Completed += new EventHandler<SocketAsyncEventArgs>(Socket_Completed);
_socket.ConnectAsync(socketEventArg);
}
private void Socket_Completed(object sender, SocketAsyncEventArgs e)
{
if (e.SocketError == SocketError.Success)
{
switch (e.LastOperation)
{
case SocketAsyncOperation.Connect: // Connected so started sending data, headers first
if (e.ConnectSocket.Connected)
{
StringBuilder sbHeaders = new StringBuilder("POST " + hostPath + " HTTP/1.1\r\n");
sbHeaders.Append("HOST: " + hostName + "\r\n");
sbHeaders.Append("USER-AGENT: MyWP7App/1.0\r\n");
sbHeaders.Append("Content-Type: text/plain; charset=\"utf-8\"\r\n");
sbHeaders.Append("Content-Length: " + isoFile.Length.ToString() + "\r\n\r\n");
byte[] headerBuffer = Encoding.UTF8.GetBytes(sbHeaders.ToString());
e.SetBuffer(headerBuffer, 0, headerBuffer.Length);
if (!e.ConnectSocket.SendAsync(e)) Socket_Completed(e.ConnectSocket, e);
}
break;
case SocketAsyncOperation.Send:
case SocketAsyncOperation.SendTo: // Previous buffer sent so send next one if stream not finished
Array.Clear(writebuffer, 0, BUFFERSIZE);
int DataRead = 0;
DataRead = isoFile.Read(writebuffer, 0, BUFFERSIZE);
if (DataRead > 0)
{
e.SetBuffer(writebuffer, 0, DataRead);
if (!_socket.SendAsync(e)) Socket_Completed(e.ConnectSocket, e);
}
else
{
isoFile.Dispose();
if (!_socket.ReceiveAsync(e)) Socket_Completed(e.ConnectSocket, e);
}
break;
case SocketAsyncOperation.Receive:
case SocketAsyncOperation.ReceiveFrom:
if (e.BytesTransferred > 0)
{
string response = Encoding.UTF8.GetString(e.Buffer, e.Offset, e.BytesTransferred).Trim('\0');
// Check response if necessary
e.ConnectSocket.Shutdown(SocketShutdown.Both);
e.ConnectSocket.Dispose();
}
break;
default:
break;
}
}
}
Note: I've left a lot of the error handling out to keep the example short.
SSL Note: Because SSL works at the TCP level and WP7 doesn't currently support SSL sockets (SslStream) you would need to handle the certificate handshake, cipher exchange, etc yourself to set up the SSL connection on the socket and then encrypt everything being sent (and decrypt everything received) with the agreed algorithms. There has been some success using the Bouncy Castle API so that could be possible (see this blog post).
One thing I noticed: you forgot to dispose f!
I personally would use the code like this:
public void writeStream(Stream writer, string filesource, string filename)
{
using (var store = System.IO.IsolatedStorage.IsolatedStorageFile.GetUserStoreForApplication())
{
long initialMemory = Microsoft.Phone.Info.DeviceStatus.ApplicationCurrentMemoryUsage;
using (var f = store.OpenFile(filesource, FileMode.Open, FileAccess.Read))
{
byte[] buffer = Encoding.UTF8.GetBytes(string.Format(#"Content-Disposition: form-data; name=""file""; filename=""{0}""\n", filename));
writer.Write(buffer, 0, buffer.Length);
buffer = Encoding.UTF8.GetBytes("Content-Type: application/octet-stream\n");
writer.Write(buffer, 0, buffer.Length);
buffer = new byte[2048];
int DataRead = 0;
do
{
DataRead = f.Read(buffer, 0, 2048);
if (DataRead > 0)
{
writer.Write(buffer, 0, DataRead);
}
} while (DataRead > 0);
buffer = Encoding.UTF8.GetBytes("\n--" + boundary + "\n--");
writer.Write(buffer, 0, buffer.Length);
writer.Flush();
}
double increasedMemory = ((double)Microsoft.Phone.Info.DeviceStatus.ApplicationCurrentMemoryUsage - initialMemory) / 1000000;
}
}
The boundary var seems to be missing, so a coding error still remains here!

Resources