I am refering the following document section "Windows Media API for Capturing the Screen" to capture screen and the original code works ok. When I am adding additional feature (I just added several lines to make it record from default audio device), it fails at thew following line (I debugged and looks like it fails at COM layer?), any hints? I posted both the original source codes and my modified source codes.
"if(FAILED(hr=pSrcGrp->put_Profile(variant_t(pProfile))))"
http://www.geocities.com/krishnapg/screencap.html
Original Source Code:
http://www.geocities.com/krishnapg/WMEncScrnCap.zip
My modified source code (I only modified function InitEncoder)
HRESULT InitEncoder(LPCTSTR szOutputFileName)
{
HRESULT hr = E_FAIL;
CComVariant varValue;
IWMEncSourceGroupCollection* pSrcGrpCollection=NULL;
IWMEncSourceGroup* pSrcGrp=NULL;
IWMEncSource* pSrc=NULL;
IWMEncSource* paSrc=NULL;
IPropertyBag* pPropertyBag=NULL;
IWMEncVideoSource2* pSrcVid=NULL;
IWMEncAudioSource* pSrcAud=NULL;
IWMEncFile* pOutFile=NULL;
IWMEncProfile* pProfile=NULL;
if(FAILED(CoCreateInstance(CLSID_WMEncoder,NULL,CLSCTX_INPROC_SERVER,IID_IWMEncoder2,(void**)&g_pEncoder)))
{
ErrorMessage("Unable to Create Encoder Object");
return E_FAIL;
}
if(FAILED(g_pEncoder->get_SourceGroupCollection(&pSrcGrpCollection))) //Retrieve the Source Group Collection - One Application can Have many Source Groups - We need to add as many as we want
{
ErrorMessage("Unable to Get Source Group Collection");
return E_FAIL;
}
do
{
if(FAILED(hr=pSrcGrpCollection->Add(CComBSTR("SourceGroup1"),&pSrcGrp)))//Add a Source Group to the Collection - Each Source can have one video one audio source input
{
ErrorMessage("Unable to Add A Source Group to the Collection");
break;
}
if(FAILED(hr=pSrcGrp->AddSource(WMENC_VIDEO,&pSrc))) //Add a Video Source to the Group
{
ErrorMessage("Unable to Add A Source to the Source Group");
break;
}
if(FAILED(hr=pSrcGrp->AddSource(WMENC_AUDIO,&paSrc))) //Add an Audio Source to the Group
{
ErrorMessage("Unable to Add A Source to the Source Group");
break;
}
if(FAILED(hr=pSrc->QueryInterface(IID_IWMEncVideoSource2,(void**)&pSrcVid)))
{
ErrorMessage("Unable to Query interface for Video Source");
break;
}
if(FAILED(hr=paSrc->QueryInterface(IID_IWMEncAudioSource,(void**)&pSrcAud)))
{
ErrorMessage("Unable to Query interface for Audio Source");
break;
}
if(FAILED(hr=pSrcVid->SetInput(CComBSTR("ScreenCap://ScreenCapture1"))))//The Video Input Source Device - Should be "ScreenCap" Device
{
ErrorMessage("Unable to Set Video Input Source");
break;
}
if(FAILED(hr=pSrcAud->SetInput(CComBSTR("Device://Default_Audio_Device"))))//The Video Input Source Device - Should be "Default_Audio_Device" Device
{
ErrorMessage("Unable to Set Audio Input Source");
break;
}
if(FAILED(hr=pSrcVid->QueryInterface(IID_IPropertyBag,(void**)&pPropertyBag)))
{
ErrorMessage("Unable to Query Interface for Propery bag");
break;
}
varValue = CAPTURE_FULLSCREEN;
if(FAILED(hr=pPropertyBag->Write(WMSCRNCAP_ENTIRESCREEN,&varValue))) //Set Full Screen Property true/false
{
ErrorMessage("Unable to Set Capture Screen Property");
break;
}
//int nLeft, nRight, nTop, nBottom; //Set Capture Area - when not in full screen mode
// // Initialize the capture area. The size must be even.
// varValue = false;
// if ( SUCCEEDED( hr ) )
// {
// hr = pPropertyBag->Write( WMSCRNCAP_ENTIRESCREEN, &varValue );
// }
// varValue = nLeft;
// if ( SUCCEEDED( hr ) )
// {
// hr = pPropertyBag->Write( WMSCRNCAP_WINDOWLEFT, &varValue );
// }
// varValue = nRight;
// if ( SUCCEEDED( hr ) )
// {
// hr = pPropertyBag->Write( WMSCRNCAP_WINDOWRIGHT, &varValue );
// }
// varValue = nTop;
// if ( SUCCEEDED( hr ) )
// {
// hr = pPropertyBag->Write( WMSCRNCAP_WINDOWTOP, &varValue );
// }
// varValue = nBottom;
// if ( SUCCEEDED( hr ) )
// {
// hr = pPropertyBag->Write( WMSCRNCAP_WINDOWBOTTOM, &varValue );
// }
// varValue = true;
// if ( SUCCEEDED( hr ) )
// {
// hr = pPropertyBag->Write( WMSCRNCAP_FLASHRECT, &varValue );
// }
if(FAILED(hr=SetupScreenCaptureProfile())) //Setup the Custom Profile
{
break;
}
if(FAILED(hr=g_pProfile->QueryInterface(IID_IWMEncProfile,(void**)&pProfile)))
{
ErrorMessage("Unable to Query Interface For Profile");
break;
}
if(FAILED(hr=pSrcGrp->put_Profile(variant_t(pProfile)))) //Select the Custom Profile into the Encoder
{
ErrorMessage("Unable to Set Profile For Source Group");
break;
}
if(FAILED(hr=g_pEncoder->get_File(&pOutFile)))
{
ErrorMessage("Unable to Get Encoder Output File Object");
break;
}
if(FAILED(hr=pOutFile->put_LocalFileName(CComBSTR(szOutputFileName)))) //Set the Target Output Filename
{
ErrorMessage("Unable to Set Output File Name");
break;
}
if(FAILED(hr=g_pEncoder->PrepareToEncode(VARIANT_TRUE))) //Using Prepare optimizes startig latency
{
ErrorMessage("Unable to Prepare for Encoding");
break;
}
}while(false);
if(pProfile)
{
pProfile->Release();
pProfile=NULL;
}
if(pOutFile)
{
pOutFile->Release();
pOutFile = NULL;
}
if(pPropertyBag)
{
pPropertyBag->Release();
pPropertyBag = NULL;
}
if(pSrcVid)
{
pSrcVid->Release();
pSrcVid = NULL;
}
if(pSrc)
{
pSrc->Release();
pSrc = NULL;
}
if(pSrcGrp)
{
pSrcGrp->Release();
pSrcGrp = NULL;
}
if(pSrcGrpCollection)
{
pSrcGrpCollection->Release();
pSrcGrpCollection = NULL;
}
return hr;
}
You may need to reset the audio:
Open Control Panel -> open Sound
In Playback, you'll see your speaker as default playback.
Right-click on it -> go to Properties -> go to the Advanced tab-> click on Restore Default
There is probably nothing wrong with your code. Windows Vista is notorious for not being able to programatically set the output / input.
Related
I am following this article for Select Multiple Images From Gallery in Xamarin Forms.
I completed the feature in android part but the picture path contains only the picture name, extensions are missing when saving path.
To upload the image to the server I need the complete image name with extension. So how can I save the complete path of the selected images with the extension?
Following method capture the image path:
public String GetRealPathFromURI(Android.Net.Uri contentURI)
{
try
{
ICursor imageCursor = null;
string fullPathToImage = "";
imageCursor = ContentResolver.Query(contentURI, null, null, null, null);
imageCursor.MoveToFirst();
int idx = imageCursor.GetColumnIndex(MediaStore.Images.ImageColumns.Data);
if (idx != -1)
{
fullPathToImage = imageCursor.GetString(idx);
}
else
{
ICursor cursor = null;
var docID = DocumentsContract.GetDocumentId(contentURI);
var id = docID.Split(':')[1];
var whereSelect = MediaStore.Images.ImageColumns.Id + "=?";
var projections = new string[] { MediaStore.Images.ImageColumns.Data };
cursor = ContentResolver.Query(MediaStore.Images.Media.InternalContentUri, projections, whereSelect, new string[] { id }, null);
if (cursor.Count == 0)
{
cursor = ContentResolver.Query(MediaStore.Images.Media.ExternalContentUri, projections, whereSelect, new string[] { id }, null);
}
var colData = cursor.GetColumnIndexOrThrow(MediaStore.Images.ImageColumns.Data);
cursor.MoveToFirst();
fullPathToImage = cursor.GetString(colData);
}
return fullPathToImage;
}
catch (Exception ex)
{
Toast.MakeText(Xamarin.Forms.Forms.Context, "Unable to get path", ToastLength.Long).Show();
}
return null;
}
The extension(.png or .jpg) was missing not from the GetRealPathFromURI(), it happens in ImageHelpers.SaveFile(). So I save the filename to another variable from the path using Path.GetFileName() like below and pass the complete filename when call ImageHelpers.SaveFile().
var fileName = Path.GetFileName(picturepath);
I'm trying to get the following example working in a Xamarin.Mac project.
It's an AUGraph that connects a mixer to the default output. The mixer has one input which is a render callback that generates a sine wav.
var graph = new AUGraph();
var output = graph.AddNode(AudioComponentDescription.CreateOutput(AudioTypeOutput.Default));
var mixer = graph.AddNode(AudioComponentDescription.CreateMixer(AudioTypeMixer.MultiChannel));
if (graph.TryOpen() != 0)
{
throw new Exception();
}
var mixNode = graph.GetNodeInfo(mixer);
// configure mixer
if (mixNode.SetElementCount(AudioUnitScopeType.Input, 1) != AudioUnitStatus.OK)
{
throw new Exception();
}
if (mixNode.SetRenderCallback(HandleRenderDelegate, AudioUnitScopeType.Global, 0) != AudioUnitStatus.OK)
{
throw new Exception();
}
var outNode = graph.GetNodeInfo(output);
// define stream description
var desc = new AudioStreamBasicDescription();
desc.BitsPerChannel = 32;
desc.BytesPerFrame = 4;
desc.BytesPerPacket = 4;
desc.Format = AudioFormatType.LinearPCM;
desc.FormatFlags = AudioStreamBasicDescription.AudioFormatFlagsAudioUnitNativeFloat;
desc.FramesPerPacket = 1;
desc.ChannelsPerFrame = 2;
desc.SampleRate = 44100;
// set mixer input format
if (mixNode.SetFormat(desc, AudioUnitScopeType.Input, 0) != AudioUnitStatus.OK)
{
throw new Exception();
}
// connect mixer's output to the output
if (graph.ConnnectNodeInput(mixer, 0, output, 0) != AUGraphError.OK)
{
throw new Exception();
}
// set format of mixer's output
desc = mixNode.GetAudioFormat(AudioUnitScopeType.Output);
desc.SampleRate = 44100;
if (outNode.SetFormat(desc, AudioUnitScopeType.Input,0) != AudioUnitStatus.OK)
{
throw new Exception();
}
if (mixNode.SetFormat(desc, AudioUnitScopeType.Output) != AudioUnitStatus.OK)
{
throw new Exception();
}
if (graph.Initialize() != AUGraphError.OK)
{
throw new Exception();
}
if (graph.Start() != AUGraphError.OK)
{
throw new Exception();
}
The callback:
int sample = 0;
unsafe AudioUnitStatus HandleRenderDelegate(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
{
var left = (float*)data[0].Data;
var right = (float*)data[1].Data;
for (var i = 0; i < numberFrames; i++)
{
float sampleValue = (float)Math.Sin(sample * 2 * Math.PI * 440 / 44100);
left[i] = right[i] = sampleValue;
sample++;
}
return AudioUnitStatus.OK;
}
The callback is running and the buffer is being filled but no sound is generated. The example works in iOS with AudioTypeOutput.Remote being used in place of AudioTypeOutput.Default but for some reason macOS is not playing the sound. Any ideas?
The output volume of the mixer had to be set manually because it defaults to 0.
In my application I am using TwainDotNet library and I am scanning a specific area from flatbed.
public Prasymas scanForm(Prasymas prasymas, bool isItFirstScan)
{
Enabled = false;
_settings = new ScanSettings();
_settings.UseDocumentFeeder = false;
_settings.ShowTwainUI = false;
_settings.ShowProgressIndicatorUI = true;
_settings.UseDuplex = false;
_settings.Resolution =
false
? ResolutionSettings.Fax : ResolutionSettings.ColourPhotocopier;
_settings.Page = PageSettings.Default;
if (!isItFirstScan)
{
_prasymas = prasymas;
AreaSettings = new AreaSettings(Units.Centimeters, 0.0f, 0.0f, (float)(_prasymas.maxScanAukstis), 0.0f);
}
else
{
_settings.Area = !false ? null : AreaSettings;
}
_settings.ShouldTransferAllPages = true;
_settings.Rotation = new RotationSettings()
{
AutomaticRotate = false,
AutomaticBorderDetection = false
};
try
{
_twain.SelectSource();
_twain.StartScanning(_settings);
}
catch (TwainException ex)
{
if (ex.Message == "Error opening data source")
{
_twain.SelectSource();
scanForm(_prasymas, isItFirstScan);
Enabled = true;
}
else
throw ex;
}
catch (Exception exc)
{
if (exc.Message == "Pasiuto skeneris")
{
scanForm(_prasymas, true);
}
else
throw exc;
}
Enabled = true;
return prasymas;
}
but in result when scanning starts and a scan source is Twain driver it scans full page, but if I choose WIA driver I get specifically selected area.
Main Idea is then App starts for the first time it scans full page, next time it scans only specific height of the page.
After reading documentation I found out that just needed to comment out rotation part and everything started to work. I can scan selected area.
if i run my code in mozilla firefox after i click scan in UI select source window is opened and then it crashed.
bt the same code if i run on chrome it scan the image in the scanner after that if i click the scan on the new window it scan properly and crashed during file transfer and chrome tell a error message "a plugin (shockwave flash) isnt responding "
what may be the prob
function onScan(no_of_pages)
{
if (DWObject)
{
if (DWObject.SourceCount > 0)
{
DWObject.SelectSource();
DWObject.IfDisableSourceAfterAcquire = true;
DWObject.AcquireImage();
DWObject.MaxImagesInBuffer = no_of_pages;
}
else
alert("No TWAIN compatible drivers detected.");
}
}
function Dynamsoft_ChangeConfig(config){
config.onPrintMsg = g_DWT_PrintMsg;
}
function g_DWT_PrintMsg(strMessage) {
alert(strMessage);
}
function OnPostTransferCallback()
{
try{
if(DWObject.MaxImagesInBuffer == DWObject.HowManyImagesInBuffer)
{
DWObject.CloseSource();
sendToFlash() ;
}else
{
//TBD
}
}catch(err){
alert(err.message);
}
}
//Call back function from the
function sendToFlash()
{
try{
var flashMovie = window.document.flashContent;
flashMovie.sendToActionScript(DWObject.HowManyImagesInBuffer);
//document.getElementById("ICANSWF").sendToActionScript();
}catch(err){
alert(err.message);
}
}
//call from flash for uploading documents
function onUpload(serialNo)
{
//alert("upload the file");
var imageArr = new Array();
try{
var imageName;
var uploadPage;
var serverHost;
var CurrentPathName = unescape(location.pathname); // get current PathName in plain ASCII
var CurrentPath = CurrentPathName.substring(0, CurrentPathName.lastIndexOf("/") + 1);
uploadPage = CurrentPath+"TempUpload.php";
//uploadPage = CurrentPath+"UploadDocument.php";
//serverHost = "blabla";
//window.Plugin.HTTPPort =1451;
serverHost = "our host";
DWObject.HTTPPort = 80;
DWObject.IfSSL = false;
//alert(Plugin.HowManyImagesInBuffer);
for(var i=0;i < DWObject.HowManyImagesInBuffer;i++)
{
imageName = serialNo+"_"+(i+1)+".png";
DWObject.HTTPUploadThroughPost(serverHost,i,uploadPage,imageName);
if (DWObject.ErrorCode == 0)
{
//alert(imageName);
imageArr.push({"label":imageName,"source":"http://"+serverHost+":"+DWObject.HTTPPort+"/icanindonesia/AppData/Temp/"+imageName}); //Push image name and location in an array
}
else //succeded
{
alert(DWObject.ErrorString);
//imageArr[i] = imageName;
//alert(imageArr[i]);
}
}
}catch(err){
//alert("onUpload");
alert(err.message);
}
console.log(imageArr);
return imageArr;
}
function startDownload(url)
{
//var url='.zip';
window.open(url,'Download');
}
function openDocument(url){
window.open(url, '_blank',"ican image viewer");
}
#priya, this is Rachel from Dynamsoft.Thanks for using our Dynamic Web TWAIN SDK. Which version of Firefox and Chrome are you using? We now also have newer version of Dynamic Web TWAIN which you may try. Please contact our support team to get better help.
I'm trying to make a timecode counter for a video player based on GMFBridge and DirectShow.
I'm using a Timer to call GetCurrentPosition() every 200ms but I believe it's not accurate. I'd like at least to get the frame number (from start) of the current frame when a video is running.
Can this actually be done?
I'm using DirectShowLib .NET library.
To my knowledge this is hard to achieve, in a solution I work on I did the following to get 'frame number':
public int NumberOfFrames
{
get
{
return (int)(Duration / AverageTimePerFrame);
}
}
public double AverageTimePerFrame
{
get
{
return videoInfoHeader.AvgTimePerFrame / 10000000.0;
}
}
public int GetCurrentFrame(double currentTime)
{
int noOfFrames = (int)(Duration / AverageTimePerFrame);
return Convert.ToInt32(Math.Min(noOfFrames - 1, Math.Floor(currentTime / AverageTimePerFrame)));
}
I got the videoInfoHeader by doing:
// Get the media type from the SampleGrabber
AMMediaType media = new AMMediaType();
hr = sampGrabber.GetConnectedMediaType(media);
DsError.ThrowExceptionForHR(hr);
if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
{
throw new NotSupportedException("Unknown Grabber Media Format");
}
// Grab the size info
videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
DsUtils.FreeAMMediaType(media);
However this is obviously tailored to my own use-case, hopefully it helps you a bit though. Good luck!
Updated
Added CurrentTime code (the locker is for my own usage you can most likely remove that):
public double CurrentTime
{
set
{
lock (locker)
{
IMediaPosition mediaPos = fFilterGraph as IMediaPosition;
int hr;
if (value >= 0 && value <= Duration)
{
hr = mediaPos.put_CurrentPosition(value);
DsError.ThrowExceptionForHR(hr);
}
}
}
get
{
lock (locker)
{
IMediaPosition mediaPos = fFilterGraph as IMediaPosition;
int hr;
double currentTime;
hr = mediaPos.get_CurrentPosition(out currentTime);
DsError.ThrowExceptionForHR(hr);
return currentTime;
}
}
}