I have an App that records and then plays back an audio file. Currently the audio playback is playing through the earpiece speaker. Could someone tell me how Swift would handle coding this to force the audio out the loud speaker instead?
Below is a one of the instances I'm using to play the audio file:
#IBAction func playAudioVader(sender: UIButton) {
playAudioWithVariablePitch(-1000)
}
func playAudioWithVariablePitch(pitch: Float){
audioPlayer.stop()
audioEngine.stop()
audioEngine.reset()
var audioPlayerNode = AVAudioPlayerNode()
audioEngine.attachNode(audioPlayerNode)
var changePitchEffect = AVAudioUnitTimePitch()
changePitchEffect.pitch = pitch
audioEngine.attachNode(changePitchEffect)
audioEngine.connect(audioPlayerNode, to: changePitchEffect, format: nil)
audioEngine.connect(changePitchEffect, to: audioEngine.outputNode, format: nil)
audioPlayerNode.scheduleFile(audioFile, atTime: nil, completionHandler: nil)
audioEngine.startAndReturnError(nil)
audioPlayerNode.play()
}
override func viewDidLoad() {
super.viewDidLoad()
audioPlayer = AVAudioPlayer(contentsOfURL:receivedAudio.filePathURL, error: nil)
audioPlayer.enableRate = true
audioEngine = AVAudioEngine()
audioFile = AVAudioFile(forReading:receivedAudio.filePathURL, error: nil)
}
EDIT July 2017: Refer to Husam's answer for the Swift 2.0 solution.
As of Swift 1.2, you use overrideOutputAudioPort and AVAudioSessionPortOverride. It can be implemented by doing something like this:
if !session.overrideOutputAudioPort(AVAudioSessionPortOverride.Speaker, error:&error) {
println("could not set output to speaker")
if let e = error {
println(e.localizedDescription)
}
}
I'm working on an app that uses this now, and I have a function called setSessionPlayandRecord, which looks like:
func setSessionPlayAndRecord() {
let session:AVAudioSession = AVAudioSession.sharedInstance()
var error: NSError?
if !session.setCategory(AVAudioSessionCategoryPlayAndRecord, error:&error) {
println("could not set session category")
if let e = error {
println(e.localizedDescription)
}
}
if !session.overrideOutputAudioPort(AVAudioSessionPortOverride.Speaker, error:&error) {
println("could not set output to speaker")
if let e = error {
println(e.localizedDescription)
}
}
if !session.setActive(true, error: &error) {
println("could not make session active")
if let e = error {
println(e.localizedDescription)
}
}
}
Swift 2.0 Code
func setSessionPlayerOn()
{
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayAndRecord)
} catch _ {
}
do {
try AVAudioSession.sharedInstance().setActive(true)
} catch _ {
}
do {
try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSessionPortOverride.Speaker)
} catch _ {
}
}
func setSessionPlayerOff()
{
do {
try AVAudioSession.sharedInstance().setActive(false)
} catch _ {
}
}
Related
I'm trying to upgrade mi app to swift 4, but the barcode reader is not working.
I have isolated the barcode reader code, and still not working. The camera works but it does not detect the barcode.
The code worked just fine on swift 3 iOS 10.
This is the complete code
import AVFoundation
import UIKit
class ViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = UIColor.black
captureSession = AVCaptureSession()
let videoCaptureDevice = AVCaptureDevice.default(for: AVMediaType.video)
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice!)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.addInput(videoInput)
} else {
failed();
return;
}
let metadataOutput = AVCaptureMetadataOutput()
if (captureSession.canAddOutput(metadataOutput)) {
captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.ean8, AVMetadataObject.ObjectType.ean13, AVMetadataObject.ObjectType.pdf417]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession);
previewLayer.frame = view.layer.bounds;
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill;
view.layer.addSublayer(previewLayer);
captureSession.startRunning();
}
func failed() {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .alert)
ac.addAction(UIAlertAction(title: "OK", style: .default))
present(ac, animated: true)
captureSession = nil
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if (captureSession?.isRunning == false) {
captureSession.startRunning();
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.isRunning == true) {
captureSession.stopRunning();
}
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
let readableObject = metadataObject as! AVMetadataMachineReadableCodeObject;
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: readableObject.stringValue!);
}
dismiss(animated: true)
}
func found(code: String) {
print(code)
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
}
I am using iOS 11 on my iPhone, upgraded to beta 9.
Any idea? Thank you.
I figured it out but Apple didn't make it so obvious. The callback function from the delegate AVCaptureMetadataOutputObjectsDelegate has been renamed and the parameter names are different!
So, replace
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!)
to
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection)
My view controller is now scanning QR Codes as before after this. It has the same parameters but the first parameter name is different. Change the function and parameter names and build/run.
After changing the delegate call back :
From
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!)
To
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection)
I need to set all available types for metadataObjectTypes too as below-
output.metadataObjectTypes=output.availableMetadataObjectTypes
After changing your code from:
func metadataOutput(captureOutput: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {}
to:
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {}
everything works again.
You can use QRCodeScanner83 to scan barcodes:
import QRCodeScanner83
import AVFoundation
...
guard let vc = UIStoryboard(name: "Main", bundle: nil).instantiateViewController(identifier: "CodeScannerViewController") as? CodeScannerViewController else {
return
}
vc.callbackCodeScanned = { code in
print("SCANNED CODE: \(code)")
vc.dismiss(animated: true, completion: nil)
}
self.present(vc, animated: true, completion: nil)
If you need custom UI, then you can nest from CodeScannerViewController and set CodeScannerViewController.delegate to receive updates of the scanner state.
I would like to open the camera and library in landscape mode, but I not be able to do it.
Please someone could explain and give me an example.
I have used the below code but seems some code is missing (for instance: import library), I will need a complete example, how I could implement, functions to open library and the camera and save in the camera roll.
override func viewWillTransitionToSize(size: CGSize, withTransitionCoordinator coordinator: UIViewControllerTransitionCoordinator) {
super.viewWillTransitionToSize(size, withTransitionCoordinator: coordinator)
if let connection = self.previewLayer?.connection {
var currentDevice: UIDevice = UIDevice.currentDevice()
var orientation: UIDeviceOrientation = currentDevice.orientation
var previewLayerConnection : AVCaptureConnection = connection
if (previewLayerConnection.supportsVideoOrientation)
{
switch (orientation)
{
case .Portrait:
previewLayerConnection.videoOrientation = AVCaptureVideoOrientation.Portrait
break
case .LandscapeRight:
previewLayerConnection.videoOrientation = AVCaptureVideoOrientation.LandscapeLeft
break
case .LandscapeLeft:
previewLayerConnection.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
break
case .PortraitUpsideDown:
previewLayerConnection.videoOrientation = AVCaptureVideoOrientation.PortraitUpsideDown
break
default:
previewLayerConnection.videoOrientation = AVCaptureVideoOrientation.Portrait
break
}
}
}
}
Also indicate that I already have some code implemented then
I find it easier to implement without the use of additional libraries like AVFoundation.
I am currently using this code in order to open library and camera.
#IBAction func openLibrary(sender: AnyObject) { ///accion del boton Library
imagePicker.allowsEditing = false
imagePicker.sourceType = .PhotoLibrary
presentViewController(imagePicker, animated:true, completion: nil)
}
#IBAction func openCamera(sender: AnyObject) { ///accion del boton Camara
imagePicker.allowsEditing = false
imagePicker.sourceType = .Camera
presentViewController(imagePicker, animated:true, completion: nil)
}
in ur app delegate use this function:
func application(application: UIApplication, supportedInterfaceOrientationsForWindow window: UIWindow?) -> Int {
if let presentedView = self.window?.rootViewController?.presentedViewController as? CameraViewController {
//cameraView
return Int(UIInterfaceOrientationMask.Landscape.rawValue)
}
} else {
//other view
}
}
Swift 2
func application(application: UIApplication, supportedInterfaceOrientationsForWindow window: UIWindow?) -> UIInterfaceOrientationMask {
if let presentedView = self.window?.rootViewController?.presentedViewController as? YourCameraViewController {
//cameraView
return UIInterfaceOrientationMask.Landscape
} else {
//other view
// return here ur orientation for other view
}
}
Basically I am working on a sleep monitoring application that monitors heart rate as well. So, I don't want to start any workout activity but I think that's the way apple works!
Here's the heart rate only code I am using:
#IBOutlet private weak var label: WKInterfaceLabel!
#IBOutlet private weak var deviceLabel : WKInterfaceLabel!
#IBOutlet private weak var heart: WKInterfaceImage!
#IBOutlet private weak var startStopButton : WKInterfaceButton!
let healthStore = HKHealthStore()
//State of the app - is the workout activated
var workoutActive = false
// define the activity type and location
var workoutSession : HKWorkoutSession?
let heartRateUnit = HKUnit(fromString: "count/min")
var anchor = HKQueryAnchor(fromValue: Int(HKAnchoredObjectQueryNoAnchor))
override func awakeWithContext(context: AnyObject?) {
super.awakeWithContext(context)
}
override func willActivate() {
super.willActivate()
guard HKHealthStore.isHealthDataAvailable() == true else {
label.setText("not available")
return
}
guard let quantityType = HKQuantityType.quantityTypeForIdentifier(HKQuantityTypeIdentifierHeartRate) else {
displayNotAllowed()
return
}
let dataTypes = Set(arrayLiteral: quantityType)
healthStore.requestAuthorizationToShareTypes(nil, readTypes: dataTypes) { (success, error) -> Void in
if success == false {
self.displayNotAllowed()
}
}
}
func displayNotAllowed() {
label.setText("not allowed")
}
func workoutSession(workoutSession: HKWorkoutSession, didChangeToState toState: HKWorkoutSessionState, fromState: HKWorkoutSessionState, date: NSDate) {
switch toState {
case .Running:
workoutDidStart(date)
case .Ended:
workoutDidEnd(date)
default:
print("Unexpected state \(toState)")
}
}
func workoutSession(workoutSession: HKWorkoutSession, didFailWithError error: NSError) {
// Do nothing for now
NSLog("Workout error: \(error.userInfo)")
}
func workoutDidStart(date : NSDate) {
if let query = createHeartRateStreamingQuery(date) {
healthStore.executeQuery(query)
} else {
label.setText("cannot start")
}
}
func workoutDidEnd(date : NSDate) {
if let query = createHeartRateStreamingQuery(date) {
healthStore.stopQuery(query)
label.setText("---")
} else {
label.setText("cannot stop")
}
}
// MARK: - Actions
#IBAction func startBtnTapped() {
if (self.workoutActive) {
//finish the current workout
self.workoutActive = false
self.startStopButton.setTitle("Start")
if let workout = self.workoutSession {
healthStore.endWorkoutSession(workout)
}
} else {
//start a new workout
self.workoutActive = true
self.startStopButton.setTitle("Stop")
startWorkout()
}
}
func startWorkout() {
self.workoutSession = HKWorkoutSession(activityType: HKWorkoutActivityType.CrossTraining, locationType: HKWorkoutSessionLocationType.Indoor)
self.workoutSession?.delegate = self
healthStore.startWorkoutSession(self.workoutSession!)
}
func createHeartRateStreamingQuery(workoutStartDate: NSDate) -> HKQuery? {
// adding predicate will not work
// let predicate = HKQuery.predicateForSamplesWithStartDate(workoutStartDate, endDate: nil, options: HKQueryOptions.None)
guard let quantityType = HKObjectType.quantityTypeForIdentifier(HKQuantityTypeIdentifierHeartRate) else { return nil }
let heartRateQuery = HKAnchoredObjectQuery(type: quantityType, predicate: nil, anchor: anchor, limit: Int(HKObjectQueryNoLimit)) { (query, sampleObjects, deletedObjects, newAnchor, error) -> Void in
guard let newAnchor = newAnchor else {return}
self.anchor = newAnchor
self.updateHeartRate(sampleObjects)
}
heartRateQuery.updateHandler = {(query, samples, deleteObjects, newAnchor, error) -> Void in
self.anchor = newAnchor!
self.updateHeartRate(samples)
}
return heartRateQuery
}
func updateHeartRate(samples: [HKSample]?) {
guard let heartRateSamples = samples as? [HKQuantitySample] else {return}
dispatch_async(dispatch_get_main_queue()) {
guard let sample = heartRateSamples.first else{return}
let value = sample.quantity.doubleValueForUnit(self.heartRateUnit)
self.label.setText(String(UInt16(value)))
// retrieve source from sample
let name = sample.sourceRevision.source.name
self.updateDeviceName(name)
self.animateHeart()
}
}
func updateDeviceName(deviceName: String) {
deviceLabel.setText(deviceName)
}
func animateHeart() {
self.animateWithDuration(0.5) {
self.heart.setWidth(60)
self.heart.setHeight(90)
}
let when = dispatch_time(DISPATCH_TIME_NOW, Int64(0.5 * double_t(NSEC_PER_SEC)))
let queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)
dispatch_after(when, queue) {
dispatch_async(dispatch_get_main_queue(), {
self.animateWithDuration(0.5, animations: {
self.heart.setWidth(50)
self.heart.setHeight(80)
})
})
}
} }
To summarize, the unexpected observations are:
1. The time I monitor the heart rate contributes to the green ring in the activity app.
2. Unexpected high amount of calories are being recorded i.e. when the person is on bed or asleep!
Can you please help with the correct code that helps me to monitor and display a person's heart beat at regular interval during his sleep without contributing to the green ring or contributing extra cals. ?
Thanks a lot in advance!
Starting a workout and running the heart rate monitor will drain the apple watch's battery after about 6 hours (if it has a full charge), so having it run continuously while sleeping is probably not realistic at this time.
From what I can tell, starting a workout using workoutSession does 2 things for your app. It keeps your app in the foreground, and it starts taking heart rate sample every few seconds. Have you considered not starting it? Your health kit queries will still work as is and the heart rate monitor still records the users heart rate every 15 minutes or so. The main thing you loose is keeping your app in the foreground, and I am wondering if you need to do that (since the user will be asleep).
To retrieve the last heart rate sample from healthkit:
func getLatestHeartRate() {
let quantityType = HKObjectType.quantityTypeForIdentifier(HKQuantityTypeIdentifierHeartRate)!
let sortDescriptor = NSSortDescriptor(key:HKSampleSortIdentifierStartDate, ascending: false)
let sampleQuery = HKSampleQuery(sampleType: quantityType, predicate: nil, limit: 1, sortDescriptors: [sortDescriptor])
{ (sampleQuery, results, error ) -> Void in
}
self.healthStore.executeQuery(sampleQuery)
}
i am trying to play audio from parse.com. I am getting the pffile but unable to play audio.
func testing() {
let query = PFQuery(className: "Attractions")
query.whereKey("objectId", equalTo: "hDyP0SwbAQ")
query.findObjectsInBackgroundWithBlock { (result, error) -> Void in
for obj in result! {
let path = obj.objectForKey("attraction_file") as! PFFile
self.playWithURL(path)
}
}
}
func playWithURL(url:PFFile) {
do {
let audioPlayer = try! AVAudioPlayer(contentsOfURL: url)
audioPlayer.prepareToPlay()
audioPlayer.play()
}
}
Thanks In Advance
I Found Answer.
earlier i was using AVAudioPlayer thats why i was getting the error.
now i am using AVPlayer and my code is working perfect.
var avAudioPlayer: AVPlayer? // declared in class
func testing() {
let query = PFQuery(className: "Attractions")
query.whereKey("objectId", equalTo: "hDyP0SwbAQ")
query.findObjectsInBackgroundWithBlock { (result, error) -> Void in
for obj in result! {
let path = obj.objectForKey("SongFile") as! PFFile
self.playWithURL(path)
}
}
}
fun playWithUrl(url : String) {
self.avAudioPlayer = AVPlayer(URL: NSURL(string: url!)!)
avAudioPlayer?.play()
}
I am setting up an iOS 8 app to request Heath Kit Store authorization to share types. The request Read/Write screen shows fine and on selecting Done, I see the completion callback immediately after. In this callback, I am pushing a new view controller. I set a breakpoint for the code that is programmatically pushing the next view controller and this is called immediately, but the transition doesn't occur until about 10 seconds later.
Some code:
#IBAction func enable(sender: AnyObject) {
let hkManager = HealthKitManager()
hkManager.setupHealthStoreIfPossible { (success, error) -> Void in
if let error = error {
println("error = \(error)")
} else {
println("enable HK success = \(success)")
self.nextStep()
}
}
}
func nextStep() {
self.nav!.pushViewController(nextController, animated: true)
}
class HealthKitManager: NSObject {
let healthStore: HKHealthStore!
override init() {
super.init()
healthStore = HKHealthStore()
}
class func isHealthKitAvailable() -> Bool {
return HKHealthStore.isHealthDataAvailable()
}
func setupHealthStoreIfPossible(completion: ((Bool, NSError!) -> Void)!) {
if HealthKitManager.isHealthKitAvailable()
{
healthStore.requestAuthorizationToShareTypes(dataTypesToWrite(), readTypes: dataTypesToRead(), completion: { (success, error) -> Void in
completion(success, error)
})
}
}
func dataTypesToWrite() -> NSSet {
let runningType = HKObjectType.quantityTypeForIdentifier(HKQuantityTypeIdentifierDistanceWalkingRunning)
let stepType = HKObjectType.quantityTypeForIdentifier(HKQuantityTypeIdentifierStepCount)
return NSSet(objects: runningType, stepType)
}
func dataTypesToRead() -> NSSet {
let runningType = HKObjectType.quantityTypeForIdentifier(HKQuantityTypeIdentifierDistanceWalkingRunning)
let stepType = HKObjectType.quantityTypeForIdentifier(HKQuantityTypeIdentifierStepCount)
let climbedType = HKObjectType.quantityTypeForIdentifier(HKQuantityTypeIdentifierFlightsClimbed)
return NSSet(objects: runningType, stepType, climbedType)
}
}
Any thoughts on what is causing the time delay for the transition?
The problem was that the completion block is returned in the background queue. I just put the transition call back onto the main queue as follows:
hkManager.setupHealthStoreIfPossible { (success, error) -> Void in
if let error = error {
println("error = \(error)")
} else {
dispatch_async(dispatch_get_main_queue(), {
println("enable HK success = \(success)")
self.nextStep()
});
}
}
}