首页
学习
活动
专区
圈层
工具
发布
社区首页 >问答首页 >AVCapturePhotoOutput iOS摄像头超暗

AVCapturePhotoOutput iOS摄像头超暗
EN

Stack Overflow用户
提问于 2018-01-28 01:24:14
回答 2查看 1.1K关注 0票数 2

我有一个应用程序设置,以使用相机的照片(在计时器的基础上),以检测人脸的存在。当我向应用程序提供一张我添加到资源中的照片时,检测过程运行得相当好。然而,当我尝试直接使用相机的输出,甚至在将图像保存到文件后,结果图像非常暗,以至于人脸识别完全不可靠。

如果我显示相机看到的图像,它看起来是正确的。我捕获了以下两张图片--一张是从摄像机现场看到的,另一张是从AVCapturePhotoOutput创建的相同视图。如果我只是在图像视图中显示捕获的图像,同样的黑暗也会发生。

注意注释:“我在这里放置了断点并截取了屏幕截图”。然后,当代码完成时,我拍摄了第二个屏幕截图。这些照片是在强光下拍摄的。

下面是基本代码:

代码语言:javascript
复制
class CRSFaceRecognitionViewController: UIViewController, UIImagePickerControllerDelegate {

var sentBy : String?

//timers
var faceTimer : Timer?
var frvcTimer : Timer?

//capture
var captureSession = AVCaptureSession()
var settings = AVCapturePhotoSettings()
var backCamera : AVCaptureDevice?
var frontCamera : AVCaptureDevice?
var currentCamera : AVCaptureDevice?

var photoOutput : AVCapturePhotoOutput?
var cameraPreviewLayer : AVCaptureVideoPreviewLayer?

var image : UIImage?
var outputImage : UIImage?
@IBOutlet weak var imageView: UIImageView!

//MARK: - Setup

override func viewDidLoad() {
    super.viewDidLoad()
}//viewDidLoad

override func viewWillAppear(_ animated: Bool) {
    super.viewWillAppear(true)
}//viewWillAppear

override func viewDidAppear(_ animated: Bool) {
    super.viewDidAppear(true)

    //check for camera
    if (UIImagePickerController.isSourceTypeAvailable(UIImagePickerControllerSourceType.camera)) {

        setupCaptureSession()
        setupDevices()
        setupInputOutput()
        setupPreviewLayer()

        startRunningCaptureSession()

        photoOutput?.capturePhoto(with:settings, delegate: self)

    } else {
        print("Camera not present")
    }

}//viewDidAppear

//MARK: - Video

@objc func showFaceRecognitionViewController() {
    //all this does is present the image in a new ViewController imageView
    performSegue(withIdentifier: "showSavedCameraPhoto", sender: self)
}//showThePhotoView

func setupCaptureSession() {
    captureSession.sessionPreset = AVCaptureSession.Preset.photo
}//setupCaptureSession

func setupDevices() {

    let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: .video, position: .unspecified)

    let devices = deviceDiscoverySession.devices
    for device in devices {

        if device.position == AVCaptureDevice.Position.back {
            backCamera = device
        } else if device.position == AVCaptureDevice.Position.front {
            frontCamera = device
        }//if else

    }//for in

    currentCamera = frontCamera

}//setupDevices

func setupInputOutput() {

    do {
        let captureDeviceInput = try AVCaptureDeviceInput(device: currentCamera!)
        captureSession.addInput(captureDeviceInput)
        photoOutput = AVCapturePhotoOutput()
        photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])], completionHandler: {(success, error) in
            print("in photoOutput completion handler")
        })
        captureSession.addOutput(photoOutput!)
    } catch {
        print("Error creating AVCaptureDeviceInput:", error)
    }//do catch

}//setupInputOutput

func setupPreviewLayer() {
    cameraPreviewLayer = AVCaptureVideoPreviewLayer(session : captureSession)
    cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
    cameraPreviewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
    cameraPreviewLayer?.frame = view.frame
    view.layer.insertSublayer(cameraPreviewLayer!, at: 0)
}//setupPreviewLayer


func startRunningCaptureSession() {
    captureSession.startRunning()
}//startRunningCaptureSession


//MARK: - Segue

override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
    if segue.identifier == "showSavedCameraPhoto" {
        let controller = segue.destination as! JustToSeeThePhotoViewController
        controller.inImage = outputImage
    }//if segue

}//prepare


//MARK: - Look for Faces

func findTheFaces() {
    let myView : UIView = self.view

    guard let outImage = outputImage else {return}

    let imageView = UIImageView(image: outImage)
    imageView.contentMode = .scaleAspectFit

    let scaledHeight = myView.frame.width / outImage.size.width * outImage.size.height

    imageView.frame = CGRect(x: 0, y: 0, width: myView.frame.width, height: myView.frame.height)
    imageView.backgroundColor = UIColor.blue

    myView.addSubview(imageView)

    let request = VNDetectFaceRectanglesRequest { (req, err) in

        if let err = err {
            print("VNDetectFaceRectanglesRequest failed to run:", err)
            return
        }//if let err

        print(req.results ?? "req.results is empty")

        req.results?.forEach({ (res) in

            DispatchQueue.main.async {

                guard let faceObservation = res as? VNFaceObservation else {return}

                let x = myView.frame.width * faceObservation.boundingBox.origin.x

                let width = myView.frame.width * faceObservation.boundingBox.width
                let height = scaledHeight * faceObservation.boundingBox.height

                let y = scaledHeight * (1 - faceObservation.boundingBox.origin.y) - height

                let redView = UIView()
                redView.backgroundColor = .red
                redView.alpha = 0.4
                redView.frame = CGRect(x: x, y: y, width: width, height: height)
                myView.addSubview(redView)

                print("faceObservation bounding box:")
                print(faceObservation.boundingBox)

                //if you get here, then you have a face bounding box

            }//main
        })//forEach block


    }//let request

    guard let cgImage = outImage.cgImage else {return}

    DispatchQueue.global(qos: .utility).async {
        let handler = VNImageRequestHandler(cgImage: cgImage, options: [:])

        do {
            try handler.perform([request])

            print("handler request was successful")
            self.performSegue(withIdentifier: "showSavedCameraPhoto", sender: self)

        } catch let reqErr {
            print("Failed to perform request:", reqErr)
        }
    }//DispatchQueue

}//findTheFaces

//MARK: - Memory

override func didReceiveMemoryWarning() {
    super.didReceiveMemoryWarning()
}//didReceiveMemoryWarning

}//class


extension CRSFaceRecognitionViewController : AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {

    if let imageData = photo.fileDataRepresentation() {

        print(imageData)
        outputImage = UIImage(data : imageData)

        //
        //I put breakpoint here and took a screen shot
        //

        if let outImage = outputImage?.updateImageOrientionUpSide() {
            self.outputImage = outImage
        }

        DispatchQueue.main.async {
            self.findTheFaces()
        }

    }//if let imageData
}//photoOutput

}//extension

extension UIImage {

//you need to do this to ensure that the image is in portrait mode
//the face recognition method will not work if the face is horizontal
func updateImageOrientionUpSide() -> UIImage? {
    if self.imageOrientation == .up {
        return self
    }

    UIGraphicsBeginImageContextWithOptions(self.size, false, self.scale)
    self.draw(in: CGRect(x: 0, y: 0, width: self.size.width, height: self.size.height))
    if let normalizedImage:UIImage = UIGraphicsGetImageFromCurrentImageContext() {
        UIGraphicsEndImageContext()
        return normalizedImage
    }
    UIGraphicsEndImageContext()
    return nil
}//updateImageOrientionUpSide

}//image

我一定是把相机拍错了。任何帮助都将不胜感激。Swift 4、iOS 11.2.5、Xcode 9.2

EN

回答 2

Stack Overflow用户

回答已采纳

发布于 2018-05-14 13:43:29

看起来好像我有太多的异步片段。我将代码分解为每个主要部分的单独函数-异步或非同步,并将它们全部放入一个DispatchGroup中。这似乎解决了问题。

票数 1
EN

Stack Overflow用户

发布于 2018-03-03 04:14:05

我会尝试在startRunningCaptureSession()photoOutput?.capturePhoto(with:settings, delegate: self)之间添加延迟

例如,

DispatchQueue.main.asyncAfter(deadline: .now() + .seconds(4), execute: { // take a photo startRunningCaptureSession() photoOutput?.capturePhoto(with:settings, delegate: self) })

票数 1
EN
页面原文内容由Stack Overflow提供。腾讯云小微IT领域专用引擎提供翻译支持
原文链接:

https://stackoverflow.com/questions/48478430

复制
相关文章

相似问题

领券
问题归档专栏文章快讯文章归档关键词归档开发者手册归档开发者手册 Section 归档