背景
1. Xcode 9.2
1. Swift 4
1. iOS 11.4
摄像头的访问基础设定都做好了的!
在网上查很多资料借鉴了网上大神的编程写了一个AVFoundation的摄像头视频处理。
想要做的事,就像OpenCV使用摄像头识别东西一样,编程一个还没有开始任何图像处理,但读取了每一帧摄像的基础demo,以便以后使用。现在把程序写出来了,但是摄像头没有打开,也没有错误,不知道该怎么办好,求助大神帮忙看看.
class ViewController: UIViewController,AVCaptureDelegate {
@IBOutlet weak var imageView: UIImageView!
var avCapture = AVCapture()
override func viewDidLoad() {
super.viewDidLoad()
avCapture.delegate = self
}
func capture(image: UIImage) {
imageView.image = image
}
}
protocol AVCaptureDelegate {
func capture(image: UIImage)
}
class AVCapture:NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {
var captureSession: AVCaptureSession!
var delegate: AVCaptureDelegate?
var counter = 0
let DISMISS_COUNT = 1
override init(){
super.init()
captureSession = AVCaptureSession()
captureSession.sessionPreset = AVCaptureSession.Preset.hd1920x1080
let videoDevice = AVCaptureDevice.default(for: AVMediaType.video)
videoDevice?.activeVideoMinFrameDuration = CMTimeMake(1, 30)// 1/30秒 (1秒間30frame)
let videoInput = try! AVCaptureDeviceInput.init(device: videoDevice!)
captureSession.addInput(videoInput)
let videoDataOutput = AVCaptureVideoDataOutput()
videoDataOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
videoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable as! String : Int(kCVPixelFormatType_32BGRA)]
videoDataOutput.alwaysDiscardsLateVideoFrames = true
captureSession.addOutput(videoDataOutput)
DispatchQueue.global(qos: .userInitiated).async {
self.captureSession.startRunning()
}
}
// SampleBuffer UIImage作成
func imageFromSampleBuffer(sampleBuffer :CMSampleBuffer) -> UIImage {
let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!
CVPixelBufferLockBaseAddress(imageBuffer, CVPixelBufferLockFlags(rawValue: 0))
// 画像情報
let base = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0)!
let bytesPerRow = UInt(CVPixelBufferGetBytesPerRow(imageBuffer))
let width = CVPixelBufferGetWidth(imageBuffer)
let height = CVPixelBufferGetHeight(imageBuffer)
CVPixelBufferUnlockBaseAddress(imageBuffer, CVPixelBufferLockFlags(rawValue: 0))
let colorSpace = CGColorSpaceCreateDeviceRGB()
let bitsPerCompornent = 8
let bitmapInfo = CGBitmapInfo(rawValue: (CGBitmapInfo.byteOrder32Little.rawValue | CGImageAlphaInfo.premultipliedFirst.rawValue) as UInt32)
let newContext = CGContext(data: base, width: width, height: height, bitsPerComponent: Int(bitsPerCompornent), bytesPerRow: Int(bytesPerRow), space: colorSpace, bitmapInfo: bitmapInfo.rawValue)! as CGContext
// 画像作成
let imageRef = newContext.makeImage()!
let image = UIImage(cgImage: imageRef, scale: 1.0, orientation: UIImageOrientation.right)
return image
}
func captureOutput(_ captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if (counter % DISMISS_COUNT) == 0 {
let image = imageFromSampleBuffer(sampleBuffer: sampleBuffer)
delegate?.capture(image: image)
if let camData = CMGetAttachment(sampleBuffer, kCMSampleBufferAttachmentKey_CameraIntrinsicMatrix, nil) as? Data {
let matrix: matrix_float3x3 = camData.withUnsafeBytes { $0.pointee }
print(matrix)
}
}
counter += 1
}
}