1 import CoreImage 2 import AVFoundation 3 class ViewController:UIViewController,AVCaptureVideoDataOutputSampleBufferDelegate 4 var filter:CIFilter! 5 var ciImage:CIImage! 6 var videoLayer:CALayer! 7 var imageView:UIImageView! 8 var avCaptureSession:AVCaptureSession! 9 var context:CIContext = { 10 return CIContext(eaglContext:EAGLContext(api: EAGLRenderingAPI.openGLES2)!, options:nil) 11 }() 12 override func viewDidLoad() { 13 super.viewDidLoad() 14 filter = CIFilter(name:“CIPhotoEffectTransfer”) 15 buildUI() 16 buildSession() 17 } 18 func buildUI() 19 { 20 videoLayer = CALayer() 21 videoLayer.anchorPoint = CGPoint.zero 22 videoLayer.bounds = view.bounds 23 self.view.layer.insertSublayer(videoLayer, at:0) 24 25 imageView = UIImageView(frame:view.bounds) 26 self.view.addSubview(imageView) 27 28 let button = UIButton(frame:CGRect(x:0, y:420, width:320, height:60)) 29 button.setTitle(“截取图片”, for: UIControlState.init(rawValue:0)) 30 button.backgroundColor = UIColor.black 31 button.addTarget(self, action:
for:.touchUpInside) 32 self.view.addSubview(button) 33 } 34 func buildSession() { 35 avCaptureSession = AVCaptureSession() 36 avCaptureSession.beginConfiguration() 37 avCaptureSession.sessionPreset = AVCaptureSessionPresetHigh 38 39 let captureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo) 40 let deviceInput = try!AVCaptureDeviceInput(device: captureDevice) 41 if avCaptureSession.canAddInput(deviceInput) 42 { 43 avCaptureSession.addInput(deviceInput) 44 } 45 46 let dataOutput = AVCaptureVideoDataOutput() 47 dataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable: Int(kCVPixelFormatType_32BGRA)] 48 dataOutput.alwaysDiscardsLateVideoFrames = true 49 if avCaptureSession.canAddOutput(dataOutput) 50 { 51 avCaptureSession.addOutput(dataOutput) 52 } 53 54 let queue = DispatchQueue(label:“VideoQueue”, attributes:.concurrent) 55 dataOutput.setSampleBufferDelegate(self, queue: queue) 56 57 avCaptureSession.commitConfiguration() 58 avCaptureSession.startRunning() 59 } 60 func captureOutput(_ captureOutput: AVCaptureOutput!,didOutputSampleBuffer sampleBuffer: CMSampleBuffer!,from connection: AVCaptureConnection!) { 61 autoreleasepool { 62 let imgBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)! 63 var ciImage = CIImage(cvPixelBuffer:imgBuffer) 64 65 self.filter.setValue(ciImage, forKey: kCIInputImageKey) 66 ciImage = self.filter.outputImage! 67 68 let orientation = UIDevice.current().orientation 69 if orientation == UIDeviceOrientation.portraitUpsideDown { 70 ciImage = ciImage.applying(CGAffineTransform(rotationAngle: CGFloat(M_PI / 2.0))) 71 } 72 else if orientation == UIDeviceOrientation.portrait { 73 ciImage = ciImage.applying(CGAffineTransform(rotationAngle: CGFloat(M_PI / -2.0))) 74 } 75 else if (orientation == UIDeviceOrientation.landscapeRight) { 76 ciImage = ciImage.applying(CGAffineTransform(rotationAngle: CGFloat(M_PI))) 77 } 78 79 self.ciImage = ciImage 80 let cgImage = self.context.createCGImage(ciImage, from:ciImage.extent) 81 82 DispatchQueue.main.sync(execute:{ 83 self.videoLayer.contents = cgImage 84 }) 85 } 86 } 87 func captureScreen(_ sender:UIButton) 88 { 89 avCaptureSession.stopRunning() 90 videoLayer.removeFromSuperlayer() 91 sender.isHidden = true 92 93 imageView.image = UIImage(ciImage:self.ciImage) 94 }