转至控制器需要 5-10 秒才能显示
Segue to controller takes 5-10 seconds to display
我正在使用 swift 4 并且有一个应用程序,人们可以通过我的应用程序打开他们的 phone 相机。我有一个名为 CameraController
的 ViewController,它具有默认值 UIView
,我在名为 CameraView
的顶部有一个视图,它显示用户相机和其他按钮.
当我单击其中一个按钮时,它会通过 segue (PlacesController
) 将我带到另一个视图控制器。当我关闭 PlacesController
我回到 CameraController
但是子视图现在需要大约 8 或 10 秒才能再次显示。
有什么方法可以在保持当前子视图的同时转到另一个 Controller?
同样的问题是,当我转到我的 segue 控制器 PlaceController
然后返回我的 CameraController
时,大约需要 8 或 10 秒才能看到相机和子图层。特别是下面这段代码,我想知道我是否可以让我的子层保持静止 运行 因为等待 10 秒让它显示太多了。
self.CameraView.layer.insertSublayer(previewLayer!, at: 0)
这是我的代码:
class CameraController: UIViewController {
@IBOutlet weak var CameraView: UIView!
var previewLayer: AVCaptureVideoPreviewLayer?
let captureSession = AVCaptureSession()
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewDidAppear(_ animated: Bool) {
DispatchQueue.main.async {
self.beginSession()
}
func beginSession() {
// gets the camera showing and displays buttons on top of it
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.CameraView.layer.insertSublayer(previewLayer!, at: 0)
previewLayer?.frame = self.CameraView.layer.bounds
previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
captureSession.startRunning()
stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecType.jpeg]
if captureSession.canAddOutput(stillImageOutput) {
captureSession.addOutput(stillImageOutput)
}
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "PlacesController" {
let PlaceAreaSearchC: ColorC = segue.destination as! PlacesController
PlaceAreaSearchC.delegate = self
}
}
// PlacesController
class PlacesController: UIViewController {
@IBAction func backAction(_ sender: Any) {
// This is how I go back to my view CameraController
dismiss(animated: true, completion: nil)
}
}
AVCaptureSession
startRunning
调用正在阻塞您的主线程,因此延迟。
正如 startRunning()
’s Apple Doc 中所说:
The startRunning() method is a blocking call which can take some time,
therefore you should perform session setup on a serial queue so that
the main queue isn't blocked (which keeps the UI responsive).
除了 的 Lyndsey Scott:
let backCamera: AVCaptureDevice? = AVCaptureDevice.default(AVCaptureDevice.DeviceType.builtInWideAngleCamera, for: AVMediaType.video, position: AVCaptureDevice.Position.back)
guard let camera = backCamera, let deviceInput = try? AVCaptureDeviceInput(device: camera) else {
self?.didReceive(captureSession: nil)
return
}
DispatchQueue.global(qos: .userInitiated).async { [weak self] in
let deviceOutput = AVCapturePhotoOutput()
let cameraSession = AVCaptureSession()
cameraSession.beginConfiguration()
cameraSession.sessionPreset = .low
if cameraSession.canAddInput(deviceInput) {
cameraSession.addInput(deviceInput)
}
if cameraSession.canAddOutput(deviceOutput) {
cameraSession.addOutput(deviceOutput)
}
cameraSession.commitConfiguration()
cameraSession.startRunning()
DispatchQueue.main.async {
self?.didReceive(captureSession: cameraSession)
}
}
Xcode 10.0, Swift 4.2
我正在使用 swift 4 并且有一个应用程序,人们可以通过我的应用程序打开他们的 phone 相机。我有一个名为 CameraController
的 ViewController,它具有默认值 UIView
,我在名为 CameraView
的顶部有一个视图,它显示用户相机和其他按钮.
当我单击其中一个按钮时,它会通过 segue (PlacesController
) 将我带到另一个视图控制器。当我关闭 PlacesController
我回到 CameraController
但是子视图现在需要大约 8 或 10 秒才能再次显示。
有什么方法可以在保持当前子视图的同时转到另一个 Controller?
同样的问题是,当我转到我的 segue 控制器 PlaceController
然后返回我的 CameraController
时,大约需要 8 或 10 秒才能看到相机和子图层。特别是下面这段代码,我想知道我是否可以让我的子层保持静止 运行 因为等待 10 秒让它显示太多了。
self.CameraView.layer.insertSublayer(previewLayer!, at: 0)
这是我的代码:
class CameraController: UIViewController {
@IBOutlet weak var CameraView: UIView!
var previewLayer: AVCaptureVideoPreviewLayer?
let captureSession = AVCaptureSession()
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewDidAppear(_ animated: Bool) {
DispatchQueue.main.async {
self.beginSession()
}
func beginSession() {
// gets the camera showing and displays buttons on top of it
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.CameraView.layer.insertSublayer(previewLayer!, at: 0)
previewLayer?.frame = self.CameraView.layer.bounds
previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
captureSession.startRunning()
stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecType.jpeg]
if captureSession.canAddOutput(stillImageOutput) {
captureSession.addOutput(stillImageOutput)
}
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "PlacesController" {
let PlaceAreaSearchC: ColorC = segue.destination as! PlacesController
PlaceAreaSearchC.delegate = self
}
}
// PlacesController
class PlacesController: UIViewController {
@IBAction func backAction(_ sender: Any) {
// This is how I go back to my view CameraController
dismiss(animated: true, completion: nil)
}
}
AVCaptureSession
startRunning
调用正在阻塞您的主线程,因此延迟。
正如 startRunning()
’s Apple Doc 中所说:
The startRunning() method is a blocking call which can take some time, therefore you should perform session setup on a serial queue so that the main queue isn't blocked (which keeps the UI responsive).
除了
let backCamera: AVCaptureDevice? = AVCaptureDevice.default(AVCaptureDevice.DeviceType.builtInWideAngleCamera, for: AVMediaType.video, position: AVCaptureDevice.Position.back)
guard let camera = backCamera, let deviceInput = try? AVCaptureDeviceInput(device: camera) else {
self?.didReceive(captureSession: nil)
return
}
DispatchQueue.global(qos: .userInitiated).async { [weak self] in
let deviceOutput = AVCapturePhotoOutput()
let cameraSession = AVCaptureSession()
cameraSession.beginConfiguration()
cameraSession.sessionPreset = .low
if cameraSession.canAddInput(deviceInput) {
cameraSession.addInput(deviceInput)
}
if cameraSession.canAddOutput(deviceOutput) {
cameraSession.addOutput(deviceOutput)
}
cameraSession.commitConfiguration()
cameraSession.startRunning()
DispatchQueue.main.async {
self?.didReceive(captureSession: cameraSession)
}
}
Xcode 10.0, Swift 4.2