Enhance CameraPreviewView and ScannerView with improved session management and UI updates; add pause and resume functionality for camera, prevent duplicate detection processing, and ensure proper handling of scanning state transitions for better user experience.

main
v504 2 months ago
parent d40cb9eb99
commit e526f6cbce

@ -8,11 +8,13 @@ struct CameraPreviewView: UIViewRepresentable {
func makeUIView(context: Context) -> UIView {
let view = UIView()
view.backgroundColor = .black
let previewLayer = AVCaptureVideoPreviewLayer(session: session)
previewLayer.frame = view.bounds
previewLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(previewLayer)
//
DispatchQueue.main.async {
self.previewLayer = previewLayer
}
@ -21,12 +23,12 @@ struct CameraPreviewView: UIViewRepresentable {
}
func updateUIView(_ uiView: UIView, context: Context) {
if let previewLayer = uiView.layer.sublayers?.first as? AVCaptureVideoPreviewLayer {
guard let previewLayer = uiView.layer.sublayers?.first as? AVCaptureVideoPreviewLayer else { return }
//
DispatchQueue.main.async {
previewLayer.frame = uiView.bounds
DispatchQueue.main.async {
self.previewLayer = previewLayer
}
self.previewLayer = previewLayer
}
}
}

@ -119,12 +119,16 @@ struct CodePositionMarker: View {
return CGPoint(x: screenSize.width / 2, y: screenSize.height / 2)
}
guard previewLayer.session?.isRunning == true else {
logWarning("Preview layer session not running, using screen center", className: "CodePositionMarker")
// 使使
//
let metadataObject = code.bounds
//
guard metadataObject.width > 0 && metadataObject.height > 0 else {
logWarning("Invalid metadata bounds: \(metadataObject), using screen center", className: "CodePositionMarker")
return CGPoint(x: screenSize.width / 2, y: screenSize.height / 2)
}
let metadataObject = code.bounds
let convertedPoint = previewLayer.layerPointConverted(fromCaptureDevicePoint: CGPoint(
x: metadataObject.midX,
y: metadataObject.midY
@ -135,6 +139,7 @@ struct CodePositionMarker: View {
return CGPoint(x: screenSize.width / 2, y: screenSize.height / 2)
}
//
let clampedX = max(20, min(screenSize.width - 20, convertedPoint.x))
let clampedY = max(20, min(screenSize.height - 20, convertedPoint.y))

@ -183,12 +183,19 @@ struct ScannerView: View {
logInfo(" 选择的条码内容: \(selectedCode.content)", className: "ScannerView")
logInfo(" 选择的条码位置: \(selectedCode.bounds)", className: "ScannerView")
//
scannerViewModel.stopScanning()
logInfo("🛑 已停止扫描功能", className: "ScannerView")
// HistoryItem Core Data
let historyItem = createHistoryItem(from: selectedCode)
//
selectedHistoryItem = historyItem
navigateToDetail = true
//
DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) {
//
self.selectedHistoryItem = historyItem
self.navigateToDetail = true
}
//
let formattedResult = "类型: \(selectedCode.type)\n内容: \(selectedCode.content)"
@ -226,6 +233,8 @@ struct ScannerView: View {
private func pauseForPreview() {
showPreviewPause = true
//
scannerViewModel.pauseCamera()
}
private func resetToScanning() {
@ -234,15 +243,11 @@ struct ScannerView: View {
// UI
showPreviewPause = false
//
//
scannerViewModel.resetDetection()
scannerViewModel.restartScanning()
//
DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) {
logInfo("🔍 检查扫描会话状态", className: "ScannerView")
self.scannerViewModel.checkSessionStatus()
}
//
scannerViewModel.resumeCamera()
logInfo("✅ ScannerView 已重置到扫描状态", className: "ScannerView")
}

@ -14,6 +14,7 @@ class ScannerViewModel: NSObject, ObservableObject, AVCaptureMetadataOutputObjec
var captureSession: AVCaptureSession!
private var metadataOutput: AVCaptureMetadataOutput?
private var videoDevice: AVCaptureDevice?
private var isProcessingDetection = false //
override init() {
super.init()
@ -155,24 +156,80 @@ class ScannerViewModel: NSObject, ObservableObject, AVCaptureMetadataOutputObjec
func stopScanning() {
logInfo("🔄 停止扫描", className: "ScannerViewModel")
//
isProcessingDetection = true
//
if captureSession?.isRunning == true {
//
captureSession?.stopRunning()
logInfo("✅ 扫描会话已停止", className: "ScannerViewModel")
} else {
logInfo(" 扫描会话已经停止", className: "ScannerViewModel")
}
}
///
func pauseCamera() {
logInfo("⏸️ 暂停相机功能", className: "ScannerViewModel")
//
isProcessingDetection = true
//
if captureSession?.isRunning == true {
captureSession?.stopRunning()
logInfo("✅ 相机会话已暂停", className: "ScannerViewModel")
} else {
logInfo(" 相机会话已经停止", className: "ScannerViewModel")
}
}
///
func resumeCamera() {
logInfo("▶️ 恢复相机功能", className: "ScannerViewModel")
//
guard cameraAuthorizationStatus == .authorized else {
logWarning("❌ 相机权限未授权,无法恢复相机", className: "ScannerViewModel")
return
}
//
if captureSession == nil || captureSession.inputs.isEmpty || captureSession.outputs.isEmpty {
logInfo("🔄 重新设置相机会话", className: "ScannerViewModel")
setupCaptureSession()
}
//
isProcessingDetection = false
//
if captureSession?.isRunning != true {
logInfo("🚀 启动相机会话", className: "ScannerViewModel")
DispatchQueue.global(qos: .userInitiated).async { [weak self] in
self?.captureSession?.stopRunning()
self?.captureSession?.startRunning()
DispatchQueue.main.async {
logInfo("✅ 扫描会话已停止", className: "ScannerViewModel")
if self?.captureSession?.isRunning == true {
logInfo("✅ 相机会话启动成功", className: "ScannerViewModel")
} else {
logWarning("⚠️ 相机会话启动失败", className: "ScannerViewModel")
}
}
}
} else {
logInfo(" 扫描会话已经停止", className: "ScannerViewModel")
}
logInfo("✅ 相机功能已恢复", className: "ScannerViewModel")
}
func resetDetection() {
DispatchQueue.main.async {
logInfo("🔄 重置检测状态,清空 detectedCodes", className: "ScannerViewModel")
self.detectedCodes = []
self.isProcessingDetection = false //
}
}
@ -193,46 +250,29 @@ class ScannerViewModel: NSObject, ObservableObject, AVCaptureMetadataOutputObjec
func restartScanning() {
logInfo("🔄 重新开始扫描", className: "ScannerViewModel")
// 线UI
DispatchQueue.main.async { [weak self] in
guard let self = self else { return }
//
if self.captureSession?.isRunning == true {
logInfo("🔄 停止当前运行的扫描会话", className: "ScannerViewModel")
self.captureSession?.stopRunning()
}
//
self.detectedCodes = []
//
if captureSession?.isRunning == true {
logInfo("🔄 停止当前运行的扫描会话", className: "ScannerViewModel")
captureSession?.stopRunning()
}
//
resetDetection()
//
DispatchQueue.main.asyncAfter(deadline: .now() + 0.3) {
logInfo("🔄 准备重新启动扫描会话", className: "ScannerViewModel")
//
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) {
logInfo("🔄 准备重新启动扫描会话", className: "ScannerViewModel")
// 线
DispatchQueue.global(qos: .userInitiated).async { [weak self] in
self?.captureSession?.startRunning()
// 线
DispatchQueue.global(qos: .userInitiated).async {
self.captureSession?.startRunning()
//
DispatchQueue.main.asyncAfter(deadline: .now() + 0.2) {
if self.captureSession?.isRunning == true {
logInfo("✅ 扫描会话已成功重新启动", className: "ScannerViewModel")
} else {
logWarning("⚠️ 扫描会话启动失败,尝试重新启动", className: "ScannerViewModel")
//
DispatchQueue.global(qos: .userInitiated).async {
self.captureSession?.startRunning()
DispatchQueue.main.async {
if self.captureSession?.isRunning == true {
logInfo("✅ 扫描会话第二次尝试启动成功", className: "ScannerViewModel")
} else {
logError("❌ 扫描会话启动失败", className: "ScannerViewModel")
}
}
}
}
//
DispatchQueue.main.asyncAfter(deadline: .now() + 0.2) {
if self?.captureSession?.isRunning == true {
logInfo("✅ 扫描会话已成功重新启动", className: "ScannerViewModel")
} else {
logWarning("⚠️ 扫描会话启动失败", className: "ScannerViewModel")
}
}
}
@ -245,8 +285,17 @@ class ScannerViewModel: NSObject, ObservableObject, AVCaptureMetadataOutputObjec
didOutput metadataObjects: [AVMetadataObject],
from connection: AVCaptureConnection) {
//
guard !isProcessingDetection else {
logInfo("⚠️ 正在处理检测结果,忽略新的检测", className: "ScannerViewModel")
return
}
logInfo("metadataOutput 被调用,检测到 \(metadataObjects.count) 个对象", className: "ScannerViewModel")
//
isProcessingDetection = true
//
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))

Loading…
Cancel
Save