我有一个 SwiftUI 应用程序。为此,它是 3 个视图:
- NewPost(包含 TextField 的 SwiftUI 视图)
- RecordVideoViewWrapper(引用RecordView,引用Storyboard中的RecordVideoViewController)
- VideoPlaybackViewController(从 RecordVideoViewController 转到这个 VC)
我想从 NewPost 中的 TextField 获取用户输入,并将其传递给 RecordViewWrapper,并最终将其显示为 VideoPlaybackViewController 上的标签
我的代码如下。当我尝试从 NewPost 移动到 RecordViewWrapper 时出现的错误是:
“线程 1:致命错误:未找到 Post2 类型的 ObservableObject。Post2 的 View.environmentObject(_:) 作为此视图的祖先可能会丢失。”
最新帖子
import SwiftUI
import Combine
struct NewPost: View {
@State var post: String = ""
@EnvironmentObject var post2: Post2
@State var selection: Int? = nil
var body: some View {
NavigationView {
VStack {
Text("Create New Post")
TextField("Placeholder", text: $post)
//continue button
NavigationLink(destination: RecordViewWrapper().environmentObject(post2), tag: 1, selection: $selection) {
Button(action: {
self.post2.name = self.post
print(self.post2.name)
self.selection = 1
}) {
Text("Next")
}
}
Spacer()
}
.navigationBarTitle("")
.navigationBarHidden(true)
}
}
}
Post2(模型)
import Foundation
class Post2: ObservableObject {
@Published var name = ""
}
RecordVideoViewWrapper
import SwiftUI
import Combine
import UIKit
struct RecordViewWrapper: View {
@EnvironmentObject var post2: post2
var body: some View {
NavigationView {
ZStack {
RecordView().environmentObject(post2)
}
.navigationBarTitle("")
.navigationBarHidden(true)
}
}
}
记录视图
import SwiftUI
import UIKit
import AVFoundation
import Combine
struct RecordView: UIViewControllerRepresentable {
let camPreview = UIView()
let cameraButton = UIButton()
let captureSession = AVCaptureSession()
let movieOutput = AVCaptureMovieFileOutput()
var previewLayer: AVCaptureVideoPreviewLayer!
var activeInput: AVCaptureDeviceInput!
var outputURL: URL!
@EnvironmentObject var post2: Post2
typealias UIViewControllerType = RecordVideoViewController
func makeUIViewController(context: UIViewControllerRepresentableContext<RecordView>) -> RecordView.UIViewControllerType {
let mainStoryboard: UIStoryboard = UIStoryboard(name: "Main", bundle: nil)
let mainViewController: RecordVideoViewController = mainStoryboard.instantiateViewController(withIdentifier: "RecordVideoViewController") as! RecordVideoViewController
return mainViewController
}
func updateUIViewController(_ uiViewController: RecordView.UIViewControllerType, context: UIViewControllerRepresentableContext<RecordView>) {
//
print("updateUIViewController \(uiViewController)")
}
}
录制视频视图控制器
import UIKit
import AVFoundation
import SwiftUI
import Combine
class RecordVideoViewController: UIViewController, AVCaptureFileOutputRecordingDelegate, ObservableObject {
@IBOutlet weak var camPreview: UIView!
@IBOutlet weak var cameraButton: UIButton!
@EnvironmentObject var post2: Post2
let captureSession = AVCaptureSession()
let movieOutput = AVCaptureMovieFileOutput()
var previewLayer: AVCaptureVideoPreviewLayer!
var activeInput: AVCaptureDeviceInput!
var outputURL: URL!
override func viewDidLoad() {
super.viewDidLoad()
if setupSession() {
setupPreview()
startSession()
}
cameraButton.isUserInteractionEnabled = true
let cameraButtonRecognizer = UITapGestureRecognizer(target: self, action: #selector(RecordVideoViewController.startCapture))
cameraButton.addGestureRecognizer(cameraButtonRecognizer)
camPreview.addSubview(cameraButton)
print(self.Post2.name)
}
@IBAction func unwindToViewController(_ unwindSegue: UIStoryboardSegue) {
let sourceViewController = unwindSegue.source
// Use data from the view controller which initiated the unwind segue
}
func setupPreview() {
// Configure previewLayer
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = camPreview.bounds
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
camPreview.layer.addSublayer(previewLayer)
}
//MARK:- Setup Camera
func setupSession() -> Bool {
captureSession.sessionPreset = AVCaptureSession.Preset.high
// Setup Camera
let camera = AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: .front)!
//AVCaptureDevice.default(for: AVMediaType.video)!
do {
let input = try AVCaptureDeviceInput(device: camera)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
activeInput = input
}
} catch {
print("Error setting device video input: \(error)")
return false
}
// Setup Microphone
let microphone = AVCaptureDevice.default(for: AVMediaType.audio)!
do {
let micInput = try AVCaptureDeviceInput(device: microphone)
if captureSession.canAddInput(micInput) {
captureSession.addInput(micInput)
}
} catch {
print("Error setting device audio input: \(error)")
return false
}
// Movie output
if captureSession.canAddOutput(movieOutput) {
captureSession.addOutput(movieOutput)
}
return true
}
func setupCaptureMode(_ mode: Int) {
// Video Mode
}
//MARK:- Camera Session
func startSession() {
if !captureSession.isRunning {
videoQueue().async {
self.captureSession.startRunning()
}
}
}
func stopSession() {
if captureSession.isRunning {
videoQueue().async {
self.captureSession.stopRunning()
}
}
}
func videoQueue() -> DispatchQueue {
return DispatchQueue.main
}
func currentVideoOrientation() -> AVCaptureVideoOrientation {
var orientation: AVCaptureVideoOrientation
switch UIDevice.current.orientation {
default:
orientation = AVCaptureVideoOrientation.portrait
}
return orientation
}
@objc func startCapture() {
startRecording()
}
func tempURL() -> URL? {
let directory = NSTemporaryDirectory() as NSString
if directory != "" {
let path = directory.appendingPathComponent(NSUUID().uuidString + ".mp4")
return URL(fileURLWithPath: path)
}
return nil
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
let vc = segue.destination as! VideoPlaybackViewController
vc.videoURL = sender as? URL
}
func startRecording() {
if movieOutput.isRecording == false {
let connection = movieOutput.connection(with: AVMediaType.video)
if (connection?.isVideoOrientationSupported)! {
connection?.videoOrientation = currentVideoOrientation()
}
if (connection?.isVideoStabilizationSupported)! {
connection?.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.auto
}
let device = activeInput.device
if (device.isSmoothAutoFocusSupported) {
do {
try device.lockForConfiguration()
device.isSmoothAutoFocusEnabled = false
device.unlockForConfiguration()
} catch {
print("Error setting configuration: \(error)")
}
}
outputURL = tempURL()
movieOutput.startRecording(to: outputURL, recordingDelegate: self)
}
else {
stopRecording()
}
}
func stopRecording() {
if movieOutput.isRecording == true {
movieOutput.stopRecording()
}
}
func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if (error != nil) {
print("Error recording movie: \(error!.localizedDescription)")
} else {
let videoRecorded = outputURL! as URL
performSegue(withIdentifier: "showVideo", sender: videoRecorded)
}
}
}
VideoPlaybackViewController
import UIKit
import AVFoundation
import SwiftUI
import Combine
class VideoPlaybackViewController: UIViewController {
@EnvironmentObject var post2: Post2
let avPlayer = AVPlayer()
let queuePlayer = AVQueuePlayer()
var avPlayerLayer: AVPlayerLayer!
var videoURL: URL!
@IBOutlet weak var videoView: UIView!
@IBOutlet weak var submitButton: UIButton!
@IBOutlet weak var postLabel: UILabel!
override func viewDidLoad() {
super.viewDidLoad()
avPlayerLayer = AVPlayerLayer(player: avPlayer)
avPlayerLayer.frame = view.bounds
avPlayerLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
videoView.layer.insertSublayer(avPlayerLayer, at: 0)
view.layoutIfNeeded()
let playerItem = AVPlayerItem(url: videoURL as URL)
avPlayer.replaceCurrentItem(with: playerItem)
avPlayer.play()
print(self.post2.name)
self.postLabel.text = self.post2.name
}
@IBAction func submitButtonPressed(_ sender: Any) {
}
}
带有 RecordVideoViewController 和 VideoPlaybackViewController 的 Storyboard 屏幕截图