构建一个网页浏览量,作为视频的垂直轮播,在视口内自动播放并在视口外暂停。到目前为止,自动播放似乎在起作用(尽管有点老套,欢迎提出改进建议)。Feed/pageview 中的一个重要功能是,当视频结束时,它会自动滚动到下一页/视频。我正在使用的当前逻辑是在视频结束/按下按钮时简单地增加选项卡选择/当前页面。这确实使它移动到下一页,但自动播放变得一团糟,因为应用程序似乎认为它仍在旧视频上,简而言之,选项卡已更改,但子视图的内容在内存中保持不变。附加父视图和子视图的源代码。
ByteFeedView.swift
import SwiftUI
import ActivityIndicators
struct ByteFeedView: View {
var showIndicator=true
@State private var bytes: [ByteFeedQuery.Data.Foryoubyte] = []
@State var currentPage:Int = 0
@State var selection = 1
@State var limit: Int = 10
@State var offset: Int = 10
var body: some View {
ZStack {
Color.black.ignoresSafeArea() //Bacgkround of the whole screen is black and ignored safe area
GeometryReader { proxy in
if(bytes.isEmpty){
VStack(alignment: .leading){
Text("Loading bytes for you").foregroundColor(.white)
} .frame(width: 400, height: 650)
}
else{
TabView(selection: $currentPage) {
ForEach(0..<bytes.count, id: \.self){ index in
VStack{
Button("Next byte"){
withAnimation { self.currentPage = self.currentPage + 1 }} //incrementing currentPage to programmitcaly change current tab in view
Byte(topic_image: bytes[index].video.topic.imageUrl!,topic: bytes[index].video.topic.name, currentPage: $currentPage, byteId: Int(bytes[index].id) ?? 0, video_url:bytes[index].video.url ?? "", videoTitle: bytes[index].video.title ?? "", startTimestamp: bytes[index].startTimestamp, endTimestamp: bytes[index].endTimestamp).tag(Int(bytes[index].id)) .onChange(of: currentPage, perform: { value in
print("\nValue is \(value)")
print("byteid is \(bytes[index].id)")
print("current page is \(currentPage)")
})
}.onAppear(){
if(bytes.count - currentPage == 4){
Network.shared.apollo.fetch(query: ByteFeedQuery(limit:limit, offset: offset)) { result in
switch result {
case .success(let graphQLResult):
if let bytes = graphQLResult.data?.foryoubytes {
DispatchQueue.main.async {
self.bytes += bytes
}
offset = self.bytes.count + 10
}
case .failure(let error):
print(error)
}
}
}
}
}
.rotationEffect(.degrees(-90)) // Rotate content
.frame(
width: proxy.size.width,
height: proxy.size.height
)
}
.frame(
width: proxy.size.height, // Height & width swap
height: proxy.size.width
)
.rotationEffect(.degrees(90), anchor: .topLeading) // Rotate TabView
.offset(x: proxy.size.width) // Offset back into screens bounds
.id(bytes.count)
.tabViewStyle(
PageTabViewStyle(indexDisplayMode: .always)
)
}
}
}
.onAppear(){
Network.shared.apollo.fetch(query: ByteFeedQuery(limit:limit, offset: 0)) { result in
switch result {
case .success(let graphQLResult):
if let bytes = graphQLResult.data?.foryoubytes {
DispatchQueue.main.async {
self.bytes = bytes
}
}
case .failure(let error):
print(error)
}
}
}
}
}
字节.swift
import SwiftUI
import AVKit
import AVFAudio
import GoogleSignIn
struct Byte : View{
@State var seekPos = 0.0
//let analyticsModel = AnalyticsViewModel()
let buildNumber: String = Bundle.main.object(forInfoDictionaryKey: "CFBundleVersion") as! String
@State private var playing: Bool = false
@State private var debouncer = 0
@Binding var thisViewTag : Int
@Binding var currentPage : Int
@State var videoTitle : String
@State var byteId : Int
@State var video_url :String
@State var topic: String
@State var topic_image: String
@State var startTimestamp : String
@State var endTimestamp : String
@State private var embeddedVideoRate: Float = 0.0
@State private var embeddedVideoVolume: Float = 0.0
@State var didAppear = false
@State var appearCount = 0
var body: some View{
VStack(alignment: .leading){
GeometryReader { gp in
VStack{
Text("Build #\(String(buildNumber))").foregroundColor(Color.white).fontWeight(.bold)
Text("\(videoTitle)").foregroundColor(Color.white)
HStack{
Text("Page is \(currentPage)").foregroundColor(Color.white)
Text("Byte is \(byteId)").foregroundColor(Color.white)
}
ZStack{
bytePlayer(video_url: video_url)
if(!playing){
Button(action: {
if(playing){
playing=false
}
else{
playing=true
}
}) {
Image(systemName: "play.circle.fill").resizable()
.frame(width: 45.0, height: 45.0)
}
.foregroundColor(.white)
.padding(.all)
}
}
}
}
}
}
init(topic_image:String, topic:String, currentPage:Binding<Int>, byteId: Int, video_url: String, videoTitle : String, startTimestamp: String, endTimestamp:String) {
self._thisViewTag = currentPage
self._currentPage = currentPage
self.byteId = byteId
self.video_url = video_url
self.videoTitle = videoTitle
self.topic_image = topic_image
self.topic = topic
self.startTimestamp = startTimestamp
self.endTimestamp = endTimestamp
}
private func metaData(title:String, topic_image:String, topic:String)-> some View{
VStack(alignment: .leading){
HStack{
RemoteImage(url:topic_image)
.aspectRatio(contentMode: .fit)
.frame(width: 40).clipShape(Circle())
Text(topic)
.fontWeight(.heavy).foregroundColor(Color.white).multilineTextAlignment(.trailing).frame(maxWidth: .infinity, alignment: .leading).font(.system(size: 15))
}
Text(videoTitle)
.fontWeight(.medium).foregroundColor(Color.white).frame(maxWidth: .infinity, alignment: .leading).font(.system(size: 15))
HStack(spacing: 8){
Text("100 views")
.fontWeight(.semibold).foregroundColor(Color.white).font(.system(size: 15))
Circle().foregroundColor(Color.white).frame(width:4,height:4)
Text("View video")
.fontWeight(.semibold).foregroundColor(Color.red).font(.system(size: 15))
}.padding(.top,1)
}.padding(.bottom,12)
}
private func bytePlayer(video_url: String) -> some View {
VStack {
Spacer()
if #available(iOS 15.0, *) {
BytePlayerView(
videoUrl: video_url,
rate: $embeddedVideoRate,
volume: $embeddedVideoVolume,playing: $playing,startTimestamp: $startTimestamp, endTimestamp: $endTimestamp,currentPage: $currentPage, byteId: $byteId).frame(height:250)
.onAppear(){
if(currentPage == 0){
playing=true
}
}.onDisappear{
print("\nbyteid out of view is \(byteId)")
playing = false
}
.onChange(of: currentPage, perform: { value in
//print("\nvalue is \(value)")
//print("byteid is \(byteId)")
//print("current page is \(currentPage)")
//print("viewtag is \(thisViewTag)")
if value == thisViewTag {
playing = true
debouncer += 1
if debouncer == 1 {
}
} else {
debouncer = 0
}
})
.onTapGesture {
if(playing){
playing=false
}
else{
playing=true
}
}
}
Spacer()
}
}
}
字节播放器.swift
import Foundation
import SwiftUI
import AVFoundation
import AVKit
struct BytePlayerView: UIViewRepresentable {
var analyticsModel = AnalyticsViewModel()
let videoUrl : String
@Binding var rate: Float
@Binding var volume: Float
@Binding var playing: Bool
@Binding var startTimestamp: String
@Binding var endTimestamp: String
@Binding var currentPage: Int
@Binding var byteId: Int
func makeUIView(context: Context) -> BytePlayerUIView {
analyticsModel.trackByteWatching(byteId: String(byteId), videoTitle:"Hello")
let view = BytePlayerUIView(videoUrl: videoUrl,currentPage:$currentPage)
let endTimestampInMiliseconds:Double = Double(endTimestamp)!
let startTimestampInMiliseconds: Double = Double(startTimestamp)!
view.jumpToStartTimestamp(startTimestamp: startTimestampInMiliseconds/1000)
view.addTimer(startTimestamp: startTimestampInMiliseconds/1000, endTimestamp: endTimestampInMiliseconds/1000)
view.overrideRinger()
return view
}
func dismantleUIView(_ uiView: BytePlayerUIView) {
uiView.pause()
print("Should dismantle")
}
func updateUIView(_ uiView: BytePlayerUIView, context: Context) {
if(playing){
uiView.play()
}
else{
uiView.pause()
}
}
final class BytePlayerUIView: UIView{
@Binding var currentPage:Int
public var player: AVPlayer?
var timeObserverToken: Any?
@Published private var playing = true
var currenTime = 0.0
var timertest: Timer?
var currenTimeString = "00:00"
func jumpToStartTimestamp(startTimestamp: Double){
let startTimestamp_CMTime=CMTime(seconds:startTimestamp, preferredTimescale: CMTimeScale(NSEC_PER_SEC))
self.player!.seek(to: startTimestamp_CMTime, toleranceBefore: .zero, toleranceAfter: .zero)
}
var isPlaying: Bool {
if (self.player!.rate != 0 && self.player!.error == nil) {
return true
} else {
return false
}
}
func overrideRinger(){
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playback)
//print("AVAudioSession Category Playback OK")
do {
try AVAudioSession.sharedInstance().setActive(true)
//print("AVAudioSession is Active")
} catch _ as NSError {
//print(error.localizedDescription)
}
} catch _ as NSError {
//print(error.localizedDescription)
}
}
func addTimer(startTimestamp: Double,endTimestamp: Double){
timertest=Timer.scheduledTimer(withTimeInterval: 1/60, repeats: true) { timer in
if self.player!.currentItem?.status == .readyToPlay {
let timeElapsed = CMTimeGetSeconds(self.player!.currentTime())
let endTimestamp_CMTime=CMTime(seconds:endTimestamp, preferredTimescale: CMTimeScale(NSEC_PER_SEC))
let secs = Int(timeElapsed)
let endTimeStampSecs = CMTimeGetSeconds(endTimestamp_CMTime)
let startTimestamp_CMTime=CMTime(seconds:startTimestamp, preferredTimescale: CMTimeScale(NSEC_PER_SEC))
self.currenTime = timeElapsed
self.currenTimeString = NSString(format: "%02d:%02d", secs/60, secs%60) as String
if(endTimeStampSecs-timeElapsed<=2){
self.setVolume( 0.3)
}
if(timeElapsed >= endTimeStampSecs){
self.timertest?.invalidate()
self.player?.seek(to: startTimestamp_CMTime, toleranceBefore: .zero, toleranceAfter: .zero)
self.pause()
self.moveToNextPage()
}
}
}
}
func moveToNextPage(){
DispatchQueue.main.asyncAfter(deadline: .now() + 0.3) {
withAnimation { self.currentPage = self.currentPage + 1 }
}
}
fileprivate let seekDuration: Float64 = 5
@IBAction func doForwardJump(startTimestamp:String) {
let moveForword : Float64 = 5
if player == nil { return }
if let duration = player!.currentItem?.duration {
let playerCurrentTime = CMTimeGetSeconds(player!.currentTime())
let newTime = playerCurrentTime + moveForword
if newTime < CMTimeGetSeconds(duration)
{
let selectedTime: CMTime = CMTimeMake(value: Int64(newTime * 1000 as Float64), timescale: 1000)
player!.seek(to: selectedTime)
}
player?.pause()
player?.play()
}
}
func playPause() {
if playing {
self.playing=false
player?.pause()
} else {
self.playing=true
player?.play()
}
}
func play(){
self.playing=true;
player?.playImmediately(atRate: 1)
}
func pause(){
self.playing=false;
player?.pause()
}
func cleanup() {
self.playing=false;
player?.pause()
//player?.removeAllItems()
player = nil
}
func setVolume(_ value: Float) {
player?.volume = value
}
func setRate(_ value: Float) {
player?.rate = value
}
private var token: NSKeyValueObservation?
override class var layerClass: AnyClass {
return AVPlayerLayer.self
}
var playerLayer: AVPlayerLayer {
return layer as! AVPlayerLayer
}
init(videoUrl : String, currentPage:Binding<Int>){
self._currentPage=currentPage
player = AVPlayer(url: URL(string: videoUrl)!)
super.init(frame: .zero)
playerLayer.player = player
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
}
如果这些信息还不够,可以分享任何其他需要的信息。