From 07522b16c32f4188fb69027fcd20be7ad1f9fe00 Mon Sep 17 00:00:00 2001 From: Grimless Date: Tue, 5 Sep 2023 11:33:44 -0400 Subject: [PATCH] Create a new controls UI layer for the DamusVideoPlayer, providing play/pause, volume control, content scale, playback rate controls, as well as a seek/timecode bar. Closes: https://github.com/damus-io/damus/pull/1537 Signed-off-by: William Casarin --- damus/Components/ImageCarousel.swift | 2 +- damus/Views/Images/ImageContainerView.swift | 2 +- damus/Views/Video/DamusVideoPlayer.swift | 400 +++++++++++++++++++- damus/Views/Video/VideoPlayer.swift | 116 +++--- 4 files changed, 441 insertions(+), 79 deletions(-) diff --git a/damus/Components/ImageCarousel.swift b/damus/Components/ImageCarousel.swift index f99b672959..774e19fa37 100644 --- a/damus/Components/ImageCarousel.swift +++ b/damus/Components/ImageCarousel.swift @@ -125,7 +125,7 @@ struct ImageCarousel: View { open_sheet = true } case .video(let url): - DamusVideoPlayer(url: url, model: video_model(url), video_size: $video_size) + DamusVideoPlayer(url: url, model: video_model(url), video_size: $video_size, showControls: false) .onChange(of: video_size) { size in guard let size else { return } diff --git a/damus/Views/Images/ImageContainerView.swift b/damus/Views/Images/ImageContainerView.swift index cd4499a793..5429508e56 100644 --- a/damus/Views/Images/ImageContainerView.swift +++ b/damus/Views/Images/ImageContainerView.swift @@ -47,7 +47,7 @@ struct ImageContainerView: View { case .image(let url): Img(url: url) case .video(let url): - DamusVideoPlayer(url: url, model: cache.get_video_player_model(url: url), video_size: .constant(nil)) + DamusVideoPlayer(url: url, model: cache.get_video_player_model(url: url), video_size: .constant(nil), showControls: true) } } } diff --git a/damus/Views/Video/DamusVideoPlayer.swift b/damus/Views/Video/DamusVideoPlayer.swift index ca64e42edd..a2c71fd3c0 100644 --- a/damus/Views/Video/DamusVideoPlayer.swift +++ b/damus/Views/Video/DamusVideoPlayer.swift @@ -8,7 +8,7 @@ import SwiftUI /// get coordinates in Global reference frame given a Local point & geometry -func globalCoordinate(localX x: CGFloat, localY y: CGFloat, +fileprivate func globalCoordinate(localX x: CGFloat, localY y: CGFloat, localGeometry geo: GeometryProxy) -> CGPoint { let localPoint = CGPoint(x: x, y: y) return geo.frame(in: .global).origin.applying( @@ -16,21 +16,45 @@ func globalCoordinate(localX x: CGFloat, localY y: CGFloat, ) } +fileprivate extension Double { + /// Extend Double to decode hours, minutes, and seconds into a human-readable timecode (e.g. "02:41:50"). + var secondsToHumanReadableTimecode: String { + guard self > 0.0 else { return "00:00" } + let totalSeconds = UInt64(self) + let hours = totalSeconds / 3600 + let minutes = totalSeconds % 3600 / 60 + let seconds = totalSeconds % 3600 % 60 + if hours > 0 { + return String(format: "%02d:%02d:%02d", hours, minutes, seconds) + } + else { + return String(format: "%02d:%02d", minutes, seconds) + } + } +} + +/// Damus-specific video player that displays controls for seeking, play/pause, volume control, and more. struct DamusVideoPlayer: View { - var url: URL + let url: URL @ObservedObject var model: VideoPlayerModel @Binding var video_size: CGSize? @EnvironmentObject private var orientationTracker: OrientationTracker + @State private var isOptionsPopoverPresented = false + @State private var isVolumeSliderPresented = false + @State private var playbackSpeedOption: PlaybackSpeedMenu.PlaybackSpeedOption = .normal - var mute_icon: String { - if model.has_audio == false || model.muted { - return "speaker.slash" - } else { - return "speaker" - } + // Track if the video was playing before seeking so we can intelligently resume playing after changing the seek time + @State private var wasPlayingBeforeSeek = true + + @State var showControls: Bool + + @State var onVideoTapped: (() -> Void)? + + var isMuted: Bool { + return model.has_audio == false || model.muted } - var mute_icon_color: Color { + var muteIconColor: Color { switch self.model.has_audio { case .none: return .white @@ -39,32 +63,195 @@ struct DamusVideoPlayer: View { } } - var MuteIcon: some View { + func hideControls() -> Self { + showControls = false + return self + } + + private func VideoButton(imageName: String) -> some View { ZStack { Circle() .opacity(0.2) .frame(width: 32, height: 32) .foregroundColor(.black) - Image(systemName: mute_icon) - .padding() - .foregroundColor(mute_icon_color) + Image(systemName: imageName) + .padding([.leading, .trailing], 1) + .padding([.top, .bottom], nil) + .foregroundColor(.white) } } + private var VolumeButton: some View { + func imageName() -> String { + if isMuted { + return "speaker.slash" + } + else { + switch model.volume { + case ..<0.01: + return "speaker.slash.fill" + case ..<0.3: + return "speaker.wave.1.fill" + case ..<0.6: + return "speaker.wave.2.fill" + default: + return "speaker.wave.3.fill" + } + } + } + return VideoButton(imageName: imageName()) + .onTapGesture { + isVolumeSliderPresented.toggle() + model.muted.toggle() + } + } + + private var PlayPauseButton: some View { + VideoButton(imageName: model.play ? "pause" : "play") + .onTapGesture { + self.model.play.toggle() + } + } + + private var SettingsButton: some View { + VideoButton(imageName: isOptionsPopoverPresented ? "gearshape.fill" : "gearshape") + .onTapGesture { + isOptionsPopoverPresented.toggle() + } + .popover(isPresented: $isOptionsPopoverPresented, + attachmentAnchor: .point(.top), + arrowEdge: .top) { + if #available(iOS 16.4, macOS 13.3, *) { + OptionsMenu + .presentationCompactAdaptation(.popover) + } + else { + OptionsMenu + } + } + } + + private var FullscreenButton: some View { + VideoButton(imageName: self.model.contentMode == .scaleAspectFill ? "arrow.down.right.and.arrow.up.left" : "arrow.up.left.and.arrow.down.right") + .onTapGesture { + switch self.model.contentMode { + case .scaleAspectFit: + self.model.contentMode = .scaleAspectFill + case .scaleAspectFill: + self.model.contentMode = .scaleAspectFit + default: + break + } + } + } + + private var VideoTime: some View { + Text("\(self.model.currentTime.secondsToHumanReadableTimecode) / \(self.model.totalDuration.secondsToHumanReadableTimecode)") + .padding([.leading, .trailing], 1) + .padding([.top, .bottom], nil) + .monospacedDigit() + .foregroundColor(muteIconColor) + .fixedSize(horizontal: true, vertical: true) + .onTapGesture { + self.model.play.toggle() + } + } + + private var OptionsMenu: some View { + PlaybackSpeedMenu(selected: $playbackSpeedOption) + .onChange(of: playbackSpeedOption) { + self.model.playbackRate = $0.rawValue + } + } + var body: some View { GeometryReader { geo in let localFrame = geo.frame(in: .local) let centerY = globalCoordinate(localX: 0, localY: localFrame.midY, localGeometry: geo).y let delta = localFrame.height / 2 + ZStack(alignment: .bottomTrailing) { + VideoPlayer(url: url, model: model) - if model.has_audio == true { - MuteIcon - .zIndex(11.0) - .onTapGesture { - self.model.muted = !self.model.muted + .zIndex(0.0) +// .onAppear { +// self.model.start() +// } + .onTapGesture { + if let tapHandler = onVideoTapped { + tapHandler() + } + else { + showControls.toggle() + } + } + + if showControls { // TODO: Animate controls in/out + Group { // Group for all overlayed controls + VStack(alignment: .trailing) { + // Volume slider + if isVolumeSliderPresented { // TODO: Animate volume slider in/out + VolumeSlider(volume: $model.volume) + { editing in + // Changing the volume value unmutes + if self.model.muted { + _ = self.model.set(muted: false) + } + } + .frame(width: 12, height: 130) + .padding([.leading, .trailing], 8) + .padding([.top, .bottom], 1) + } + + // Seek slider + SeekSlider(time: Binding(get: { self.model.currentTime }, + set: { _ in }), + totalDuration: self.model.totalDuration) + { editing in + if editing { + self.wasPlayingBeforeSeek = model.play + model.stop() + } + } + onTimeFinalized: { time in + _ = model.set(seekSeconds: time) + if self.wasPlayingBeforeSeek { + model.start() + } } + .padding([.leading, .trailing], 8) + .frame(height: 10) + + ZStack(alignment: .bottomTrailing) { // Group bottom elements + PlayPauseButton + .frame(maxWidth: .infinity, alignment: .leading) + .padding([.leading, .trailing], 8) + + VideoTime + .frame(maxWidth: .infinity, alignment: .center) + + // Settings, Fullscreen, and Volume + HStack { + // TODO: Changing playback speed is currently broken for unknown reasons. + SettingsButton + + if model.has_audio == true { + FullscreenButton + + VolumeButton + .padding([.trailing], 8) + } + else { + FullscreenButton + // Keep button spacing even if the VolumeButton is not present + .padding([.trailing], 32 + 16) + } + } + } + } + } + .zIndex(1.0) } } .onChange(of: model.size) { size in @@ -84,12 +271,187 @@ struct DamusVideoPlayer: View { } } } + .onDisappear { + model.stop() + } + } +} +fileprivate extension DamusVideoPlayer { + struct PlaybackSpeedMenu : View { + enum PlaybackSpeedOption : Float, CaseIterable, Identifiable { + case half = 0.5 + case normal = 1.0 + case oneAndQuarter = 1.25 + case oneAndHalf = 1.5 + case double = 2.0 + var id: String { + return String(format: "%1.2f", arguments: [self.rawValue]) + } + var displayName: String { + switch self { + case .half: return "0.5x" + case .normal: return "1x" + case .oneAndQuarter: return "1.25x" + case .oneAndHalf: return "1.5x" + case .double: return "2.0x" + } + } + } + + @Binding var selected: PlaybackSpeedOption + + var body: some View { + VStack { + ForEach(PlaybackSpeedOption.allCases) { option in + HStack { + Text(verbatim: option.displayName) + .frame(minWidth: 150, maxWidth: .infinity, alignment: .leading) + .padding() + Image(systemName: (self.selected == option) ? "checkmark" : "circle") + .frame(maxWidth: .infinity, alignment: .trailing) + .padding() + } + .onTapGesture { + self.selected = option + } + } + } + .frame(minWidth: 200) + } } } +fileprivate extension DamusVideoPlayer { + /// A vertical volume slider ranging from 0.0 (silent) to 1.0 (full volume). + /// `onEditingChanged` is called when the user begins or ends editing the volume value using this slider + /// Inspired by [pratikg29's VerticalVolumeSlider](https://github.com/pratikg29/Custom-Slider-Control/blob/main/AppleMusicSlider/AppleMusicSlider/VerticalVolumeSlider.swift) + struct VolumeSlider : View { + @Binding var volume: Float + @State private var localRealProgress: Float = 0 + @State private var localTempProgress: Float = 0 + @GestureState private var isEditing: Bool = false + let onEditingChanged: (Bool) -> Void + + var body: some View { + GeometryReader { bounds in + ZStack { + GeometryReader { geo in + ZStack(alignment: .bottom) { + RoundedRectangle(cornerRadius: bounds.size.width, style: .continuous) + .fill(Color.black) + RoundedRectangle(cornerRadius: bounds.size.width, style: .continuous) + .foregroundColor(.accentColor) + .mask({ + VStack { + Spacer(minLength: 0) + Rectangle() + .frame(height: max(geo.size.height * CGFloat((localRealProgress + localTempProgress)), 0), + alignment: .leading) + } + }) + } + .clipped() + } + } + .frame(width: bounds.size.width, height: bounds.size.height, alignment: .center) + .gesture(DragGesture(minimumDistance: 8, coordinateSpace: .local) + .updating($isEditing) { value, state, transaction in + state = true + } + .onChanged { gesture in + localTempProgress = Float(-gesture.translation.height / bounds.size.height) + volume = max(min(localRealProgress + localTempProgress, 1.0), 0.0) + }.onEnded { value in + localRealProgress = max(min(localRealProgress + localTempProgress, 1), 0) + localTempProgress = 0 + }) + .onChange(of: isEditing) { editing in + onEditingChanged(editing) + } + .onAppear { + localRealProgress = volume + } + .onChange(of: volume) { newValue in + if !(isEditing) { + localRealProgress = volume + } + } + } + .frame(alignment: .center) + } + } +} +fileprivate extension DamusVideoPlayer { + /// A seek time slider ranging from 0.0 (beginning) to `totalDuration` at the end. + /// `onEditingChanged` is called when the user begins or ends editing the current play location using this slider. + /// `onTimeFinalized` is called when the user end editing and the new time has been calculated, ready for seek. + /// Inspired by [pratikg29's VerticalVolumeSlider](https://github.com/pratikg29/Custom-Slider-Control/blob/main/AppleMusicSlider/AppleMusicSlider/VerticalVolumeSlider.swift) + struct SeekSlider : View { + @Binding var time: Double + @State private var localRealProgress: Double = 0 + @State private var localTempProgress: Double = 0 + @GestureState private var isEditing: Bool = false + let totalDuration: Double + let onEditingChanged: (Bool) -> Void + let onTimeFinalized: (Double) -> Void + + var body: some View { + GeometryReader { bounds in + ZStack { + GeometryReader { geo in + ZStack(alignment: .leading) { + RoundedRectangle(cornerRadius: bounds.size.height, style: .continuous) + .fill(Color.black) + RoundedRectangle(cornerRadius: bounds.size.height, style: .continuous) + .foregroundColor(.accentColor) + .mask({ + HStack { + Rectangle() + .frame(width: max(geo.size.width * CGFloat((localRealProgress + localTempProgress)), 0), + alignment: .leading) + Spacer(minLength: 0) + } + }) + } + .clipped() + } + } + .frame(width: bounds.size.width, height: bounds.size.height, alignment: .center) + .gesture(DragGesture(minimumDistance: 8, coordinateSpace: .local) + .updating($isEditing) { value, state, transaction in + state = true + } + .onChanged { gesture in + localTempProgress = Double(gesture.translation.width / bounds.size.width) + time = max(min(localRealProgress + localTempProgress, 1.0), 0.0) + }.onEnded { value in + localRealProgress = max(min(localRealProgress + localTempProgress, 1.0), 0) + localTempProgress = 0 + + onTimeFinalized(localRealProgress * totalDuration) + }) + .onChange(of: isEditing) { editing in + onEditingChanged(editing) + } + .onAppear { + localRealProgress = time / totalDuration + } + .onChange(of: time) { newValue in + if !(isEditing) { + localRealProgress = time / totalDuration + } + } + } + .frame(alignment: .center) + } + } +} + struct DamusVideoPlayer_Previews: PreviewProvider { @StateObject static var model: VideoPlayerModel = VideoPlayerModel() static var previews: some View { - DamusVideoPlayer(url: URL(string: "http://cdn.jb55.com/s/zaps-build.mp4")!, model: model, video_size: .constant(nil)) + DamusVideoPlayer(url: URL(string: "http://cdn.jb55.com/s/zaps-build.mp4")!, model: model, video_size: .constant(nil), showControls: true) + .environmentObject(OrientationTracker()) +// .previewInterfaceOrientation(.landscapeLeft) } } diff --git a/damus/Views/Video/VideoPlayer.swift b/damus/Views/Video/VideoPlayer.swift index 53f25323e6..d91eb82f0e 100644 --- a/damus/Views/Video/VideoPlayer.swift +++ b/damus/Views/Video/VideoPlayer.swift @@ -42,13 +42,13 @@ enum VideoHandler { @MainActor public class VideoPlayerModel: ObservableObject { - @Published var autoReplay: Bool = true - @Published var muted: Bool = true - @Published var play: Bool = true + @Published var autoReplay = true + @Published var muted = true + @Published var play = true @Published var size: CGSize? = nil @Published var has_audio: Bool? = nil @Published var contentMode: UIView.ContentMode = .scaleAspectFill - @Published var currentTime: Double = 0.0 + @Published var currentTime = 0.0 @Published var playbackRate: Float = 1.0 @Published var volume: Float = 1.0 // Split out a deliberate stream for *setting* currentTime manually, that way system-driven updates don't cause infinite loops and strange fighting behavior. @@ -208,13 +208,13 @@ fileprivate extension AVPlayer { @available(iOS 13, *) extension VideoPlayer: UIViewRepresentable { - public func makeUIView(context: Context) -> VideoPlayerView { let uiView = VideoPlayerView() - uiView.playToEndTime = { - if self.model.autoReplay == false { - self.model.play = false + uiView.playToEndTime = { [weak model] in + guard let model else { return } + if model.autoReplay == false { + model.play = false } DispatchQueue.main.async { for handler in model.handlers { @@ -227,7 +227,8 @@ extension VideoPlayer: UIViewRepresentable { uiView.contentMode = self.model.contentMode - uiView.replay = { + uiView.replay = { [weak model] in + guard let model else { return } DispatchQueue.main.async { for handler in model.handlers { if case .onReplay(let cb) = handler { @@ -237,30 +238,29 @@ extension VideoPlayer: UIViewRepresentable { } } - uiView.stateDidChanged = { [unowned uiView] _ in + uiView.stateDidChanged = { [weak model, unowned uiView] _ in + guard let model else { return } let state: VideoState = uiView.videoState if case .playing = uiView.videoState { + model.totalDuration = uiView.totalDuration context.coordinator.startObserver(uiView: uiView) - if let player = uiView.player { Task { let has_audio = await player.hasAudio let size = await player.videoSize Task { @MainActor in if let size { - self.model.size = size + model.size = size } - self.model.has_audio = has_audio + model.has_audio = has_audio } } } - - } else { - context.coordinator.stopObserver(uiView: uiView) } - DispatchQueue.main.async { + DispatchQueue.main.async { [weak model] in + guard let model else { return } for handler in model.handlers { if case .onStateChanged(let cb) = handler { cb(state) @@ -269,6 +269,21 @@ extension VideoPlayer: UIViewRepresentable { } } + // Split this out because calling `uiView.play(for:)` will initialize the coordinator, which will result in an initialization loop. + context.coordinator.disposeSet.insert(context.coordinator.videoPlayer.model.$play.sink { [weak model, unowned uiView] play in + if play { + uiView.resume() + // We have to re-set the AVPlayer.rate property because internally AVPlayer sets this value to 0.0 for pausing. + guard let model else { return } + uiView.player?.rate = model.playbackRate + } + else { + uiView.pause(reason: .userInteraction) + } + }) + + uiView.play(for: self.url) + return uiView } @@ -277,19 +292,7 @@ extension VideoPlayer: UIViewRepresentable { } public func updateUIView(_ uiView: VideoPlayerView, context: Context) { - if context.coordinator.observingURL != url { - context.coordinator.clean() - context.coordinator.observingURL = url - } - - if model.play { - uiView.play(for: url) - } else { - uiView.pause(reason: .userInteraction) - } - - uiView.isMuted = model.muted - uiView.isAutoReplay = model.autoReplay + // This method is called A LOT for this view, so model properties have been moved to publishers to allow for event-driven changes instead of performance heavy checks here. } public static func dismantleUIView(_ uiView: VideoPlayerView, coordinator: VideoPlayer.Coordinator) { @@ -301,47 +304,49 @@ extension VideoPlayer: UIViewRepresentable { var observingURL: URL? var observer: Any? var observerBuffer: Double? - var videoModeSub: AnyCancellable? - var playbackRateSub: AnyCancellable? - var volumeSub: AnyCancellable? - var currentTimeSub: AnyCancellable? + var disposeSet: Set = [] init(_ videoPlayer: VideoPlayer) { self.videoPlayer = videoPlayer } + deinit { + print("Coordinator.deinit!") + } + @MainActor func startObserver(uiView: VideoPlayerView) { guard observer == nil else { return } - self.videoModeSub = self.videoPlayer.model.$contentMode.sink { mode in + disposeSet.insert(videoPlayer.model.$muted.sink { [unowned uiView] muted in + uiView.isMuted = muted + }) + + disposeSet.insert(videoPlayer.model.$autoReplay.sink { [unowned uiView] autoReplay in + uiView.isAutoReplay = autoReplay + }) + + disposeSet.insert(videoPlayer.model.$contentMode.sink { [unowned uiView] mode in uiView.contentMode = mode - } + }) - self.playbackRateSub = self.videoPlayer.model.$playbackRate.sink { rate in + disposeSet.insert(videoPlayer.model.$playbackRate.sink { [unowned uiView] rate in uiView.player?.rate = rate - } + }) - self.volumeSub = self.videoPlayer.model.$volume.sink { volume in - uiView.player?.volume = volume - } + disposeSet.insert(videoPlayer.model.$volume.sink { [unowned uiView] volume in + uiView.volume = Double(volume) + }) - self.currentTimeSub = self.videoPlayer.model.currentTimeSubject.sink { seconds in - let currentTimeScale = uiView.player?.currentTime().timescale ?? 1 - let newTime = CMTimeMakeWithSeconds(seconds, preferredTimescale: currentTimeScale) - let halfSecondBefore = CMTimeMakeWithSeconds(seconds - 0.5, preferredTimescale: currentTimeScale) - let halfSecondAfter = CMTimeMakeWithSeconds(seconds + 0.5, preferredTimescale: currentTimeScale) - uiView.player?.seek(to: newTime, - toleranceBefore: halfSecondBefore, - toleranceAfter: halfSecondAfter) - } + disposeSet.insert(videoPlayer.model.currentTimeSubject.sink { [unowned uiView] seconds in + uiView.seek(to: CMTime(seconds: seconds, preferredTimescale: uiView.player?.currentTime().timescale ?? 1000)) + }) observer = uiView.addPeriodicTimeObserver(forInterval: .init(seconds: 0.25, preferredTimescale: 60)) { [weak self, unowned uiView] time in guard let self else { return } - self.videoPlayer.model.totalDuration = uiView.player?.totalDuration ?? 0.0 Task { @MainActor in - self.videoPlayer.model.currentTime = CMTimeGetSeconds(time) + self.videoPlayer.model.currentTime = uiView.currentDuration } self.updateBuffer(uiView: uiView) @@ -354,18 +359,13 @@ extension VideoPlayer: UIViewRepresentable { uiView.removeTimeObserver(observer) self.observer = nil - - // TODO: Should this call `clean()`? } func clean() { self.observingURL = nil self.observer = nil self.observerBuffer = nil - self.videoModeSub = nil - self.playbackRateSub = nil - self.volumeSub = nil - self.currentTimeSub = nil + self.disposeSet = [] } @MainActor