diff --git a/Nicegram/NGEnv/Sources/NGEnv.swift b/Nicegram/NGEnv/Sources/NGEnv.swift
index d610c346832..b6355e8a4c0 100644
--- a/Nicegram/NGEnv/Sources/NGEnv.swift
+++ b/Nicegram/NGEnv/Sources/NGEnv.swift
@@ -15,6 +15,7 @@ public struct NGEnvObj: Decodable {
public let esim_api_key: String
public let referral_bot: String
public let remote_config_cache_duration_seconds: Double
+ public let tapjoy_api_key: String
public let telegram_auth_bot: String
public let google_cloud_api_key: String
public let applovin_api_key: String
diff --git a/Package.resolved b/Package.resolved
index efa38352272..014bc1c45e2 100644
--- a/Package.resolved
+++ b/Package.resolved
@@ -42,7 +42,7 @@
"location" : "git@bitbucket.org:mobyrix/nicegram-assistant-ios.git",
"state" : {
"branch" : "develop",
- "revision" : "35b2c2c88c8d2cb3d25cfa95e15e588f90578994"
+ "revision" : "05f2392cbf916a604d9d7e87dc640e7d4651dc12"
}
},
{
@@ -59,8 +59,8 @@
"kind" : "remoteSourceControl",
"location" : "https://github.com/SDWebImage/SDWebImage.git",
"state" : {
- "revision" : "0383fd49fe4d9ae43f150f24693550ebe6ef0d14",
- "version" : "5.18.6"
+ "revision" : "e278c13e46e8d20c895c221e922c6ac6b72aaca9",
+ "version" : "5.18.7"
}
},
{
diff --git a/Telegram/BUILD b/Telegram/BUILD
index e5ab67e13f3..a62a0a95a65 100644
--- a/Telegram/BUILD
+++ b/Telegram/BUILD
@@ -1813,11 +1813,391 @@ nicegram_queries_schemes = """
plnt
"""
+# MARK: Nicegram
+nicegram_skad_network_items = """
+SKAdNetworkItems
+
+
+ SKAdNetworkIdentifier
+ ecpz2srf59.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 7ug5zh24hu.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 9t245vhmpl.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ prcb7njmu6.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 5lm9lj6jb7.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 578prtvx9j.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 22mmun2rn5.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ uw77j35x4d.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ c6k4g5qg8m.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ hs6bdukanm.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ yclnxrl5pm.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 3sh42y64q3.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ cj5566h2ga.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ klf5c3l5u5.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 8s468mfl3y.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 2u9pt9hc89.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 7rz58n8ntl.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ ppxm28t8ap.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 5tjdwbrq8w.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 238da6jt44.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 24t9a8vw3c.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 252b5q8x7y.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 3qy4746246.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 3rd42ekr43.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 424m5254lk.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 4468km3ulz.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 44jx6755aq.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 44n7hlldy6.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 488r3q3dtq.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 4fzdc2evr5.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 4pfyvq9l8r.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 523jb4fst2.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 52fl2v3hgk.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 5a6flpkh64.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 5l3tpt7t6e.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 737z793b9f.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 97r2b46745.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 9rd848q2bz.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 9yg77x724h.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ av6w8kgt66.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ cg4yq2srnc.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ cstr6suwn9.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ dzg6xy7pwj.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ ejvt5qm6ak.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ f73kdq92p3.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ g28c52eehv.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ ggvn48r87g.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ glqzh8vgby.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ gvmwg8q7h5.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ hdw39hrw9y.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ kbd757ywx3.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ lr83yxwka7.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ m8dbw4sv7c.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ mlmmfzh3r3.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ mls7yz5dvl.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ mtkv5xtk9e.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ n66cz3y3bx.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ n9x2a789qt.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ nzq8sh4pbs.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ p78axxw29g.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ pu4na253f3.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ s39g8k73mm.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ t38b2kh725.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ tl55sbb4fm.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ u679fj5vs4.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ v72qych5uu.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ v79kvwwj4g.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ w9q455wk68.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ wg4vff78zm.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ wzmmz9fp6w.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ xy9t38ct57.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ y45688jllp.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ ydx93a7ass.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ yrqqpx2mcb.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ z4gj7hsk7h.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ zmvfpc5aq8.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ k674qkevps.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 2fnua5tdw4.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 9nlqeag3gk.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ n6fk4nfna4.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ e5fvkxwrpn.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ kbmxgpxpgc.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 294l99pt4k.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ r45fhb6rf7.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ pwa73g5rt2.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 3qcr597p9d.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ x44k69ngh6.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ a2p9lx4jpn.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ zq492l623r.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ c3frkrj4fj.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 6g9af3uyq4.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 74b6s63p6l.skadnetwork
+
+
+ SKAdNetworkIdentifier
+ 32z4fx6l9h.skadnetwork
+
+
+"""
+
plist_fragment(
name = "TelegramInfoPlist",
extension = "plist",
template =
"""
+ {nicegram_skad_network_items}
BGTaskSchedulerPermittedIdentifiers
{telegram_bundle_id}.refresh
@@ -1987,6 +2367,7 @@ plist_fragment(
""".format(
nicegram_queries_schemes = nicegram_queries_schemes,
+ nicegram_skad_network_items = nicegram_skad_network_items,
telegram_bundle_id = telegram_bundle_id,
)
)
diff --git a/Telegram/Telegram-iOS/en.lproj/Localizable.strings b/Telegram/Telegram-iOS/en.lproj/Localizable.strings
index 02a04605f80..f6cb9cb204c 100644
--- a/Telegram/Telegram-iOS/en.lproj/Localizable.strings
+++ b/Telegram/Telegram-iOS/en.lproj/Localizable.strings
@@ -5305,6 +5305,7 @@ Sorry for the inconvenience.";
"Stats.LanguagesTitle" = "LANGUAGES";
"Stats.PostsTitle" = "RECENT POSTS";
+"Stats.MessageViews.NoViews" = "No views";
"Stats.MessageViews_0" = "%@ views";
"Stats.MessageViews_1" = "%@ view";
"Stats.MessageViews_2" = "%@ views";
@@ -10575,12 +10576,19 @@ Sorry for the inconvenience.";
"ChannelReactions.UnsavedChangesAlertDiscard" = "Discard";
"ChannelReactions.UnsavedChangesAlertApply" = "Apply";
"ChannelReactions.ToastMaxReactionsReached" = "You can select at most 100 reactions.";
-"ChannelReactions.ToastLevelBoostRequired" = "Your channel needs to reach **Level %1$@** to add **%2$@** custom emoji as reactions.**";
+
+"ChannelReactions.ToastLevelBoostRequiredTemplate" = "Your channel needs to reach **%1$@** to add **%2$@** as reactions.**";
+"ChannelReactions.ToastLevelBoostRequiredTemplateLevel_1" = "Level 1";
+"ChannelReactions.ToastLevelBoostRequiredTemplateLevel_any" = "Level %d";
+"ChannelReactions.ToastLevelBoostRequiredTemplateEmojiCount_1" = "1 custom emoji";
+"ChannelReactions.ToastLevelBoostRequiredTemplateEmojiCount_any" = "%d custom emoji";
+
"ChannelReactions.GeneralInfoLabel" = "You can add emoji from any emoji pack as a reaction.";
"ChannelReactions.ReactionsSectionTitle" = "AVAILABLE REACTIONS";
"ChannelReactions.ReactionsInfoLabel" = "You can also [create your own]() emoji packs and use them.";
"ChannelReactions.SaveAction" = "Update Reactions";
"ChannelReactions.LevelRequiredLabel" = "Level %1$@ Required";
+"ChannelReactions.InputPlaceholder" = "Add Reactions...";
"ProfileColorSetup.ResetAction" = "Reset Profile Color";
"ProfileColorSetup.IconSectionTitle" = "ADD ICON TO PROFILE";
@@ -10596,3 +10604,13 @@ Sorry for the inconvenience.";
"Chat.InputPlaceholderMessageInTopic" = "Message in %1$@";
"Chat.Reactions.MultiplePremiumTooltip" = "You can set multiple reactions with [Telegram Premium]().";
+
+"Notification.Wallpaper.Remove" = "Remove";
+"Chat.RemoveWallpaper.Title" = "Remove Wallpaper";
+"Chat.RemoveWallpaper.Text" = "Are you sure you want to reset the wallpaper?";
+"Chat.RemoveWallpaper.Remove" = "Remove";
+
+"MediaEditor.Shortcut.Image" = "Image";
+"MediaEditor.Shortcut.Location" = "Location";
+"MediaEditor.Shortcut.Reaction" = "Reaction";
+"MediaEditor.Shortcut.Audio" = "Audio";
diff --git a/Tests/CallUITest/Resources/test2.mp4 b/Tests/CallUITest/Resources/test2.mp4
index c0327e91e34..c3361d06176 100644
Binary files a/Tests/CallUITest/Resources/test2.mp4 and b/Tests/CallUITest/Resources/test2.mp4 differ
diff --git a/Tests/CallUITest/Resources/test20.mp4 b/Tests/CallUITest/Resources/test20.mp4
new file mode 100644
index 00000000000..c0327e91e34
Binary files /dev/null and b/Tests/CallUITest/Resources/test20.mp4 differ
diff --git a/Tests/CallUITest/Resources/test3.mp4 b/Tests/CallUITest/Resources/test3.mp4
new file mode 100644
index 00000000000..fc128e106ba
Binary files /dev/null and b/Tests/CallUITest/Resources/test3.mp4 differ
diff --git a/Tests/CallUITest/Resources/test4.mp4 b/Tests/CallUITest/Resources/test4.mp4
new file mode 100644
index 00000000000..e34e7bf854c
Binary files /dev/null and b/Tests/CallUITest/Resources/test4.mp4 differ
diff --git a/Tests/CallUITest/Sources/AppDelegate.swift b/Tests/CallUITest/Sources/AppDelegate.swift
index f40c8894f52..dd7ab8d3379 100644
--- a/Tests/CallUITest/Sources/AppDelegate.swift
+++ b/Tests/CallUITest/Sources/AppDelegate.swift
@@ -16,6 +16,8 @@ public final class AppDelegate: NSObject, UIApplicationDelegate {
window.rootViewController = ViewController()
window.makeKeyAndVisible()
+ application.internalSetStatusBarStyle(.lightContent, animated: false)
+
return true
}
}
diff --git a/Tests/CallUITest/Sources/ViewController.swift b/Tests/CallUITest/Sources/ViewController.swift
index d9103d2dad3..c17d759f733 100644
--- a/Tests/CallUITest/Sources/ViewController.swift
+++ b/Tests/CallUITest/Sources/ViewController.swift
@@ -5,18 +5,41 @@ import Display
import CallScreen
import ComponentFlow
-public final class ViewController: UIViewController {
+private extension UIScreen {
+ private static let cornerRadiusKey: String = {
+ let components = ["Radius", "Corner", "display", "_"]
+ return components.reversed().joined()
+ }()
+
+ var displayCornerRadius: CGFloat {
+ guard let cornerRadius = self.value(forKey: Self.cornerRadiusKey) as? CGFloat else {
+ assertionFailure("Failed to detect screen corner radius")
+ return 0
+ }
+
+ return cornerRadius
+ }
+}
+
+public final class ViewController: UIViewController {
private var callScreenView: PrivateCallScreen?
private var callState: PrivateCallScreen.State = PrivateCallScreen.State(
lifecycleState: .connecting,
name: "Emma Walters",
+ shortName: "Emma",
avatarImage: UIImage(named: "test"),
audioOutput: .internalSpeaker,
isMicrophoneMuted: false,
localVideo: nil,
- remoteVideo: nil
+ remoteVideo: nil,
+ isRemoteBatteryLow: false
)
+ private var currentLayout: (size: CGSize, insets: UIEdgeInsets)?
+ private var viewLayoutTransition: Transition?
+
+ private var audioLevelTimer: Foundation.Timer?
+
override public func viewDidLoad() {
super.viewDidLoad()
@@ -45,7 +68,7 @@ public final class ViewController: UIViewController {
self.callState.lifecycleState = .active(PrivateCallScreen.State.ActiveState(
startTime: Date().timeIntervalSince1970,
signalInfo: PrivateCallScreen.State.SignalInfo(quality: 1.0),
- emojiKey: ["A", "B", "C", "D"]
+ emojiKey: ["😂", "😘", "😍", "😊"]
))
case var .active(activeState):
activeState.signalInfo.quality = activeState.signalInfo.quality == 1.0 ? 0.1 : 1.0
@@ -54,10 +77,33 @@ public final class ViewController: UIViewController {
self.callState.lifecycleState = .active(PrivateCallScreen.State.ActiveState(
startTime: Date().timeIntervalSince1970,
signalInfo: PrivateCallScreen.State.SignalInfo(quality: 1.0),
- emojiKey: ["A", "B", "C", "D"]
+ emojiKey: ["😂", "😘", "😍", "😊"]
))
}
+ switch self.callState.lifecycleState {
+ case .terminated:
+ if let audioLevelTimer = self.audioLevelTimer {
+ self.audioLevelTimer = nil
+ audioLevelTimer.invalidate()
+ }
+ default:
+ if self.audioLevelTimer == nil {
+ let startTime = CFAbsoluteTimeGetCurrent()
+ self.audioLevelTimer = Foundation.Timer.scheduledTimer(withTimeInterval: 1.0 / 60.0, repeats: true, block: { [weak self] _ in
+ guard let self, let callScreenView = self.callScreenView else {
+ return
+ }
+ let timestamp = CFAbsoluteTimeGetCurrent() - startTime
+ let stream1 = sin(timestamp * Double.pi * 2.0)
+ let stream2 = sin(2.0 * timestamp * Double.pi * 2.0)
+ let stream3 = sin(3.0 * timestamp * Double.pi * 2.0)
+ let result = stream1 + stream2 + stream3
+ callScreenView.addIncomingAudioLevel(value: abs(Float(result)))
+ })
+ }
+ }
+
self.update(transition: .spring(duration: 0.4))
}
callScreenView.flipCameraAction = { [weak self] in
@@ -66,6 +112,8 @@ public final class ViewController: UIViewController {
}
if let input = self.callState.localVideo as? FileVideoSource {
input.sourceId = input.sourceId == 0 ? 1 : 0
+ input.fixedRotationAngle = input.sourceId == 0 ? Float.pi * 0.0 : Float.pi * 0.5
+ input.sizeMultiplicator = input.sourceId == 0 ? CGPoint(x: 1.0, y: 1.0) : CGPoint(x: 1.5, y: 1.0)
}
}
callScreenView.videoAction = { [weak self] in
@@ -73,7 +121,7 @@ public final class ViewController: UIViewController {
return
}
if self.callState.localVideo == nil {
- self.callState.localVideo = FileVideoSource(device: MetalEngine.shared.device, url: Bundle.main.url(forResource: "test2", withExtension: "mp4")!)
+ self.callState.localVideo = FileVideoSource(device: MetalEngine.shared.device, url: Bundle.main.url(forResource: "test3", withExtension: "mp4")!, fixedRotationAngle: Float.pi * 0.0)
} else {
self.callState.localVideo = nil
}
@@ -81,7 +129,7 @@ public final class ViewController: UIViewController {
}
callScreenView.microhoneMuteAction = {
if self.callState.remoteVideo == nil {
- self.callState.remoteVideo = FileVideoSource(device: MetalEngine.shared.device, url: Bundle.main.url(forResource: "test2", withExtension: "mp4")!)
+ self.callState.remoteVideo = FileVideoSource(device: MetalEngine.shared.device, url: Bundle.main.url(forResource: "test4", withExtension: "mp4")!, fixedRotationAngle: Float.pi * 0.0)
} else {
self.callState.remoteVideo = nil
}
@@ -96,32 +144,51 @@ public final class ViewController: UIViewController {
self.callState.localVideo = nil
self.update(transition: .spring(duration: 0.4))
}
-
- self.update(transition: .immediate)
+ callScreenView.backAction = { [weak self] in
+ guard let self else {
+ return
+ }
+ self.callState.isMicrophoneMuted = !self.callState.isMicrophoneMuted
+ self.update(transition: .spring(duration: 0.4))
+ }
}
private func update(transition: Transition) {
- self.update(size: self.view.bounds.size, transition: transition)
+ if let (size, insets) = self.currentLayout {
+ self.update(size: size, insets: insets, transition: transition)
+ }
}
- private func update(size: CGSize, transition: Transition) {
+ private func update(size: CGSize, insets: UIEdgeInsets, transition: Transition) {
guard let callScreenView = self.callScreenView else {
return
}
transition.setFrame(view: callScreenView, frame: CGRect(origin: CGPoint(), size: size))
- let insets: UIEdgeInsets
- if size.width < size.height {
- insets = UIEdgeInsets(top: 44.0, left: 0.0, bottom: 0.0, right: 0.0)
+ callScreenView.update(size: size, insets: insets, screenCornerRadius: UIScreen.main.displayCornerRadius, state: self.callState, transition: transition)
+ }
+
+ override public func viewWillLayoutSubviews() {
+ super.viewWillLayoutSubviews()
+
+ let safeAreaLayoutGuide = self.view.safeAreaLayoutGuide
+
+ let size = self.view.bounds.size
+ let insets = UIEdgeInsets(top: safeAreaLayoutGuide.layoutFrame.minY, left: safeAreaLayoutGuide.layoutFrame.minX, bottom: size.height - safeAreaLayoutGuide.layoutFrame.maxY, right: safeAreaLayoutGuide.layoutFrame.minX)
+
+ let transition = self.viewLayoutTransition ?? .immediate
+ self.viewLayoutTransition = nil
+
+ if let currentLayout = self.currentLayout, currentLayout == (size, insets) {
} else {
- insets = UIEdgeInsets(top: 0.0, left: 44.0, bottom: 0.0, right: 44.0)
+ self.currentLayout = (size, insets)
+ self.update(size: size, insets: insets, transition: transition)
}
- callScreenView.update(size: size, insets: insets, screenCornerRadius: 55.0, state: self.callState, transition: transition)
}
override public func viewWillTransition(to size: CGSize, with coordinator: UIViewControllerTransitionCoordinator) {
super.viewWillTransition(to: size, with: coordinator)
- self.update(size: size, transition: .easeInOut(duration: 0.3))
+ self.viewLayoutTransition = .easeInOut(duration: 0.3)
}
}
diff --git a/WORKSPACE b/WORKSPACE
index 6cefd0f06e8..455045017ad 100644
--- a/WORKSPACE
+++ b/WORKSPACE
@@ -107,9 +107,9 @@ http_archive(
http_archive(
name = "rules_swift_package_manager",
- sha256 = "5fa4bb1ed4d105ac0d10234b8442ba7c489058697afef7dbf59dbd35bff8892e",
+ sha256 = "9ef780cb621ec2d7e2c494dd0d2c9994089195e82417634ed3fa000313beb151",
urls = [
- "https://github.com/cgrindel/rules_swift_package_manager/releases/download/v0.13.1/rules_swift_package_manager.v0.13.1.tar.gz",
+ "https://github.com/cgrindel/rules_swift_package_manager/releases/download/v0.23.0/rules_swift_package_manager.v0.23.0.tar.gz",
],
)
diff --git a/submodules/Camera/Sources/Camera.swift b/submodules/Camera/Sources/Camera.swift
index ad935ca89c1..08f7aa34bfe 100644
--- a/submodules/Camera/Sources/Camera.swift
+++ b/submodules/Camera/Sources/Camera.swift
@@ -50,11 +50,11 @@ final class CameraDeviceContext {
let input = CameraInput()
let output: CameraOutput
- init(session: CameraSession, exclusive: Bool, additional: Bool) {
+ init(session: CameraSession, exclusive: Bool, additional: Bool, ciContext: CIContext) {
self.session = session
self.exclusive = exclusive
self.additional = additional
- self.output = CameraOutput(exclusive: exclusive)
+ self.output = CameraOutput(exclusive: exclusive, ciContext: ciContext)
}
func configure(position: Camera.Position, previewView: CameraSimplePreviewView?, audio: Bool, photo: Bool, metadata: Bool, preferWide: Bool = false, preferLowerFramerate: Bool = false) {
@@ -114,7 +114,7 @@ private final class CameraContext {
private var mainDeviceContext: CameraDeviceContext?
private var additionalDeviceContext: CameraDeviceContext?
- private let cameraImageContext = CIContext()
+ private let ciContext = CIContext()
private let initialConfiguration: Camera.Configuration
private var invalidated = false
@@ -140,7 +140,7 @@ private final class CameraContext {
ciImage = ciImage.transformed(by: transform)
}
ciImage = ciImage.clampedToExtent().applyingGaussianBlur(sigma: 40.0).cropped(to: CGRect(origin: .zero, size: size))
- if let cgImage = self.cameraImageContext.createCGImage(ciImage, from: ciImage.extent) {
+ if let cgImage = self.ciContext.createCGImage(ciImage, from: ciImage.extent) {
let uiImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .right)
if front {
CameraSimplePreviewView.saveLastFrontImage(uiImage)
@@ -303,10 +303,10 @@ private final class CameraContext {
if enabled {
self.configure {
self.mainDeviceContext?.invalidate()
- self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: false)
+ self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: false, ciContext: self.ciContext)
self.mainDeviceContext?.configure(position: .back, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata)
- self.additionalDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: true)
+ self.additionalDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: true, ciContext: self.ciContext)
self.additionalDeviceContext?.configure(position: .front, previewView: self.secondaryPreviewView, audio: false, photo: true, metadata: false)
}
self.mainDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in
@@ -343,7 +343,7 @@ private final class CameraContext {
self.additionalDeviceContext?.invalidate()
self.additionalDeviceContext = nil
- self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: true, additional: false)
+ self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: true, additional: false, ciContext: self.ciContext)
self.mainDeviceContext?.configure(position: self.positionValue, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: self.initialConfiguration.preferWide, preferLowerFramerate: self.initialConfiguration.preferLowerFramerate)
}
self.mainDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in
diff --git a/submodules/Camera/Sources/CameraOutput.swift b/submodules/Camera/Sources/CameraOutput.swift
index 09e977dfc57..4635a36c519 100644
--- a/submodules/Camera/Sources/CameraOutput.swift
+++ b/submodules/Camera/Sources/CameraOutput.swift
@@ -78,13 +78,14 @@ public struct CameraCode: Equatable {
}
final class CameraOutput: NSObject {
+ let exclusive: Bool
+ let ciContext: CIContext
+
let photoOutput = AVCapturePhotoOutput()
let videoOutput = AVCaptureVideoDataOutput()
let audioOutput = AVCaptureAudioDataOutput()
let metadataOutput = AVCaptureMetadataOutput()
-
- let exclusive: Bool
-
+
private var photoConnection: AVCaptureConnection?
private var videoConnection: AVCaptureConnection?
private var previewConnection: AVCaptureConnection?
@@ -99,8 +100,9 @@ final class CameraOutput: NSObject {
var processAudioBuffer: ((CMSampleBuffer) -> Void)?
var processCodes: (([CameraCode]) -> Void)?
- init(exclusive: Bool) {
+ init(exclusive: Bool, ciContext: CIContext) {
self.exclusive = exclusive
+ self.ciContext = ciContext
super.init()
@@ -266,7 +268,7 @@ final class CameraOutput: NSObject {
}
let uniqueId = settings.uniqueID
- let photoCapture = PhotoCaptureContext(settings: settings, orientation: orientation, mirror: mirror)
+ let photoCapture = PhotoCaptureContext(ciContext: self.ciContext, settings: settings, orientation: orientation, mirror: mirror)
self.photoCaptureRequests[uniqueId] = photoCapture
self.photoOutput.capturePhoto(with: settings, delegate: photoCapture)
@@ -309,7 +311,7 @@ final class CameraOutput: NSObject {
let outputFilePath = NSTemporaryDirectory() + outputFileName + ".mp4"
let outputFileURL = URL(fileURLWithPath: outputFilePath)
- let videoRecorder = VideoRecorder(configuration: VideoRecorder.Configuration(videoSettings: videoSettings, audioSettings: audioSettings), orientation: orientation, fileUrl: outputFileURL, completion: { [weak self] result in
+ let videoRecorder = VideoRecorder(configuration: VideoRecorder.Configuration(videoSettings: videoSettings, audioSettings: audioSettings), ciContext: self.ciContext, orientation: orientation, fileUrl: outputFileURL, completion: { [weak self] result in
guard let self else {
return
}
diff --git a/submodules/Camera/Sources/PhotoCaptureContext.swift b/submodules/Camera/Sources/PhotoCaptureContext.swift
index ad7a992a650..2f2e7753887 100644
--- a/submodules/Camera/Sources/PhotoCaptureContext.swift
+++ b/submodules/Camera/Sources/PhotoCaptureContext.swift
@@ -33,11 +33,13 @@ public enum PhotoCaptureResult: Equatable {
}
final class PhotoCaptureContext: NSObject, AVCapturePhotoCaptureDelegate {
+ private let ciContext: CIContext
private let pipe = ValuePipe()
private let orientation: AVCaptureVideoOrientation
private let mirror: Bool
- init(settings: AVCapturePhotoSettings, orientation: AVCaptureVideoOrientation, mirror: Bool) {
+ init(ciContext: CIContext, settings: AVCapturePhotoSettings, orientation: AVCaptureVideoOrientation, mirror: Bool) {
+ self.ciContext = ciContext
self.orientation = orientation
self.mirror = mirror
@@ -70,9 +72,8 @@ final class PhotoCaptureContext: NSObject, AVCapturePhotoCaptureDelegate {
}
let finalPixelBuffer = photoPixelBuffer
- let ciContext = CIContext()
let renderedCIImage = CIImage(cvImageBuffer: finalPixelBuffer)
- if let cgImage = ciContext.createCGImage(renderedCIImage, from: renderedCIImage.extent) {
+ if let cgImage = self.ciContext.createCGImage(renderedCIImage, from: renderedCIImage.extent) {
var image = UIImage(cgImage: cgImage, scale: 1.0, orientation: orientation)
if image.imageOrientation != .up {
UIGraphicsBeginImageContextWithOptions(image.size, true, image.scale)
diff --git a/submodules/Camera/Sources/VideoRecorder.swift b/submodules/Camera/Sources/VideoRecorder.swift
index 67c3cb0661d..78dcff37a8c 100644
--- a/submodules/Camera/Sources/VideoRecorder.swift
+++ b/submodules/Camera/Sources/VideoRecorder.swift
@@ -34,7 +34,7 @@ private final class VideoRecorderImpl {
private var videoInput: AVAssetWriterInput?
private var audioInput: AVAssetWriterInput?
- private let imageContext = CIContext()
+ private let ciContext: CIContext
private var transitionImage: UIImage?
private var savedTransitionImage = false
@@ -63,8 +63,9 @@ private final class VideoRecorderImpl {
private var hasAllVideoBuffers = false
private var hasAllAudioBuffers = false
- public init?(configuration: VideoRecorder.Configuration, orientation: AVCaptureVideoOrientation, fileUrl: URL) {
+ public init?(configuration: VideoRecorder.Configuration, ciContext: CIContext, orientation: AVCaptureVideoOrientation, fileUrl: URL) {
self.configuration = configuration
+ self.ciContext = ciContext
var transform: CGAffineTransform = CGAffineTransform(rotationAngle: .pi / 2.0)
if orientation == .landscapeLeft {
@@ -184,7 +185,7 @@ private final class VideoRecorderImpl {
self.savedTransitionImage = true
Queue.concurrentBackgroundQueue().async {
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
- if let cgImage = self.imageContext.createCGImage(ciImage, from: ciImage.extent) {
+ if let cgImage = self.ciContext.createCGImage(ciImage, from: ciImage.extent) {
var orientation: UIImage.Orientation = .right
if self.orientation == .landscapeLeft {
orientation = .down
@@ -479,12 +480,12 @@ public final class VideoRecorder {
return self.impl.isRecording
}
- init?(configuration: Configuration, orientation: AVCaptureVideoOrientation, fileUrl: URL, completion: @escaping (Result) -> Void) {
+ init?(configuration: Configuration, ciContext: CIContext, orientation: AVCaptureVideoOrientation, fileUrl: URL, completion: @escaping (Result) -> Void) {
self.configuration = configuration
self.fileUrl = fileUrl
self.completion = completion
- guard let impl = VideoRecorderImpl(configuration: configuration, orientation: orientation, fileUrl: fileUrl) else {
+ guard let impl = VideoRecorderImpl(configuration: configuration, ciContext: ciContext, orientation: orientation, fileUrl: fileUrl) else {
completion(.initError(.generic))
return nil
}
diff --git a/submodules/ChatListUI/Sources/Node/ChatListItem.swift b/submodules/ChatListUI/Sources/Node/ChatListItem.swift
index 79403326dbd..be68b94b04b 100644
--- a/submodules/ChatListUI/Sources/Node/ChatListItem.swift
+++ b/submodules/ChatListUI/Sources/Node/ChatListItem.swift
@@ -293,6 +293,7 @@ public class ChatListItem: ListViewItem, ChatListSearchItemNeighbour {
if #available(iOS 13.0, *) {
Task { @MainActor in
nicegramItem.select()
+ interaction.clearHighlightAnimated(true)
}
}
return
diff --git a/submodules/ChatListUI/Sources/Node/ChatListNode.swift b/submodules/ChatListUI/Sources/Node/ChatListNode.swift
index 6a28cb116f2..44543b84485 100644
--- a/submodules/ChatListUI/Sources/Node/ChatListNode.swift
+++ b/submodules/ChatListUI/Sources/Node/ChatListNode.swift
@@ -81,6 +81,11 @@ public final class ChatListNodeInteraction {
}
let activateSearch: () -> Void
+
+ // MARK: Nicegram PinnedChats
+ let clearHighlightAnimated: (Bool) -> Void
+ //
+
let peerSelected: (EnginePeer, EnginePeer?, Int64?, ChatListNodeEntryPromoInfo?) -> Void
let disabledPeerSelected: (EnginePeer, Int64?) -> Void
let togglePeerSelected: (EnginePeer, Int64?) -> Void
@@ -131,6 +136,9 @@ public final class ChatListNodeInteraction {
animationCache: AnimationCache,
animationRenderer: MultiAnimationRenderer,
activateSearch: @escaping () -> Void,
+ // MARK: Nicegram PinnedChats
+ clearHighlightAnimated: @escaping (Bool) -> Void = { _ in },
+ //
peerSelected: @escaping (EnginePeer, EnginePeer?, Int64?, ChatListNodeEntryPromoInfo?) -> Void,
disabledPeerSelected: @escaping (EnginePeer, Int64?) -> Void,
togglePeerSelected: @escaping (EnginePeer, Int64?) -> Void,
@@ -166,6 +174,9 @@ public final class ChatListNodeInteraction {
openStories: @escaping (ChatListNode.OpenStoriesSubject, ASDisplayNode?) -> Void
) {
self.activateSearch = activateSearch
+ // MARK: Nicegram PinnedChats
+ self.clearHighlightAnimated = clearHighlightAnimated
+ //
self.peerSelected = peerSelected
self.disabledPeerSelected = disabledPeerSelected
self.togglePeerSelected = togglePeerSelected
@@ -1325,6 +1336,10 @@ public final class ChatListNode: ListView {
if let strongSelf = self, let activateSearch = strongSelf.activateSearch {
activateSearch()
}
+ // MARK: Nicegram PinnedChats
+ }, clearHighlightAnimated: { [weak self] flag in
+ self?.clearHighlightAnimated(flag)
+ //
}, peerSelected: { [weak self] peer, _, threadId, promoInfo in
if let strongSelf = self, let peerSelected = strongSelf.peerSelected {
peerSelected(peer, threadId, true, true, promoInfo)
diff --git a/submodules/ChatPresentationInterfaceState/Sources/ChatTextFormat.swift b/submodules/ChatPresentationInterfaceState/Sources/ChatTextFormat.swift
index fb7d6906348..f123a516d2a 100644
--- a/submodules/ChatPresentationInterfaceState/Sources/ChatTextFormat.swift
+++ b/submodules/ChatPresentationInterfaceState/Sources/ChatTextFormat.swift
@@ -11,8 +11,10 @@ public func chatTextInputAddFormattingAttribute(_ state: ChatTextInputState, att
state.inputText.enumerateAttributes(in: nsRange, options: .longestEffectiveRangeNotRequired) { attributes, range, _ in
for (key, _) in attributes {
if key == attribute {
- addAttribute = false
- attributesToRemove.append(key)
+ if nsRange == range {
+ addAttribute = false
+ attributesToRemove.append(key)
+ }
}
}
}
diff --git a/submodules/ComponentFlow/Source/Base/Transition.swift b/submodules/ComponentFlow/Source/Base/Transition.swift
index 973ec228535..9b1887fbeed 100644
--- a/submodules/ComponentFlow/Source/Base/Transition.swift
+++ b/submodules/ComponentFlow/Source/Base/Transition.swift
@@ -481,8 +481,14 @@ public struct Transition {
}
public func setScale(layer: CALayer, scale: CGFloat, delay: Double = 0.0, completion: ((Bool) -> Void)? = nil) {
- let t = layer.presentation()?.transform ?? layer.transform
- let currentScale = sqrt((t.m11 * t.m11) + (t.m12 * t.m12) + (t.m13 * t.m13))
+ let currentTransform: CATransform3D
+ if layer.animation(forKey: "transform") != nil || layer.animation(forKey: "transform.scale") != nil {
+ currentTransform = layer.presentation()?.transform ?? layer.transform
+ } else {
+ currentTransform = layer.transform
+ }
+
+ let currentScale = sqrt((currentTransform.m11 * currentTransform.m11) + (currentTransform.m12 * currentTransform.m12) + (currentTransform.m13 * currentTransform.m13))
if currentScale == scale {
if let animation = layer.animation(forKey: "transform.scale") as? CABasicAnimation, let toValue = animation.toValue as? NSNumber {
if toValue.doubleValue == scale {
@@ -591,7 +597,7 @@ public struct Transition {
completion?(true)
case let .curve(duration, curve):
let previousValue: CATransform3D
- if let presentation = layer.presentation() {
+ if layer.animation(forKey: "transform") != nil, let presentation = layer.presentation() {
previousValue = presentation.transform
} else {
previousValue = layer.transform
@@ -703,6 +709,33 @@ public struct Transition {
)
}
}
+
+ public func setZPosition(layer: CALayer, zPosition: CGFloat, delay: Double = 0.0, completion: ((Bool) -> Void)? = nil) {
+ if layer.zPosition == zPosition {
+ completion?(true)
+ return
+ }
+ switch self.animation {
+ case .none:
+ layer.zPosition = zPosition
+ layer.removeAnimation(forKey: "zPosition")
+ completion?(true)
+ case let .curve(duration, curve):
+ let previousZPosition = layer.presentation()?.opacity ?? layer.opacity
+ layer.zPosition = zPosition
+ layer.animate(
+ from: previousZPosition as NSNumber,
+ to: zPosition as NSNumber,
+ keyPath: "zPosition",
+ duration: duration,
+ delay: delay,
+ curve: curve,
+ removeOnCompletion: true,
+ additive: false,
+ completion: completion
+ )
+ }
+ }
public func animateScale(view: UIView, from fromValue: CGFloat, to toValue: CGFloat, delay: Double = 0.0, additive: Bool = false, completion: ((Bool) -> Void)? = nil) {
switch self.animation {
diff --git a/submodules/Display/Source/CAAnimationUtils.swift b/submodules/Display/Source/CAAnimationUtils.swift
index a20da581bfc..b2673b2b9be 100644
--- a/submodules/Display/Source/CAAnimationUtils.swift
+++ b/submodules/Display/Source/CAAnimationUtils.swift
@@ -203,9 +203,9 @@ public extension CALayer {
}
}
- func animate(from: AnyObject?, to: AnyObject, keyPath: String, timingFunction: String, duration: Double, delay: Double = 0.0, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, additive: Bool = false, completion: ((Bool) -> Void)? = nil) {
+ func animate(from: AnyObject?, to: AnyObject, keyPath: String, timingFunction: String, duration: Double, delay: Double = 0.0, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, additive: Bool = false, completion: ((Bool) -> Void)? = nil, key: String? = nil) {
let animation = self.makeAnimation(from: from, to: to, keyPath: keyPath, timingFunction: timingFunction, duration: duration, delay: delay, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, completion: completion)
- self.add(animation, forKey: additive ? nil : keyPath)
+ self.add(animation, forKey: key ?? (additive ? nil : keyPath))
}
func animateGroup(_ animations: [CAAnimation], key: String, completion: ((Bool) -> Void)? = nil) {
diff --git a/submodules/Display/Source/SimpleLayer.swift b/submodules/Display/Source/SimpleLayer.swift
index 8bc691303b5..7033bd1f85c 100644
--- a/submodules/Display/Source/SimpleLayer.swift
+++ b/submodules/Display/Source/SimpleLayer.swift
@@ -87,3 +87,32 @@ open class SimpleGradientLayer: CAGradientLayer {
fatalError("init(coder:) has not been implemented")
}
}
+
+open class SimpleTransformLayer: CATransformLayer {
+ public var didEnterHierarchy: (() -> Void)?
+ public var didExitHierarchy: (() -> Void)?
+ public private(set) var isInHierarchy: Bool = false
+
+ override open func action(forKey event: String) -> CAAction? {
+ if event == kCAOnOrderIn {
+ self.isInHierarchy = true
+ self.didEnterHierarchy?()
+ } else if event == kCAOnOrderOut {
+ self.isInHierarchy = false
+ self.didExitHierarchy?()
+ }
+ return nullAction
+ }
+
+ override public init() {
+ super.init()
+ }
+
+ override public init(layer: Any) {
+ super.init(layer: layer)
+ }
+
+ required public init?(coder: NSCoder) {
+ fatalError("init(coder:) has not been implemented")
+ }
+}
diff --git a/submodules/DrawingUI/Sources/DrawingScreen.swift b/submodules/DrawingUI/Sources/DrawingScreen.swift
index 385984dbaa4..bfa0b855005 100644
--- a/submodules/DrawingUI/Sources/DrawingScreen.swift
+++ b/submodules/DrawingUI/Sources/DrawingScreen.swift
@@ -834,9 +834,14 @@ private final class DrawingScreenComponent: CombinedComponent {
func updateColor(_ color: DrawingColor, animated: Bool = false) {
self.currentColor = color
- if let selectedEntity = self.selectedEntity {
- selectedEntity.color = color
- self.updateEntityView.invoke((selectedEntity.uuid, false))
+ if let selectedEntity = self.selectedEntity, let selectedEntityView = self.entityViewForEntity(selectedEntity) {
+ if let textEntity = selectedEntity as? DrawingTextEntity, let textEntityView = selectedEntityView as? DrawingTextEntityView {
+ textEntity.setColor(color, range: textEntityView.selectedRange)
+ textEntityView.update(animated: false, keepSelectedRange: true)
+ } else {
+ selectedEntity.color = color
+ selectedEntityView.update(animated: false)
+ }
} else {
self.drawingState = self.drawingState.withUpdatedColor(color)
self.updateToolState.invoke(self.drawingState.currentToolState)
@@ -3077,12 +3082,15 @@ public final class DrawingToolsInteraction {
return
}
+ var isRectangleImage = false
var isVideo = false
var isAdditional = false
if let entity = entityView.entity as? DrawingStickerEntity {
if case let .dualVideoReference(isAdditionalValue) = entity.content {
isVideo = true
isAdditional = isAdditionalValue
+ } else if case let .image(_, type) = entity.content, case .rectangle = type {
+ isRectangleImage = true
}
}
@@ -3139,6 +3147,21 @@ public final class DrawingToolsInteraction {
}))
}
}
+ #if DEBUG
+ if isRectangleImage {
+ actions.append(ContextMenuAction(content: .text(title: "Cut Out", accessibilityLabel: "Cut Out"), action: { [weak self, weak entityView] in
+ if let self, let entityView, let entity = entityView.entity as? DrawingStickerEntity, case let .image(image, _) = entity.content {
+ let _ = (cutoutStickerImage(from: image)
+ |> deliverOnMainQueue).start(next: { result in
+ if let result {
+ let newEntity = DrawingStickerEntity(content: .image(result, .sticker))
+ self.insertEntity(newEntity)
+ }
+ })
+ }
+ }))
+ }
+ #endif
let entityFrame = entityView.convert(entityView.selectionBounds, to: node.view).offsetBy(dx: 0.0, dy: -6.0)
let controller = makeContextMenuController(actions: actions)
let bounds = node.bounds.insetBy(dx: 0.0, dy: 160.0)
@@ -3497,7 +3520,7 @@ public final class DrawingToolsInteraction {
return
}
entityView.suspendEditing()
- self?.presentColorPicker(initialColor: textEntity.color, dismissed: {
+ self?.presentColorPicker(initialColor: textEntity.color(in: entityView.selectedRange), dismissed: {
entityView.resumeEditing()
})
},
diff --git a/submodules/DrawingUI/Sources/DrawingTextEntityView.swift b/submodules/DrawingUI/Sources/DrawingTextEntityView.swift
index 02be4d4ebcc..c062142fa09 100644
--- a/submodules/DrawingUI/Sources/DrawingTextEntityView.swift
+++ b/submodules/DrawingUI/Sources/DrawingTextEntityView.swift
@@ -367,6 +367,10 @@ public final class DrawingTextEntityView: DrawingEntityView, UITextViewDelegate
}
}
+ public var selectedRange: NSRange {
+ return self.textView.selectedRange
+ }
+
public func textViewDidChange(_ textView: UITextView) {
guard let updatedText = self.textView.attributedText.mutableCopy() as? NSMutableAttributedString else {
return
@@ -379,7 +383,7 @@ public final class DrawingTextEntityView: DrawingEntityView, UITextViewDelegate
self.textEntity.text = updatedText
self.sizeToFit()
- self.update(afterAppendingEmoji: true)
+ self.update(keepSelectedRange: true)
self.textChanged()
}
@@ -398,7 +402,7 @@ public final class DrawingTextEntityView: DrawingEntityView, UITextViewDelegate
self.textEntity.text = updatedText
- self.update(animated: false, afterAppendingEmoji: true)
+ self.update(animated: false, keepSelectedRange: true)
self.textView.selectedRange = NSMakeRange(previousSelectedRange.location + previousSelectedRange.length + text.length, 0)
}
@@ -493,6 +497,9 @@ public final class DrawingTextEntityView: DrawingEntityView, UITextViewDelegate
if let _ = attributes[ChatTextInputAttributes.customEmoji] {
text.addAttribute(.foregroundColor, value: UIColor.clear, range: subrange)
visualText.addAttribute(.foregroundColor, value: UIColor.clear, range: subrange)
+ } else if let color = attributes[DrawingTextEntity.TextAttributes.color] {
+ text.addAttribute(.foregroundColor, value: color, range: subrange)
+ visualText.addAttribute(.foregroundColor, value: color, range: subrange)
}
}
@@ -565,10 +572,14 @@ public final class DrawingTextEntityView: DrawingEntityView, UITextViewDelegate
}
public override func update(animated: Bool = false) {
- self.update(animated: animated, afterAppendingEmoji: false, updateEditingPosition: true)
+ self.update(animated: animated, keepSelectedRange: false, updateEditingPosition: true)
+ }
+
+ public func update(animated: Bool = false, keepSelectedRange: Bool = false) {
+ self.update(animated: animated, keepSelectedRange: keepSelectedRange, updateEditingPosition: true)
}
- func update(animated: Bool = false, afterAppendingEmoji: Bool = false, updateEditingPosition: Bool = true) {
+ func update(animated: Bool = false, keepSelectedRange: Bool = false, updateEditingPosition: Bool = true) {
if !self.isEditing {
self.center = self.textEntity.position
self.transform = CGAffineTransformScale(CGAffineTransformMakeRotation(self.textEntity.rotation), self.textEntity.scale, self.textEntity.scale)
@@ -612,7 +623,7 @@ public final class DrawingTextEntityView: DrawingEntityView, UITextViewDelegate
}
self.textView.textAlignment = self.textEntity.alignment.alignment
- self.updateText(keepSelectedRange: afterAppendingEmoji)
+ self.updateText(keepSelectedRange: keepSelectedRange)
self.sizeToFit()
diff --git a/submodules/DrawingUI/Sources/VideoRecorder.swift b/submodules/DrawingUI/Sources/DrawingVideoRecorder.swift
similarity index 100%
rename from submodules/DrawingUI/Sources/VideoRecorder.swift
rename to submodules/DrawingUI/Sources/DrawingVideoRecorder.swift
diff --git a/submodules/DrawingUI/Sources/ImageObjectSeparation.swift b/submodules/DrawingUI/Sources/ImageObjectSeparation.swift
new file mode 100644
index 00000000000..5d7e8b6146e
--- /dev/null
+++ b/submodules/DrawingUI/Sources/ImageObjectSeparation.swift
@@ -0,0 +1,74 @@
+import Foundation
+import UIKit
+import Vision
+import CoreImage
+import SwiftSignalKit
+import VideoToolbox
+
+private let queue = Queue()
+
+func cutoutStickerImage(from image: UIImage) -> Signal {
+ if #available(iOS 17.0, *) {
+ guard let cgImage = image.cgImage else {
+ return .single(nil)
+ }
+ return Signal { subscriber in
+ let handler = VNImageRequestHandler(cgImage: cgImage, options: [:])
+ let request = VNGenerateForegroundInstanceMaskRequest { [weak handler] request, error in
+ guard let handler, let result = request.results?.first as? VNInstanceMaskObservation else {
+ subscriber.putNext(nil)
+ subscriber.putCompletion()
+ return
+ }
+ let instances = instances(atPoint: nil, inObservation: result)
+ if let mask = try? result.generateScaledMaskForImage(forInstances: instances, from: handler), let image = UIImage(pixelBuffer: mask) {
+ subscriber.putNext(image)
+ subscriber.putCompletion()
+ } else {
+ subscriber.putNext(nil)
+ subscriber.putCompletion()
+ }
+ }
+ try? handler.perform([request])
+ return ActionDisposable {
+ request.cancel()
+ }
+ }
+ |> runOn(queue)
+ } else {
+ return .single(nil)
+ }
+}
+
+@available(iOS 17.0, *)
+private func instances(atPoint maybePoint: CGPoint?, inObservation observation: VNInstanceMaskObservation) -> IndexSet {
+ guard let point = maybePoint else {
+ return observation.allInstances
+ }
+
+ let instanceMap = observation.instanceMask
+ let coords = VNImagePointForNormalizedPoint(point, CVPixelBufferGetWidth(instanceMap) - 1, CVPixelBufferGetHeight(instanceMap) - 1)
+
+ CVPixelBufferLockBaseAddress(instanceMap, .readOnly)
+ guard let pixels = CVPixelBufferGetBaseAddress(instanceMap) else {
+ fatalError()
+ }
+ let bytesPerRow = CVPixelBufferGetBytesPerRow(instanceMap)
+ let instanceLabel = pixels.load(fromByteOffset: Int(coords.y) * bytesPerRow + Int(coords.x), as: UInt8.self)
+ CVPixelBufferUnlockBaseAddress(instanceMap, .readOnly)
+
+ return instanceLabel == 0 ? observation.allInstances : [Int(instanceLabel)]
+}
+
+private extension UIImage {
+ convenience init?(pixelBuffer: CVPixelBuffer) {
+ var cgImage: CGImage?
+ VTCreateCGImageFromCVPixelBuffer(pixelBuffer, options: nil, imageOut: &cgImage)
+
+ guard let cgImage = cgImage else {
+ return nil
+ }
+
+ self.init(cgImage: cgImage)
+ }
+}
diff --git a/submodules/MediaPickerUI/Sources/MediaPickerGridItem.swift b/submodules/MediaPickerUI/Sources/MediaPickerGridItem.swift
index a20d645b159..3d829fecb23 100644
--- a/submodules/MediaPickerUI/Sources/MediaPickerGridItem.swift
+++ b/submodules/MediaPickerUI/Sources/MediaPickerGridItem.swift
@@ -260,11 +260,18 @@ final class MediaPickerGridItemNode: GridItemNode {
}
}
+ private var innerIsHidden = false
func updateHiddenMedia() {
- let wasHidden = self.isHidden
- self.isHidden = self.interaction?.hiddenMediaId == self.identifier
- if !self.isHidden && wasHidden {
- self.animateFadeIn(animateCheckNode: true, animateSpoilerNode: true)
+ let wasHidden = self.innerIsHidden
+ if self.identifier == self.interaction?.hiddenMediaId {
+ self.isHidden = true
+ self.innerIsHidden = true
+ } else {
+ self.isHidden = false
+ self.innerIsHidden = false
+ if wasHidden {
+ self.animateFadeIn(animateCheckNode: true, animateSpoilerNode: true)
+ }
}
}
diff --git a/submodules/MediaPickerUI/Sources/MediaPickerScreen.swift b/submodules/MediaPickerUI/Sources/MediaPickerScreen.swift
index bd6ed8159e3..6018c3a8aae 100644
--- a/submodules/MediaPickerUI/Sources/MediaPickerScreen.swift
+++ b/submodules/MediaPickerUI/Sources/MediaPickerScreen.swift
@@ -369,13 +369,11 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
|> deliverOnMainQueue).start(next: { [weak self] id in
if let strongSelf = self {
strongSelf.controller?.interaction?.hiddenMediaId = id
-
strongSelf.gridNode.forEachItemNode { itemNode in
if let itemNode = itemNode as? MediaPickerGridItemNode {
itemNode.updateHiddenMedia()
}
}
-
strongSelf.selectionNode?.updateHiddenMedia()
}
})
diff --git a/submodules/MetalEngine/Sources/MetalEngine.swift b/submodules/MetalEngine/Sources/MetalEngine.swift
index bf25c2ad1db..39356c93196 100644
--- a/submodules/MetalEngine/Sources/MetalEngine.swift
+++ b/submodules/MetalEngine/Sources/MetalEngine.swift
@@ -115,6 +115,32 @@ open class MetalEngineSubjectLayer: SimpleLayer {
fileprivate var internalId: Int = -1
fileprivate var surfaceAllocation: MetalEngine.SurfaceAllocation?
+ #if DEBUG
+ fileprivate var surfaceChangeFrameCount: Int = 0
+ #endif
+
+ public var cloneLayers: [CALayer] = []
+
+ override open var contents: Any? {
+ didSet {
+ if !self.cloneLayers.isEmpty {
+ for cloneLayer in self.cloneLayers {
+ cloneLayer.contents = self.contents
+ }
+ }
+ }
+ }
+
+ override open var contentsRect: CGRect {
+ didSet {
+ if !self.cloneLayers.isEmpty {
+ for cloneLayer in self.cloneLayers {
+ cloneLayer.contentsRect = self.contentsRect
+ }
+ }
+ }
+ }
+
public override init() {
super.init()
@@ -529,10 +555,13 @@ public final class MetalEngine {
let renderingRect: CGRect
let contentsRect: CGRect
- init(baseRect: CGRect, surfaceWidth: Int, surfaceHeight: Int) {
+ init(baseRect: CGRect, edgeSize: CGFloat, surfaceWidth: Int, surfaceHeight: Int) {
self.subRect = CGRect(origin: CGPoint(x: baseRect.minX, y: baseRect.minY), size: CGSize(width: baseRect.width, height: baseRect.height))
self.renderingRect = CGRect(origin: CGPoint(x: self.subRect.minX / CGFloat(surfaceWidth), y: self.subRect.minY / CGFloat(surfaceHeight)), size: CGSize(width: self.subRect.width / CGFloat(surfaceWidth), height: self.subRect.height / CGFloat(surfaceHeight)))
- self.contentsRect = CGRect(origin: CGPoint(x: self.subRect.minX / CGFloat(surfaceWidth), y: 1.0 - self.subRect.minY / CGFloat(surfaceHeight) - self.subRect.height / CGFloat(surfaceHeight)), size: CGSize(width: self.subRect.width / CGFloat(surfaceWidth), height: self.subRect.height / CGFloat(surfaceHeight)))
+
+ let subRectWithInset = self.subRect.insetBy(dx: edgeSize, dy: edgeSize)
+
+ self.contentsRect = CGRect(origin: CGPoint(x: subRectWithInset.minX / CGFloat(surfaceWidth), y: 1.0 - subRectWithInset.minY / CGFloat(surfaceHeight) - subRectWithInset.height / CGFloat(surfaceHeight)), size: CGSize(width: subRectWithInset.width / CGFloat(surfaceWidth), height: subRectWithInset.height / CGFloat(surfaceHeight)))
}
}
@@ -546,11 +575,13 @@ public final class MetalEngine {
if item0.itemId != -1 && item1.itemId != -1 {
let layout0 = AllocationLayout(
baseRect: CGRect(origin: CGPoint(x: CGFloat(item0.x), y: CGFloat(item0.y)), size: CGSize(width: CGFloat(item0.width), height: CGFloat(item0.height))),
+ edgeSize: CGFloat(renderingParameters.edgeInset),
surfaceWidth: self.width,
surfaceHeight: self.height
)
let layout1 = AllocationLayout(
baseRect: CGRect(origin: CGPoint(x: CGFloat(item1.x), y: CGFloat(item1.y)), size: CGSize(width: CGFloat(item1.width), height: CGFloat(item1.height))),
+ edgeSize: CGFloat(renderingParameters.edgeInset),
surfaceWidth: self.width,
surfaceHeight: self.height
)
@@ -780,7 +811,10 @@ public final class MetalEngine {
if previousSurfaceId != nil {
#if DEBUG
- print("Changing surface for layer \(layer) (\(renderSpec.allocationWidth)x\(renderSpec.allocationHeight))")
+ layer.surfaceChangeFrameCount += 1
+ if layer.surfaceChangeFrameCount > 100 {
+ print("Changing surface for layer \(layer) (\(renderSpec.allocationWidth)x\(renderSpec.allocationHeight))")
+ }
#endif
}
} else {
@@ -792,6 +826,10 @@ public final class MetalEngine {
#endif
}
}
+ } else {
+ #if DEBUG
+ layer.surfaceChangeFrameCount = max(0, layer.surfaceChangeFrameCount - 1)
+ #endif
}
}
diff --git a/submodules/PeerInfoAvatarListNode/Sources/PeerInfoAvatarListNode.swift b/submodules/PeerInfoAvatarListNode/Sources/PeerInfoAvatarListNode.swift
index 75328ee483c..9e184bca04a 100644
--- a/submodules/PeerInfoAvatarListNode/Sources/PeerInfoAvatarListNode.swift
+++ b/submodules/PeerInfoAvatarListNode/Sources/PeerInfoAvatarListNode.swift
@@ -602,6 +602,9 @@ private final class VariableBlurView: UIVisualEffectView {
fatalError("init(coder:) has not been implemented")
}
+ override func updateTraitsIfNeeded() {
+ }
+
private func resetEffect() {
let filterClassStringEncoded = "Q0FGaWx0ZXI="
let filterClassString: String = {
@@ -649,6 +652,7 @@ private final class VariableBlurView: UIVisualEffectView {
variableBlur.setValue(self.maxBlurRadius, forKey: "inputRadius")
variableBlur.setValue(gradientImageRef, forKey: "inputMaskImage")
variableBlur.setValue(true, forKey: "inputNormalizeEdges")
+ variableBlur.setValue(UIScreenScale, forKey: "scale")
let backdropLayer = self.subviews.first?.layer
backdropLayer?.filters = [variableBlur]
@@ -671,6 +675,7 @@ public final class PeerAvatarBottomShadowNode: ASDisplayNode {
self.imageView = UIImageView()
self.imageView.contentMode = .scaleToFill
+ self.imageView.alpha = 0.8
super.init()
@@ -704,6 +709,7 @@ public final class PeerAvatarBottomShadowNode: ASDisplayNode {
self.backgroundNode.updateColor(color: UIColor(white: 0.0, alpha: 0.1), enableSaturation: false, forceKeepBlur: true, transition: .immediate)
+ self.view.addSubview(self.imageView)
//self.addSubnode(self.backgroundNode)
}
@@ -1729,7 +1735,7 @@ public final class PeerInfoAvatarListContainerNode: ASDisplayNode {
transition.updateAlpha(node: self.setByYouNode, alpha: 0.7)
self.setByYouNode.attributedText = NSAttributedString(string: photoTitle, font: Font.regular(12.0), textColor: UIColor.white)
let setByYouSize = self.setByYouNode.updateLayout(size)
- self.setByYouNode.frame = CGRect(origin: CGPoint(x: size.width - setByYouSize.width - 14.0, y: size.height - setByYouSize.height - 18.0), size: setByYouSize)
+ self.setByYouNode.frame = CGRect(origin: CGPoint(x: size.width - setByYouSize.width - 14.0, y: size.height - setByYouSize.height - 40.0), size: setByYouSize)
self.setByYouNode.isUserInteractionEnabled = hasLink
} else {
transition.updateAlpha(node: self.setByYouNode, alpha: 0.0)
diff --git a/submodules/PremiumUI/Sources/PremiumBoostScreen.swift b/submodules/PremiumUI/Sources/PremiumBoostScreen.swift
index 1fe350d1024..2e2ef001cc7 100644
--- a/submodules/PremiumUI/Sources/PremiumBoostScreen.swift
+++ b/submodules/PremiumUI/Sources/PremiumBoostScreen.swift
@@ -30,7 +30,17 @@ private struct BoostState {
}
return (
- .storiesChannelBoost(peer: peer, boostSubject: .stories, isCurrent: isCurrent, level: currentLevel, currentLevelBoosts: currentLevelBoosts, nextLevelBoosts: nextLevelBoosts, link: nil, myBoostCount: myBoostCount, canBoostAgain: canBoostAgain),
+ .storiesChannelBoost(
+ peer: peer,
+ boostSubject: .stories,
+ isCurrent: isCurrent,
+ level: currentLevel,
+ currentLevelBoosts: currentLevelBoosts,
+ nextLevelBoosts: nextLevelBoosts,
+ link: nil,
+ myBoostCount: myBoostCount,
+ canBoostAgain: canBoostAgain
+ ),
boosts
)
}
@@ -160,9 +170,10 @@ public func PremiumBoostScreen(
}
let _ = context.engine.peers.applyChannelBoost(peerId: peerId, slots: slots).startStandalone(completed: {
- let _ = combineLatest(queue: Queue.mainQueue(),
- context.engine.peers.getChannelBoostStatus(peerId: peerId),
- context.engine.peers.getMyBoostStatus()
+ let _ = combineLatest(
+ queue: Queue.mainQueue(),
+ context.engine.peers.getChannelBoostStatus(peerId: peerId),
+ context.engine.peers.getMyBoostStatus()
).startStandalone(next: { boostStatus, myBoostStatus in
dismissReplaceImpl?()
PremiumBoostScreen(context: context, contentContext: contentContext, peerId: peerId, isCurrent: isCurrent, status: boostStatus, myBoostStatus: myBoostStatus, replacedBoosts: (Int32(slots.count), Int32(channelIds.count)), forceDark: forceDark, openPeer: openPeer, presentController: presentController, pushController: pushController, dismissed: dismissed)
diff --git a/submodules/SettingsUI/Sources/Themes/ThemeAccentColorController.swift b/submodules/SettingsUI/Sources/Themes/ThemeAccentColorController.swift
index 8260484768e..2624de8dd26 100644
--- a/submodules/SettingsUI/Sources/Themes/ThemeAccentColorController.swift
+++ b/submodules/SettingsUI/Sources/Themes/ThemeAccentColorController.swift
@@ -158,7 +158,7 @@ final class ThemeAccentColorController: ViewController {
if let strongSelf = self {
strongSelf.dismiss()
}
- }, apply: { [weak self] state, serviceBackgroundColor in
+ }, apply: { [weak self] state, serviceBackgroundColor, forBoth in
if let strongSelf = self {
let context = strongSelf.context
let autoNightModeTriggered = strongSelf.presentationData.autoNightModeTriggered
@@ -175,7 +175,7 @@ final class ThemeAccentColorController: ViewController {
}
if case let .peer(peer) = strongSelf.resultMode {
- let _ = strongSelf.context.engine.themes.setChatWallpaper(peerId: peer.id, wallpaper: coloredWallpaper, forBoth: false).start()
+ let _ = strongSelf.context.engine.themes.setChatWallpaper(peerId: peer.id, wallpaper: coloredWallpaper, forBoth: forBoth).start()
strongSelf.completion?()
return
}
diff --git a/submodules/SettingsUI/Sources/Themes/ThemeAccentColorControllerNode.swift b/submodules/SettingsUI/Sources/Themes/ThemeAccentColorControllerNode.swift
index 243bc64bc5d..192e02a402b 100644
--- a/submodules/SettingsUI/Sources/Themes/ThemeAccentColorControllerNode.swift
+++ b/submodules/SettingsUI/Sources/Themes/ThemeAccentColorControllerNode.swift
@@ -225,7 +225,7 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
}
}
- init(context: AccountContext, mode: ThemeAccentColorControllerMode, resultMode: ThemeAccentColorController.ResultMode, theme: PresentationTheme, wallpaper: TelegramWallpaper, dismiss: @escaping () -> Void, apply: @escaping (ThemeColorState, UIColor?) -> Void, ready: Promise) {
+ init(context: AccountContext, mode: ThemeAccentColorControllerMode, resultMode: ThemeAccentColorController.ResultMode, theme: PresentationTheme, wallpaper: TelegramWallpaper, dismiss: @escaping () -> Void, apply: @escaping (ThemeColorState, UIColor?, Bool) -> Void, ready: Promise) {
self.context = context
self.mode = mode
self.resultMode = resultMode
@@ -437,7 +437,7 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
}
}
- self.toolbarNode.done = { [weak self] _ in
+ self.toolbarNode.done = { [weak self] forBoth in
if let strongSelf = self {
if strongSelf.state.displayPatternPanel {
strongSelf.updateState({ current in
@@ -448,7 +448,7 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
} else {
if !strongSelf.dismissed {
strongSelf.dismissed = true
- apply(strongSelf.state, strongSelf.serviceBackgroundColor)
+ apply(strongSelf.state, strongSelf.serviceBackgroundColor, forBoth)
}
}
}
diff --git a/submodules/SettingsUI/Sources/Themes/WallpaperGalleryToolbarNode.swift b/submodules/SettingsUI/Sources/Themes/WallpaperGalleryToolbarNode.swift
index 0a0f2f41422..876f1325bb3 100644
--- a/submodules/SettingsUI/Sources/Themes/WallpaperGalleryToolbarNode.swift
+++ b/submodules/SettingsUI/Sources/Themes/WallpaperGalleryToolbarNode.swift
@@ -190,7 +190,13 @@ final class WallpaperGalleryToolbarNode: ASDisplayNode, WallpaperGalleryToolbar
}
}
+ private var previousActionTime: Double?
@objc func pressed() {
+ let currentTime = CACurrentMediaTime()
+ if let previousActionTime = self.previousActionTime, currentTime < previousActionTime + 1.0 {
+ return
+ }
+ self.previousActionTime = currentTime
self.action()
}
}
diff --git a/submodules/StatisticsUI/Sources/ChannelStatsController.swift b/submodules/StatisticsUI/Sources/ChannelStatsController.swift
index cfeacb85eed..d67b8c88797 100644
--- a/submodules/StatisticsUI/Sources/ChannelStatsController.swift
+++ b/submodules/StatisticsUI/Sources/ChannelStatsController.swift
@@ -1446,7 +1446,7 @@ public func channelStatsController(context: AccountContext, updatedPresentationD
return controller
}
-private final class ChannelStatsContextExtractedContentSource: ContextExtractedContentSource {
+final class ChannelStatsContextExtractedContentSource: ContextExtractedContentSource {
var keepInPlace: Bool
let ignoreContentTouches: Bool = true
let blurBackground: Bool = true
diff --git a/submodules/StatisticsUI/Sources/MessageStatsController.swift b/submodules/StatisticsUI/Sources/MessageStatsController.swift
index de7cdbc9cd5..05828dc801a 100644
--- a/submodules/StatisticsUI/Sources/MessageStatsController.swift
+++ b/submodules/StatisticsUI/Sources/MessageStatsController.swift
@@ -16,18 +16,21 @@ import PresentationDataUtils
import AppBundle
import GraphUI
import StoryContainerScreen
+import ContextUI
private final class MessageStatsControllerArguments {
let context: AccountContext
let loadDetailedGraph: (StatsGraph, Int64) -> Signal
let openMessage: (EngineMessage.Id) -> Void
let openStory: (EnginePeer.Id, EngineStoryItem, UIView) -> Void
+ let storyContextAction: (EnginePeer.Id, ASDisplayNode, ContextGesture?, Bool) -> Void
- init(context: AccountContext, loadDetailedGraph: @escaping (StatsGraph, Int64) -> Signal, openMessage: @escaping (EngineMessage.Id) -> Void, openStory: @escaping (EnginePeer.Id, EngineStoryItem, UIView) -> Void) {
+ init(context: AccountContext, loadDetailedGraph: @escaping (StatsGraph, Int64) -> Signal, openMessage: @escaping (EngineMessage.Id) -> Void, openStory: @escaping (EnginePeer.Id, EngineStoryItem, UIView) -> Void, storyContextAction: @escaping (EnginePeer.Id, ASDisplayNode, ContextGesture?, Bool) -> Void) {
self.context = context
self.loadDetailedGraph = loadDetailedGraph
self.openMessage = openMessage
self.openStory = openStory
+ self.storyContextAction = storyContextAction
}
}
@@ -171,6 +174,7 @@ private enum StatsEntry: ItemListNodeEntry {
var forwards: Int32 = 0
var reactions: Int32 = 0
+ var isStory = false
let peer: Peer
switch item {
case let .message(message):
@@ -191,6 +195,7 @@ private enum StatsEntry: ItemListNodeEntry {
views = Int32(story.views?.seenCount ?? 0)
forwards = Int32(story.views?.forwardCount ?? 0)
reactions = Int32(story.views?.reactedCount ?? 0)
+ isStory = true
}
return StatsMessageItem(context: arguments.context, presentationData: presentationData, peer: peer, item: item, views: views, reactions: reactions, forwards: forwards, isPeer: true, sectionId: self.section, style: .blocks, action: {
switch item {
@@ -203,7 +208,9 @@ private enum StatsEntry: ItemListNodeEntry {
if case let .story(peer, story) = item {
arguments.openStory(peer.id, story, view)
}
- }, contextAction: nil)
+ }, contextAction: { node, gesture in
+ arguments.storyContextAction(peer.id, node, gesture, !isStory)
+ })
}
}
}
@@ -305,6 +312,7 @@ public func messageStatsController(context: AccountContext, updatedPresentationD
let dataSignal: Signal
var loadDetailedGraphImpl: ((StatsGraph, Int64) -> Signal)?
var openStoryImpl: ((EnginePeer.Id, EngineStoryItem, UIView) -> Void)?
+ var storyContextActionImpl: ((EnginePeer.Id, ASDisplayNode, ContextGesture?, Bool) -> Void)?
var forwardsContext: StoryStatsPublicForwardsContext?
let peerId: EnginePeer.Id
@@ -373,6 +381,8 @@ public func messageStatsController(context: AccountContext, updatedPresentationD
navigateToMessageImpl?(messageId)
}, openStory: { peerId, story, view in
openStoryImpl?(peerId, story, view)
+ }, storyContextAction: { peerId, node, gesture, isMessage in
+ storyContextActionImpl?(peerId, node, gesture, isMessage)
})
let longLoadingSignal: Signal = .single(false) |> then(.single(true) |> delay(2.0, queue: Queue.mainQueue()))
@@ -534,5 +544,50 @@ public func messageStatsController(context: AccountContext, updatedPresentationD
controller.push(storyContainerScreen)
})
}
+ storyContextActionImpl = { [weak controller] peerId, sourceNode, gesture, isMessage in
+ guard let controller = controller, let sourceNode = sourceNode as? ContextExtractedContentContainingNode else {
+ return
+ }
+
+ let presentationData = updatedPresentationData?.initial ?? context.sharedContext.currentPresentationData.with { $0 }
+
+ var items: [ContextMenuItem] = []
+
+ let title: String
+ let iconName: String
+ if isMessage {
+ title = presentationData.strings.Conversation_ViewInChannel
+ iconName = "Chat/Context Menu/GoToMessage"
+ } else {
+ if peerId.isGroupOrChannel {
+ title = presentationData.strings.ChatList_ContextOpenChannel
+ iconName = "Chat/Context Menu/Channels"
+ } else {
+ title = presentationData.strings.Conversation_ContextMenuOpenProfile
+ iconName = "Chat/Context Menu/User"
+ }
+ }
+
+ items.append(.action(ContextMenuActionItem(text: title, icon: { theme in generateTintedImage(image: UIImage(bundleImageName: iconName), color: theme.contextMenu.primaryColor) }, action: { [weak controller] c, _ in
+ c.dismiss(completion: {
+ let _ = (context.engine.data.get(TelegramEngine.EngineData.Item.Peer.Peer(id: peerId))
+ |> deliverOnMainQueue).start(next: { peer in
+ guard let peer = peer, let navigationController = controller?.navigationController as? NavigationController else {
+ return
+ }
+ if case .user = peer {
+ if let controller = context.sharedContext.makePeerInfoController(context: context, updatedPresentationData: nil, peer: peer._asPeer(), mode: .generic, avatarInitiallyExpanded: peer.largeProfileImage != nil, fromChat: false, requestsContext: nil) {
+ navigationController.pushViewController(controller)
+ }
+ } else {
+ context.sharedContext.navigateToChatController(NavigateToChatControllerParams(navigationController: navigationController, context: context, chatLocation: .peer(peer), subject: nil))
+ }
+ })
+ })
+ })))
+
+ let contextController = ContextController(presentationData: presentationData, source: .extracted(ChannelStatsContextExtractedContentSource(controller: controller, sourceNode: sourceNode, keepInPlace: false)), items: .single(ContextController.Items(content: .list(items))), gesture: gesture)
+ controller.presentInGlobalOverlay(contextController)
+ }
return controller
}
diff --git a/submodules/StatisticsUI/Sources/StatsMessageItem.swift b/submodules/StatisticsUI/Sources/StatsMessageItem.swift
index 4691740299d..f3a612ee4c9 100644
--- a/submodules/StatisticsUI/Sources/StatsMessageItem.swift
+++ b/submodules/StatisticsUI/Sources/StatsMessageItem.swift
@@ -381,7 +381,13 @@ final class StatsMessageItemNode: ListViewItemNode, ItemListItemNode {
}
}
- let (viewsLayout, viewsApply) = makeViewsLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.presentationData.strings.Stats_MessageViews(item.views), font: labelFont, textColor: item.presentationData.theme.list.itemPrimaryTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: 128.0, height: CGFloat.greatestFiniteMagnitude), alignment: .right, cutout: nil, insets: UIEdgeInsets()))
+ let viewsString: String
+ if item.views == 0 {
+ viewsString = item.presentationData.strings.Stats_MessageViews_NoViews
+ } else {
+ viewsString = item.presentationData.strings.Stats_MessageViews(item.views)
+ }
+ let (viewsLayout, viewsApply) = makeViewsLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: viewsString, font: labelFont, textColor: item.presentationData.theme.list.itemPrimaryTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: 128.0, height: CGFloat.greatestFiniteMagnitude), alignment: .right, cutout: nil, insets: UIEdgeInsets()))
let reactions = item.reactions > 0 ? compactNumericCountString(Int(item.reactions), decimalSeparator: item.presentationData.dateTimeFormat.decimalSeparator) : ""
let (reactionsLayout, reactionsApply) = makeReactionsLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: reactions, font: labelFont, textColor: item.presentationData.theme.list.itemSecondaryTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: 128.0, height: CGFloat.greatestFiniteMagnitude), alignment: .right, cutout: nil, insets: UIEdgeInsets()))
diff --git a/submodules/StatisticsUI/Sources/StatsOverviewItem.swift b/submodules/StatisticsUI/Sources/StatsOverviewItem.swift
index 4810e266b46..acb8a5da3e2 100644
--- a/submodules/StatisticsUI/Sources/StatsOverviewItem.swift
+++ b/submodules/StatisticsUI/Sources/StatsOverviewItem.swift
@@ -379,7 +379,7 @@ class StatsOverviewItemNode: ListViewItemNode {
middle1RightItemLayoutAndApply = makeMiddle1RightItemLayout(
params.width,
item.presentationData,
- compactNumericCountString(views.forwardCount - Int(item.publicShares ?? 0)),
+ item.publicShares.flatMap { "≈\( compactNumericCountString(max(0, views.forwardCount - Int($0))))" } ?? "–",
item.presentationData.strings.Stats_Message_PrivateShares,
nil
)
diff --git a/submodules/TelegramCallsUI/Sources/CallControllerNode.swift b/submodules/TelegramCallsUI/Sources/CallControllerNode.swift
index d6f9fb13aba..b2a2990e355 100644
--- a/submodules/TelegramCallsUI/Sources/CallControllerNode.swift
+++ b/submodules/TelegramCallsUI/Sources/CallControllerNode.swift
@@ -26,7 +26,7 @@ private func interpolate(from: CGFloat, to: CGFloat, value: CGFloat) -> CGFloat
return (1.0 - value) * from + value * to
}
-private final class CallVideoNode: ASDisplayNode, PreviewVideoNode {
+final class CallVideoNode: ASDisplayNode, PreviewVideoNode {
private let videoTransformContainer: ASDisplayNode
private let videoView: PresentationCallVideoView
diff --git a/submodules/TelegramCallsUI/Sources/CallControllerNodeV2.swift b/submodules/TelegramCallsUI/Sources/CallControllerNodeV2.swift
index 12c772d8b0c..53d8dbbbb5e 100644
--- a/submodules/TelegramCallsUI/Sources/CallControllerNodeV2.swift
+++ b/submodules/TelegramCallsUI/Sources/CallControllerNodeV2.swift
@@ -16,6 +16,7 @@ import TinyThumbnail
import ImageBlur
import TelegramVoip
import MetalEngine
+import DeviceAccess
final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeProtocol {
private let sharedContext: SharedAccountContext
@@ -32,6 +33,7 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
private var callStartTimestamp: Double?
+ private var callState: PresentationCallState?
var isMuted: Bool = false
var toggleMute: (() -> Void)?
@@ -56,6 +58,7 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
private var isMicrophoneMutedDisposable: Disposable?
private var audioLevelDisposable: Disposable?
+ private var localVideo: AdaptedCallVideoSource?
private var remoteVideo: AdaptedCallVideoSource?
init(
@@ -94,7 +97,13 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
guard let self else {
return
}
- let _ = self
+ self.toggleVideo()
+ }
+ self.callScreen.flipCameraAction = { [weak self] in
+ guard let self else {
+ return
+ }
+ self.call.switchVideoCamera()
}
self.callScreen.microhoneMuteAction = { [weak self] in
guard let self else {
@@ -108,15 +117,23 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
}
self.endCall?()
}
+ self.callScreen.backAction = { [weak self] in
+ guard let self else {
+ return
+ }
+ self.back?()
+ }
self.callScreenState = PrivateCallScreen.State(
lifecycleState: .connecting,
name: " ",
+ shortName: " ",
avatarImage: nil,
audioOutput: .internalSpeaker,
isMicrophoneMuted: false,
localVideo: nil,
- remoteVideo: nil
+ remoteVideo: nil,
+ isRemoteBatteryLow: false
)
if let peer = call.peer {
self.updatePeer(peer: peer)
@@ -176,6 +193,93 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
}
}
+ private func toggleVideo() {
+ guard let callState = self.callState else {
+ return
+ }
+ switch callState.state {
+ case .active:
+ switch callState.videoState {
+ case .active(let isScreencast), .paused(let isScreencast):
+ if isScreencast {
+ (self.call as? PresentationCallImpl)?.disableScreencast()
+ } else {
+ self.call.disableVideo()
+ }
+ default:
+ DeviceAccess.authorizeAccess(to: .camera(.videoCall), onlyCheck: true, presentationData: self.presentationData, present: { [weak self] c, a in
+ if let strongSelf = self {
+ strongSelf.present?(c)
+ }
+ }, openSettings: { [weak self] in
+ self?.sharedContext.applicationBindings.openSettings()
+ }, _: { [weak self] ready in
+ guard let self, ready else {
+ return
+ }
+ let proceed = { [weak self] in
+ guard let self else {
+ return
+ }
+ /*switch callState.videoState {
+ case .inactive:
+ self.isRequestingVideo = true
+ self.updateButtonsMode()
+ default:
+ break
+ }*/
+ self.call.requestVideo()
+ }
+
+ self.call.makeOutgoingVideoView(completion: { [weak self] outgoingVideoView in
+ guard let self else {
+ return
+ }
+
+ if let outgoingVideoView = outgoingVideoView {
+ outgoingVideoView.view.backgroundColor = .black
+ outgoingVideoView.view.clipsToBounds = true
+
+ var updateLayoutImpl: ((ContainerViewLayout, CGFloat) -> Void)?
+
+ let outgoingVideoNode = CallVideoNode(videoView: outgoingVideoView, disabledText: nil, assumeReadyAfterTimeout: true, isReadyUpdated: { [weak self] in
+ guard let self, let (layout, navigationBarHeight) = self.validLayout else {
+ return
+ }
+ updateLayoutImpl?(layout, navigationBarHeight)
+ }, orientationUpdated: { [weak self] in
+ guard let self, let (layout, navigationBarHeight) = self.validLayout else {
+ return
+ }
+ updateLayoutImpl?(layout, navigationBarHeight)
+ }, isFlippedUpdated: { [weak self] _ in
+ guard let self, let (layout, navigationBarHeight) = self.validLayout else {
+ return
+ }
+ updateLayoutImpl?(layout, navigationBarHeight)
+ })
+
+ let controller = VoiceChatCameraPreviewController(sharedContext: self.sharedContext, cameraNode: outgoingVideoNode, shareCamera: { _, _ in
+ proceed()
+ }, switchCamera: { [weak self] in
+ Queue.mainQueue().after(0.1) {
+ self?.call.switchVideoCamera()
+ }
+ })
+ self.present?(controller)
+
+ updateLayoutImpl = { [weak controller] layout, navigationBarHeight in
+ controller?.containerLayoutUpdated(layout, transition: .immediate)
+ }
+ }
+ })
+ })
+ }
+ default:
+ break
+ }
+ }
+
private func resolvedEmojiKey(data: Data) -> [String] {
if let emojiKey = self.emojiKey, emojiKey.data == data {
return emojiKey.resolvedKey
@@ -186,6 +290,8 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
}
func updateCallState(_ callState: PresentationCallState) {
+ self.callState = callState
+
let mappedLifecycleState: PrivateCallScreen.State.LifecycleState
switch callState.state {
case .waiting:
@@ -227,18 +333,45 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
mappedLifecycleState = .terminated(PrivateCallScreen.State.TerminatedState(duration: duration))
}
- switch callState.remoteVideoState {
- case .active, .paused:
- if self.remoteVideo == nil, let call = self.call as? PresentationCallImpl, let videoStreamSignal = call.video(isIncoming: true) {
- self.remoteVideo = AdaptedCallVideoSource(videoStreamSignal: videoStreamSignal)
- }
- case .inactive:
+ switch callState.state {
+ case .terminating, .terminated:
+ self.localVideo = nil
self.remoteVideo = nil
+ default:
+ switch callState.videoState {
+ case .active(let isScreencast), .paused(let isScreencast):
+ if isScreencast {
+ self.localVideo = nil
+ } else {
+ if self.localVideo == nil, let call = self.call as? PresentationCallImpl, let videoStreamSignal = call.video(isIncoming: false) {
+ self.localVideo = AdaptedCallVideoSource(videoStreamSignal: videoStreamSignal)
+ }
+ }
+ case .inactive, .notAvailable:
+ self.localVideo = nil
+ }
+
+ switch callState.remoteVideoState {
+ case .active, .paused:
+ if self.remoteVideo == nil, let call = self.call as? PresentationCallImpl, let videoStreamSignal = call.video(isIncoming: true) {
+ self.remoteVideo = AdaptedCallVideoSource(videoStreamSignal: videoStreamSignal)
+ }
+ case .inactive:
+ self.remoteVideo = nil
+ }
}
if var callScreenState = self.callScreenState {
callScreenState.lifecycleState = mappedLifecycleState
callScreenState.remoteVideo = self.remoteVideo
+ callScreenState.localVideo = self.localVideo
+
+ switch callState.remoteBatteryLevel {
+ case .low:
+ callScreenState.isRemoteBatteryLow = true
+ case .normal:
+ callScreenState.isRemoteBatteryLow = false
+ }
if self.callScreenState != callScreenState {
self.callScreenState = callScreenState
@@ -380,7 +513,7 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
private final class AdaptedCallVideoSource: VideoSource {
private static let queue = Queue(name: "AdaptedCallVideoSource")
- var updated: (() -> Void)?
+ private var onUpdatedListeners = Bag<() -> Void>()
private(set) var currentOutput: Output?
private var textureCache: CVMetalTextureCache?
@@ -396,7 +529,7 @@ private final class AdaptedCallVideoSource: VideoSource {
}
let rotationAngle: Float
- switch videoFrameData.orientation {
+ switch videoFrameData.deviceRelativeOrientation ?? videoFrameData.orientation {
case .rotation0:
rotationAngle = 0.0
case .rotation90:
@@ -407,6 +540,47 @@ private final class AdaptedCallVideoSource: VideoSource {
rotationAngle = Float.pi * 3.0 / 2.0
}
+ var mirrorDirection: Output.MirrorDirection = []
+
+ var sourceId: Int = 0
+ if videoFrameData.mirrorHorizontally || videoFrameData.mirrorVertically {
+ sourceId = 1
+ }
+
+ if let deviceRelativeOrientation = videoFrameData.deviceRelativeOrientation, deviceRelativeOrientation != videoFrameData.orientation {
+ let shouldMirror = videoFrameData.mirrorHorizontally || videoFrameData.mirrorVertically
+
+ var mirrorHorizontally = false
+ var mirrorVertically = false
+
+ if shouldMirror {
+ switch deviceRelativeOrientation {
+ case .rotation0:
+ mirrorHorizontally = true
+ case .rotation90:
+ mirrorVertically = true
+ case .rotation180:
+ mirrorHorizontally = true
+ case .rotation270:
+ mirrorVertically = true
+ }
+ }
+
+ if mirrorHorizontally {
+ mirrorDirection.insert(.horizontal)
+ }
+ if mirrorVertically {
+ mirrorDirection.insert(.vertical)
+ }
+ } else {
+ if videoFrameData.mirrorHorizontally {
+ mirrorDirection.insert(.horizontal)
+ }
+ if videoFrameData.mirrorVertically {
+ mirrorDirection.insert(.vertical)
+ }
+ }
+
AdaptedCallVideoSource.queue.async { [weak self] in
let output: Output
switch videoFrameData.buffer {
@@ -425,7 +599,14 @@ private final class AdaptedCallVideoSource: VideoSource {
return
}
- output = Output(y: yTexture, uv: uvTexture, rotationAngle: rotationAngle, sourceId: videoFrameData.mirrorHorizontally || videoFrameData.mirrorVertically ? 1 : 0)
+ output = Output(
+ resolution: CGSize(width: CGFloat(yTexture.width), height: CGFloat(yTexture.height)),
+ y: yTexture,
+ uv: uvTexture,
+ rotationAngle: rotationAngle,
+ mirrorDirection: mirrorDirection,
+ sourceId: sourceId
+ )
default:
return
}
@@ -435,12 +616,27 @@ private final class AdaptedCallVideoSource: VideoSource {
return
}
self.currentOutput = output
- self.updated?()
+ for onUpdated in self.onUpdatedListeners.copyItems() {
+ onUpdated()
+ }
}
}
})
}
+ func addOnUpdated(_ f: @escaping () -> Void) -> Disposable {
+ let index = self.onUpdatedListeners.add(f)
+
+ return ActionDisposable { [weak self] in
+ DispatchQueue.main.async {
+ guard let self else {
+ return
+ }
+ self.onUpdatedListeners.remove(index)
+ }
+ }
+ }
+
deinit {
self.videoFrameDisposable?.dispose()
}
diff --git a/submodules/TelegramCallsUI/Sources/PresentationCall.swift b/submodules/TelegramCallsUI/Sources/PresentationCall.swift
index 73699c77c60..031a9353b74 100644
--- a/submodules/TelegramCallsUI/Sources/PresentationCall.swift
+++ b/submodules/TelegramCallsUI/Sources/PresentationCall.swift
@@ -896,7 +896,13 @@ public final class PresentationCallImpl: PresentationCall {
}
func video(isIncoming: Bool) -> Signal? {
- return self.ongoingContext?.video(isIncoming: isIncoming)
+ if isIncoming {
+ return self.ongoingContext?.video(isIncoming: isIncoming)
+ } else if let videoCapturer = self.videoCapturer {
+ return videoCapturer.video()
+ } else {
+ return nil
+ }
}
public func makeIncomingVideoView(completion: @escaping (PresentationCallVideoView?) -> Void) {
diff --git a/submodules/TelegramCore/Sources/Statistics/StoryStatistics.swift b/submodules/TelegramCore/Sources/Statistics/StoryStatistics.swift
index 4ef959885a1..da541d7f12a 100644
--- a/submodules/TelegramCore/Sources/Statistics/StoryStatistics.swift
+++ b/submodules/TelegramCore/Sources/Statistics/StoryStatistics.swift
@@ -243,13 +243,30 @@ private final class StoryStatsPublicForwardsContextImpl {
let storyId = self.storyId
let lastOffset = self.lastOffset
- self.disposable.set((self.account.postbox.transaction { transaction -> Api.InputPeer? in
- return transaction.getPeer(peerId).flatMap(apiInputPeer)
+ self.disposable.set((self.account.postbox.transaction { transaction -> (Api.InputPeer, Int32?)? in
+ let statsDatacenterId = (transaction.getPeerCachedData(peerId: peerId) as? CachedChannelData)?.statsDatacenterId
+ guard let inputPeer = transaction.getPeer(peerId).flatMap(apiInputPeer) else {
+ return nil
+ }
+ return (inputPeer, statsDatacenterId)
}
- |> mapToSignal { inputPeer -> Signal<([StoryStatsPublicForwardsContext.State.Forward], Int32, String?), NoError> in
- if let inputPeer = inputPeer {
+ |> mapToSignal { data -> Signal<([StoryStatsPublicForwardsContext.State.Forward], Int32, String?), NoError> in
+ if let (inputPeer, statsDatacenterId) = data {
let offset = lastOffset ?? ""
- let signal = account.network.request(Api.functions.stats.getStoryPublicForwards(peer: inputPeer, id: storyId, offset: offset, limit: 50))
+
+ let request = Api.functions.stats.getStoryPublicForwards(peer: inputPeer, id: storyId, offset: offset, limit: 50)
+ let signal: Signal
+ if let statsDatacenterId = statsDatacenterId, account.network.datacenterId != statsDatacenterId {
+ signal = account.network.download(datacenterId: Int(statsDatacenterId), isMedia: false, tag: nil)
+ |> castError(MTRpcError.self)
+ |> mapToSignal { worker in
+ return worker.request(request)
+ }
+ } else {
+ signal = account.network.request(request, automaticFloodWait: false)
+ }
+
+ return signal
|> map(Optional.init)
|> `catch` { _ -> Signal in
return .single(nil)
@@ -272,7 +289,7 @@ private final class StoryStatsPublicForwardsContextImpl {
peers[groupOrChannel.id] = groupOrChannel
}
}
- updatePeers(transaction: transaction, accountPeerId: accountPeerId, peers: AccumulatedPeers(users: users))
+ updatePeers(transaction: transaction, accountPeerId: accountPeerId, peers: AccumulatedPeers(peers: Array(peers.values)))
var resultForwards: [StoryStatsPublicForwardsContext.State.Forward] = []
for forward in forwards {
switch forward {
@@ -325,7 +342,6 @@ private final class StoryStatsPublicForwardsContextImpl {
}
}
}
- return signal
} else {
return .single(([], 0, nil))
}
diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Metal/CallScreenShaders.metal b/submodules/TelegramUI/Components/Calls/CallScreen/Metal/CallScreenShaders.metal
index 928f1106686..d2bc62de18e 100644
--- a/submodules/TelegramUI/Components/Calls/CallScreen/Metal/CallScreenShaders.metal
+++ b/submodules/TelegramUI/Components/Calls/CallScreen/Metal/CallScreenShaders.metal
@@ -232,7 +232,7 @@ vertex BlobVertexOut callBlobVertex(
fragment half4 callBlobFragment(
BlobVertexOut in [[stage_in]]
) {
- half alpha = 0.15;
+ half alpha = 0.35;
return half4(1.0 * alpha, 1.0 * alpha, 1.0 * alpha, alpha);
}
@@ -251,6 +251,7 @@ kernel void videoYUVToRGBA(
vertex QuadVertexOut mainVideoVertex(
const device Rectangle &rect [[ buffer(0) ]],
+ const device uint2 &mirror [[ buffer(1) ]],
unsigned int vid [[ vertex_id ]]
) {
float2 quadVertex = quadVertices[vid];
@@ -262,6 +263,12 @@ vertex QuadVertexOut mainVideoVertex(
out.position.y = -1.0 + out.position.y * 2.0;
out.uv = float2(quadVertex.x, 1.0 - quadVertex.y);
+ if (mirror.x == 1) {
+ out.uv.x = 1.0 - out.uv.x;
+ }
+ if (mirror.y == 1) {
+ out.uv.y = 1.0 - out.uv.y;
+ }
return out;
}
@@ -345,3 +352,28 @@ kernel void gaussianBlurVertical(
) {
gaussianBlur(inTexture, outTexture, float2(0, 1), gid);
}
+
+vertex QuadVertexOut edgeTestVertex(
+ const device Rectangle &rect [[ buffer(0) ]],
+ unsigned int vid [[ vertex_id ]]
+) {
+ float2 quadVertex = quadVertices[vid];
+
+ QuadVertexOut out;
+
+ out.position = float4(rect.origin.x + quadVertex.x * rect.size.x, rect.origin.y + quadVertex.y * rect.size.y, 0.0, 1.0);
+ out.position.x = -1.0 + out.position.x * 2.0;
+ out.position.y = -1.0 + out.position.y * 2.0;
+
+ out.uv = quadVertex;
+
+ return out;
+}
+
+fragment half4 edgeTestFragment(
+ QuadVertexOut in [[stage_in]],
+ const device float4 &colorIn
+) {
+ half4 color = half4(colorIn);
+ return color;
+}
diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/BackButtonView.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/BackButtonView.swift
new file mode 100644
index 00000000000..0fed60cf04a
--- /dev/null
+++ b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/BackButtonView.swift
@@ -0,0 +1,56 @@
+import Foundation
+import UIKit
+import Display
+
+final class BackButtonView: HighlightableButton {
+ private let iconView: UIImageView
+ private let textView: TextView
+
+ let size: CGSize
+
+ var pressAction: (() -> Void)?
+
+ init(text: String) {
+ self.iconView = UIImageView(image: NavigationBar.backArrowImage(color: .white))
+ self.iconView.isUserInteractionEnabled = false
+
+ self.textView = TextView()
+ self.textView.isUserInteractionEnabled = false
+
+ let spacing: CGFloat = 8.0
+
+ var iconSize: CGSize = self.iconView.image?.size ?? CGSize(width: 2.0, height: 2.0)
+ let iconScaleFactor: CGFloat = 0.9
+ iconSize.width = floor(iconSize.width * iconScaleFactor)
+ iconSize.height = floor(iconSize.height * iconScaleFactor)
+
+ let textSize = self.textView.update(string: text, fontSize: 17.0, fontWeight: UIFont.Weight.regular.rawValue, color: .white, constrainedWidth: 100.0, transition: .immediate)
+ self.size = CGSize(width: iconSize.width + spacing + textSize.width, height: textSize.height)
+
+ self.iconView.frame = CGRect(origin: CGPoint(x: 0.0, y: floorToScreenPixels((self.size.height - iconSize.height) * 0.5)), size: iconSize)
+ self.textView.frame = CGRect(origin: CGPoint(x: iconSize.width + spacing, y: floorToScreenPixels((self.size.height - textSize.height) * 0.5)), size: textSize)
+
+ super.init(frame: CGRect())
+
+ self.addSubview(self.iconView)
+ self.addSubview(self.textView)
+
+ self.addTarget(self, action: #selector(self.pressed), for: .touchUpInside)
+ }
+
+ required init?(coder: NSCoder) {
+ fatalError("init(coder:) has not been implemented")
+ }
+
+ @objc private func pressed() {
+ self.pressAction?()
+ }
+
+ override public func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
+ if self.bounds.insetBy(dx: -8.0, dy: -4.0).contains(point) {
+ return super.hitTest(self.bounds.center, with: event)
+ } else {
+ return nil
+ }
+ }
+}
diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/ButtonGroupView.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/ButtonGroupView.swift
index 8eddb2ae024..c1a20242ebf 100644
--- a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/ButtonGroupView.swift
+++ b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/ButtonGroupView.swift
@@ -46,9 +46,21 @@ final class ButtonGroupView: OverlayMaskContainerView {
}
}
+ final class Notice {
+ let id: AnyHashable
+ let text: String
+
+ init(id: AnyHashable, text: String) {
+ self.id = id
+ self.text = text
+ }
+ }
+
private var buttons: [Button]?
private var buttonViews: [Button.Content.Key: ContentOverlayButton] = [:]
+ private var noticeViews: [AnyHashable: NoticeView] = [:]
+
override init(frame: CGRect) {
super.init(frame: frame)
}
@@ -57,13 +69,98 @@ final class ButtonGroupView: OverlayMaskContainerView {
fatalError("init(coder:) has not been implemented")
}
- func update(size: CGSize, buttons: [Button], transition: Transition) {
+ override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
+ guard let result = super.hitTest(point, with: event) else {
+ return nil
+ }
+ if result === self {
+ return nil
+ }
+ return result
+ }
+
+ func update(size: CGSize, insets: UIEdgeInsets, controlsHidden: Bool, buttons: [Button], notices: [Notice], transition: Transition) -> CGFloat {
self.buttons = buttons
let buttonSize: CGFloat = 56.0
let buttonSpacing: CGFloat = 36.0
- let buttonY: CGFloat = size.height - 86.0 - buttonSize
+ let buttonNoticeSpacing: CGFloat = 16.0
+ let controlsHiddenNoticeSpacing: CGFloat = 0.0
+ var nextNoticeY: CGFloat
+ if controlsHidden {
+ nextNoticeY = size.height - insets.bottom - 4.0
+ } else {
+ nextNoticeY = size.height - insets.bottom - 52.0 - buttonSize - buttonNoticeSpacing
+ }
+ let noticeSpacing: CGFloat = 8.0
+
+ var validNoticeIds: [AnyHashable] = []
+ var noticesHeight: CGFloat = 0.0
+ for notice in notices {
+ validNoticeIds.append(notice.id)
+
+ let noticeView: NoticeView
+ var noticeTransition = transition
+ var animateIn = false
+ if let current = self.noticeViews[notice.id] {
+ noticeView = current
+ } else {
+ noticeTransition = noticeTransition.withAnimation(.none)
+ animateIn = true
+ noticeView = NoticeView()
+ self.noticeViews[notice.id] = noticeView
+ self.addSubview(noticeView)
+ }
+
+ if noticesHeight != 0.0 {
+ noticesHeight += noticeSpacing
+ } else {
+ if controlsHidden {
+ noticesHeight += controlsHiddenNoticeSpacing
+ } else {
+ noticesHeight += buttonNoticeSpacing
+ }
+ }
+ let noticeSize = noticeView.update(text: notice.text, constrainedWidth: size.width - insets.left * 2.0 - 16.0 * 2.0, transition: noticeTransition)
+ let noticeFrame = CGRect(origin: CGPoint(x: floor((size.width - noticeSize.width) * 0.5), y: nextNoticeY - noticeSize.height), size: noticeSize)
+ noticesHeight += noticeSize.height
+ nextNoticeY -= noticeSize.height + noticeSpacing
+
+ noticeTransition.setFrame(view: noticeView, frame: noticeFrame)
+ if animateIn, !transition.animation.isImmediate {
+ noticeView.animateIn()
+ }
+ }
+ if noticesHeight != 0.0 {
+ noticesHeight += 5.0
+ }
+ var removedNoticeIds: [AnyHashable] = []
+ for (id, noticeView) in self.noticeViews {
+ if !validNoticeIds.contains(id) {
+ removedNoticeIds.append(id)
+ if !transition.animation.isImmediate {
+ noticeView.animateOut(completion: { [weak noticeView] in
+ noticeView?.removeFromSuperview()
+ })
+ } else {
+ noticeView.removeFromSuperview()
+ }
+ }
+ }
+ for id in removedNoticeIds {
+ self.noticeViews.removeValue(forKey: id)
+ }
+
+ let buttonY: CGFloat
+ let resultHeight: CGFloat
+ if controlsHidden {
+ buttonY = size.height + 12.0
+ resultHeight = insets.bottom + 4.0 + noticesHeight
+ } else {
+ buttonY = size.height - insets.bottom - 52.0 - buttonSize
+ resultHeight = size.height - buttonY + noticesHeight
+ }
var buttonX: CGFloat = floor((size.width - buttonSize * CGFloat(buttons.count) - buttonSpacing * CGFloat(buttons.count - 1)) * 0.5)
for button in buttons {
@@ -137,5 +234,7 @@ final class ButtonGroupView: OverlayMaskContainerView {
for key in removeKeys {
self.buttonViews.removeValue(forKey: key)
}
+
+ return resultHeight
}
}
diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/CallBackgroundLayer.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/CallBackgroundLayer.swift
index 8174757243f..9c1aace84df 100644
--- a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/CallBackgroundLayer.swift
+++ b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/CallBackgroundLayer.swift
@@ -80,6 +80,7 @@ final class CallBackgroundLayer: MetalEngineSubjectLayer, MetalEngineSubject {
]
let blurredLayer: MetalEngineSubjectLayer
+ let externalBlurredLayer: MetalEngineSubjectLayer
private var phase: Float = 0.0
@@ -100,6 +101,7 @@ final class CallBackgroundLayer: MetalEngineSubjectLayer, MetalEngineSubject {
override init() {
self.blurredLayer = MetalEngineSubjectLayer()
+ self.externalBlurredLayer = MetalEngineSubjectLayer()
self.colorSets = [
ColorSet(colors: [
@@ -125,6 +127,8 @@ final class CallBackgroundLayer: MetalEngineSubjectLayer, MetalEngineSubject {
super.init()
+ self.blurredLayer.cloneLayers.append(self.externalBlurredLayer)
+
self.didEnterHierarchy = { [weak self] in
guard let self else {
return
@@ -154,6 +158,7 @@ final class CallBackgroundLayer: MetalEngineSubjectLayer, MetalEngineSubject {
override init(layer: Any) {
self.blurredLayer = MetalEngineSubjectLayer()
+ self.externalBlurredLayer = MetalEngineSubjectLayer()
self.colorSets = []
self.colorTransition = AnimatedProperty(ColorSet(colors: []))
@@ -187,7 +192,8 @@ final class CallBackgroundLayer: MetalEngineSubjectLayer, MetalEngineSubject {
for i in 0 ..< 2 {
let isBlur = i == 1
context.renderToLayer(spec: renderSpec, state: RenderState.self, layer: i == 0 ? self : self.blurredLayer, commands: { encoder, placement in
- let effectiveRect = placement.effectiveRect
+ var effectiveRect = placement.effectiveRect
+ effectiveRect = effectiveRect.insetBy(dx: -effectiveRect.width * 0.1, dy: -effectiveRect.height * 0.1)
var rect = SIMD4(Float(effectiveRect.minX), Float(effectiveRect.minY), Float(effectiveRect.width), Float(effectiveRect.height))
encoder.setVertexBytes(&rect, length: 4 * 4, index: 0)
diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/CallBlobsLayer.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/CallBlobsLayer.swift
index 3107c9d0933..4747301e7ac 100644
--- a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/CallBlobsLayer.swift
+++ b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/CallBlobsLayer.swift
@@ -36,18 +36,6 @@ final class CallBlobsLayer: MetalEngineSubjectLayer, MetalEngineSubject {
}
final class RenderState: RenderToLayerState {
- final class Input {
- let rect: CGRect
- let blobs: [Blob]
- let phase: Float
-
- init(rect: CGRect, blobs: [Blob], phase: Float) {
- self.rect = rect
- self.blobs = blobs
- self.phase = phase
- }
- }
-
let pipelineState: MTLRenderPipelineState
required init?(device: MTLDevice) {
@@ -133,7 +121,7 @@ final class CallBlobsLayer: MetalEngineSubjectLayer, MetalEngineSubject {
let phase = self.phase
let blobs = self.blobs
- context.renderToLayer(spec: RenderLayerSpec(size: RenderSize(width: Int(self.bounds.width * 3.0), height: Int(self.bounds.height * 3.0))), state: RenderState.self, layer: self, commands: { encoder, placement in
+ context.renderToLayer(spec: RenderLayerSpec(size: RenderSize(width: Int(self.bounds.width * 3.0), height: Int(self.bounds.height * 3.0)), edgeInset: 4), state: RenderState.self, layer: self, commands: { encoder, placement in
let rect = placement.effectiveRect
for i in 0 ..< blobs.count {
diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/ContentOverlayButton.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/ContentOverlayButton.swift
index 9347cbf35a1..12d62e51045 100644
--- a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/ContentOverlayButton.swift
+++ b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/ContentOverlayButton.swift
@@ -64,7 +64,7 @@ final class ContentOverlayButton: HighlightTrackingButton, OverlayMaskContainerV
if highlighted {
self.layer.removeAnimation(forKey: "opacity")
- self.layer.removeAnimation(forKey: "sublayerTransform")
+ self.layer.removeAnimation(forKey: "transform")
let transition = Transition(animation: .curve(duration: 0.15, curve: .easeInOut))
transition.setScale(layer: self.layer, scale: topScale)
} else {
diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/EmojiExpandedInfoView.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/EmojiExpandedInfoView.swift
new file mode 100644
index 00000000000..3a0cf981bd2
--- /dev/null
+++ b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/EmojiExpandedInfoView.swift
@@ -0,0 +1,175 @@
+import Foundation
+import UIKit
+import Display
+import ComponentFlow
+
+final class EmojiExpandedInfoView: OverlayMaskContainerView {
+ private struct Params: Equatable {
+ var constrainedWidth: CGFloat
+
+ init(constrainedWidth: CGFloat) {
+ self.constrainedWidth = constrainedWidth
+ }
+ }
+
+ private struct Layout: Equatable {
+ var params: Params
+ var size: CGSize
+
+ init(params: Params, size: CGSize) {
+ self.params = params
+ self.size = size
+ }
+ }
+
+ private let title: String
+ private let text: String
+
+ private let backgroundView: UIImageView
+ private let separatorLayer: SimpleLayer
+
+ private let titleView: TextView
+ private let textView: TextView
+
+ private let actionButton: HighlightTrackingButton
+ private let actionTitleView: TextView
+
+ private var currentLayout: Layout?
+
+ var closeAction: (() -> Void)?
+
+ init(title: String, text: String) {
+ self.title = title
+ self.text = text
+
+ self.backgroundView = UIImageView()
+ let cornerRadius: CGFloat = 18.0
+ let buttonHeight: CGFloat = 56.0
+ self.backgroundView.image = generateImage(CGSize(width: cornerRadius * 2.0 + 10.0, height: cornerRadius + 10.0 + buttonHeight), rotatedContext: { size, context in
+ context.clear(CGRect(origin: CGPoint(), size: size))
+ context.addPath(UIBezierPath(roundedRect: CGRect(origin: CGPoint(), size: size), cornerRadius: cornerRadius).cgPath)
+ context.setFillColor(UIColor.white.cgColor)
+ context.fillPath()
+
+ context.setBlendMode(.copy)
+ context.setFillColor(UIColor.clear.cgColor)
+ context.fill(CGRect(origin: CGPoint(x: 0.0, y: size.height - buttonHeight), size: CGSize(width: size.width, height: UIScreenPixel)))
+ })?.stretchableImage(withLeftCapWidth: Int(cornerRadius) + 5, topCapHeight: Int(cornerRadius) + 5)
+
+ self.separatorLayer = SimpleLayer()
+
+ self.titleView = TextView()
+ self.textView = TextView()
+
+ self.actionButton = HighlightTrackingButton()
+ self.actionTitleView = TextView()
+ self.actionTitleView.isUserInteractionEnabled = false
+
+ super.init(frame: CGRect())
+
+ self.maskContents.addSubview(self.backgroundView)
+
+ self.layer.addSublayer(self.separatorLayer)
+
+ self.addSubview(self.titleView)
+ self.addSubview(self.textView)
+
+ self.addSubview(self.actionButton)
+ self.actionButton.addSubview(self.actionTitleView)
+
+ self.actionButton.internalHighligthedChanged = { [weak self] highlighted in
+ if let self, self.bounds.width > 0.0 {
+ let topScale: CGFloat = (self.bounds.width - 8.0) / self.bounds.width
+ let maxScale: CGFloat = (self.bounds.width + 2.0) / self.bounds.width
+
+ if highlighted {
+ self.actionButton.layer.removeAnimation(forKey: "sublayerTransform")
+ let transition = Transition(animation: .curve(duration: 0.15, curve: .easeInOut))
+ transition.setScale(layer: self.actionButton.layer, scale: topScale)
+ } else {
+ let t = self.actionButton.layer.presentation()?.transform ?? layer.transform
+ let currentScale = sqrt((t.m11 * t.m11) + (t.m12 * t.m12) + (t.m13 * t.m13))
+
+ let transition = Transition(animation: .none)
+ transition.setScale(layer: self.actionButton.layer, scale: 1.0)
+
+ self.actionButton.layer.animateScale(from: currentScale, to: maxScale, duration: 0.13, timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, removeOnCompletion: false, completion: { [weak self] completed in
+ guard let self, completed else {
+ return
+ }
+
+ self.actionButton.layer.animateScale(from: maxScale, to: 1.0, duration: 0.1, timingFunction: CAMediaTimingFunctionName.easeIn.rawValue)
+ })
+ }
+ }
+ }
+ self.actionButton.addTarget(self, action: #selector(self.actionButtonPressed), for: .touchUpInside)
+
+ self.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.tapGesture(_:))))
+ }
+
+ required init?(coder: NSCoder) {
+ fatalError("init(coder:) has not been implemented")
+ }
+
+ @objc private func actionButtonPressed() {
+ self.closeAction?()
+ }
+
+ @objc private func tapGesture(_ recognizer: UITapGestureRecognizer) {
+ if case .ended = recognizer.state {
+ self.closeAction?()
+ }
+ }
+
+ override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
+ if let result = self.actionButton.hitTest(self.convert(point, to: self.actionButton), with: event) {
+ return result
+ }
+ return nil
+ }
+
+ func update(constrainedWidth: CGFloat, transition: Transition) -> CGSize {
+ let params = Params(constrainedWidth: constrainedWidth)
+ if let currentLayout = self.currentLayout, currentLayout.params == params {
+ return currentLayout.size
+ }
+ let size = self.update(params: params, transition: transition)
+ self.currentLayout = Layout(params: params, size: size)
+ return size
+ }
+
+ private func update(params: Params, transition: Transition) -> CGSize {
+ let buttonHeight: CGFloat = 56.0
+
+ var constrainedWidth = params.constrainedWidth
+ constrainedWidth = min(constrainedWidth, 300.0)
+
+ let titleSize = self.titleView.update(string: self.title, fontSize: 16.0, fontWeight: 0.3, alignment: .center, color: .white, constrainedWidth: constrainedWidth - 16.0 * 2.0, transition: transition)
+ let textSize = self.textView.update(string: self.text, fontSize: 16.0, fontWeight: 0.0, alignment: .center, color: .white, constrainedWidth: constrainedWidth - 16.0 * 2.0, transition: transition)
+
+ let contentWidth: CGFloat = max(titleSize.width, textSize.width) + 26.0 * 2.0
+ let contentHeight = 78.0 + titleSize.height + 10.0 + textSize.height + 22.0 + buttonHeight
+
+ let size = CGSize(width: contentWidth, height: contentHeight)
+
+ transition.setFrame(view: self.backgroundView, frame: CGRect(origin: CGPoint(), size: size))
+
+ let titleFrame = CGRect(origin: CGPoint(x: floor((size.width - titleSize.width) * 0.5), y: 78.0), size: titleSize)
+ transition.setFrame(view: self.titleView, frame: titleFrame)
+
+ let textFrame = CGRect(origin: CGPoint(x: floor((size.width - textSize.width) * 0.5), y: titleFrame.maxY + 10.0), size: textSize)
+ transition.setFrame(view: self.textView, frame: textFrame)
+
+ let buttonFrame = CGRect(origin: CGPoint(x: 0.0, y: size.height - buttonHeight), size: CGSize(width: size.width, height: buttonHeight))
+ transition.setFrame(view: self.actionButton, frame: buttonFrame)
+
+ transition.setFrame(layer: self.separatorLayer, frame: CGRect(origin: CGPoint(x: 0.0, y: size.height - buttonHeight), size: CGSize(width: size.width, height: UIScreenPixel)))
+
+ let actionTitleSize = self.actionTitleView.update(string: "OK", fontSize: 19.0, fontWeight: 0.3, color: .white, constrainedWidth: size.width, transition: transition)
+ let actionTitleFrame = CGRect(origin: CGPoint(x: floor((buttonFrame.width - actionTitleSize.width) * 0.5), y: floor((buttonFrame.height - actionTitleSize.height) * 0.5)), size: actionTitleSize)
+ transition.setFrame(view: self.actionTitleView, frame: actionTitleFrame)
+
+ return size
+ }
+}
diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/EmojiTooltipView.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/EmojiTooltipView.swift
new file mode 100644
index 00000000000..05afa7b2dcd
--- /dev/null
+++ b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/EmojiTooltipView.swift
@@ -0,0 +1,125 @@
+import Foundation
+import UIKit
+import Display
+
+private func addRoundedRectPath(context: CGContext, rect: CGRect, radius: CGFloat) {
+ context.saveGState()
+ context.translateBy(x: rect.minX, y: rect.minY)
+ context.scaleBy(x: radius, y: radius)
+ let fw = rect.width / radius
+ let fh = rect.height / radius
+ context.move(to: CGPoint(x: fw, y: fh / 2.0))
+ context.addArc(tangent1End: CGPoint(x: fw, y: fh), tangent2End: CGPoint(x: fw/2, y: fh), radius: 1.0)
+ context.addArc(tangent1End: CGPoint(x: 0, y: fh), tangent2End: CGPoint(x: 0, y: fh/2), radius: 1)
+ context.addArc(tangent1End: CGPoint(x: 0, y: 0), tangent2End: CGPoint(x: fw/2, y: 0), radius: 1)
+ context.addArc(tangent1End: CGPoint(x: fw, y: 0), tangent2End: CGPoint(x: fw, y: fh/2), radius: 1)
+ context.closePath()
+ context.restoreGState()
+}
+
+final class EmojiTooltipView: OverlayMaskContainerView {
+ private struct Params: Equatable {
+ var constrainedWidth: CGFloat
+ var subjectWidth: CGFloat
+
+ init(constrainedWidth: CGFloat, subjectWidth: CGFloat) {
+ self.constrainedWidth = constrainedWidth
+ self.subjectWidth = subjectWidth
+ }
+ }
+
+ private struct Layout {
+ var params: Params
+ var size: CGSize
+
+ init(params: Params, size: CGSize) {
+ self.params = params
+ self.size = size
+ }
+ }
+
+ private let text: String
+
+ private let backgroundView: UIImageView
+ private let textView: TextView
+
+ private var currentLayout: Layout?
+
+ init(text: String) {
+ self.text = text
+
+ self.backgroundView = UIImageView()
+
+ self.textView = TextView()
+
+ super.init(frame: CGRect())
+
+ self.maskContents.addSubview(self.backgroundView)
+ self.addSubview(self.textView)
+ }
+
+ required init?(coder: NSCoder) {
+ fatalError("init(coder:) has not been implemented")
+
+
+ }
+
+ func animateIn() {
+ let anchorPoint = CGPoint(x: self.bounds.width - 46.0, y: 0.0)
+
+ self.layer.animateSpring(from: 0.001 as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: 0.5)
+ self.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
+ self.layer.animateSpring(from: NSValue(cgPoint: CGPoint(x: anchorPoint.x - self.bounds.width * 0.5, y: anchorPoint.y - self.bounds.height * 0.5)), to: NSValue(cgPoint: CGPoint()), keyPath: "position", duration: 0.5, additive: true)
+ }
+
+ func animateOut(completion: @escaping () -> Void) {
+ let anchorPoint = CGPoint(x: self.bounds.width - 46.0, y: 0.0)
+ self.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in
+ completion()
+ })
+ self.layer.animateScale(from: 1.0, to: 0.4, duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false)
+ self.layer.animatePosition(from: CGPoint(), to: CGPoint(x: anchorPoint.x - self.bounds.width * 0.5, y: anchorPoint.y - self.bounds.height * 0.5), duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, additive: true)
+ }
+
+ func update(constrainedWidth: CGFloat, subjectWidth: CGFloat) -> CGSize {
+ let params = Params(constrainedWidth: constrainedWidth, subjectWidth: subjectWidth)
+ if let currentLayout = self.currentLayout, currentLayout.params == params {
+ return currentLayout.size
+ }
+ let size = self.update(params: params)
+ self.currentLayout = Layout(params: params, size: size)
+ return size
+ }
+
+ private func update(params: Params) -> CGSize {
+ let horizontalInset: CGFloat = 12.0
+ let verticalInset: CGFloat = 10.0
+ let arrowHeight: CGFloat = 8.0
+
+ let textSize = self.textView.update(
+ string: self.text,
+ fontSize: 15.0,
+ fontWeight: 0.0,
+ color: .white,
+ constrainedWidth: params.constrainedWidth - horizontalInset * 2.0,
+ transition: .immediate
+ )
+
+ let size = CGSize(width: textSize.width + horizontalInset * 2.0, height: arrowHeight + textSize.height + verticalInset * 2.0)
+
+ self.textView.frame = CGRect(origin: CGPoint(x: horizontalInset, y: arrowHeight + verticalInset), size: textSize)
+
+ self.backgroundView.image = generateImage(size, rotatedContext: { size, context in
+ context.clear(CGRect(origin: CGPoint(), size: size))
+ context.setFillColor(UIColor.white.cgColor)
+ addRoundedRectPath(context: context, rect: CGRect(origin: CGPoint(x: 0.0, y: arrowHeight), size: CGSize(width: size.width, height: size.height - arrowHeight)), radius: 14.0)
+ context.fillPath()
+
+ context.translateBy(x: size.width - floor(params.subjectWidth * 0.5) - 20.0, y: 0.0)
+ let _ = try? drawSvgPath(context, path: "M9.0981,1.1979 C9.547,0.6431 10.453,0.6431 10.9019,1.1979 C12.4041,3.0542 15.6848,6.5616 20,8 H-0.0002 C4.3151,6.5616 7.5959,3.0542 9.0981,1.1978 Z ")
+ })
+ self.backgroundView.frame = CGRect(origin: CGPoint(), size: size)
+
+ return size
+ }
+}
diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/KeyEmojiView.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/KeyEmojiView.swift
index 0576e3b30e7..2e50530470e 100644
--- a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/KeyEmojiView.swift
+++ b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/KeyEmojiView.swift
@@ -1,17 +1,114 @@
import Foundation
import UIKit
import Display
+import ComponentFlow
-final class KeyEmojiView: UIView {
+final class KeyEmojiView: HighlightTrackingButton {
+ private struct Params: Equatable {
+ var isExpanded: Bool
+
+ init(isExpanded: Bool) {
+ self.isExpanded = isExpanded
+ }
+ }
+
+ private struct Layout: Equatable {
+ var params: Params
+ var size: CGSize
+
+ init(params: Params, size: CGSize) {
+ self.params = params
+ self.size = size
+ }
+ }
+
+ private let emoji: [String]
private let emojiViews: [TextView]
- let size: CGSize
+ var pressAction: (() -> Void)?
+
+ private var currentLayout: Layout?
+
+ var isExpanded: Bool? {
+ return self.currentLayout?.params.isExpanded
+ }
init(emoji: [String]) {
- self.emojiViews = emoji.map { emoji in
+ self.emoji = emoji
+ self.emojiViews = emoji.map { _ in
TextView()
}
+ super.init(frame: CGRect())
+
+ for emojiView in self.emojiViews {
+ emojiView.contentMode = .scaleToFill
+ emojiView.isUserInteractionEnabled = false
+ self.addSubview(emojiView)
+ }
+
+ self.internalHighligthedChanged = { [weak self] highlighted in
+ if let self, self.bounds.width > 0.0 {
+ let topScale: CGFloat = (self.bounds.width - 8.0) / self.bounds.width
+ let maxScale: CGFloat = (self.bounds.width + 2.0) / self.bounds.width
+
+ if highlighted {
+ self.layer.removeAnimation(forKey: "opacity")
+ self.layer.removeAnimation(forKey: "transform")
+ let transition = Transition(animation: .curve(duration: 0.15, curve: .easeInOut))
+ transition.setScale(layer: self.layer, scale: topScale)
+ } else {
+ let t = self.layer.presentation()?.transform ?? layer.transform
+ let currentScale = sqrt((t.m11 * t.m11) + (t.m12 * t.m12) + (t.m13 * t.m13))
+
+ let transition = Transition(animation: .none)
+ transition.setScale(layer: self.layer, scale: 1.0)
+
+ self.layer.animateScale(from: currentScale, to: maxScale, duration: 0.13, timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, removeOnCompletion: false, completion: { [weak self] completed in
+ guard let self, completed else {
+ return
+ }
+
+ self.layer.animateScale(from: maxScale, to: 1.0, duration: 0.1, timingFunction: CAMediaTimingFunctionName.easeIn.rawValue)
+ })
+ }
+ }
+ }
+ self.addTarget(self, action: #selector(self.pressed), for: .touchUpInside)
+ }
+
+ required init?(coder: NSCoder) {
+ fatalError("init(coder:) has not been implemented")
+ }
+
+ override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
+ return super.hitTest(point, with: event)
+ }
+
+ @objc private func pressed() {
+ self.pressAction?()
+ }
+
+ func animateIn() {
+ for i in 0 ..< self.emojiViews.count {
+ let emojiView = self.emojiViews[i]
+ emojiView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
+ emojiView.layer.animatePosition(from: CGPoint(x: -CGFloat(self.emojiViews.count - 1 - i) * 30.0, y: 0.0), to: CGPoint(), duration: 0.5, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
+ }
+ }
+
+ func update(isExpanded: Bool, transition: Transition) -> CGSize {
+ let params = Params(isExpanded: isExpanded)
+ if let currentLayout = self.currentLayout, currentLayout.params == params {
+ return currentLayout.size
+ }
+
+ let size = self.update(params: params, transition: transition)
+ self.currentLayout = Layout(params: params, size: size)
+ return size
+ }
+
+ private func update(params: Params, transition: Transition) -> CGSize {
let itemSpacing: CGFloat = 3.0
var height: CGFloat = 0.0
@@ -21,32 +118,62 @@ final class KeyEmojiView: UIView {
nextX += itemSpacing
}
let emojiView = self.emojiViews[i]
- let itemSize = emojiView.update(string: emoji[i], fontSize: 16.0, fontWeight: 0.0, color: .white, constrainedWidth: 100.0, transition: .immediate)
+ let itemSize = emojiView.update(string: emoji[i], fontSize: params.isExpanded ? 40.0 : 16.0, fontWeight: 0.0, color: .white, constrainedWidth: 100.0, transition: transition)
if height == 0.0 {
height = itemSize.height
}
- emojiView.frame = CGRect(origin: CGPoint(x: nextX, y: 0.0), size: itemSize)
+ let itemFrame = CGRect(origin: CGPoint(x: nextX, y: 0.0), size: itemSize)
+ transition.setFrame(view: emojiView, frame: itemFrame)
nextX += itemSize.width
}
- self.size = CGSize(width: nextX, height: height)
-
- super.init(frame: CGRect())
-
- for emojiView in self.emojiViews {
- self.addSubview(emojiView)
- }
+ return CGSize(width: nextX, height: height)
}
+}
+
+func generateParabollicMotionKeyframes(from sourcePoint: CGPoint, to targetPosition: CGPoint, elevation: CGFloat, duration: Double, curve: Transition.Animation.Curve, reverse: Bool) -> [CGPoint] {
+ let midPoint = CGPoint(x: (sourcePoint.x + targetPosition.x) / 2.0, y: sourcePoint.y - elevation)
- required init?(coder: NSCoder) {
- fatalError("init(coder:) has not been implemented")
- }
+ let x1 = sourcePoint.x
+ let y1 = sourcePoint.y
+ let x2 = midPoint.x
+ let y2 = midPoint.y
+ let x3 = targetPosition.x
+ let y3 = targetPosition.y
- func animateIn() {
- for i in 0 ..< self.emojiViews.count {
- let emojiView = self.emojiViews[i]
- emojiView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
- emojiView.layer.animatePosition(from: CGPoint(x: -CGFloat(self.emojiViews.count - 1 - i) * 30.0, y: 0.0), to: CGPoint(), duration: 0.5, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
+ let numPoints: Int = Int(ceil(Double(UIScreen.main.maximumFramesPerSecond) * duration))
+
+ var keyframes: [CGPoint] = []
+ if abs(y1 - y3) < 5.0 && abs(x1 - x3) < 5.0 {
+ for rawI in 0 ..< numPoints {
+ let i = reverse ? (numPoints - 1 - rawI) : rawI
+ let ks = CGFloat(i) / CGFloat(numPoints - 1)
+ var k = curve.solve(at: reverse ? (1.0 - ks) : ks)
+ if reverse {
+ k = 1.0 - k
+ }
+ let x = sourcePoint.x * (1.0 - k) + targetPosition.x * k
+ let y = sourcePoint.y * (1.0 - k) + targetPosition.y * k
+ keyframes.append(CGPoint(x: x, y: y))
+ }
+ } else {
+ let a = (x3 * (y2 - y1) + x2 * (y1 - y3) + x1 * (y3 - y2)) / ((x1 - x2) * (x1 - x3) * (x2 - x3))
+ let b = (x1 * x1 * (y2 - y3) + x3 * x3 * (y1 - y2) + x2 * x2 * (y3 - y1)) / ((x1 - x2) * (x1 - x3) * (x2 - x3))
+ let c = (x2 * x2 * (x3 * y1 - x1 * y3) + x2 * (x1 * x1 * y3 - x3 * x3 * y1) + x1 * x3 * (x3 - x1) * y2) / ((x1 - x2) * (x1 - x3) * (x2 - x3))
+
+ for rawI in 0 ..< numPoints {
+ let i = reverse ? (numPoints - 1 - rawI) : rawI
+
+ let ks = CGFloat(i) / CGFloat(numPoints - 1)
+ var k = curve.solve(at: reverse ? (1.0 - ks) : ks)
+ if reverse {
+ k = 1.0 - k
+ }
+ let x = sourcePoint.x * (1.0 - k) + targetPosition.x * k
+ let y = a * x * x + b * x + c
+ keyframes.append(CGPoint(x: x, y: y))
}
}
+
+ return keyframes
}
diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/MinimizedVideoContainerView.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/MinimizedVideoContainerView.swift
deleted file mode 100644
index 90b9434f431..00000000000
--- a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/MinimizedVideoContainerView.swift
+++ /dev/null
@@ -1,144 +0,0 @@
-import Foundation
-import UIKit
-import Display
-import MetalEngine
-import ComponentFlow
-
-final class MinimizedVideoContainerView: UIView {
- private struct Params: Equatable {
- var size: CGSize
- var insets: UIEdgeInsets
-
- init(size: CGSize, insets: UIEdgeInsets) {
- self.size = size
- self.insets = insets
- }
- }
-
- private struct VideoMetrics: Equatable {
- var resolution: CGSize
- var rotationAngle: Float
- var sourceId: Int
-
- init(resolution: CGSize, rotationAngle: Float, sourceId: Int) {
- self.resolution = resolution
- self.rotationAngle = rotationAngle
- self.sourceId = sourceId
- }
- }
-
- private let videoLayer: PrivateCallVideoLayer
-
- private var params: Params?
- private var videoMetrics: VideoMetrics?
- private var appliedVideoMetrics: VideoMetrics?
-
- var video: VideoSource? {
- didSet {
- self.video?.updated = { [weak self] in
- guard let self else {
- return
- }
- var videoMetrics: VideoMetrics?
- if let currentOutput = self.video?.currentOutput {
- self.videoLayer.video = currentOutput
- videoMetrics = VideoMetrics(resolution: CGSize(width: CGFloat(currentOutput.y.width), height: CGFloat(currentOutput.y.height)), rotationAngle: currentOutput.rotationAngle, sourceId: currentOutput.sourceId)
- } else {
- self.videoLayer.video = nil
- }
- self.videoLayer.setNeedsUpdate()
-
- if self.videoMetrics != videoMetrics {
- self.videoMetrics = videoMetrics
- self.update(transition: .easeInOut(duration: 0.2))
- }
- }
- var videoMetrics: VideoMetrics?
- if let currentOutput = self.video?.currentOutput {
- self.videoLayer.video = currentOutput
- videoMetrics = VideoMetrics(resolution: CGSize(width: CGFloat(currentOutput.y.width), height: CGFloat(currentOutput.y.height)), rotationAngle: currentOutput.rotationAngle, sourceId: currentOutput.sourceId)
- } else {
- self.videoLayer.video = nil
- }
- self.videoLayer.setNeedsUpdate()
-
- if self.videoMetrics != videoMetrics {
- self.videoMetrics = videoMetrics
- self.update(transition: .easeInOut(duration: 0.2))
- }
- }
- }
-
- override init(frame: CGRect) {
- self.videoLayer = PrivateCallVideoLayer()
- self.videoLayer.masksToBounds = true
-
- super.init(frame: frame)
-
- self.layer.addSublayer(self.videoLayer)
- }
-
- required init?(coder: NSCoder) {
- fatalError("init(coder:) has not been implemented")
- }
-
- override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
- return nil
- }
-
- private func update(transition: Transition) {
- guard let params = self.params else {
- return
- }
- self.update(params: params, transition: transition)
- }
-
- func update(size: CGSize, insets: UIEdgeInsets, transition: Transition) {
- let params = Params(size: size, insets: insets)
- if self.params == params {
- return
- }
- self.params = params
-
- self.update(params: params, transition: transition)
- }
-
- private func update(params: Params, transition: Transition) {
- guard let videoMetrics = self.videoMetrics else {
- return
- }
-
- var transition = transition
- if self.appliedVideoMetrics == nil {
- transition = .immediate
- }
- self.appliedVideoMetrics = videoMetrics
-
- var rotatedResolution = videoMetrics.resolution
- var videoIsRotated = false
- if videoMetrics.rotationAngle == Float.pi * 0.5 || videoMetrics.rotationAngle == Float.pi * 3.0 / 2.0 {
- rotatedResolution = CGSize(width: rotatedResolution.height, height: rotatedResolution.width)
- videoIsRotated = true
- }
-
- let videoSize = rotatedResolution.aspectFitted(CGSize(width: 160.0, height: 160.0))
-
- let videoResolution = rotatedResolution.aspectFittedOrSmaller(CGSize(width: 1280, height: 1280)).aspectFittedOrSmaller(CGSize(width: videoSize.width * 3.0, height: videoSize.height * 3.0))
- let rotatedVideoResolution = videoIsRotated ? CGSize(width: videoResolution.height, height: videoResolution.width) : videoResolution
-
- let rotatedVideoSize = videoIsRotated ? CGSize(width: videoSize.height, height: videoSize.width) : videoSize
- let rotatedVideoFrame = CGRect(origin: CGPoint(x: params.size.width - params.insets.right - videoSize.width, y: params.size.height - params.insets.bottom - videoSize.height), size: videoSize)
-
- transition.setPosition(layer: self.videoLayer, position: rotatedVideoFrame.center)
- transition.setBounds(layer: self.videoLayer, bounds: CGRect(origin: CGPoint(), size: rotatedVideoSize))
- transition.setPosition(layer: self.videoLayer.blurredLayer, position: rotatedVideoFrame.center)
- transition.setBounds(layer: self.videoLayer.blurredLayer, bounds: CGRect(origin: CGPoint(), size: rotatedVideoSize))
-
- transition.setTransform(layer: self.videoLayer, transform: CATransform3DMakeRotation(CGFloat(videoMetrics.rotationAngle), 0.0, 0.0, 1.0))
- transition.setTransform(layer: self.videoLayer.blurredLayer, transform: CATransform3DMakeRotation(CGFloat(videoMetrics.rotationAngle), 0.0, 0.0, 1.0))
-
- transition.setCornerRadius(layer: self.videoLayer, cornerRadius: 10.0)
-
- self.videoLayer.renderSpec = RenderLayerSpec(size: RenderSize(width: Int(rotatedVideoResolution.width), height: Int(rotatedVideoResolution.height)))
- }
-}
diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/NoticeView.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/NoticeView.swift
new file mode 100644
index 00000000000..80331e8ad4a
--- /dev/null
+++ b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/NoticeView.swift
@@ -0,0 +1,73 @@
+import Foundation
+import UIKit
+import Display
+import ComponentFlow
+
+final class NoticeView: OverlayMaskContainerView {
+ private let backgroundView: RoundedCornersView
+ private let textContainer: UIView
+ private let textView: TextView
+
+ override init(frame: CGRect) {
+ self.backgroundView = RoundedCornersView(color: .white)
+ self.textContainer = UIView()
+ self.textContainer.clipsToBounds = true
+ self.textView = TextView()
+
+ super.init(frame: frame)
+
+ self.clipsToBounds = true
+
+ self.maskContents.addSubview(self.backgroundView)
+
+ self.textContainer.addSubview(self.textView)
+ self.addSubview(self.textContainer)
+ }
+
+ required init?(coder: NSCoder) {
+ fatalError("init(coder:) has not been implemented")
+ }
+
+ func animateIn() {
+ let delay: Double = 0.2
+
+ self.layer.animateScale(from: 0.001, to: 1.0, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring)
+ self.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15)
+ self.textView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, delay: delay)
+
+ self.backgroundView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15)
+ self.backgroundView.layer.animateFrame(from: CGRect(origin: CGPoint(x: (self.bounds.width - self.bounds.height) * 0.5, y: 0.0), size: CGSize(width: self.bounds.height, height: self.bounds.height)), to: self.backgroundView.frame, duration: 0.5, delay: delay, timingFunction: kCAMediaTimingFunctionSpring)
+
+ self.textContainer.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15, delay: delay)
+ self.textContainer.layer.cornerRadius = self.bounds.height * 0.5
+ self.textContainer.layer.animateFrame(from: CGRect(origin: CGPoint(x: (self.bounds.width - self.bounds.height) * 0.5, y: 0.0), size: CGSize(width: self.bounds.height, height: self.bounds.height)), to: self.textContainer.frame, duration: 0.5, delay: delay, timingFunction: kCAMediaTimingFunctionSpring, completion: { [weak self] completed in
+ guard let self, completed else {
+ return
+ }
+ self.textContainer.layer.cornerRadius = 0.0
+ })
+ }
+
+ func animateOut(completion: @escaping () -> Void) {
+ self.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in
+ completion()
+ })
+ self.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2, removeOnCompletion: false)
+ }
+
+ func update(text: String, constrainedWidth: CGFloat, transition: Transition) -> CGSize {
+ let sideInset: CGFloat = 12.0
+ let verticalInset: CGFloat = 6.0
+
+ let textSize = self.textView.update(string: text, fontSize: 15.0, fontWeight: 0.0, color: .white, constrainedWidth: constrainedWidth - sideInset * 2.0, transition: .immediate)
+ let size = CGSize(width: textSize.width + sideInset * 2.0, height: textSize.height + verticalInset * 2.0)
+
+ transition.setFrame(view: self.backgroundView, frame: CGRect(origin: CGPoint(), size: size))
+ self.backgroundView.update(cornerRadius: floor(size.height * 0.5), transition: transition)
+
+ transition.setFrame(view: self.textContainer, frame: CGRect(origin: CGPoint(), size: size))
+ transition.setFrame(view: self.textView, frame: CGRect(origin: CGPoint(x: sideInset, y: verticalInset), size: textSize))
+
+ return size
+ }
+}
diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/PrivateCallVideoLayer.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/PrivateCallVideoLayer.swift
index e9098f3032f..53ada011856 100644
--- a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/PrivateCallVideoLayer.swift
+++ b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/PrivateCallVideoLayer.swift
@@ -5,38 +5,6 @@ import MetalPerformanceShaders
import Accelerate
import MetalEngine
-func imageToCVPixelBuffer(image: UIImage) -> CVPixelBuffer? {
- guard let cgImage = image.cgImage, let data = cgImage.dataProvider?.data, let bytes = CFDataGetBytePtr(data), let colorSpace = cgImage.colorSpace, case .rgb = colorSpace.model, cgImage.bitsPerPixel / cgImage.bitsPerComponent == 4 else {
- return nil
- }
-
- let width = cgImage.width
- let height = cgImage.width
-
- var pixelBuffer: CVPixelBuffer? = nil
- let _ = CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_32BGRA, [
- kCVPixelBufferIOSurfacePropertiesKey: NSDictionary()
- ] as CFDictionary, &pixelBuffer)
- guard let pixelBuffer else {
- return nil
- }
-
- CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
- defer {
- CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
- }
- guard let baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer) else {
- return nil
- }
-
- var srcBuffer = vImage_Buffer(data: UnsafeMutableRawPointer(mutating: bytes), height: vImagePixelCount(height), width: vImagePixelCount(width), rowBytes: cgImage.bytesPerRow)
- var dstBuffer = vImage_Buffer(data: UnsafeMutableRawPointer(mutating: baseAddress), height: vImagePixelCount(height), width: vImagePixelCount(width), rowBytes: CVPixelBufferGetBytesPerRow(pixelBuffer))
-
- vImageCopyBuffer(&srcBuffer, &dstBuffer, 4, vImage_Flags(kvImageDoNotTile))
-
- return pixelBuffer
-}
-
final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSubject {
var internalData: MetalEngineSubjectInternalData?
@@ -221,6 +189,13 @@ final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSubject {
var rect = SIMD4(Float(effectiveRect.minX), Float(effectiveRect.minY), Float(effectiveRect.width), Float(effectiveRect.height))
encoder.setVertexBytes(&rect, length: 4 * 4, index: 0)
+
+ var mirror = SIMD2(
+ videoTextures.mirrorDirection.contains(.horizontal) ? 1 : 0,
+ videoTextures.mirrorDirection.contains(.vertical) ? 1 : 0
+ )
+ encoder.setVertexBytes(&mirror, length: 2 * 4, index: 1)
+
encoder.setFragmentTexture(blurredTexture, index: 0)
var brightness: Float = 1.0
@@ -243,6 +218,13 @@ final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSubject {
var rect = SIMD4(Float(effectiveRect.minX), Float(effectiveRect.minY), Float(effectiveRect.width), Float(effectiveRect.height))
encoder.setVertexBytes(&rect, length: 4 * 4, index: 0)
+
+ var mirror = SIMD2(
+ videoTextures.mirrorDirection.contains(.horizontal) ? 1 : 0,
+ videoTextures.mirrorDirection.contains(.vertical) ? 1 : 0
+ )
+ encoder.setVertexBytes(&mirror, length: 2 * 4, index: 1)
+
encoder.setFragmentTexture(rgbaTexture, index: 0)
var brightness: Float = 1.0
diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/RoundedCornersView.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/RoundedCornersView.swift
new file mode 100644
index 00000000000..664616101a6
--- /dev/null
+++ b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/RoundedCornersView.swift
@@ -0,0 +1,63 @@
+import Foundation
+import UIKit
+import Display
+import ComponentFlow
+
+final class RoundedCornersView: UIImageView {
+ private let color: UIColor
+ private var currentCornerRadius: CGFloat?
+ private var cornerImage: UIImage?
+
+ init(color: UIColor) {
+ self.color = color
+
+ super.init(image: nil)
+
+ if #available(iOS 13.0, *) {
+ self.layer.cornerCurve = .circular
+ }
+ }
+
+ required init?(coder: NSCoder) {
+ fatalError("init(coder:) has not been implemented")
+ }
+
+ private func applyStaticCornerRadius() {
+ guard let cornerRadius = self.currentCornerRadius else {
+ return
+ }
+ if let cornerImage = self.cornerImage, cornerImage.size.height == cornerRadius * 2.0 {
+ } else {
+ let size = CGSize(width: cornerRadius * 2.0, height: cornerRadius * 2.0)
+ self.cornerImage = generateStretchableFilledCircleImage(diameter: size.width, color: self.color)
+ }
+ self.image = self.cornerImage
+ self.clipsToBounds = false
+ self.backgroundColor = nil
+ self.layer.cornerRadius = 0.0
+ }
+
+ func update(cornerRadius: CGFloat, transition: Transition) {
+ if self.currentCornerRadius == cornerRadius {
+ return
+ }
+ let previousCornerRadius = self.currentCornerRadius
+ self.currentCornerRadius = cornerRadius
+ if transition.animation.isImmediate {
+ self.applyStaticCornerRadius()
+ } else {
+ self.image = nil
+ self.clipsToBounds = true
+ self.backgroundColor = self.color
+ if let previousCornerRadius, self.layer.animation(forKey: "cornerRadius") == nil {
+ self.layer.cornerRadius = previousCornerRadius
+ }
+ transition.setCornerRadius(layer: self.layer, cornerRadius: cornerRadius, completion: { [weak self] completed in
+ guard let self, completed else {
+ return
+ }
+ self.applyStaticCornerRadius()
+ })
+ }
+ }
+}
diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/TitleView.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/TitleView.swift
index 0b5e4aad967..59a938b5783 100644
--- a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/TitleView.swift
+++ b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/TitleView.swift
@@ -8,6 +8,7 @@ final class TextView: UIView {
var fontSize: CGFloat
var fontWeight: CGFloat
var monospacedDigits: Bool
+ var alignment: NSTextAlignment
var constrainedWidth: CGFloat
}
@@ -25,7 +26,6 @@ final class TextView: UIView {
self.isOpaque = false
self.backgroundColor = nil
- self.contentMode = .center
}
required init?(coder: NSCoder) {
@@ -43,8 +43,8 @@ final class TextView: UIView {
return super.action(for: layer, forKey: event)
}
- func update(string: String, fontSize: CGFloat, fontWeight: CGFloat, monospacedDigits: Bool = false, color: UIColor, constrainedWidth: CGFloat, transition: Transition) -> CGSize {
- let params = Params(string: string, fontSize: fontSize, fontWeight: fontWeight, monospacedDigits: monospacedDigits, constrainedWidth: constrainedWidth)
+ func update(string: String, fontSize: CGFloat, fontWeight: CGFloat, monospacedDigits: Bool = false, alignment: NSTextAlignment = .natural, color: UIColor, constrainedWidth: CGFloat, transition: Transition) -> CGSize {
+ let params = Params(string: string, fontSize: fontSize, fontWeight: fontWeight, monospacedDigits: monospacedDigits, alignment: alignment, constrainedWidth: constrainedWidth)
if let layoutState = self.layoutState, layoutState.params == params {
return layoutState.size
}
@@ -56,9 +56,13 @@ final class TextView: UIView {
font = UIFont.systemFont(ofSize: fontSize, weight: UIFont.Weight(fontWeight))
}
+ let paragraphStyle = NSMutableParagraphStyle()
+ paragraphStyle.alignment = alignment
+ paragraphStyle.lineSpacing = 0.6
let attributedString = NSAttributedString(string: string, attributes: [
.font: font,
.foregroundColor: color,
+ .paragraphStyle: paragraphStyle
])
let stringBounds = attributedString.boundingRect(with: CGSize(width: constrainedWidth, height: 200.0), options: .usesLineFragmentOrigin, context: nil)
let stringSize = CGSize(width: ceil(stringBounds.width), height: ceil(stringBounds.height))
diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/VideoContainerView.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/VideoContainerView.swift
index 022fae6ebbf..6d321a3a374 100644
--- a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/VideoContainerView.swift
+++ b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/VideoContainerView.swift
@@ -3,144 +3,524 @@ import UIKit
import Display
import ComponentFlow
import MetalEngine
+import SwiftSignalKit
private let shadowImage: UIImage? = {
UIImage(named: "Call/VideoGradient")?.precomposed()
}()
-final class VideoContainerView: UIView {
+private final class VideoContainerLayer: SimpleLayer {
+ let contentsLayer: SimpleLayer
+
+ override init() {
+ self.contentsLayer = SimpleLayer()
+
+ super.init()
+
+ self.addSublayer(self.contentsLayer)
+ }
+
+ override init(layer: Any) {
+ self.contentsLayer = SimpleLayer()
+
+ super.init(layer: layer)
+ }
+
+ required init?(coder: NSCoder) {
+ fatalError("init(coder:) has not been implemented")
+ }
+
+ func update(size: CGSize, transition: Transition) {
+ transition.setFrame(layer: self.contentsLayer, frame: CGRect(origin: CGPoint(), size: size))
+ }
+}
+
+final class VideoContainerView: HighlightTrackingButton {
+ enum Key {
+ case background
+ case foreground
+ }
+
private struct Params: Equatable {
var size: CGSize
var insets: UIEdgeInsets
var cornerRadius: CGFloat
+ var controlsHidden: Bool
var isMinimized: Bool
- var isAnimatingOut: Bool
+ var isAnimatedOut: Bool
- init(size: CGSize, insets: UIEdgeInsets, cornerRadius: CGFloat, isMinimized: Bool, isAnimatingOut: Bool) {
+ init(size: CGSize, insets: UIEdgeInsets, cornerRadius: CGFloat, controlsHidden: Bool, isMinimized: Bool, isAnimatedOut: Bool) {
self.size = size
self.insets = insets
self.cornerRadius = cornerRadius
+ self.controlsHidden = controlsHidden
self.isMinimized = isMinimized
- self.isAnimatingOut = isAnimatingOut
+ self.isAnimatedOut = isAnimatedOut
}
}
private struct VideoMetrics: Equatable {
var resolution: CGSize
var rotationAngle: Float
+ var sourceId: Int
- init(resolution: CGSize, rotationAngle: Float) {
+ init(resolution: CGSize, rotationAngle: Float, sourceId: Int) {
self.resolution = resolution
self.rotationAngle = rotationAngle
+ self.sourceId = sourceId
+ }
+ }
+
+ private final class FlipAnimationInfo {
+ let isForward: Bool
+ let previousRotationAngle: Float
+
+ init(isForward: Bool, previousRotationAngle: Float) {
+ self.isForward = isForward
+ self.previousRotationAngle = previousRotationAngle
}
}
- private let videoLayer: PrivateCallVideoLayer
+ private final class DisappearingVideo {
+ let flipAnimationInfo: FlipAnimationInfo?
+ let videoLayer: PrivateCallVideoLayer
+ let videoMetrics: VideoMetrics
+ var isAlphaAnimationInitiated: Bool = false
+
+ init(flipAnimationInfo: FlipAnimationInfo?, videoLayer: PrivateCallVideoLayer, videoMetrics: VideoMetrics) {
+ self.flipAnimationInfo = flipAnimationInfo
+ self.videoLayer = videoLayer
+ self.videoMetrics = videoMetrics
+ }
+ }
+
+ private enum MinimizedPosition: CaseIterable {
+ case topLeft
+ case topRight
+ case bottomLeft
+ case bottomRight
+ }
+
+ let key: Key
+
+ private let videoContainerLayer: VideoContainerLayer
+
+ private var videoLayer: PrivateCallVideoLayer
+ private var disappearingVideoLayer: DisappearingVideo?
+
let blurredContainerLayer: SimpleLayer
- private let topShadowView: UIImageView
- private let bottomShadowView: UIImageView
+ private let shadowContainer: SimpleLayer
+ private let topShadowLayer: SimpleLayer
+ private let bottomShadowLayer: SimpleLayer
private var params: Params?
private var videoMetrics: VideoMetrics?
private var appliedVideoMetrics: VideoMetrics?
+ private var highlightedState: Bool = false
+
+ private(set) var isFillingBounds: Bool = false
+
+ private var minimizedPosition: MinimizedPosition = .bottomRight
+ private var initialDragPosition: CGPoint?
+ private var dragPosition: CGPoint?
+ private var dragVelocity: CGPoint = CGPoint()
+ private var dragPositionAnimatorLink: SharedDisplayLinkDriver.Link?
+
+ private var videoOnUpdatedListener: Disposable?
var video: VideoSource? {
didSet {
- self.video?.updated = { [weak self] in
- guard let self else {
- return
+ if self.video !== oldValue {
+ self.videoOnUpdatedListener?.dispose()
+
+ self.videoOnUpdatedListener = self.video?.addOnUpdated { [weak self] in
+ guard let self else {
+ return
+ }
+ var videoMetrics: VideoMetrics?
+ if let currentOutput = self.video?.currentOutput {
+ if let previousVideo = self.videoLayer.video, previousVideo.sourceId != currentOutput.sourceId {
+ self.initiateVideoSourceSwitch(flipAnimationInfo: FlipAnimationInfo(isForward: previousVideo.sourceId < currentOutput.sourceId, previousRotationAngle: previousVideo.rotationAngle))
+ }
+
+ self.videoLayer.video = currentOutput
+ videoMetrics = VideoMetrics(resolution: currentOutput.resolution, rotationAngle: currentOutput.rotationAngle, sourceId: currentOutput.sourceId)
+ } else {
+ self.videoLayer.video = nil
+ }
+ self.videoLayer.setNeedsUpdate()
+
+ if self.videoMetrics != videoMetrics {
+ self.videoMetrics = videoMetrics
+ self.update(transition: .easeInOut(duration: 0.2))
+ }
}
+
+ if oldValue != nil {
+ self.initiateVideoSourceSwitch(flipAnimationInfo: nil)
+ }
+
var videoMetrics: VideoMetrics?
if let currentOutput = self.video?.currentOutput {
self.videoLayer.video = currentOutput
- videoMetrics = VideoMetrics(resolution: CGSize(width: CGFloat(currentOutput.y.width), height: CGFloat(currentOutput.y.height)), rotationAngle: currentOutput.rotationAngle)
+ videoMetrics = VideoMetrics(resolution: currentOutput.resolution, rotationAngle: currentOutput.rotationAngle, sourceId: currentOutput.sourceId)
} else {
self.videoLayer.video = nil
}
self.videoLayer.setNeedsUpdate()
- if self.videoMetrics != videoMetrics {
+ if self.videoMetrics != videoMetrics || oldValue != nil {
self.videoMetrics = videoMetrics
self.update(transition: .easeInOut(duration: 0.2))
}
}
- var videoMetrics: VideoMetrics?
- if let currentOutput = self.video?.currentOutput {
- self.videoLayer.video = currentOutput
- videoMetrics = VideoMetrics(resolution: CGSize(width: CGFloat(currentOutput.y.width), height: CGFloat(currentOutput.y.height)), rotationAngle: currentOutput.rotationAngle)
- } else {
- self.videoLayer.video = nil
- }
- self.videoLayer.setNeedsUpdate()
-
- if self.videoMetrics != videoMetrics {
- self.videoMetrics = videoMetrics
- self.update(transition: .easeInOut(duration: 0.2))
- }
}
}
- override init(frame: CGRect) {
+ var pressAction: (() -> Void)?
+
+ init(key: Key) {
+ self.key = key
+
+ self.videoContainerLayer = VideoContainerLayer()
+ self.videoContainerLayer.backgroundColor = nil
+ self.videoContainerLayer.isOpaque = false
+ self.videoContainerLayer.contentsLayer.backgroundColor = nil
+ self.videoContainerLayer.contentsLayer.isOpaque = false
+ if #available(iOS 13.0, *) {
+ self.videoContainerLayer.contentsLayer.cornerCurve = .circular
+ }
+
self.videoLayer = PrivateCallVideoLayer()
- self.blurredContainerLayer = SimpleLayer()
+ self.videoLayer.masksToBounds = true
+ self.videoLayer.isDoubleSided = false
+ if #available(iOS 13.0, *) {
+ self.videoLayer.cornerCurve = .circular
+ }
- self.topShadowView = UIImageView()
- self.topShadowView.transform = CGAffineTransformMakeScale(1.0, -1.0)
- self.bottomShadowView = UIImageView()
+ self.blurredContainerLayer = SimpleLayer()
- super.init(frame: frame)
+ self.shadowContainer = SimpleLayer()
+ self.topShadowLayer = SimpleLayer()
+ self.topShadowLayer.transform = CATransform3DMakeScale(1.0, -1.0, 1.0)
+ self.bottomShadowLayer = SimpleLayer()
- self.backgroundColor = UIColor.black
- self.blurredContainerLayer.backgroundColor = UIColor.black.cgColor
+ super.init(frame: CGRect())
- self.layer.addSublayer(self.videoLayer)
+ self.videoContainerLayer.contentsLayer.addSublayer(self.videoLayer)
+ self.layer.addSublayer(self.videoContainerLayer)
self.blurredContainerLayer.addSublayer(self.videoLayer.blurredLayer)
- self.topShadowView.image = shadowImage
- self.bottomShadowView.image = shadowImage
- self.addSubview(self.topShadowView)
- self.addSubview(self.bottomShadowView)
+ self.topShadowLayer.contents = shadowImage?.cgImage
+ self.bottomShadowLayer.contents = shadowImage?.cgImage
+ self.shadowContainer.addSublayer(self.topShadowLayer)
+ self.shadowContainer.addSublayer(self.bottomShadowLayer)
+ self.layer.addSublayer(self.shadowContainer)
+
+ self.highligthedChanged = { [weak self] highlighted in
+ guard let self, let params = self.params, !self.videoContainerLayer.bounds.isEmpty else {
+ return
+ }
+ var highlightedState = false
+ if highlighted {
+ if params.isMinimized {
+ highlightedState = true
+ }
+ } else {
+ highlightedState = false
+ }
+
+ if self.highlightedState == highlightedState {
+ return
+ }
+ self.highlightedState = highlightedState
+
+ let measurementSide = min(self.videoContainerLayer.bounds.width, self.videoContainerLayer.bounds.height)
+ let topScale: CGFloat = (measurementSide - 8.0) / measurementSide
+ let maxScale: CGFloat = (measurementSide + 2.0) / measurementSide
+
+ if highlightedState {
+ self.videoContainerLayer.removeAnimation(forKey: "sublayerTransform")
+ let transition = Transition(animation: .curve(duration: 0.15, curve: .easeInOut))
+ transition.setSublayerTransform(layer: self.videoContainerLayer, transform: CATransform3DMakeScale(topScale, topScale, 1.0))
+ } else {
+ let t = self.videoContainerLayer.presentation()?.sublayerTransform ?? self.videoContainerLayer.sublayerTransform
+ let currentScale = sqrt((t.m11 * t.m11) + (t.m12 * t.m12) + (t.m13 * t.m13))
+
+ let transition = Transition(animation: .none)
+ transition.setSublayerTransform(layer: self.videoContainerLayer, transform: CATransform3DIdentity)
+
+ self.videoContainerLayer.animateSublayerScale(from: currentScale, to: maxScale, duration: 0.13, timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, removeOnCompletion: false, completion: { [weak self] completed in
+ guard let self, completed else {
+ return
+ }
+
+ self.videoContainerLayer.animateSublayerScale(from: maxScale, to: 1.0, duration: 0.1, timingFunction: CAMediaTimingFunctionName.easeIn.rawValue)
+ })
+ }
+ }
+ self.addTarget(self, action: #selector(self.pressed), for: .touchUpInside)
+
+ self.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(self.panGesture(_:))))
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
+ override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
+ guard let params = self.params else {
+ return nil
+ }
+ if params.isMinimized {
+ let videoContainerPoint = self.layer.convert(point, to: self.videoContainerLayer)
+ if self.videoContainerLayer.bounds.contains(videoContainerPoint) {
+ return self
+ } else {
+ return nil
+ }
+ } else {
+ return nil
+ }
+ }
+
+ @objc private func pressed() {
+ self.pressAction?()
+ }
+
+ @objc private func panGesture(_ recognizer: UIPanGestureRecognizer) {
+ switch recognizer.state {
+ case .began, .changed:
+ self.dragVelocity = CGPoint()
+ if let dragPositionAnimatorLink = self.dragPositionAnimatorLink {
+ self.dragPositionAnimatorLink = nil
+ dragPositionAnimatorLink.invalidate()
+ }
+ let translation = recognizer.translation(in: self)
+
+ let initialDragPosition: CGPoint
+ if let current = self.initialDragPosition {
+ initialDragPosition = current
+ } else {
+ initialDragPosition = self.videoContainerLayer.position
+ self.initialDragPosition = initialDragPosition
+ }
+ self.dragPosition = initialDragPosition.offsetBy(dx: translation.x, dy: translation.y)
+ self.update(transition: .immediate)
+ case .ended, .cancelled:
+ self.initialDragPosition = nil
+ self.dragVelocity = recognizer.velocity(in: self)
+
+ if let params = self.params, let dragPosition = self.dragPosition {
+ let endPosition = CGPoint(
+ x: dragPosition.x - self.dragVelocity.x / (1000.0 * log(0.99)),
+ y: dragPosition.y - self.dragVelocity.y / (1000.0 * log(0.99))
+ )
+
+ var minCornerDistance: (corner: MinimizedPosition, distance: CGFloat)?
+ for corner in MinimizedPosition.allCases {
+ let cornerPosition: CGPoint
+ switch corner {
+ case .topLeft:
+ cornerPosition = CGPoint(x: params.insets.left, y: params.insets.top)
+ case .topRight:
+ cornerPosition = CGPoint(x: params.size.width - params.insets.right, y: params.insets.top)
+ case .bottomLeft:
+ cornerPosition = CGPoint(x: params.insets.left, y: params.size.height - params.insets.bottom)
+ case .bottomRight:
+ cornerPosition = CGPoint(x: params.size.width - params.insets.right, y: params.size.height - params.insets.bottom)
+ }
+
+ let distance = CGPoint(x: endPosition.x - cornerPosition.x, y: endPosition.y - cornerPosition.y)
+ let scalarDistance = sqrt(distance.x * distance.x + distance.y * distance.y)
+ if let (_, minDistance) = minCornerDistance {
+ if scalarDistance < minDistance {
+ minCornerDistance = (corner, scalarDistance)
+ }
+ } else {
+ minCornerDistance = (corner, scalarDistance)
+ }
+ }
+ if let minCornerDistance {
+ self.minimizedPosition = minCornerDistance.corner
+ }
+ }
+
+ self.dragPositionAnimatorLink = SharedDisplayLinkDriver.shared.add(framesPerSecond: .max, { [weak self] deltaTime in
+ guard let self else {
+ return
+ }
+ self.updateDragPositionAnimation(deltaTime: deltaTime)
+ })
+ default:
+ break
+ }
+ }
+
+ private func updateVelocityUsingSpring(currentVelocity: CGPoint, currentPosition: CGPoint, attractor: CGPoint, springConstant: CGFloat, damping: CGFloat, deltaTime: CGFloat) -> CGPoint {
+ let displacement = CGPoint(x: attractor.x - currentPosition.x, y: attractor.y - currentPosition.y)
+ let springForce = CGPoint(x: -springConstant * displacement.x, y: -springConstant * displacement.y)
+ var newVelocity = CGPoint(x: currentVelocity.x + springForce.x * deltaTime, y: currentVelocity.y + springForce.y * deltaTime)
+ newVelocity = CGPoint(x: newVelocity.x * exp(-damping * deltaTime), y: newVelocity.y * exp(-damping * deltaTime))
+ return newVelocity
+ }
+
+ private func updateDragPositionAnimation(deltaTime: Double) {
+ guard let params = self.params, let videoMetrics = self.videoMetrics else {
+ self.dragPosition = nil
+ self.dragPositionAnimatorLink = nil
+ return
+ }
+ if !params.isMinimized {
+ self.dragPosition = nil
+ self.dragPositionAnimatorLink = nil
+ return
+ }
+ guard var dragPosition = self.dragPosition else {
+ self.dragPosition = nil
+ self.dragPositionAnimatorLink = nil
+ return
+ }
+ let videoLayout = self.calculateMinimizedLayout(params: params, videoMetrics: videoMetrics, applyDragPosition: false)
+ let targetPosition = videoLayout.rotatedVideoFrame.center
+
+ self.dragVelocity = self.updateVelocityUsingSpring(
+ currentVelocity: self.dragVelocity,
+ currentPosition: dragPosition,
+ attractor: targetPosition,
+ springConstant: -130.0,
+ damping: 17.0,
+ deltaTime: CGFloat(deltaTime)
+ )
+
+ if sqrt(self.dragVelocity.x * self.dragVelocity.x + self.dragVelocity.y * self.dragVelocity.y) <= 0.1 {
+ self.dragVelocity = CGPoint()
+ self.dragPosition = nil
+ self.dragPositionAnimatorLink = nil
+ } else {
+ dragPosition.x += self.dragVelocity.x * CGFloat(deltaTime)
+ dragPosition.y += self.dragVelocity.y * CGFloat(deltaTime)
+
+ self.dragPosition = dragPosition
+ }
+
+ self.update(transition: .immediate)
+ }
+
+ private func initiateVideoSourceSwitch(flipAnimationInfo: FlipAnimationInfo?) {
+ guard let videoMetrics = self.videoMetrics else {
+ return
+ }
+ if let disappearingVideoLayer = self.disappearingVideoLayer {
+ disappearingVideoLayer.videoLayer.removeFromSuperlayer()
+ disappearingVideoLayer.videoLayer.blurredLayer.removeFromSuperlayer()
+ }
+ let previousVideoLayer = self.videoLayer
+ self.disappearingVideoLayer = DisappearingVideo(flipAnimationInfo: flipAnimationInfo, videoLayer: self.videoLayer, videoMetrics: videoMetrics)
+
+ self.videoLayer = PrivateCallVideoLayer()
+ self.videoLayer.opacity = previousVideoLayer.opacity
+ self.videoLayer.masksToBounds = true
+ self.videoLayer.isDoubleSided = false
+ if #available(iOS 13.0, *) {
+ self.videoLayer.cornerCurve = .circular
+ }
+ self.videoLayer.cornerRadius = previousVideoLayer.cornerRadius
+ self.videoLayer.blurredLayer.opacity = previousVideoLayer.blurredLayer.opacity
+
+ self.videoContainerLayer.contentsLayer.addSublayer(self.videoLayer)
+ self.blurredContainerLayer.addSublayer(self.videoLayer.blurredLayer)
+
+ self.dragPosition = nil
+ self.dragPositionAnimatorLink = nil
+ }
+
private func update(transition: Transition) {
guard let params = self.params else {
return
}
- self.update(params: params, transition: transition)
+ self.update(previousParams: params, params: params, transition: transition)
}
- func update(size: CGSize, insets: UIEdgeInsets, cornerRadius: CGFloat, isMinimized: Bool, isAnimatingOut: Bool, transition: Transition) {
- let params = Params(size: size, insets: insets, cornerRadius: cornerRadius, isMinimized: isMinimized, isAnimatingOut: isAnimatingOut)
+ func update(size: CGSize, insets: UIEdgeInsets, cornerRadius: CGFloat, controlsHidden: Bool, isMinimized: Bool, isAnimatedOut: Bool, transition: Transition) {
+ let params = Params(size: size, insets: insets, cornerRadius: cornerRadius, controlsHidden: controlsHidden, isMinimized: isMinimized, isAnimatedOut: isAnimatedOut)
if self.params == params {
return
}
- self.layer.masksToBounds = true
- if self.layer.animation(forKey: "cornerRadius") == nil {
- self.layer.cornerRadius = self.params?.cornerRadius ?? 0.0
+ let previousParams = self.params
+ self.params = params
+
+ if let previousParams, previousParams.controlsHidden != params.controlsHidden {
+ self.dragPosition = nil
+ self.dragPositionAnimatorLink = nil
}
- self.params = params
+ self.update(previousParams: previousParams, params: params, transition: transition)
+ }
+
+ private struct MinimizedLayout {
+ var videoIsRotated: Bool
+ var rotatedVideoSize: CGSize
+ var rotatedVideoResolution: CGSize
+ var rotatedVideoFrame: CGRect
+ var videoTransform: CATransform3D
+ var effectiveVideoFrame: CGRect
+ }
+
+ private func calculateMinimizedLayout(params: Params, videoMetrics: VideoMetrics, applyDragPosition: Bool) -> MinimizedLayout {
+ var rotatedResolution = videoMetrics.resolution
+ var videoIsRotated = false
+ if videoMetrics.rotationAngle == Float.pi * 0.5 || videoMetrics.rotationAngle == Float.pi * 3.0 / 2.0 {
+ rotatedResolution = CGSize(width: rotatedResolution.height, height: rotatedResolution.width)
+ videoIsRotated = true
+ }
- transition.setCornerRadius(layer: self.layer, cornerRadius: params.cornerRadius, completion: { [weak self] completed in
- guard let self, let params = self.params, completed else {
- return
- }
- if !params.isAnimatingOut {
- self.layer.masksToBounds = false
- self.layer.cornerRadius = 0.0
+ let minimizedBoundingSize: CGFloat = params.controlsHidden ? 140.0 : 240.0
+ let videoSize = rotatedResolution.aspectFitted(CGSize(width: minimizedBoundingSize, height: minimizedBoundingSize))
+
+ let videoResolution = rotatedResolution.aspectFittedOrSmaller(CGSize(width: 1280, height: 1280)).aspectFittedOrSmaller(CGSize(width: videoSize.width * 3.0, height: videoSize.height * 3.0))
+ let rotatedVideoResolution = videoIsRotated ? CGSize(width: videoResolution.height, height: videoResolution.width) : videoResolution
+
+ let rotatedVideoSize = videoIsRotated ? CGSize(width: videoSize.height, height: videoSize.width) : videoSize
+
+ let rotatedVideoFrame: CGRect
+ if applyDragPosition, let dragPosition = self.dragPosition {
+ rotatedVideoFrame = videoSize.centered(around: dragPosition)
+ } else {
+ switch self.minimizedPosition {
+ case .topLeft:
+ rotatedVideoFrame = CGRect(origin: CGPoint(x: params.insets.left, y: params.insets.top), size: videoSize)
+ case .topRight:
+ rotatedVideoFrame = CGRect(origin: CGPoint(x: params.size.width - params.insets.right - videoSize.width, y: params.insets.top), size: videoSize)
+ case .bottomLeft:
+ rotatedVideoFrame = CGRect(origin: CGPoint(x: params.insets.left, y: params.size.height - params.insets.bottom - videoSize.height), size: videoSize)
+ case .bottomRight:
+ rotatedVideoFrame = CGRect(origin: CGPoint(x: params.size.width - params.insets.right - videoSize.width, y: params.size.height - params.insets.bottom - videoSize.height), size: videoSize)
}
- })
+ }
+
+ let effectiveVideoFrame = videoSize.centered(around: rotatedVideoFrame.center)
- self.update(params: params, transition: transition)
+ var videoTransform = CATransform3DIdentity
+ videoTransform.m34 = 1.0 / 600.0
+ videoTransform = CATransform3DRotate(videoTransform, CGFloat(videoMetrics.rotationAngle), 0.0, 0.0, 1.0)
+ if params.isAnimatedOut {
+ videoTransform = CATransform3DScale(videoTransform, 0.6, 0.6, 1.0)
+ }
+
+ return MinimizedLayout(
+ videoIsRotated: videoIsRotated,
+ rotatedVideoSize: rotatedVideoSize,
+ rotatedVideoResolution: rotatedVideoResolution,
+ rotatedVideoFrame: rotatedVideoFrame,
+ videoTransform: videoTransform,
+ effectiveVideoFrame: effectiveVideoFrame
+ )
}
- private func update(params: Params, transition: Transition) {
+ private func update(previousParams: Params?, params: Params, transition: Transition) {
guard let videoMetrics = self.videoMetrics else {
return
}
@@ -151,43 +531,124 @@ final class VideoContainerView: UIView {
self.appliedVideoMetrics = videoMetrics
if params.isMinimized {
- var rotatedResolution = videoMetrics.resolution
- var videoIsRotated = false
- if videoMetrics.rotationAngle == Float.pi * 0.5 || videoMetrics.rotationAngle == Float.pi * 3.0 / 2.0 {
- rotatedResolution = CGSize(width: rotatedResolution.height, height: rotatedResolution.width)
- videoIsRotated = true
- }
+ self.isFillingBounds = false
- let videoSize = rotatedResolution.aspectFitted(CGSize(width: 160.0, height: 160.0))
+ let videoLayout = self.calculateMinimizedLayout(params: params, videoMetrics: videoMetrics, applyDragPosition: true)
- let videoResolution = rotatedResolution.aspectFittedOrSmaller(CGSize(width: 1280, height: 1280)).aspectFittedOrSmaller(CGSize(width: videoSize.width * 3.0, height: videoSize.height * 3.0))
- let rotatedVideoResolution = videoIsRotated ? CGSize(width: videoResolution.height, height: videoResolution.width) : videoResolution
+ transition.setPosition(layer: self.videoContainerLayer, position: videoLayout.rotatedVideoFrame.center)
- let rotatedVideoSize = videoIsRotated ? CGSize(width: videoSize.height, height: videoSize.width) : videoSize
- let rotatedVideoFrame = CGRect(origin: CGPoint(x: params.size.width - params.insets.right - videoSize.width, y: params.size.height - params.insets.bottom - videoSize.height), size: videoSize)
- let effectiveVideoFrame = videoSize.centered(around: rotatedVideoFrame.center)
+ self.videoContainerLayer.contentsLayer.masksToBounds = true
+ if self.disappearingVideoLayer != nil {
+ self.videoContainerLayer.contentsLayer.backgroundColor = UIColor.black.cgColor
+ }
+ transition.setBounds(layer: self.videoContainerLayer, bounds: CGRect(origin: CGPoint(), size: videoLayout.rotatedVideoSize), completion: { [weak self] completed in
+ guard let self, completed else {
+ return
+ }
+ self.videoContainerLayer.contentsLayer.masksToBounds = false
+ self.videoContainerLayer.contentsLayer.backgroundColor = nil
+ })
+ self.videoContainerLayer.update(size: videoLayout.rotatedVideoSize, transition: transition)
- transition.setPosition(layer: self.videoLayer, position: rotatedVideoFrame.center)
- transition.setBounds(layer: self.videoLayer, bounds: CGRect(origin: CGPoint(), size: rotatedVideoSize))
- transition.setPosition(layer: self.videoLayer.blurredLayer, position: rotatedVideoFrame.center)
- transition.setBounds(layer: self.videoLayer.blurredLayer, bounds: CGRect(origin: CGPoint(), size: rotatedVideoSize))
+ var videoTransition = transition
+ if self.videoLayer.bounds.isEmpty {
+ videoTransition = .immediate
+ }
+ var animateFlipDisappearingVideo: DisappearingVideo?
+ if let disappearingVideoLayer = self.disappearingVideoLayer {
+ self.disappearingVideoLayer = nil
+
+ let disappearingVideoLayout = self.calculateMinimizedLayout(params: params, videoMetrics: disappearingVideoLayer.videoMetrics, applyDragPosition: true)
+ let initialDisapparingVideoSize = disappearingVideoLayout.rotatedVideoSize
+
+ if !disappearingVideoLayer.isAlphaAnimationInitiated {
+ disappearingVideoLayer.isAlphaAnimationInitiated = true
+
+ if let flipAnimationInfo = disappearingVideoLayer.flipAnimationInfo {
+ var videoTransform = self.videoContainerLayer.transform
+ var axis: (x: CGFloat, y: CGFloat, z: CGFloat) = (0.0, 0.0, 0.0)
+ let previousVideoScale: CGPoint
+ if flipAnimationInfo.previousRotationAngle == Float.pi * 0.5 {
+ axis.x = -1.0
+ previousVideoScale = CGPoint(x: 1.0, y: -1.0)
+ } else if flipAnimationInfo.previousRotationAngle == Float.pi {
+ axis.y = -1.0
+ previousVideoScale = CGPoint(x: -1.0, y: -1.0)
+ } else if flipAnimationInfo.previousRotationAngle == Float.pi * 3.0 / 2.0 {
+ axis.x = 1.0
+ previousVideoScale = CGPoint(x: 1.0, y: 1.0)
+ } else {
+ axis.y = 1.0
+ previousVideoScale = CGPoint(x: -1.0, y: 1.0)
+ }
+
+ videoTransform = CATransform3DRotate(videoTransform, (flipAnimationInfo.isForward ? 1.0 : -1.0) * CGFloat.pi * 0.9999, axis.x, axis.y, axis.z)
+ self.videoContainerLayer.transform = videoTransform
+
+ disappearingVideoLayer.videoLayer.zPosition = 1.0
+ transition.setZPosition(layer: disappearingVideoLayer.videoLayer, zPosition: -1.0)
+
+ disappearingVideoLayer.videoLayer.transform = CATransform3DMakeScale(previousVideoScale.x, previousVideoScale.y, 1.0)
+
+ animateFlipDisappearingVideo = disappearingVideoLayer
+ disappearingVideoLayer.videoLayer.blurredLayer.removeFromSuperlayer()
+ } else {
+ let alphaTransition: Transition = .easeInOut(duration: 0.2)
+ let disappearingVideoLayerValue = disappearingVideoLayer.videoLayer
+ alphaTransition.setAlpha(layer: disappearingVideoLayerValue, alpha: 0.0, completion: { [weak self, weak disappearingVideoLayerValue] _ in
+ guard let self, let disappearingVideoLayerValue else {
+ return
+ }
+ disappearingVideoLayerValue.removeFromSuperlayer()
+ if self.disappearingVideoLayer?.videoLayer === disappearingVideoLayerValue {
+ self.disappearingVideoLayer = nil
+ self.update(transition: .immediate)
+ }
+ })
+ disappearingVideoLayer.videoLayer.blurredLayer.removeFromSuperlayer()
+
+ self.videoLayer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
+ }
+
+ self.videoLayer.position = disappearingVideoLayer.videoLayer.position
+ self.videoLayer.bounds = CGRect(origin: CGPoint(), size: videoLayout.rotatedVideoSize.aspectFilled(initialDisapparingVideoSize))
+ self.videoLayer.blurredLayer.position = disappearingVideoLayer.videoLayer.blurredLayer.position
+ self.videoLayer.blurredLayer.bounds = CGRect(origin: CGPoint(), size: videoLayout.rotatedVideoSize.aspectFilled(initialDisapparingVideoSize))
+ }
+
+ let disappearingVideoSize = initialDisapparingVideoSize.aspectFilled(videoLayout.rotatedVideoSize)
+ transition.setPosition(layer: disappearingVideoLayer.videoLayer, position: CGPoint(x: videoLayout.rotatedVideoSize.width * 0.5, y: videoLayout.rotatedVideoSize.height * 0.5))
+ transition.setBounds(layer: disappearingVideoLayer.videoLayer, bounds: CGRect(origin: CGPoint(), size: disappearingVideoSize))
+ transition.setPosition(layer: disappearingVideoLayer.videoLayer.blurredLayer, position: videoLayout.rotatedVideoFrame.center)
+ transition.setBounds(layer: disappearingVideoLayer.videoLayer.blurredLayer, bounds: CGRect(origin: CGPoint(), size: disappearingVideoSize))
+ }
- transition.setTransform(layer: self.videoLayer, transform: CATransform3DMakeRotation(CGFloat(videoMetrics.rotationAngle), 0.0, 0.0, 1.0))
- transition.setTransform(layer: self.videoLayer.blurredLayer, transform: CATransform3DMakeRotation(CGFloat(videoMetrics.rotationAngle), 0.0, 0.0, 1.0))
+ let animateFlipDisappearingVideoLayer = animateFlipDisappearingVideo?.videoLayer
+ transition.setTransform(layer: self.videoContainerLayer, transform: videoLayout.videoTransform, completion: { [weak animateFlipDisappearingVideoLayer] _ in
+ animateFlipDisappearingVideoLayer?.removeFromSuperlayer()
+ })
- transition.setCornerRadius(layer: self.videoLayer, cornerRadius: 10.0)
+ transition.setPosition(layer: self.videoLayer, position: CGPoint(x: videoLayout.rotatedVideoSize.width * 0.5, y: videoLayout.rotatedVideoSize.height * 0.5))
+ transition.setBounds(layer: self.videoLayer, bounds: CGRect(origin: CGPoint(), size: videoLayout.rotatedVideoSize))
- self.videoLayer.renderSpec = RenderLayerSpec(size: RenderSize(width: Int(rotatedVideoResolution.width), height: Int(rotatedVideoResolution.height)))
+ transition.setPosition(layer: self.videoLayer.blurredLayer, position: videoLayout.rotatedVideoFrame.center)
+ transition.setAlpha(layer: self.videoLayer.blurredLayer, alpha: 0.0)
+ transition.setBounds(layer: self.videoLayer.blurredLayer, bounds: CGRect(origin: CGPoint(), size: videoLayout.rotatedVideoSize))
+ videoTransition.setTransform(layer: self.videoLayer.blurredLayer, transform: videoLayout.videoTransform)
- let topShadowHeight: CGFloat = floor(effectiveVideoFrame.height * 0.2)
- let topShadowFrame = CGRect(origin: effectiveVideoFrame.origin, size: CGSize(width: effectiveVideoFrame.width, height: topShadowHeight))
- transition.setPosition(view: self.topShadowView, position: topShadowFrame.center)
- transition.setBounds(view: self.topShadowView, bounds: CGRect(origin: CGPoint(x: effectiveVideoFrame.minX, y: effectiveVideoFrame.maxY - topShadowHeight), size: topShadowFrame.size))
- transition.setAlpha(view: self.topShadowView, alpha: 0.0)
+ if let previousParams, !previousParams.isMinimized {
+ self.videoContainerLayer.contentsLayer.cornerRadius = previousParams.cornerRadius
+ }
+ transition.setCornerRadius(layer: self.videoContainerLayer.contentsLayer, cornerRadius: 18.0, completion: { [weak self] completed in
+ guard let self, completed, let params = self.params else {
+ return
+ }
+ if params.isMinimized {
+ self.videoLayer.cornerRadius = 18.0
+ }
+ })
- let bottomShadowHeight: CGFloat = 200.0
- transition.setFrame(view: self.bottomShadowView, frame: CGRect(origin: CGPoint(x: 0.0, y: params.size.height - bottomShadowHeight), size: CGSize(width: params.size.width, height: bottomShadowHeight)))
- transition.setAlpha(view: self.bottomShadowView, alpha: 0.0)
+ self.videoLayer.renderSpec = RenderLayerSpec(size: RenderSize(width: Int(videoLayout.rotatedVideoResolution.width), height: Int(videoLayout.rotatedVideoResolution.height)), edgeInset: 2)
} else {
var rotatedResolution = videoMetrics.resolution
var videoIsRotated = false
@@ -196,41 +657,109 @@ final class VideoContainerView: UIView {
videoIsRotated = true
}
- var videoSize = rotatedResolution.aspectFitted(params.size)
- let boundingAspectRatio = params.size.width / params.size.height
- let videoAspectRatio = videoSize.width / videoSize.height
- if abs(boundingAspectRatio - videoAspectRatio) < 0.15 {
+ var videoSize: CGSize
+ if params.isAnimatedOut {
+ self.isFillingBounds = true
videoSize = rotatedResolution.aspectFilled(params.size)
+ } else {
+ videoSize = rotatedResolution.aspectFitted(params.size)
+ let boundingAspectRatio = params.size.width / params.size.height
+ let videoAspectRatio = videoSize.width / videoSize.height
+ self.isFillingBounds = abs(boundingAspectRatio - videoAspectRatio) < 0.15
+ if self.isFillingBounds {
+ videoSize = rotatedResolution.aspectFilled(params.size)
+ }
}
let videoResolution = rotatedResolution.aspectFittedOrSmaller(CGSize(width: 1280, height: 1280)).aspectFittedOrSmaller(CGSize(width: videoSize.width * 3.0, height: videoSize.height * 3.0))
let rotatedVideoResolution = videoIsRotated ? CGSize(width: videoResolution.height, height: videoResolution.width) : videoResolution
+ let rotatedBoundingSize = videoIsRotated ? CGSize(width: params.size.height, height: params.size.width) : params.size
let rotatedVideoSize = videoIsRotated ? CGSize(width: videoSize.height, height: videoSize.width) : videoSize
- let rotatedBoundingSize = params.size
- let rotatedVideoFrame = CGRect(origin: CGPoint(x: floor((rotatedBoundingSize.width - rotatedVideoSize.width) * 0.5), y: floor((rotatedBoundingSize.height - rotatedVideoSize.height) * 0.5)), size: rotatedVideoSize)
+ let rotatedVideoBoundingSize = params.size
+ let rotatedVideoFrame = CGRect(origin: CGPoint(x: floor((rotatedVideoBoundingSize.width - rotatedVideoSize.width) * 0.5), y: floor((rotatedVideoBoundingSize.height - rotatedVideoSize.height) * 0.5)), size: rotatedVideoSize)
+
+ self.videoContainerLayer.contentsLayer.masksToBounds = true
+ if let previousParams, self.videoContainerLayer.contentsLayer.animation(forKey: "cornerRadius") == nil {
+ if previousParams.isMinimized {
+ self.videoContainerLayer.contentsLayer.cornerRadius = self.videoLayer.cornerRadius
+ } else {
+ self.videoContainerLayer.contentsLayer.cornerRadius = previousParams.cornerRadius
+ }
+ }
+ self.videoLayer.cornerRadius = 0.0
+ transition.setCornerRadius(layer: self.videoContainerLayer.contentsLayer, cornerRadius: params.cornerRadius, completion: { [weak self] completed in
+ guard let self, completed, let params = self.params else {
+ return
+ }
+ if !params.isMinimized && !params.isAnimatedOut {
+ self.videoContainerLayer.contentsLayer.cornerRadius = 0.0
+ }
+ })
- transition.setPosition(layer: self.videoLayer, position: rotatedVideoFrame.center)
- transition.setBounds(layer: self.videoLayer, bounds: CGRect(origin: CGPoint(), size: rotatedVideoFrame.size))
- transition.setPosition(layer: self.videoLayer.blurredLayer, position: rotatedVideoFrame.center)
- transition.setBounds(layer: self.videoLayer.blurredLayer, bounds: CGRect(origin: CGPoint(), size: rotatedVideoFrame.size))
+ transition.setPosition(layer: self.videoContainerLayer, position: CGPoint(x: params.size.width * 0.5, y: params.size.height * 0.5))
+ transition.setBounds(layer: self.videoContainerLayer, bounds: CGRect(origin: CGPoint(), size: rotatedBoundingSize))
+ self.videoContainerLayer.update(size: rotatedBoundingSize, transition: transition)
- transition.setTransform(layer: self.videoLayer, transform: CATransform3DMakeRotation(CGFloat(videoMetrics.rotationAngle), 0.0, 0.0, 1.0))
- transition.setTransform(layer: self.videoLayer.blurredLayer, transform: CATransform3DMakeRotation(CGFloat(videoMetrics.rotationAngle), 0.0, 0.0, 1.0))
+ var videoTransition = transition
+ if self.videoLayer.bounds.isEmpty {
+ videoTransition = .immediate
+ if !transition.animation.isImmediate {
+ self.videoLayer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
+ self.videoLayer.blurredLayer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
+ }
+ }
- if !params.isAnimatingOut {
- self.videoLayer.renderSpec = RenderLayerSpec(size: RenderSize(width: Int(rotatedVideoResolution.width), height: Int(rotatedVideoResolution.height)))
+ if let disappearingVideoLayer = self.disappearingVideoLayer {
+ self.disappearingVideoLayer = nil
+
+ if !disappearingVideoLayer.isAlphaAnimationInitiated {
+ disappearingVideoLayer.isAlphaAnimationInitiated = true
+
+ let alphaTransition: Transition = .easeInOut(duration: 0.2)
+ let disappearingVideoLayerValue = disappearingVideoLayer.videoLayer
+ alphaTransition.setAlpha(layer: disappearingVideoLayerValue, alpha: 0.0, completion: { [weak disappearingVideoLayerValue] _ in
+ disappearingVideoLayerValue?.removeFromSuperlayer()
+ })
+ let disappearingVideoLayerBlurredLayerValue = disappearingVideoLayer.videoLayer.blurredLayer
+ alphaTransition.setAlpha(layer: disappearingVideoLayerBlurredLayerValue, alpha: 0.0, completion: { [weak disappearingVideoLayerBlurredLayerValue] _ in
+ disappearingVideoLayerBlurredLayerValue?.removeFromSuperlayer()
+ })
+ }
}
- let topShadowHeight: CGFloat = 200.0
- let topShadowFrame = CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: CGSize(width: params.size.width, height: topShadowHeight))
- transition.setPosition(view: self.topShadowView, position: topShadowFrame.center)
- transition.setBounds(view: self.topShadowView, bounds: CGRect(origin: CGPoint(), size: topShadowFrame.size))
- transition.setAlpha(view: self.topShadowView, alpha: 1.0)
+ transition.setTransform(layer: self.videoContainerLayer, transform: CATransform3DMakeRotation(CGFloat(videoMetrics.rotationAngle), 0.0, 0.0, 1.0))
+
+ videoTransition.setFrame(layer: self.videoLayer, frame: rotatedVideoSize.centered(around: CGPoint(x: rotatedBoundingSize.width * 0.5, y: rotatedBoundingSize.height * 0.5)))
+ videoTransition.setPosition(layer: self.videoLayer.blurredLayer, position: rotatedVideoFrame.center)
+ videoTransition.setBounds(layer: self.videoLayer.blurredLayer, bounds: CGRect(origin: CGPoint(), size: rotatedVideoFrame.size))
+ videoTransition.setAlpha(layer: self.videoLayer.blurredLayer, alpha: 1.0)
+ videoTransition.setTransform(layer: self.videoLayer.blurredLayer, transform: CATransform3DMakeRotation(CGFloat(videoMetrics.rotationAngle), 0.0, 0.0, 1.0))
- let bottomShadowHeight: CGFloat = 200.0
- transition.setFrame(view: self.bottomShadowView, frame: CGRect(origin: CGPoint(x: 0.0, y: params.size.height - bottomShadowHeight), size: CGSize(width: params.size.width, height: bottomShadowHeight)))
- transition.setAlpha(view: self.bottomShadowView, alpha: 1.0)
+ if !params.isAnimatedOut {
+ self.videoLayer.renderSpec = RenderLayerSpec(size: RenderSize(width: Int(rotatedVideoResolution.width), height: Int(rotatedVideoResolution.height)), edgeInset: 2)
+ }
}
+
+ self.shadowContainer.masksToBounds = true
+ transition.setCornerRadius(layer: self.shadowContainer, cornerRadius: params.cornerRadius, completion: { [weak self] completed in
+ guard let self, completed else {
+ return
+ }
+ self.shadowContainer.masksToBounds = false
+ })
+ transition.setFrame(layer: self.shadowContainer, frame: CGRect(origin: CGPoint(), size: params.size))
+
+ let shadowAlpha: CGFloat = (params.controlsHidden || params.isMinimized || params.isAnimatedOut) ? 0.0 : 1.0
+
+ let topShadowHeight: CGFloat = 200.0
+ let topShadowFrame = CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: CGSize(width: params.size.width, height: topShadowHeight))
+ transition.setPosition(layer: self.topShadowLayer, position: topShadowFrame.center)
+ transition.setBounds(layer: self.topShadowLayer, bounds: CGRect(origin: CGPoint(), size: topShadowFrame.size))
+ transition.setAlpha(layer: self.topShadowLayer, alpha: shadowAlpha)
+
+ let bottomShadowHeight: CGFloat = 200.0
+ transition.setFrame(layer: self.bottomShadowLayer, frame: CGRect(origin: CGPoint(x: 0.0, y: params.size.height - bottomShadowHeight), size: CGSize(width: params.size.width, height: bottomShadowHeight)))
+ transition.setAlpha(layer: self.bottomShadowLayer, alpha: shadowAlpha)
}
}
diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/VideoShadowsView.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/VideoShadowsView.swift
new file mode 100644
index 00000000000..11cec5dbdfc
--- /dev/null
+++ b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/VideoShadowsView.swift
@@ -0,0 +1,17 @@
+import Foundation
+import UIKit
+import Display
+import ComponentFlow
+
+final class VideoShadowsView: UIView {
+ override init(frame: CGRect) {
+ super.init(frame: frame)
+ }
+
+ required init?(coder: NSCoder) {
+ fatalError("init(coder:) has not been implemented")
+ }
+
+ func update(size: CGSize, transition: Transition) {
+ }
+}
diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Media/VideoInput.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Media/VideoInput.swift
index c15a7bc427f..68977a2a5c6 100644
--- a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Media/VideoInput.swift
+++ b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Media/VideoInput.swift
@@ -2,17 +2,33 @@ import AVFoundation
import Metal
import CoreVideo
import Display
+import SwiftSignalKit
public final class VideoSourceOutput {
+ public struct MirrorDirection: OptionSet {
+ public var rawValue: Int32
+
+ public init(rawValue: Int32) {
+ self.rawValue = rawValue
+ }
+
+ public static let horizontal = MirrorDirection(rawValue: 1 << 0)
+ public static let vertical = MirrorDirection(rawValue: 1 << 1)
+ }
+
+ public let resolution: CGSize
public let y: MTLTexture
public let uv: MTLTexture
public let rotationAngle: Float
+ public let mirrorDirection: MirrorDirection
public let sourceId: Int
- public init(y: MTLTexture, uv: MTLTexture, rotationAngle: Float, sourceId: Int) {
+ public init(resolution: CGSize, y: MTLTexture, uv: MTLTexture, rotationAngle: Float, mirrorDirection: MirrorDirection, sourceId: Int) {
+ self.resolution = resolution
self.y = y
self.uv = uv
self.rotationAngle = rotationAngle
+ self.mirrorDirection = mirrorDirection
self.sourceId = sourceId
}
}
@@ -20,8 +36,9 @@ public final class VideoSourceOutput {
public protocol VideoSource: AnyObject {
typealias Output = VideoSourceOutput
- var updated: (() -> Void)? { get set }
var currentOutput: Output? { get }
+
+ func addOnUpdated(_ f: @escaping () -> Void) -> Disposable
}
public final class FileVideoSource: VideoSource {
@@ -35,13 +52,17 @@ public final class FileVideoSource: VideoSource {
private var targetItem: AVPlayerItem?
public private(set) var currentOutput: Output?
- public var updated: (() -> Void)?
+ private var onUpdatedListeners = Bag<() -> Void>()
private var displayLink: SharedDisplayLinkDriver.Link?
public var sourceId: Int = 0
+ public var fixedRotationAngle: Float?
+ public var sizeMultiplicator: CGPoint = CGPoint(x: 1.0, y: 1.0)
- public init?(device: MTLDevice, url: URL) {
+ public init?(device: MTLDevice, url: URL, fixedRotationAngle: Float? = nil) {
+ self.fixedRotationAngle = fixedRotationAngle
+
self.device = device
CVMetalTextureCacheCreate(nil, nil, device, nil, &self.textureCache)
@@ -62,11 +83,26 @@ public final class FileVideoSource: VideoSource {
return
}
if self.updateOutput() {
- self.updated?()
+ for onUpdated in self.onUpdatedListeners.copyItems() {
+ onUpdated()
+ }
}
})
}
+ public func addOnUpdated(_ f: @escaping () -> Void) -> Disposable {
+ let index = self.onUpdatedListeners.add(f)
+
+ return ActionDisposable { [weak self] in
+ DispatchQueue.main.async {
+ guard let self else {
+ return
+ }
+ self.onUpdatedListeners.remove(index)
+ }
+ }
+ }
+
private func updateOutput() -> Bool {
if self.targetItem !== self.queuePlayer.currentItem {
self.targetItem?.remove(self.videoOutput)
@@ -117,9 +153,15 @@ public final class FileVideoSource: VideoSource {
return false
}
- rotationAngle = Float.pi * 0.5
+ if let fixedRotationAngle = self.fixedRotationAngle {
+ rotationAngle = fixedRotationAngle
+ }
+
+ var resolution = CGSize(width: CGFloat(yTexture.width), height: CGFloat(yTexture.height))
+ resolution.width = floor(resolution.width * self.sizeMultiplicator.x)
+ resolution.height = floor(resolution.height * self.sizeMultiplicator.y)
- self.currentOutput = Output(y: yTexture, uv: uvTexture, rotationAngle: rotationAngle, sourceId: self.sourceId)
+ self.currentOutput = Output(resolution: resolution, y: yTexture, uv: uvTexture, rotationAngle: rotationAngle, mirrorDirection: [], sourceId: self.sourceId)
return true
}
}
diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/MirroringLayer.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/MirroringLayer.swift
index 8506d054454..6acf9b7346f 100644
--- a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/MirroringLayer.swift
+++ b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/MirroringLayer.swift
@@ -39,6 +39,28 @@ final class MirroringLayer: SimpleLayer {
}
}
+ override var anchorPoint: CGPoint {
+ get {
+ return super.anchorPoint
+ } set(value) {
+ if let targetLayer = self.targetLayer {
+ targetLayer.anchorPoint = value
+ }
+ super.anchorPoint = value
+ }
+ }
+
+ override var anchorPointZ: CGFloat {
+ get {
+ return super.anchorPointZ
+ } set(value) {
+ if let targetLayer = self.targetLayer {
+ targetLayer.anchorPointZ = value
+ }
+ super.anchorPointZ = value
+ }
+ }
+
override var opacity: Float {
get {
return super.opacity
diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/OverlayMaskContainerView.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/OverlayMaskContainerView.swift
index 447704d0dc0..a765cba831d 100644
--- a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/OverlayMaskContainerView.swift
+++ b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/OverlayMaskContainerView.swift
@@ -60,7 +60,7 @@ public class OverlayMaskContainerView: UIView, OverlayMaskContainerViewProtocol
super.willRemoveSubview(subview)
if let view = subview as? OverlayMaskContainerViewProtocol {
- if view.maskContents.superview === self {
+ if view.maskContents.superview === self.maskContents {
view.maskContents.removeFromSuperview()
}
}
diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/PrivateCallScreen.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/PrivateCallScreen.swift
index adfc27257f5..b67bebc28c1 100644
--- a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/PrivateCallScreen.swift
+++ b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/PrivateCallScreen.swift
@@ -5,6 +5,53 @@ import MetalEngine
import ComponentFlow
import SwiftSignalKit
+/*private final class EdgeTestLayer: MetalEngineSubjectLayer, MetalEngineSubject {
+ final class RenderState: RenderToLayerState {
+ let pipelineState: MTLRenderPipelineState
+
+ required init?(device: MTLDevice) {
+ guard let library = metalLibrary(device: device) else {
+ return nil
+ }
+ guard let vertexFunction = library.makeFunction(name: "edgeTestVertex"), let fragmentFunction = library.makeFunction(name: "edgeTestFragment") else {
+ return nil
+ }
+
+ let pipelineDescriptor = MTLRenderPipelineDescriptor()
+ pipelineDescriptor.vertexFunction = vertexFunction
+ pipelineDescriptor.fragmentFunction = fragmentFunction
+ pipelineDescriptor.colorAttachments[0].pixelFormat = .bgra8Unorm
+ pipelineDescriptor.colorAttachments[0].isBlendingEnabled = true
+ pipelineDescriptor.colorAttachments[0].rgbBlendOperation = .add
+ pipelineDescriptor.colorAttachments[0].alphaBlendOperation = .add
+ pipelineDescriptor.colorAttachments[0].sourceRGBBlendFactor = .one
+ pipelineDescriptor.colorAttachments[0].sourceAlphaBlendFactor = .one
+ pipelineDescriptor.colorAttachments[0].destinationRGBBlendFactor = .oneMinusSourceAlpha
+ pipelineDescriptor.colorAttachments[0].destinationAlphaBlendFactor = .one
+
+ guard let pipelineState = try? device.makeRenderPipelineState(descriptor: pipelineDescriptor) else {
+ return nil
+ }
+ self.pipelineState = pipelineState
+ }
+ }
+
+ var internalData: MetalEngineSubjectInternalData?
+
+ func update(context: MetalEngineSubjectContext) {
+ context.renderToLayer(spec: RenderLayerSpec(size: RenderSize(width: 300, height: 300), edgeInset: 100), state: RenderState.self, layer: self, commands: { encoder, placement in
+ let effectiveRect = placement.effectiveRect
+
+ var rect = SIMD4(Float(effectiveRect.minX), Float(effectiveRect.minY), Float(effectiveRect.width * 0.5), Float(effectiveRect.height))
+ encoder.setVertexBytes(&rect, length: 4 * 4, index: 0)
+
+ var color = SIMD4(1.0, 0.0, 0.0, 1.0)
+ encoder.setFragmentBytes(&color, length: 4 * 4, index: 0)
+ encoder.drawPrimitives(type: .triangle, vertexStart: 0, vertexCount: 6)
+ })
+ }
+}*/
+
public final class PrivateCallScreen: OverlayMaskContainerView {
public struct State: Equatable {
public struct SignalInfo: Equatable {
@@ -50,28 +97,34 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
public var lifecycleState: LifecycleState
public var name: String
+ public var shortName: String
public var avatarImage: UIImage?
public var audioOutput: AudioOutput
public var isMicrophoneMuted: Bool
public var localVideo: VideoSource?
public var remoteVideo: VideoSource?
+ public var isRemoteBatteryLow: Bool
public init(
lifecycleState: LifecycleState,
name: String,
+ shortName: String,
avatarImage: UIImage?,
audioOutput: AudioOutput,
isMicrophoneMuted: Bool,
localVideo: VideoSource?,
- remoteVideo: VideoSource?
+ remoteVideo: VideoSource?,
+ isRemoteBatteryLow: Bool
) {
self.lifecycleState = lifecycleState
self.name = name
+ self.shortName = shortName
self.avatarImage = avatarImage
self.audioOutput = audioOutput
self.isMicrophoneMuted = isMicrophoneMuted
self.localVideo = localVideo
self.remoteVideo = remoteVideo
+ self.isRemoteBatteryLow = isRemoteBatteryLow
}
public static func ==(lhs: State, rhs: State) -> Bool {
@@ -81,6 +134,9 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
if lhs.name != rhs.name {
return false
}
+ if lhs.shortName != rhs.shortName {
+ return false
+ }
if lhs.avatarImage != rhs.avatarImage {
return false
}
@@ -96,6 +152,9 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
if lhs.remoteVideo !== rhs.remoteVideo {
return false
}
+ if lhs.isRemoteBatteryLow != rhs.isRemoteBatteryLow {
+ return false
+ }
return true
}
}
@@ -119,17 +178,25 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
private let backgroundLayer: CallBackgroundLayer
private let overlayContentsView: UIView
private let buttonGroupView: ButtonGroupView
+ private let blobTransformLayer: SimpleLayer
+ private let blobBackgroundLayer: CALayer
private let blobLayer: CallBlobsLayer
+ private let avatarTransformLayer: SimpleLayer
private let avatarLayer: AvatarLayer
private let titleView: TextView
+ private let backButtonView: BackButtonView
private var statusView: StatusView
private var weakSignalView: WeakSignalView?
private var emojiView: KeyEmojiView?
+ private var emojiTooltipView: EmojiTooltipView?
+ private var emojiExpandedInfoView: EmojiExpandedInfoView?
+
+ private let videoContainerBackgroundView: RoundedCornersView
+ private let overlayContentsVideoContainerBackgroundView: RoundedCornersView
- private var localVideoContainerView: VideoContainerView?
- private var remoteVideoContainerView: VideoContainerView?
+ private var videoContainerViews: [VideoContainerView] = []
private var activeRemoteVideoSource: VideoSource?
private var waitingForFirstRemoteVideoFrameDisposable: Disposable?
@@ -137,9 +204,16 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
private var activeLocalVideoSource: VideoSource?
private var waitingForFirstLocalVideoFrameDisposable: Disposable?
+ private var canAnimateAudioLevel: Bool = false
+ private var displayEmojiTooltip: Bool = false
+ private var isEmojiKeyExpanded: Bool = false
+ private var areControlsHidden: Bool = false
+ private var swapLocalAndRemoteVideo: Bool = false
+
private var processedInitialAudioLevelBump: Bool = false
private var audioLevelBump: Float = 0.0
+ private var currentAvatarAudioScale: CGFloat = 1.0
private var targetAudioLevel: Float = 0.0
private var audioLevel: Float = 0.0
private var audioLevelUpdateSubscription: SharedDisplayLinkDriver.Link?
@@ -149,6 +223,7 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
public var videoAction: (() -> Void)?
public var microhoneMuteAction: (() -> Void)?
public var endCallAction: (() -> Void)?
+ public var backAction: (() -> Void)?
public override init(frame: CGRect) {
self.overlayContentsView = UIView()
@@ -158,19 +233,42 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
self.buttonGroupView = ButtonGroupView()
+ self.blobTransformLayer = SimpleLayer()
+ self.blobBackgroundLayer = self.backgroundLayer.externalBlurredLayer
self.blobLayer = CallBlobsLayer()
+ self.blobBackgroundLayer.mask = self.blobTransformLayer
+
+ self.avatarTransformLayer = SimpleLayer()
self.avatarLayer = AvatarLayer()
+ self.videoContainerBackgroundView = RoundedCornersView(color: .black)
+ self.overlayContentsVideoContainerBackgroundView = RoundedCornersView(color: UIColor(white: 0.1, alpha: 1.0))
+
self.titleView = TextView()
self.statusView = StatusView()
+ self.backButtonView = BackButtonView(text: "Back")
+
super.init(frame: frame)
+ self.clipsToBounds = true
+
self.layer.addSublayer(self.backgroundLayer)
self.overlayContentsView.layer.addSublayer(self.backgroundLayer.blurredLayer)
- self.layer.addSublayer(self.blobLayer)
- self.layer.addSublayer(self.avatarLayer)
+ self.overlayContentsView.addSubview(self.overlayContentsVideoContainerBackgroundView)
+
+ self.layer.addSublayer(self.blobBackgroundLayer)
+ self.blobTransformLayer.addSublayer(self.blobLayer)
+
+ self.avatarTransformLayer.addSublayer(self.avatarLayer)
+ self.layer.addSublayer(self.avatarTransformLayer)
+
+ /*let edgeTestLayer = EdgeTestLayer()
+ edgeTestLayer.frame = CGRect(origin: CGPoint(x: 20.0, y: 100.0), size: CGSize(width: 100.0, height: 100.0))
+ self.layer.addSublayer(edgeTestLayer)*/
+
+ self.addSubview(self.videoContainerBackgroundView)
self.overlayContentsView.mask = self.maskContents
self.addSubview(self.overlayContentsView)
@@ -184,6 +282,8 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
self?.update(transition: .immediate)
}
+ self.addSubview(self.backButtonView)
+
(self.layer as? SimpleLayer)?.didEnterHierarchy = { [weak self] in
guard let self else {
return
@@ -201,6 +301,15 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
}
self.audioLevelUpdateSubscription = nil
}
+
+ self.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.tapGesture(_:))))
+
+ self.backButtonView.pressAction = { [weak self] in
+ guard let self else {
+ return
+ }
+ self.backAction?()
+ }
}
public required init?(coder: NSCoder) {
@@ -217,11 +326,21 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
return nil
}
+ if let emojiExpandedInfoView = self.emojiExpandedInfoView, self.isEmojiKeyExpanded {
+ if !result.isDescendant(of: emojiExpandedInfoView) {
+ return emojiExpandedInfoView
+ }
+ }
+
return result
}
public func addIncomingAudioLevel(value: Float) {
- self.targetAudioLevel = value
+ if self.canAnimateAudioLevel {
+ self.targetAudioLevel = value
+ } else {
+ self.targetAudioLevel = 0.0
+ }
}
private func attenuateAudioLevelStep() {
@@ -233,14 +352,35 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
}
private func updateAudioLevel() {
- if self.activeRemoteVideoSource == nil && self.activeLocalVideoSource == nil {
+ if self.canAnimateAudioLevel {
let additionalAvatarScale = CGFloat(max(0.0, min(self.audioLevel, 5.0)) * 0.05)
- self.avatarLayer.transform = CATransform3DMakeScale(1.0 + additionalAvatarScale, 1.0 + additionalAvatarScale, 1.0)
+ self.currentAvatarAudioScale = 1.0 + additionalAvatarScale
+ self.avatarTransformLayer.transform = CATransform3DMakeScale(self.currentAvatarAudioScale, self.currentAvatarAudioScale, 1.0)
if let params = self.params, case .terminated = params.state.lifecycleState {
} else {
let blobAmplificationFactor: CGFloat = 2.0
- self.blobLayer.transform = CATransform3DMakeScale(1.0 + additionalAvatarScale * blobAmplificationFactor, 1.0 + additionalAvatarScale * blobAmplificationFactor, 1.0)
+ self.blobTransformLayer.transform = CATransform3DMakeScale(1.0 + additionalAvatarScale * blobAmplificationFactor, 1.0 + additionalAvatarScale * blobAmplificationFactor, 1.0)
+ }
+ }
+ }
+
+ @objc private func tapGesture(_ recognizer: UITapGestureRecognizer) {
+ if case .ended = recognizer.state {
+ var update = false
+
+ if self.displayEmojiTooltip {
+ self.displayEmojiTooltip = false
+ update = true
+ }
+
+ if self.activeRemoteVideoSource != nil || self.activeLocalVideoSource != nil {
+ self.areControlsHidden = !self.areControlsHidden
+ update = true
+ }
+
+ if update {
+ self.update(transition: .spring(duration: 0.4))
}
}
}
@@ -259,7 +399,7 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
self.activeRemoteVideoSource = remoteVideo
} else {
let firstVideoFrameSignal = Signal { subscriber in
- remoteVideo.updated = { [weak remoteVideo] in
+ return remoteVideo.addOnUpdated { [weak remoteVideo] in
guard let remoteVideo else {
subscriber.putCompletion()
return
@@ -268,12 +408,9 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
subscriber.putCompletion()
}
}
-
- return EmptyDisposable
}
var shouldUpdate = false
self.waitingForFirstRemoteVideoFrameDisposable = (firstVideoFrameSignal
- |> timeout(4.0, queue: .mainQueue(), alternate: .complete())
|> deliverOnMainQueue).startStrict(completed: { [weak self] in
guard let self else {
return
@@ -297,7 +434,7 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
self.activeLocalVideoSource = localVideo
} else {
let firstVideoFrameSignal = Signal { subscriber in
- localVideo.updated = { [weak localVideo] in
+ return localVideo.addOnUpdated { [weak localVideo] in
guard let localVideo else {
subscriber.putCompletion()
return
@@ -306,12 +443,9 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
subscriber.putCompletion()
}
}
-
- return EmptyDisposable
}
var shouldUpdate = false
self.waitingForFirstLocalVideoFrameDisposable = (firstVideoFrameSignal
- |> timeout(4.0, queue: .mainQueue(), alternate: .complete())
|> deliverOnMainQueue).startStrict(completed: { [weak self] in
guard let self else {
return
@@ -328,6 +462,19 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
}
}
+ if self.activeRemoteVideoSource == nil && self.activeLocalVideoSource == nil {
+ self.areControlsHidden = false
+ }
+
+ if let previousParams = self.params, case .active = params.state.lifecycleState {
+ switch previousParams.state.lifecycleState {
+ case .connecting, .exchangingKeys, .ringing:
+ self.displayEmojiTooltip = true
+ default:
+ break
+ }
+ }
+
self.params = params
self.updateInternal(params: params, transition: transition)
}
@@ -340,36 +487,44 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
}
private func updateInternal(params: Params, transition: Transition) {
+ let genericAlphaTransition: Transition
+ switch transition.animation {
+ case .none:
+ genericAlphaTransition = .immediate
+ case let .curve(duration, _):
+ genericAlphaTransition = .easeInOut(duration: min(0.3, duration))
+ }
+
let backgroundFrame = CGRect(origin: CGPoint(), size: params.size)
- let aspect: CGFloat = params.size.width / params.size.height
- let sizeNorm: CGFloat = 64.0
- let renderingSize = CGSize(width: floor(sizeNorm * aspect), height: sizeNorm)
- let edgeSize: Int = 2
-
- let primaryVideoSource: VideoSource?
- let secondaryVideoSource: VideoSource?
- if let activeRemoteVideoSource = self.activeRemoteVideoSource, let activeLocalVideoSource = self.activeLocalVideoSource {
- primaryVideoSource = activeRemoteVideoSource
- secondaryVideoSource = activeLocalVideoSource
- } else if let activeRemoteVideoSource = self.activeRemoteVideoSource {
- primaryVideoSource = activeRemoteVideoSource
- secondaryVideoSource = nil
- } else if let activeLocalVideoSource = self.activeLocalVideoSource {
- primaryVideoSource = activeLocalVideoSource
- secondaryVideoSource = nil
+ var activeVideoSources: [(VideoContainerView.Key, VideoSource)] = []
+ if self.swapLocalAndRemoteVideo {
+ if let activeLocalVideoSource = self.activeLocalVideoSource {
+ activeVideoSources.append((.background, activeLocalVideoSource))
+ }
+ if let activeRemoteVideoSource = self.activeRemoteVideoSource {
+ activeVideoSources.append((.foreground, activeRemoteVideoSource))
+ }
} else {
- primaryVideoSource = nil
- secondaryVideoSource = nil
+ if let activeRemoteVideoSource = self.activeRemoteVideoSource {
+ activeVideoSources.append((.background, activeRemoteVideoSource))
+ }
+ if let activeLocalVideoSource = self.activeLocalVideoSource {
+ activeVideoSources.append((.foreground, activeLocalVideoSource))
+ }
}
+ let havePrimaryVideo = !activeVideoSources.isEmpty
- let havePrimaryVideo = self.activeRemoteVideoSource != nil || self.activeLocalVideoSource != nil
-
- let visualBackgroundFrame = backgroundFrame.insetBy(dx: -CGFloat(edgeSize) / renderingSize.width * backgroundFrame.width, dy: -CGFloat(edgeSize) / renderingSize.height * backgroundFrame.height)
+ let currentAreControlsHidden = havePrimaryVideo && self.areControlsHidden
- self.backgroundLayer.renderSpec = RenderLayerSpec(size: RenderSize(width: Int(renderingSize.width) + edgeSize * 2, height: Int(renderingSize.height) + edgeSize * 2))
+ let backgroundAspect: CGFloat = params.size.width / params.size.height
+ let backgroundSizeNorm: CGFloat = 64.0
+ let backgroundRenderingSize = CGSize(width: floor(backgroundSizeNorm * backgroundAspect), height: backgroundSizeNorm)
+ let visualBackgroundFrame = backgroundFrame
+ self.backgroundLayer.renderSpec = RenderLayerSpec(size: RenderSize(width: Int(backgroundRenderingSize.width), height: Int(backgroundRenderingSize.height)), edgeInset: 8)
transition.setFrame(layer: self.backgroundLayer, frame: visualBackgroundFrame)
transition.setFrame(layer: self.backgroundLayer.blurredLayer, frame: visualBackgroundFrame)
+ transition.setFrame(layer: self.blobBackgroundLayer, frame: visualBackgroundFrame)
let backgroundStateIndex: Int
switch params.state.lifecycleState {
@@ -427,17 +582,114 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
self.speakerAction?()
}), at: 0)
}
- self.buttonGroupView.update(size: params.size, buttons: buttons, transition: transition)
+
+ var notices: [ButtonGroupView.Notice] = []
+ if params.state.isMicrophoneMuted {
+ notices.append(ButtonGroupView.Notice(id: AnyHashable(0 as Int), text: "Your microphone is turned off"))
+ }
+ if params.state.remoteVideo != nil && params.state.localVideo == nil {
+ notices.append(ButtonGroupView.Notice(id: AnyHashable(1 as Int), text: "Your camera is turned off"))
+ }
+ if params.state.isRemoteBatteryLow {
+ notices.append(ButtonGroupView.Notice(id: AnyHashable(2 as Int), text: "\(params.state.shortName)'s battery is low"))
+ }
+
+ let contentBottomInset = self.buttonGroupView.update(size: params.size, insets: params.insets, controlsHidden: currentAreControlsHidden, buttons: buttons, notices: notices, transition: transition)
+
+ var expandedEmojiKeyRect: CGRect?
+ if self.isEmojiKeyExpanded {
+ let emojiExpandedInfoView: EmojiExpandedInfoView
+ var emojiExpandedInfoTransition = transition
+ let alphaTransition: Transition
+ if let current = self.emojiExpandedInfoView {
+ emojiExpandedInfoView = current
+ alphaTransition = genericAlphaTransition
+ } else {
+ emojiExpandedInfoTransition = emojiExpandedInfoTransition.withAnimation(.none)
+ if !genericAlphaTransition.animation.isImmediate {
+ alphaTransition = genericAlphaTransition.withAnimation(.curve(duration: 0.1, curve: .easeInOut))
+ } else {
+ alphaTransition = genericAlphaTransition
+ }
+
+ emojiExpandedInfoView = EmojiExpandedInfoView(title: "This call is end-to-end encrypted", text: "If the emoji on \(params.state.shortName)'s screen are the same, this call is 100% secure.")
+ self.emojiExpandedInfoView = emojiExpandedInfoView
+ emojiExpandedInfoView.alpha = 0.0
+ Transition.immediate.setScale(view: emojiExpandedInfoView, scale: 0.5)
+ emojiExpandedInfoView.layer.anchorPoint = CGPoint(x: 0.5, y: 0.1)
+ if let emojiView = self.emojiView {
+ self.insertSubview(emojiExpandedInfoView, belowSubview: emojiView)
+ } else {
+ self.addSubview(emojiExpandedInfoView)
+ }
+
+ emojiExpandedInfoView.closeAction = { [weak self] in
+ guard let self else {
+ return
+ }
+ self.isEmojiKeyExpanded = false
+ self.update(transition: .spring(duration: 0.4))
+ }
+ }
+
+ let emojiExpandedInfoSize = emojiExpandedInfoView.update(constrainedWidth: params.size.width - (params.insets.left + 16.0) * 2.0, transition: emojiExpandedInfoTransition)
+ let emojiExpandedInfoFrame = CGRect(origin: CGPoint(x: floor((params.size.width - emojiExpandedInfoSize.width) * 0.5), y: params.insets.top + 73.0), size: emojiExpandedInfoSize)
+ emojiExpandedInfoTransition.setPosition(view: emojiExpandedInfoView, position: CGPoint(x: emojiExpandedInfoFrame.minX + emojiExpandedInfoView.layer.anchorPoint.x * emojiExpandedInfoFrame.width, y: emojiExpandedInfoFrame.minY + emojiExpandedInfoView.layer.anchorPoint.y * emojiExpandedInfoFrame.height))
+ emojiExpandedInfoTransition.setBounds(view: emojiExpandedInfoView, bounds: CGRect(origin: CGPoint(), size: emojiExpandedInfoFrame.size))
+
+ alphaTransition.setAlpha(view: emojiExpandedInfoView, alpha: 1.0)
+ transition.setScale(view: emojiExpandedInfoView, scale: 1.0)
+
+ expandedEmojiKeyRect = emojiExpandedInfoFrame
+ } else {
+ if let emojiExpandedInfoView = self.emojiExpandedInfoView {
+ self.emojiExpandedInfoView = nil
+
+ let alphaTransition: Transition
+ if !genericAlphaTransition.animation.isImmediate {
+ alphaTransition = genericAlphaTransition.withAnimation(.curve(duration: 0.1, curve: .easeInOut))
+ } else {
+ alphaTransition = genericAlphaTransition
+ }
+
+ alphaTransition.setAlpha(view: emojiExpandedInfoView, alpha: 0.0, completion: { [weak emojiExpandedInfoView] _ in
+ emojiExpandedInfoView?.removeFromSuperview()
+ })
+ transition.setScale(view: emojiExpandedInfoView, scale: 0.5)
+ }
+ }
+
+ let backButtonY: CGFloat
+ if currentAreControlsHidden {
+ backButtonY = -self.backButtonView.size.height - 12.0
+ } else {
+ backButtonY = params.insets.top + 12.0
+ }
+ let backButtonFrame = CGRect(origin: CGPoint(x: params.insets.left + 10.0, y: backButtonY), size: self.backButtonView.size)
+ transition.setFrame(view: self.backButtonView, frame: backButtonFrame)
+ transition.setAlpha(view: self.backButtonView, alpha: currentAreControlsHidden ? 0.0 : 1.0)
if case let .active(activeState) = params.state.lifecycleState {
let emojiView: KeyEmojiView
var emojiTransition = transition
+ var emojiAlphaTransition = genericAlphaTransition
if let current = self.emojiView {
emojiView = current
} else {
emojiTransition = transition.withAnimation(.none)
+ emojiAlphaTransition = genericAlphaTransition.withAnimation(.none)
emojiView = KeyEmojiView(emoji: activeState.emojiKey)
self.emojiView = emojiView
+ emojiView.pressAction = { [weak self] in
+ guard let self else {
+ return
+ }
+ if !self.isEmojiKeyExpanded {
+ self.isEmojiKeyExpanded = true
+ self.displayEmojiTooltip = false
+ self.update(transition: .spring(duration: 0.4))
+ }
+ }
}
if emojiView.superview == nil {
self.addSubview(emojiView)
@@ -445,157 +697,342 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
emojiView.animateIn()
}
}
- emojiTransition.setFrame(view: emojiView, frame: CGRect(origin: CGPoint(x: params.size.width - params.insets.right - 12.0 - emojiView.size.width, y: params.insets.top + 27.0), size: emojiView.size))
+ emojiView.isUserInteractionEnabled = !self.isEmojiKeyExpanded
+
+ let emojiViewWasExpanded = emojiView.isExpanded
+ let emojiViewSize = emojiView.update(isExpanded: self.isEmojiKeyExpanded, transition: emojiTransition)
+
+ if self.isEmojiKeyExpanded {
+ let emojiViewFrame = CGRect(origin: CGPoint(x: floor((params.size.width - emojiViewSize.width) * 0.5), y: params.insets.top + 93.0), size: emojiViewSize)
+
+ if case let .curve(duration, curve) = transition.animation, let emojiViewWasExpanded, !emojiViewWasExpanded {
+ let distance = CGPoint(x: emojiViewFrame.midX - emojiView.center.x, y: emojiViewFrame.midY - emojiView.center.y)
+ let positionKeyframes = generateParabollicMotionKeyframes(from: emojiView.center, to: emojiViewFrame.center, elevation: -distance.y * 0.8, duration: duration, curve: curve, reverse: false)
+ emojiView.center = emojiViewFrame.center
+ emojiView.layer.animateKeyframes(values: positionKeyframes.map { NSValue(cgPoint: $0) }, duration: duration, keyPath: "position", additive: false)
+ } else {
+ emojiTransition.setPosition(view: emojiView, position: emojiViewFrame.center)
+ }
+ emojiTransition.setBounds(view: emojiView, bounds: CGRect(origin: CGPoint(), size: emojiViewFrame.size))
+
+ if let emojiTooltipView = self.emojiTooltipView {
+ self.emojiTooltipView = nil
+ emojiTooltipView.animateOut(completion: { [weak emojiTooltipView] in
+ emojiTooltipView?.removeFromSuperview()
+ })
+ }
+ } else {
+ let emojiY: CGFloat
+ if currentAreControlsHidden {
+ emojiY = -8.0 - emojiViewSize.height
+ } else {
+ emojiY = params.insets.top + 12.0
+ }
+ let emojiViewFrame = CGRect(origin: CGPoint(x: params.size.width - params.insets.right - 12.0 - emojiViewSize.width, y: emojiY), size: emojiViewSize)
+
+ if case let .curve(duration, curve) = transition.animation, let emojiViewWasExpanded, emojiViewWasExpanded {
+ let distance = CGPoint(x: emojiViewFrame.midX - emojiView.center.x, y: emojiViewFrame.midY - emojiView.center.y)
+ let positionKeyframes = generateParabollicMotionKeyframes(from: emojiViewFrame.center, to: emojiView.center, elevation: distance.y * 0.8, duration: duration, curve: curve, reverse: true)
+ emojiView.center = emojiViewFrame.center
+ emojiView.layer.animateKeyframes(values: positionKeyframes.map { NSValue(cgPoint: $0) }, duration: duration, keyPath: "position", additive: false)
+ } else {
+ emojiTransition.setPosition(view: emojiView, position: emojiViewFrame.center)
+ }
+ emojiTransition.setBounds(view: emojiView, bounds: CGRect(origin: CGPoint(), size: emojiViewFrame.size))
+ emojiAlphaTransition.setAlpha(view: emojiView, alpha: currentAreControlsHidden ? 0.0 : 1.0)
+
+ if self.displayEmojiTooltip {
+ let emojiTooltipView: EmojiTooltipView
+ var emojiTooltipTransition = transition
+ var animateIn = false
+ if let current = self.emojiTooltipView {
+ emojiTooltipView = current
+ } else {
+ emojiTooltipTransition = emojiTooltipTransition.withAnimation(.none)
+ emojiTooltipView = EmojiTooltipView(text: "Encryption key of this call")
+ animateIn = true
+ self.emojiTooltipView = emojiTooltipView
+ self.addSubview(emojiTooltipView)
+ }
+
+ let emojiTooltipSize = emojiTooltipView.update(constrainedWidth: params.size.width - 32.0 * 2.0, subjectWidth: emojiViewSize.width - 20.0)
+ let emojiTooltipFrame = CGRect(origin: CGPoint(x: emojiViewFrame.maxX - emojiTooltipSize.width, y: emojiViewFrame.maxY + 8.0), size: emojiTooltipSize)
+ emojiTooltipTransition.setFrame(view: emojiTooltipView, frame: emojiTooltipFrame)
+
+ if animateIn && !transition.animation.isImmediate {
+ emojiTooltipView.animateIn()
+ }
+ } else if let emojiTooltipView = self.emojiTooltipView {
+ self.emojiTooltipView = nil
+ emojiTooltipView.animateOut(completion: { [weak emojiTooltipView] in
+ emojiTooltipView?.removeFromSuperview()
+ })
+ }
+ }
+
+ emojiAlphaTransition.setAlpha(view: emojiView, alpha: 1.0)
} else {
if let emojiView = self.emojiView {
self.emojiView = nil
- transition.setAlpha(view: emojiView, alpha: 0.0, completion: { [weak emojiView] _ in
+ genericAlphaTransition.setAlpha(view: emojiView, alpha: 0.0, completion: { [weak emojiView] _ in
emojiView?.removeFromSuperview()
})
}
+ if let emojiTooltipView = self.emojiTooltipView {
+ self.emojiTooltipView = nil
+ emojiTooltipView.animateOut(completion: { [weak emojiTooltipView] in
+ emojiTooltipView?.removeFromSuperview()
+ })
+ }
}
let collapsedAvatarSize: CGFloat = 136.0
let blobSize: CGFloat = collapsedAvatarSize + 40.0
- let collapsedAvatarFrame = CGRect(origin: CGPoint(x: floor((params.size.width - collapsedAvatarSize) * 0.5), y: 222.0), size: CGSize(width: collapsedAvatarSize, height: collapsedAvatarSize))
+ let collapsedAvatarFrame = CGRect(origin: CGPoint(x: floor((params.size.width - collapsedAvatarSize) * 0.5), y: max(params.insets.top + 8.0, floor(params.size.height * 0.49) - 39.0 - collapsedAvatarSize)), size: CGSize(width: collapsedAvatarSize, height: collapsedAvatarSize))
let expandedAvatarFrame = CGRect(origin: CGPoint(), size: params.size)
let expandedVideoFrame = CGRect(origin: CGPoint(), size: params.size)
let avatarFrame = havePrimaryVideo ? expandedAvatarFrame : collapsedAvatarFrame
let avatarCornerRadius = havePrimaryVideo ? params.screenCornerRadius : collapsedAvatarSize * 0.5
- let minimizedVideoInsets = UIEdgeInsets(top: 124.0, left: 12.0, bottom: 178.0, right: 12.0)
+ var minimizedVideoInsets = UIEdgeInsets()
+ minimizedVideoInsets.top = params.insets.top + (currentAreControlsHidden ? 0.0 : 60.0)
+ minimizedVideoInsets.left = params.insets.left + 12.0
+ minimizedVideoInsets.right = params.insets.right + 12.0
+ minimizedVideoInsets.bottom = contentBottomInset + 12.0
- if let primaryVideoSource {
- let remoteVideoContainerView: VideoContainerView
- if let current = self.remoteVideoContainerView {
- remoteVideoContainerView = current
+ var validVideoContainerKeys: [VideoContainerView.Key] = []
+ for i in 0 ..< activeVideoSources.count {
+ let (videoContainerKey, videoSource) = activeVideoSources[i]
+ validVideoContainerKeys.append(videoContainerKey)
+
+ var animateIn = false
+ let videoContainerView: VideoContainerView
+ if let current = self.videoContainerViews.first(where: { $0.key == videoContainerKey }) {
+ videoContainerView = current
} else {
- remoteVideoContainerView = VideoContainerView(frame: CGRect())
- self.remoteVideoContainerView = remoteVideoContainerView
- self.insertSubview(remoteVideoContainerView, belowSubview: self.overlayContentsView)
- self.overlayContentsView.layer.addSublayer(remoteVideoContainerView.blurredContainerLayer)
+ animateIn = true
+ videoContainerView = VideoContainerView(key: videoContainerKey)
+ switch videoContainerKey {
+ case .foreground:
+ self.overlayContentsView.layer.addSublayer(videoContainerView.blurredContainerLayer)
+
+ self.insertSubview(videoContainerView, belowSubview: self.overlayContentsView)
+ self.videoContainerViews.append(videoContainerView)
+ case .background:
+ if !self.videoContainerViews.isEmpty {
+ self.overlayContentsView.layer.insertSublayer(videoContainerView.blurredContainerLayer, below: self.videoContainerViews[0].blurredContainerLayer)
+
+ self.insertSubview(videoContainerView, belowSubview: self.videoContainerViews[0])
+ self.videoContainerViews.insert(videoContainerView, at: 0)
+ } else {
+ self.overlayContentsView.layer.addSublayer(videoContainerView.blurredContainerLayer)
+
+ self.insertSubview(videoContainerView, belowSubview: self.overlayContentsView)
+ self.videoContainerViews.append(videoContainerView)
+ }
+ }
- remoteVideoContainerView.layer.position = self.avatarLayer.position
- remoteVideoContainerView.layer.bounds = self.avatarLayer.bounds
- remoteVideoContainerView.alpha = 0.0
- remoteVideoContainerView.blurredContainerLayer.position = self.avatarLayer.position
- remoteVideoContainerView.blurredContainerLayer.bounds = self.avatarLayer.bounds
- remoteVideoContainerView.blurredContainerLayer.opacity = 0.0
- remoteVideoContainerView.update(size: self.avatarLayer.bounds.size, insets: minimizedVideoInsets, cornerRadius: self.avatarLayer.params?.cornerRadius ?? 0.0, isMinimized: false, isAnimatingOut: false, transition: .immediate)
+ videoContainerView.pressAction = { [weak self] in
+ guard let self else {
+ return
+ }
+ self.swapLocalAndRemoteVideo = !self.swapLocalAndRemoteVideo
+ self.update(transition: .easeInOut(duration: 0.25))
+ }
}
- if remoteVideoContainerView.video !== primaryVideoSource {
- remoteVideoContainerView.video = primaryVideoSource
+ if videoContainerView.video !== videoSource {
+ videoContainerView.video = videoSource
}
- transition.setPosition(view: remoteVideoContainerView, position: expandedVideoFrame.center)
- transition.setBounds(view: remoteVideoContainerView, bounds: CGRect(origin: CGPoint(), size: expandedVideoFrame.size))
- transition.setAlpha(view: remoteVideoContainerView, alpha: 1.0)
- transition.setPosition(layer: remoteVideoContainerView.blurredContainerLayer, position: expandedVideoFrame.center)
- transition.setBounds(layer: remoteVideoContainerView.blurredContainerLayer, bounds: CGRect(origin: CGPoint(), size: expandedVideoFrame.size))
- transition.setAlpha(layer: remoteVideoContainerView.blurredContainerLayer, alpha: 1.0)
- remoteVideoContainerView.update(size: expandedVideoFrame.size, insets: minimizedVideoInsets, cornerRadius: params.screenCornerRadius, isMinimized: false, isAnimatingOut: false, transition: transition)
- } else {
- if let remoteVideoContainerView = self.remoteVideoContainerView {
- remoteVideoContainerView.update(size: avatarFrame.size, insets: minimizedVideoInsets, cornerRadius: avatarCornerRadius, isMinimized: false, isAnimatingOut: true, transition: transition)
- transition.setPosition(layer: remoteVideoContainerView.blurredContainerLayer, position: avatarFrame.center)
- transition.setBounds(layer: remoteVideoContainerView.blurredContainerLayer, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size))
- transition.setAlpha(layer: remoteVideoContainerView.blurredContainerLayer, alpha: 0.0)
- transition.setPosition(view: remoteVideoContainerView, position: avatarFrame.center)
- transition.setBounds(view: remoteVideoContainerView, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size))
- if remoteVideoContainerView.alpha != 0.0 {
- transition.setAlpha(view: remoteVideoContainerView, alpha: 0.0, completion: { [weak self, weak remoteVideoContainerView] completed in
- guard let self, let remoteVideoContainerView, completed else {
- return
- }
- remoteVideoContainerView.removeFromSuperview()
- remoteVideoContainerView.blurredContainerLayer.removeFromSuperlayer()
- if self.remoteVideoContainerView === remoteVideoContainerView {
- self.remoteVideoContainerView = nil
+ let videoContainerTransition = transition
+ if animateIn {
+ if i == 0 && self.videoContainerViews.count == 1 {
+ videoContainerView.layer.position = self.avatarTransformLayer.position
+ videoContainerView.layer.bounds = self.avatarTransformLayer.bounds
+ videoContainerView.alpha = 0.0
+ videoContainerView.blurredContainerLayer.position = self.avatarTransformLayer.position
+ videoContainerView.blurredContainerLayer.bounds = self.avatarTransformLayer.bounds
+ videoContainerView.blurredContainerLayer.opacity = 0.0
+ videoContainerView.update(size: self.avatarTransformLayer.bounds.size, insets: minimizedVideoInsets, cornerRadius: self.avatarLayer.params?.cornerRadius ?? 0.0, controlsHidden: currentAreControlsHidden, isMinimized: false, isAnimatedOut: true, transition: .immediate)
+ Transition.immediate.setScale(view: videoContainerView, scale: self.currentAvatarAudioScale)
+ Transition.immediate.setScale(view: self.videoContainerBackgroundView, scale: self.currentAvatarAudioScale)
+ } else {
+ videoContainerView.layer.position = expandedVideoFrame.center
+ videoContainerView.layer.bounds = CGRect(origin: CGPoint(), size: expandedVideoFrame.size)
+ videoContainerView.alpha = 0.0
+ videoContainerView.blurredContainerLayer.position = expandedVideoFrame.center
+ videoContainerView.blurredContainerLayer.bounds = CGRect(origin: CGPoint(), size: expandedVideoFrame.size)
+ videoContainerView.blurredContainerLayer.opacity = 0.0
+ videoContainerView.update(size: self.avatarTransformLayer.bounds.size, insets: minimizedVideoInsets, cornerRadius: params.screenCornerRadius, controlsHidden: currentAreControlsHidden, isMinimized: i != 0, isAnimatedOut: i != 0, transition: .immediate)
+ }
+ }
+
+ videoContainerTransition.setPosition(view: videoContainerView, position: expandedVideoFrame.center)
+ videoContainerTransition.setBounds(view: videoContainerView, bounds: CGRect(origin: CGPoint(), size: expandedVideoFrame.size))
+ videoContainerTransition.setScale(view: videoContainerView, scale: 1.0)
+ videoContainerTransition.setPosition(layer: videoContainerView.blurredContainerLayer, position: expandedVideoFrame.center)
+ videoContainerTransition.setBounds(layer: videoContainerView.blurredContainerLayer, bounds: CGRect(origin: CGPoint(), size: expandedVideoFrame.size))
+ videoContainerTransition.setScale(layer: videoContainerView.blurredContainerLayer, scale: 1.0)
+ videoContainerView.update(size: expandedVideoFrame.size, insets: minimizedVideoInsets, cornerRadius: params.screenCornerRadius, controlsHidden: currentAreControlsHidden, isMinimized: i != 0, isAnimatedOut: false, transition: videoContainerTransition)
+
+ let alphaTransition: Transition
+ switch transition.animation {
+ case .none:
+ alphaTransition = .immediate
+ case let .curve(duration, _):
+ if animateIn {
+ if i == 0 {
+ if self.videoContainerViews.count > 1 && self.videoContainerViews[1].isFillingBounds {
+ alphaTransition = .immediate
+ } else {
+ alphaTransition = transition
}
- })
+ } else {
+ alphaTransition = .easeInOut(duration: min(0.1, duration))
+ }
+ } else {
+ alphaTransition = transition
}
}
+
+ alphaTransition.setAlpha(view: videoContainerView, alpha: 1.0)
+ alphaTransition.setAlpha(layer: videoContainerView.blurredContainerLayer, alpha: 1.0)
}
- if let secondaryVideoSource {
- let localVideoContainerView: VideoContainerView
- if let current = self.localVideoContainerView {
- localVideoContainerView = current
- } else {
- localVideoContainerView = VideoContainerView(frame: CGRect())
- self.localVideoContainerView = localVideoContainerView
- self.insertSubview(localVideoContainerView, belowSubview: self.overlayContentsView)
- self.overlayContentsView.layer.addSublayer(localVideoContainerView.blurredContainerLayer)
+ var removedVideoContainerIndices: [Int] = []
+ for i in 0 ..< self.videoContainerViews.count {
+ let videoContainerView = self.videoContainerViews[i]
+ if !validVideoContainerKeys.contains(videoContainerView.key) {
+ removedVideoContainerIndices.append(i)
- localVideoContainerView.layer.position = self.avatarLayer.position
- localVideoContainerView.layer.bounds = self.avatarLayer.bounds
- localVideoContainerView.alpha = 0.0
- localVideoContainerView.blurredContainerLayer.position = self.avatarLayer.position
- localVideoContainerView.blurredContainerLayer.bounds = self.avatarLayer.bounds
- localVideoContainerView.blurredContainerLayer.opacity = 0.0
- localVideoContainerView.update(size: self.avatarLayer.bounds.size, insets: minimizedVideoInsets, cornerRadius: self.avatarLayer.params?.cornerRadius ?? 0.0, isMinimized: true, isAnimatingOut: false, transition: .immediate)
- }
-
- if localVideoContainerView.video !== secondaryVideoSource {
- localVideoContainerView.video = secondaryVideoSource
- }
-
- transition.setPosition(view: localVideoContainerView, position: expandedVideoFrame.center)
- transition.setBounds(view: localVideoContainerView, bounds: CGRect(origin: CGPoint(), size: expandedVideoFrame.size))
- transition.setAlpha(view: localVideoContainerView, alpha: 1.0)
- transition.setPosition(layer: localVideoContainerView.blurredContainerLayer, position: expandedVideoFrame.center)
- transition.setBounds(layer: localVideoContainerView.blurredContainerLayer, bounds: CGRect(origin: CGPoint(), size: expandedVideoFrame.size))
- transition.setAlpha(layer: localVideoContainerView.blurredContainerLayer, alpha: 1.0)
- localVideoContainerView.update(size: expandedVideoFrame.size, insets: minimizedVideoInsets, cornerRadius: params.screenCornerRadius, isMinimized: true, isAnimatingOut: false, transition: transition)
- } else {
- if let localVideoContainerView = self.localVideoContainerView {
- localVideoContainerView.update(size: avatarFrame.size, insets: minimizedVideoInsets, cornerRadius: avatarCornerRadius, isMinimized: false, isAnimatingOut: true, transition: transition)
- transition.setPosition(layer: localVideoContainerView.blurredContainerLayer, position: avatarFrame.center)
- transition.setBounds(layer: localVideoContainerView.blurredContainerLayer, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size))
- transition.setAlpha(layer: localVideoContainerView.blurredContainerLayer, alpha: 0.0)
- transition.setPosition(view: localVideoContainerView, position: avatarFrame.center)
- transition.setBounds(view: localVideoContainerView, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size))
- if localVideoContainerView.alpha != 0.0 {
- transition.setAlpha(view: localVideoContainerView, alpha: 0.0, completion: { [weak self, weak localVideoContainerView] completed in
- guard let self, let localVideoContainerView, completed else {
+ if self.videoContainerViews.count == 1 || (i == 0 && !havePrimaryVideo) {
+ let alphaTransition: Transition = genericAlphaTransition
+
+ videoContainerView.update(size: avatarFrame.size, insets: minimizedVideoInsets, cornerRadius: avatarCornerRadius, controlsHidden: currentAreControlsHidden, isMinimized: false, isAnimatedOut: true, transition: transition)
+ transition.setPosition(layer: videoContainerView.blurredContainerLayer, position: avatarFrame.center)
+ transition.setBounds(layer: videoContainerView.blurredContainerLayer, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size))
+ transition.setAlpha(layer: videoContainerView.blurredContainerLayer, alpha: 0.0)
+ transition.setPosition(view: videoContainerView, position: avatarFrame.center)
+ transition.setBounds(view: videoContainerView, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size))
+ if videoContainerView.alpha != 0.0 {
+ alphaTransition.setAlpha(view: videoContainerView, alpha: 0.0, completion: { [weak videoContainerView] _ in
+ guard let videoContainerView else {
+ return
+ }
+ videoContainerView.removeFromSuperview()
+ videoContainerView.blurredContainerLayer.removeFromSuperlayer()
+ })
+ alphaTransition.setAlpha(layer: videoContainerView.blurredContainerLayer, alpha: 0.0)
+ }
+ } else if i == 0 {
+ let alphaTransition = genericAlphaTransition
+
+ alphaTransition.setAlpha(view: videoContainerView, alpha: 0.0, completion: { [weak videoContainerView] _ in
+ guard let videoContainerView else {
return
}
- localVideoContainerView.removeFromSuperview()
- localVideoContainerView.blurredContainerLayer.removeFromSuperlayer()
- if self.localVideoContainerView === localVideoContainerView {
- self.localVideoContainerView = nil
+ videoContainerView.removeFromSuperview()
+ videoContainerView.blurredContainerLayer.removeFromSuperlayer()
+ })
+ alphaTransition.setAlpha(layer: videoContainerView.blurredContainerLayer, alpha: 0.0)
+ } else {
+ let alphaTransition = genericAlphaTransition
+
+ alphaTransition.setAlpha(view: videoContainerView, alpha: 0.0, completion: { [weak videoContainerView] _ in
+ guard let videoContainerView else {
+ return
}
+ videoContainerView.removeFromSuperview()
+ videoContainerView.blurredContainerLayer.removeFromSuperlayer()
})
+ alphaTransition.setAlpha(layer: videoContainerView.blurredContainerLayer, alpha: 0.0)
+
+ videoContainerView.update(size: params.size, insets: minimizedVideoInsets, cornerRadius: params.screenCornerRadius, controlsHidden: currentAreControlsHidden, isMinimized: true, isAnimatedOut: true, transition: transition)
}
}
}
+ for index in removedVideoContainerIndices.reversed() {
+ self.videoContainerViews.remove(at: index)
+ }
if self.avatarLayer.image !== params.state.avatarImage {
self.avatarLayer.image = params.state.avatarImage
}
- transition.setPosition(layer: self.avatarLayer, position: avatarFrame.center)
- transition.setBounds(layer: self.avatarLayer, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size))
- self.avatarLayer.update(size: collapsedAvatarFrame.size, isExpanded:havePrimaryVideo, cornerRadius: avatarCornerRadius, transition: transition)
+
+ transition.setPosition(layer: self.avatarTransformLayer, position: avatarFrame.center)
+ transition.setBounds(layer: self.avatarTransformLayer, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size))
+ transition.setPosition(layer: self.avatarLayer, position: CGPoint(x: avatarFrame.width * 0.5, y: avatarFrame.height * 0.5))
+
+ if havePrimaryVideo != self.avatarLayer.params?.isExpanded {
+ if havePrimaryVideo {
+ self.canAnimateAudioLevel = false
+ self.audioLevel = 0.0
+ self.currentAvatarAudioScale = 1.0
+ transition.setScale(layer: self.avatarTransformLayer, scale: 1.0)
+ transition.setScale(layer: self.blobTransformLayer, scale: 1.0)
+ }
+ transition.setBounds(layer: self.avatarLayer, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size), completion: { [weak self] completed in
+ guard let self, let params = self.params, completed else {
+ return
+ }
+ if !havePrimaryVideo {
+ switch params.state.lifecycleState {
+ case .terminated:
+ break
+ default:
+ self.canAnimateAudioLevel = true
+ }
+ }
+ })
+ } else {
+ transition.setBounds(layer: self.avatarLayer, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size))
+ }
+
+ var expandedEmojiKeyOverlapsAvatar = false
+ if let expandedEmojiKeyRect, collapsedAvatarFrame.insetBy(dx: -40.0, dy: -40.0).intersects(expandedEmojiKeyRect) {
+ expandedEmojiKeyOverlapsAvatar = true
+ }
+
+ self.avatarLayer.update(size: collapsedAvatarFrame.size, isExpanded: havePrimaryVideo, cornerRadius: avatarCornerRadius, transition: transition)
+ transition.setAlpha(layer: self.avatarLayer, alpha: (expandedEmojiKeyOverlapsAvatar && !havePrimaryVideo) ? 0.0 : 1.0)
+ transition.setScale(layer: self.avatarLayer, scale: expandedEmojiKeyOverlapsAvatar ? 0.001 : 1.0)
+
+ transition.setPosition(view: self.videoContainerBackgroundView, position: avatarFrame.center)
+ transition.setBounds(view: self.videoContainerBackgroundView, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size))
+ transition.setScale(view: self.videoContainerBackgroundView, scale: 1.0)
+ transition.setAlpha(view: self.videoContainerBackgroundView, alpha: havePrimaryVideo ? 1.0 : 0.0)
+ self.videoContainerBackgroundView.update(cornerRadius: havePrimaryVideo ? params.screenCornerRadius : avatarCornerRadius, transition: transition)
+
+ transition.setPosition(view: self.overlayContentsVideoContainerBackgroundView, position: avatarFrame.center)
+ transition.setBounds(view: self.overlayContentsVideoContainerBackgroundView, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size))
+ transition.setAlpha(view: self.overlayContentsVideoContainerBackgroundView, alpha: havePrimaryVideo ? 1.0 : 0.0)
+ self.overlayContentsVideoContainerBackgroundView.update(cornerRadius: havePrimaryVideo ? params.screenCornerRadius : avatarCornerRadius, transition: transition)
let blobFrame = CGRect(origin: CGPoint(x: floor(avatarFrame.midX - blobSize * 0.5), y: floor(avatarFrame.midY - blobSize * 0.5)), size: CGSize(width: blobSize, height: blobSize))
- transition.setPosition(layer: self.blobLayer, position: CGPoint(x: blobFrame.midX, y: blobFrame.midY))
+ transition.setPosition(layer: self.blobTransformLayer, position: CGPoint(x: blobFrame.midX, y: blobFrame.midY))
+ transition.setBounds(layer: self.blobTransformLayer, bounds: CGRect(origin: CGPoint(), size: blobFrame.size))
+ transition.setPosition(layer: self.blobLayer, position: CGPoint(x: blobFrame.width * 0.5, y: blobFrame.height * 0.5))
transition.setBounds(layer: self.blobLayer, bounds: CGRect(origin: CGPoint(), size: blobFrame.size))
let titleString: String
switch params.state.lifecycleState {
case .terminated:
+ self.titleView.contentMode = .center
titleString = "Call Ended"
- if !transition.animation.isImmediate {
- transition.withAnimation(.curve(duration: 0.3, curve: .easeInOut)).setScale(layer: self.blobLayer, scale: 0.3)
- } else {
- transition.setScale(layer: self.blobLayer, scale: 0.3)
- }
- transition.setAlpha(layer: self.blobLayer, alpha: 0.0)
+ genericAlphaTransition.setScale(layer: self.blobLayer, scale: 0.3)
+ genericAlphaTransition.setAlpha(layer: self.blobLayer, alpha: 0.0)
+ self.canAnimateAudioLevel = false
+ self.audioLevel = 0.0
+ self.currentAvatarAudioScale = 1.0
+ transition.setScale(layer: self.avatarTransformLayer, scale: 1.0)
+ transition.setScale(layer: self.blobTransformLayer, scale: 1.0)
default:
+ self.titleView.contentMode = .scaleToFill
titleString = params.state.name
- transition.setAlpha(layer: self.blobLayer, alpha: 1.0)
+ genericAlphaTransition.setAlpha(layer: self.blobLayer, alpha: (expandedEmojiKeyOverlapsAvatar && !havePrimaryVideo) ? 0.0 : 1.0)
+ transition.setScale(layer: self.blobLayer, scale: expandedEmojiKeyOverlapsAvatar ? 0.001 : 1.0)
}
let titleSize = self.titleView.update(
@@ -606,14 +1043,6 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
constrainedWidth: params.size.width - 16.0 * 2.0,
transition: transition
)
- let titleFrame = CGRect(
- origin: CGPoint(
- x: (params.size.width - titleSize.width) * 0.5,
- y: !havePrimaryVideo ? collapsedAvatarFrame.maxY + 39.0 : params.insets.top + 17.0
- ),
- size: titleSize
- )
- transition.setFrame(view: self.titleView, frame: titleFrame)
let statusState: StatusView.State
switch params.state.lifecycleState {
@@ -661,6 +1090,25 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
}
let statusSize = self.statusView.update(state: statusState, transition: .immediate)
+
+ let titleY: CGFloat
+ if currentAreControlsHidden {
+ titleY = -8.0 - titleSize.height - statusSize.height
+ } else if havePrimaryVideo {
+ titleY = params.insets.top + 2.0
+ } else {
+ titleY = collapsedAvatarFrame.maxY + 39.0
+ }
+ let titleFrame = CGRect(
+ origin: CGPoint(
+ x: (params.size.width - titleSize.width) * 0.5,
+ y: titleY
+ ),
+ size: titleSize
+ )
+ transition.setFrame(view: self.titleView, frame: titleFrame)
+ genericAlphaTransition.setAlpha(view: self.titleView, alpha: currentAreControlsHidden ? 0.0 : 1.0)
+
let statusFrame = CGRect(
origin: CGPoint(
x: (params.size.width - statusSize.width) * 0.5,
@@ -678,9 +1126,10 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
}
} else {
transition.setFrame(view: self.statusView, frame: statusFrame)
+ genericAlphaTransition.setAlpha(view: self.statusView, alpha: currentAreControlsHidden ? 0.0 : 1.0)
}
- if case let .active(activeState) = params.state.lifecycleState, activeState.signalInfo.quality <= 0.2 {
+ if case let .active(activeState) = params.state.lifecycleState, activeState.signalInfo.quality <= 0.2, !self.isEmojiKeyExpanded, (!self.displayEmojiTooltip || !havePrimaryVideo) {
let weakSignalView: WeakSignalView
if let current = self.weakSignalView {
weakSignalView = current
@@ -690,7 +1139,13 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
self.addSubview(weakSignalView)
}
let weakSignalSize = weakSignalView.update(constrainedSize: CGSize(width: params.size.width - 32.0, height: 100.0))
- let weakSignalFrame = CGRect(origin: CGPoint(x: floor((params.size.width - weakSignalSize.width) * 0.5), y: statusFrame.maxY + (havePrimaryVideo ? 12.0 : 12.0)), size: weakSignalSize)
+ let weakSignalY: CGFloat
+ if currentAreControlsHidden {
+ weakSignalY = params.insets.top + 2.0
+ } else {
+ weakSignalY = statusFrame.maxY + (havePrimaryVideo ? 12.0 : 12.0)
+ }
+ let weakSignalFrame = CGRect(origin: CGPoint(x: floor((params.size.width - weakSignalSize.width) * 0.5), y: weakSignalY), size: weakSignalSize)
if weakSignalView.bounds.isEmpty {
weakSignalView.frame = weakSignalFrame
if !transition.animation.isImmediate {
diff --git a/submodules/TelegramUI/Components/CameraScreen/Sources/CaptureControlsComponent.swift b/submodules/TelegramUI/Components/CameraScreen/Sources/CaptureControlsComponent.swift
index cdec27a8cce..6c68b856104 100644
--- a/submodules/TelegramUI/Components/CameraScreen/Sources/CaptureControlsComponent.swift
+++ b/submodules/TelegramUI/Components/CameraScreen/Sources/CaptureControlsComponent.swift
@@ -956,9 +956,9 @@ final class CaptureControlsComponent: Component {
} else {
galleryButtonFrame = CGRect(origin: CGPoint(x: buttonSideInset, y: floorToScreenPixels((size.height - galleryButtonSize.height) / 2.0)), size: galleryButtonSize)
}
- if let galleryButtonView = self.galleryButtonView.view {
- galleryButtonView.clipsToBounds = true
- galleryButtonView.layer.cornerRadius = galleryCornerRadius
+ if let galleryButtonView = self.galleryButtonView.view as? CameraButton.View {
+ galleryButtonView.contentView.clipsToBounds = true
+ galleryButtonView.contentView.layer.cornerRadius = galleryCornerRadius
if galleryButtonView.superview == nil {
self.addSubview(galleryButtonView)
}
diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageAttachedContentButtonNode/Sources/ChatMessageAttachedContentButtonNode.swift b/submodules/TelegramUI/Components/Chat/ChatMessageAttachedContentButtonNode/Sources/ChatMessageAttachedContentButtonNode.swift
index ecb14a0075f..ae5c65eeda7 100644
--- a/submodules/TelegramUI/Components/Chat/ChatMessageAttachedContentButtonNode/Sources/ChatMessageAttachedContentButtonNode.swift
+++ b/submodules/TelegramUI/Components/Chat/ChatMessageAttachedContentButtonNode/Sources/ChatMessageAttachedContentButtonNode.swift
@@ -78,7 +78,7 @@ public final class ChatMessageAttachedContentButtonNode: HighlightTrackingButton
return
}
self.shimmerEffectNode = nil
- shimmerEffectNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, completion: { [weak shimmerEffectNode] _ in
+ shimmerEffectNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak shimmerEffectNode] _ in
shimmerEffectNode?.removeFromSupernode()
})
}
diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveInstantVideoNode/Sources/ChatMessageInteractiveInstantVideoNode.swift b/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveInstantVideoNode/Sources/ChatMessageInteractiveInstantVideoNode.swift
index 9f949769df1..001fd935a91 100644
--- a/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveInstantVideoNode/Sources/ChatMessageInteractiveInstantVideoNode.swift
+++ b/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveInstantVideoNode/Sources/ChatMessageInteractiveInstantVideoNode.swift
@@ -617,8 +617,11 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode {
}
if let updatedAudioTranscriptionState = updatedAudioTranscriptionState {
+ let previous = strongSelf.audioTranscriptionState
strongSelf.audioTranscriptionState = updatedAudioTranscriptionState
- strongSelf.updateTranscriptionExpanded?(strongSelf.audioTranscriptionState)
+ if previous != updatedAudioTranscriptionState {
+ strongSelf.updateTranscriptionExpanded?(strongSelf.audioTranscriptionState)
+ }
}
if let updatedTranscriptionText = updatedTranscriptionText {
strongSelf.audioTranscriptionText = updatedTranscriptionText
diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageWallpaperBubbleContentNode/BUILD b/submodules/TelegramUI/Components/Chat/ChatMessageWallpaperBubbleContentNode/BUILD
index be2f71996d4..98d37bb5828 100644
--- a/submodules/TelegramUI/Components/Chat/ChatMessageWallpaperBubbleContentNode/BUILD
+++ b/submodules/TelegramUI/Components/Chat/ChatMessageWallpaperBubbleContentNode/BUILD
@@ -27,6 +27,7 @@ swift_library(
"//submodules/Markdown",
"//submodules/RadialStatusNode",
"//submodules/ComponentFlow",
+ "//submodules/PresentationDataUtils",
"//submodules/TelegramUI/Components/AudioTranscriptionPendingIndicatorComponent",
"//submodules/TelegramUI/Components/Chat/ChatMessageBubbleContentNode",
"//submodules/TelegramUI/Components/Chat/ChatMessageItemCommon",
diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageWallpaperBubbleContentNode/Sources/ChatMessageWallpaperBubbleContentNode.swift b/submodules/TelegramUI/Components/Chat/ChatMessageWallpaperBubbleContentNode/Sources/ChatMessageWallpaperBubbleContentNode.swift
index 792c9bbb94d..dc835683d28 100644
--- a/submodules/TelegramUI/Components/Chat/ChatMessageWallpaperBubbleContentNode/Sources/ChatMessageWallpaperBubbleContentNode.swift
+++ b/submodules/TelegramUI/Components/Chat/ChatMessageWallpaperBubbleContentNode/Sources/ChatMessageWallpaperBubbleContentNode.swift
@@ -22,6 +22,7 @@ import ChatMessageBubbleContentNode
import ChatMessageItemCommon
import WallpaperPreviewMedia
import ChatControllerInteraction
+import PresentationDataUtils
public class ChatMessageWallpaperBubbleContentNode: ChatMessageBubbleContentNode {
private var mediaBackgroundContent: WallpaperBubbleBackgroundNode?
@@ -193,7 +194,15 @@ public class ChatMessageWallpaperBubbleContentNode: ChatMessageBubbleContentNode
}
if canRemove {
- let _ = item.context.engine.themes.revertChatWallpaper(peerId: item.message.id.peerId).startStandalone()
+ let controller = textAlertController(context: item.context, title: item.presentationData.strings.Chat_RemoveWallpaper_Title, text: item.presentationData.strings.Chat_RemoveWallpaper_Text, actions: [
+ TextAlertAction(type: .genericAction, title: item.presentationData.strings.Common_Cancel, action: {}),
+ TextAlertAction(type: .destructiveAction, title: item.presentationData.strings.Chat_RemoveWallpaper_Remove, action: { [weak item] in
+ if let item {
+ let _ = item.context.engine.themes.revertChatWallpaper(peerId: item.message.id.peerId).startStandalone()
+ }
+ })
+ ])
+ item.controllerInteraction.presentController(controller, nil)
} else {
let _ = item.controllerInteraction.openMessage(item.message, OpenMessageParams(mode: .default))
}
@@ -291,7 +300,7 @@ public class ChatMessageWallpaperBubbleContentNode: ChatMessageBubbleContentNode
let buttonText: String
if let wallpaper, forBoth && item.presentationData.theme.wallpaper.isBasicallyEqual(to: wallpaper) {
- buttonText = "Remove"
+ buttonText = item.presentationData.strings.Notification_Wallpaper_Remove
} else {
buttonText = item.presentationData.strings.Notification_Wallpaper_View
}
diff --git a/submodules/TelegramUI/Components/DustEffect/Sources/DustEffectLayer.swift b/submodules/TelegramUI/Components/DustEffect/Sources/DustEffectLayer.swift
index c907e93c699..3cc57e5adb7 100644
--- a/submodules/TelegramUI/Components/DustEffect/Sources/DustEffectLayer.swift
+++ b/submodules/TelegramUI/Components/DustEffect/Sources/DustEffectLayer.swift
@@ -110,6 +110,8 @@ public final class DustEffectLayer: MetalEngineSubjectLayer, MetalEngineSubject
private var items: [Item] = []
private var lastTimeStep: Double = 0.0
+ public var animationSpeed: Float = 1.0
+
public var becameEmpty: (() -> Void)?
override public init() {
@@ -164,7 +166,7 @@ public final class DustEffectLayer: MetalEngineSubjectLayer, MetalEngineSubject
var didRemoveItems = false
for i in (0 ..< self.items.count).reversed() {
- self.items[i].phase += Float(deltaTimeValue) / Float(UIView.animationDurationFactor())
+ self.items[i].phase += Float(deltaTimeValue) * self.animationSpeed / Float(UIView.animationDurationFactor())
if self.items[i].phase >= 4.0 {
self.items.remove(at: i)
diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/Drawing/DrawingTextEntity.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/Drawing/DrawingTextEntity.swift
index 9eac4874eff..02407f5e43a 100644
--- a/submodules/TelegramUI/Components/MediaEditor/Sources/Drawing/DrawingTextEntity.swift
+++ b/submodules/TelegramUI/Components/MediaEditor/Sources/Drawing/DrawingTextEntity.swift
@@ -96,6 +96,10 @@ public final class DrawingTextEntity: DrawingEntity, Codable {
return isAnimated
}
+ public struct TextAttributes {
+ public static let color = NSAttributedString.Key(rawValue: "Attribute__Color")
+ }
+
public var text: NSAttributedString
public var style: Style
public var animation: Animation
@@ -307,3 +311,33 @@ public final class DrawingTextEntity: DrawingEntity, Codable {
return true
}
}
+
+public extension DrawingTextEntity {
+ func setColor(_ color: DrawingColor, range: NSRange) {
+ if range.length == 0 {
+ self.color = color
+
+ let updatedText = self.text.mutableCopy() as! NSMutableAttributedString
+ let range = NSMakeRange(0, updatedText.length)
+ updatedText.removeAttribute(DrawingTextEntity.TextAttributes.color, range: range)
+ self.text = updatedText
+ } else {
+ let updatedText = self.text.mutableCopy() as! NSMutableAttributedString
+ updatedText.removeAttribute(DrawingTextEntity.TextAttributes.color, range: range)
+ updatedText.addAttribute(DrawingTextEntity.TextAttributes.color, value: color.toUIColor(), range: range)
+ self.text = updatedText
+ }
+ }
+
+ func color(in range: NSRange) -> DrawingColor {
+ if range.length == 0 {
+ return self.color
+ } else {
+ if let color = self.text.attribute(DrawingTextEntity.TextAttributes.color, at: range.location, effectiveRange: nil) as? UIColor {
+ return DrawingColor(color: color)
+ } else {
+ return self.color
+ }
+ }
+ }
+}
diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/BUILD b/submodules/TelegramUI/Components/MediaEditorScreen/BUILD
index 1753e68c678..144d6234e59 100644
--- a/submodules/TelegramUI/Components/MediaEditorScreen/BUILD
+++ b/submodules/TelegramUI/Components/MediaEditorScreen/BUILD
@@ -47,7 +47,8 @@ swift_library(
"//submodules/TelegramUI/Components/AudioWaveformComponent",
"//submodules/ReactionSelectionNode",
"//submodules/TelegramUI/Components/VolumeSliderContextItem",
- "//submodules/TelegramUI/Components/Stories/ForwardInfoPanelComponent"
+ "//submodules/TelegramUI/Components/Stories/ForwardInfoPanelComponent",
+ "//submodules/TelegramUI/Components/ContextReferenceButtonComponent",
],
visibility = [
"//visibility:public",
diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/FlipButtonContentComponent.swift b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/FlipButtonContentComponent.swift
index ea71fcc02d3..ae34bafa026 100644
--- a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/FlipButtonContentComponent.swift
+++ b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/FlipButtonContentComponent.swift
@@ -4,16 +4,29 @@ import Display
import ComponentFlow
final class FlipButtonContentComponent: Component {
- init() {
-
+ let tag: AnyObject?
+
+ init(
+ tag: AnyObject?
+ ) {
+ self.tag = tag
}
static func ==(lhs: FlipButtonContentComponent, rhs: FlipButtonContentComponent) -> Bool {
return lhs === rhs
}
- final class View: UIView {
+ final class View: UIView, ComponentTaggedView {
private var component: FlipButtonContentComponent?
+ public func matches(tag: Any) -> Bool {
+ if let component = self.component, let componentTag = component.tag {
+ let tag = tag as AnyObject
+ if componentTag === tag {
+ return true
+ }
+ }
+ return false
+ }
private let backgroundView: BlurredBackgroundView
private let icon = SimpleLayer()
diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorRecording.swift b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorRecording.swift
index d2d30465c17..19e5a014241 100644
--- a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorRecording.swift
+++ b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorRecording.swift
@@ -61,9 +61,24 @@ extension MediaEditorScreen {
}
func requestDeviceAccess() {
- DeviceAccess.authorizeAccess(to: .camera(.video), { granted in
+ guard let controller = self.controller else {
+ return
+ }
+ let context = controller.context
+ let presentationData = context.sharedContext.currentPresentationData.with { $0 }.withUpdated(theme: defaultDarkColorPresentationTheme)
+ DeviceAccess.authorizeAccess(to: .camera(.video), presentationData: presentationData, present: { c, a in
+ c.presentationArguments = a
+ context.sharedContext.mainWindow?.present(c, on: .root)
+ }, openSettings: {
+ context.sharedContext.applicationBindings.openSettings()
+ }, { granted in
if granted {
- DeviceAccess.authorizeAccess(to: .microphone(.video))
+ DeviceAccess.authorizeAccess(to: .microphone(.video), presentationData: presentationData, present: { c, a in
+ c.presentationArguments = a
+ context.sharedContext.mainWindow?.present(c, on: .root)
+ }, openSettings: {
+ context.sharedContext.applicationBindings.openSettings()
+ })
}
})
}
@@ -119,6 +134,11 @@ extension MediaEditorScreen {
self.cameraIsActive = false
} else {
+ if self.cameraAuthorizationStatus != .allowed || self.microphoneAuthorizationStatus != .allowed {
+ self.requestDeviceAccess()
+ return
+ }
+
guard self.tooltipController == nil, let sourceView else {
return
}
diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorScreen.swift b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorScreen.swift
index 762e1f8f6bd..6d47bfe1f42 100644
--- a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorScreen.swift
+++ b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorScreen.swift
@@ -38,10 +38,13 @@ import ReactionSelectionNode
import VolumeSliderContextItem
import TelegramStringFormatting
import ForwardInfoPanelComponent
+import ContextReferenceButtonComponent
private let playbackButtonTag = GenericComponentViewTag()
private let muteButtonTag = GenericComponentViewTag()
private let saveButtonTag = GenericComponentViewTag()
+private let switchCameraButtonTag = GenericComponentViewTag()
+private let stickerButtonTag = GenericComponentViewTag()
final class MediaEditorScreenComponent: Component {
typealias EnvironmentType = ViewControllerComponentContainer.Environment
@@ -869,19 +872,25 @@ final class MediaEditorScreenComponent: Component {
let stickerButtonSize = self.stickerButton.update(
transition: transition,
- component: AnyComponent(Button(
+ component: AnyComponent(ContextReferenceButtonComponent(
content: AnyComponent(Image(
image: state.image(.sticker),
size: CGSize(width: 30.0, height: 30.0)
)),
- action: { [weak self] in
+ tag: stickerButtonTag,
+ minSize: CGSize(width: 30.0, height: 30.0),
+ action: { [weak self] view, gesture in
guard let environment = self?.environment, let controller = environment.controller() as? MediaEditorScreen else {
return
}
guard !controller.node.recording.isActive else {
return
}
- openDrawing(.sticker)
+ if let gesture {
+ controller.presentEntityShortcuts(sourceView: view, gesture: gesture)
+ } else {
+ openDrawing(.sticker)
+ }
}
)),
environment: {},
@@ -1168,17 +1177,7 @@ final class MediaEditorScreenComponent: Component {
guard let controller else {
return
}
- let context = controller.context
- let _ = (context.engine.data.get(TelegramEngine.EngineData.Item.Peer.Peer(id: context.account.peerId))
- |> deliverOnMainQueue).start(next: { [weak controller] peer in
- let hasPremium: Bool
- if case let .user(user) = peer {
- hasPremium = user.isPremium
- } else {
- hasPremium = false
- }
- controller?.presentTimeoutSetup(sourceView: view, gesture: gesture, hasPremium: hasPremium)
- })
+ controller.presentTimeoutSetup(sourceView: view, gesture: gesture)
},
forwardAction: nil,
moreAction: nil,
@@ -1736,11 +1735,15 @@ final class MediaEditorScreenComponent: Component {
transition: transition,
component: AnyComponent(Button(
content: AnyComponent(
- FlipButtonContentComponent()
+ FlipButtonContentComponent(tag: switchCameraButtonTag)
),
action: { [weak self] in
if let self, let environment = self.environment, let controller = environment.controller() as? MediaEditorScreen {
controller.node.recording.togglePosition()
+
+ if let view = self.switchCameraButton.findTaggedView(tag: switchCameraButtonTag) as? FlipButtonContentComponent.View {
+ view.playAnimation()
+ }
}
}
).withIsExclusive(false)),
@@ -1963,6 +1966,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
private var availableReactions: [ReactionItem] = []
private var availableReactionsDisposable: Disposable?
+ private var panGestureRecognizer: UIPanGestureRecognizer?
private var dismissPanGestureRecognizer: UIPanGestureRecognizer?
private var isDisplayingTool = false
@@ -2368,7 +2372,8 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
panGestureRecognizer.delegate = self
panGestureRecognizer.minimumNumberOfTouches = 1
panGestureRecognizer.maximumNumberOfTouches = 2
- self.previewContainerView.addGestureRecognizer(panGestureRecognizer)
+ self.view.addGestureRecognizer(panGestureRecognizer)
+ self.panGestureRecognizer = panGestureRecognizer
let pinchGestureRecognizer = UIPinchGestureRecognizer(target: self, action: #selector(self.handlePinch(_:)))
pinchGestureRecognizer.delegate = self
@@ -2406,8 +2411,14 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
},
updateColor: { [weak self] color in
if let self, let selectedEntityView = self.entitiesView.selectedEntityView {
- selectedEntityView.entity.color = color
- selectedEntityView.update(animated: false)
+ let selectedEntity = selectedEntityView.entity
+ if let textEntity = selectedEntity as? DrawingTextEntity, let textEntityView = selectedEntityView as? DrawingTextEntityView, textEntityView.isEditing {
+ textEntity.setColor(color, range: textEntityView.selectedRange)
+ textEntityView.update(animated: false, keepSelectedRange: true)
+ } else {
+ selectedEntity.color = color
+ selectedEntityView.update(animated: false)
+ }
}
},
onInteractionUpdated: { [weak self] isInteracting in
@@ -2518,6 +2529,17 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
return false
}
return true
+ } else if gestureRecognizer === self.panGestureRecognizer {
+ let location = gestureRecognizer.location(in: self.view)
+ if location.x > self.view.frame.width - 44.0 && location.y > self.view.frame.height - 180.0 {
+ return false
+ }
+ if let reactionNode = self.view.subviews.last?.asyncdisplaykit_node as? ReactionContextNode {
+ if let hitTestResult = self.view.hitTest(location, with: nil), hitTestResult.isDescendant(of: reactionNode.view) {
+ return false
+ }
+ }
+ return true
} else {
return true
}
@@ -3495,6 +3517,33 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
self.controller?.present(contextController, in: .window(.root))
}
+ func addReaction() {
+ guard let controller = self.controller else {
+ return
+ }
+ let maxReactionCount = self.context.userLimits.maxStoriesSuggestedReactions
+ var currentReactionCount = 0
+ self.entitiesView.eachView { entityView in
+ if let stickerEntity = entityView.entity as? DrawingStickerEntity, case let .file(_, type) = stickerEntity.content, case .reaction = type {
+ currentReactionCount += 1
+ }
+ }
+ if currentReactionCount >= maxReactionCount {
+ controller.presentReactionPremiumSuggestion()
+ return
+ }
+
+ let heart = "❤️".strippedEmoji
+ if let reaction = self.availableReactions.first(where: { reaction in
+ return reaction.reaction.rawValue == .builtin(heart)
+ }) {
+ let stickerEntity = DrawingStickerEntity(content: .file(reaction.stillAnimation, .reaction(.builtin(heart), .white)))
+ self.interaction?.insertEntity(stickerEntity, scale: 1.175)
+ }
+
+ self.mediaEditor?.play()
+ }
+
func updateModalTransitionFactor(_ value: CGFloat, transition: ContainedViewLayoutTransition) {
guard let layout = self.validLayout, case .compact = layout.metrics.widthClass else {
return
@@ -3684,30 +3733,10 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
}
controller.addReaction = { [weak self, weak controller] in
if let self {
- let maxReactionCount = self.context.userLimits.maxStoriesSuggestedReactions
- var currentReactionCount = 0
- self.entitiesView.eachView { entityView in
- if let stickerEntity = entityView.entity as? DrawingStickerEntity, case let .file(_, type) = stickerEntity.content, case .reaction = type {
- currentReactionCount += 1
- }
- }
- if currentReactionCount >= maxReactionCount {
- self.controller?.presentReactionPremiumSuggestion()
- return
- }
+ self.addReaction()
self.stickerScreen = nil
controller?.dismiss(animated: true)
-
- let heart = "❤️".strippedEmoji
- if let reaction = self.availableReactions.first(where: { reaction in
- return reaction.reaction.rawValue == .builtin(heart)
- }) {
- let stickerEntity = DrawingStickerEntity(content: .file(reaction.stillAnimation, .reaction(.builtin(heart), .white)))
- self.interaction?.insertEntity(stickerEntity, scale: 1.175)
- }
-
- self.mediaEditor?.play()
}
}
controller.pushController = { [weak self] c in
@@ -4382,11 +4411,55 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
})
}
- func presentTimeoutSetup(sourceView: UIView, gesture: ContextGesture?, hasPremium: Bool) {
+ func presentEntityShortcuts(sourceView: UIView, gesture: ContextGesture) {
self.hapticFeedback.impact(.light)
+ let presentationData = self.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: defaultDarkPresentationTheme)
+
var items: [ContextMenuItem] = []
-
+
+ items.append(.action(ContextMenuActionItem(text: presentationData.strings.MediaEditor_Shortcut_Image, icon: { theme in
+ return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Image"), color: theme.contextMenu.primaryColor)
+ }, action: { [weak self] _, a in
+ a(.default)
+
+ self?.node.presentGallery()
+ })))
+ items.append(.action(ContextMenuActionItem(text: presentationData.strings.MediaEditor_Shortcut_Location, icon: { theme in
+ return generateTintedImage(image: UIImage(bundleImageName: "Media Editor/LocationSmall"), color: theme.contextMenu.primaryColor)
+ }, action: { [weak self] _, a in
+ a(.default)
+
+ self?.node.presentLocationPicker()
+ })))
+ items.append(.action(ContextMenuActionItem(text: presentationData.strings.MediaEditor_Shortcut_Reaction, icon: { theme in
+ return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Reactions"), color: theme.contextMenu.primaryColor)
+ }, action: { [weak self] _, a in
+ a(.default)
+
+ self?.node.addReaction()
+ })))
+ items.append(.action(ContextMenuActionItem(text: presentationData.strings.MediaEditor_Shortcut_Audio, icon: { theme in
+ return generateTintedImage(image: UIImage(bundleImageName: "Media Editor/AudioSmall"), color: theme.contextMenu.primaryColor)
+ }, action: { [weak self] _, a in
+ a(.default)
+
+ self?.node.presentAudioPicker()
+ })))
+
+ let contextController = ContextController(presentationData: presentationData, source: .reference(HeaderContextReferenceContentSource(controller: self, sourceView: sourceView)), items: .single(ContextController.Items(content: .list(items))), gesture: gesture)
+ self.present(contextController, in: .window(.root))
+ }
+
+ func presentTimeoutSetup(sourceView: UIView, gesture: ContextGesture?) {
+ self.hapticFeedback.impact(.light)
+
+ let hasPremium = self.context.isPremium
+ let presentationData = self.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: defaultDarkPresentationTheme)
+ let title = presentationData.strings.Story_Editor_ExpirationText
+ let currentValue = self.state.privacy.timeout
+ let emptyAction: ((ContextMenuActionItem.Action) -> Void)? = nil
+
let updateTimeout: (Int?) -> Void = { [weak self] timeout in
guard let self else {
return
@@ -4399,12 +4472,8 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
pin: self.state.privacy.pin
)
}
-
- let presentationData = self.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: defaultDarkPresentationTheme)
- let title = presentationData.strings.Story_Editor_ExpirationText
- let currentValue = self.state.privacy.timeout
- let emptyAction: ((ContextMenuActionItem.Action) -> Void)? = nil
+ var items: [ContextMenuItem] = []
items.append(.action(ContextMenuActionItem(text: title, textLayout: .multiline, textFont: .small, icon: { _ in return nil }, action: emptyAction)))
items.append(.action(ContextMenuActionItem(text: presentationData.strings.Story_Editor_ExpirationValue(6), icon: { theme in
diff --git a/submodules/TelegramUI/Components/MessageInputActionButtonComponent/Sources/MessageInputActionButtonComponent.swift b/submodules/TelegramUI/Components/MessageInputActionButtonComponent/Sources/MessageInputActionButtonComponent.swift
index 9698490cdf1..962f2aca1cc 100644
--- a/submodules/TelegramUI/Components/MessageInputActionButtonComponent/Sources/MessageInputActionButtonComponent.swift
+++ b/submodules/TelegramUI/Components/MessageInputActionButtonComponent/Sources/MessageInputActionButtonComponent.swift
@@ -327,68 +327,73 @@ public final class MessageInputActionButtonComponent: Component {
self.containerNode.isUserInteractionEnabled = component.longPressAction != nil
if self.micButton == nil {
- let micButton = ChatTextInputMediaRecordingButton(
- context: component.context,
- theme: defaultDarkPresentationTheme,
- useDarkTheme: true,
- strings: component.strings,
- presentController: component.presentController
- )
- self.micButton = micButton
- micButton.statusBarHost = component.context.sharedContext.mainWindow?.statusBarHost
- self.addSubview(micButton)
-
- micButton.disablesInteractiveKeyboardGestureRecognizer = true
-
- micButton.beginRecording = { [weak self] in
- guard let self, let component = self.component else {
- return
- }
- switch component.mode {
- case .voiceInput, .videoInput:
- component.action(component.mode, .down, false)
- default:
- break
- }
- }
- micButton.stopRecording = { [weak self] in
- guard let self, let component = self.component else {
- return
- }
- component.stopAndPreviewMediaRecording()
- }
- micButton.endRecording = { [weak self] sendMedia in
- guard let self, let component = self.component else {
- return
+ switch component.mode {
+ case .videoInput, .voiceInput, .unavailableVoiceInput, .send:
+ let micButton = ChatTextInputMediaRecordingButton(
+ context: component.context,
+ theme: defaultDarkPresentationTheme,
+ useDarkTheme: true,
+ strings: component.strings,
+ presentController: component.presentController
+ )
+ self.micButton = micButton
+ micButton.statusBarHost = component.context.sharedContext.mainWindow?.statusBarHost
+ self.addSubview(micButton)
+
+ micButton.disablesInteractiveKeyboardGestureRecognizer = true
+
+ micButton.beginRecording = { [weak self] in
+ guard let self, let component = self.component else {
+ return
+ }
+ switch component.mode {
+ case .voiceInput, .videoInput:
+ component.action(component.mode, .down, false)
+ default:
+ break
+ }
}
- switch component.mode {
- case .voiceInput, .videoInput:
- component.action(component.mode, .up, sendMedia)
- default:
- break
+ micButton.stopRecording = { [weak self] in
+ guard let self, let component = self.component else {
+ return
+ }
+ component.stopAndPreviewMediaRecording()
}
- }
- micButton.updateLocked = { [weak self] _ in
- guard let self, let component = self.component else {
- return
+ micButton.endRecording = { [weak self] sendMedia in
+ guard let self, let component = self.component else {
+ return
+ }
+ switch component.mode {
+ case .voiceInput, .videoInput:
+ component.action(component.mode, .up, sendMedia)
+ default:
+ break
+ }
}
- component.lockMediaRecording()
- }
- micButton.switchMode = { [weak self] in
- guard let self, let component = self.component else {
- return
+ micButton.updateLocked = { [weak self] _ in
+ guard let self, let component = self.component else {
+ return
+ }
+ component.lockMediaRecording()
}
- if case .unavailableVoiceInput = component.mode {
- component.action(component.mode, .up, false)
- } else {
- component.switchMediaInputMode()
+ micButton.switchMode = { [weak self] in
+ guard let self, let component = self.component else {
+ return
+ }
+ if case .unavailableVoiceInput = component.mode {
+ component.action(component.mode, .up, false)
+ } else {
+ component.switchMediaInputMode()
+ }
}
- }
- micButton.updateCancelTranslation = { [weak self] in
- guard let self, let micButton = self.micButton, let component = self.component else {
- return
+ micButton.updateCancelTranslation = { [weak self] in
+ guard let self, let micButton = self.micButton, let component = self.component else {
+ return
+ }
+ component.updateMediaCancelFraction(micButton.cancelTranslation)
}
- component.updateMediaCancelFraction(micButton.cancelTranslation)
+ default:
+ break
}
}
@@ -636,6 +641,19 @@ public final class MessageInputActionButtonComponent: Component {
return availableSize
}
+
+ public override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
+ var result = super.hitTest(point, with: event)
+ if result == nil, !self.isHidden && self.alpha > 0.0 {
+ for view in self.button.view.subviews {
+ if view.point(inside: self.convert(point, to: view), with: event) {
+ result = self.button.view
+ break
+ }
+ }
+ }
+ return result
+ }
}
public func makeView() -> View {
diff --git a/submodules/TelegramUI/Components/PeerAllowedReactionsScreen/Sources/PeerAllowedReactionsScreen.swift b/submodules/TelegramUI/Components/PeerAllowedReactionsScreen/Sources/PeerAllowedReactionsScreen.swift
index 08dfdab46a6..a3c415cb829 100644
--- a/submodules/TelegramUI/Components/PeerAllowedReactionsScreen/Sources/PeerAllowedReactionsScreen.swift
+++ b/submodules/TelegramUI/Components/PeerAllowedReactionsScreen/Sources/PeerAllowedReactionsScreen.swift
@@ -513,7 +513,8 @@ final class PeerAllowedReactionsScreenComponent: Component {
animateAsReplacement = true
}
- let undoController = UndoOverlayController(presentationData: presentationData, content: .customEmoji(context: component.context, file: itemFile, loop: false, title: nil, text: presentationData.strings.ChannelReactions_ToastLevelBoostRequired("\(nextCustomReactionCount)", "\(nextCustomReactionCount)").string, undoText: nil, customAction: nil), elevatedLayout: false, position: .bottom, animateInAsReplacement: animateAsReplacement, action: { _ in return false })
+ let text = presentationData.strings.ChannelReactions_ToastLevelBoostRequiredTemplate(presentationData.strings.ChannelReactions_ToastLevelBoostRequiredTemplateLevel(Int32(nextCustomReactionCount)), presentationData.strings.ChannelReactions_ToastLevelBoostRequiredTemplateEmojiCount(Int32(nextCustomReactionCount))).string
+ let undoController = UndoOverlayController(presentationData: presentationData, content: .customEmoji(context: component.context, file: itemFile, loop: false, title: nil, text: text, undoText: nil, customAction: nil), elevatedLayout: false, position: .bottom, animateInAsReplacement: animateAsReplacement, action: { _ in return false })
self.currentUndoController = undoController
self.environment?.controller()?.present(undoController, in: .current)
}
@@ -724,13 +725,12 @@ final class PeerAllowedReactionsScreenComponent: Component {
self.reactionInput = reactionInput
}
- //TOOD:localize
let reactionInputSize = reactionInput.update(
transition: animateIn ? .immediate : transition,
component: AnyComponent(EmojiListInputComponent(
context: component.context,
theme: environment.theme,
- placeholder: "Add Reactions...",
+ placeholder: environment.strings.ChannelReactions_InputPlaceholder,
reactionItems: enabledReactions,
isInputActive: self.displayInput,
caretPosition: caretPosition,
diff --git a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/Panes/PeerInfoRecommendedChannelsPane.swift b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/Panes/PeerInfoRecommendedChannelsPane.swift
index ce226d65bfe..031406638f9 100644
--- a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/Panes/PeerInfoRecommendedChannelsPane.swift
+++ b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/Panes/PeerInfoRecommendedChannelsPane.swift
@@ -170,6 +170,12 @@ final class PeerInfoRecommendedChannelsPaneNode: ASDisplayNode, PeerInfoPaneNode
}
return nil
})
+
+ self.listNode.visibleBottomContentOffsetChanged = { [weak self] offset in
+ if let self {
+ self.layoutUnlockPanel(transition: .animated(duration: 0.4, curve: .spring))
+ }
+ }
}
deinit {
@@ -233,18 +239,18 @@ final class PeerInfoRecommendedChannelsPaneNode: ASDisplayNode, PeerInfoPaneNode
}, openPeerContextAction: { [weak self] peer, node, gesture in
self?.openPeerContextAction(true, peer, node, gesture)
})
+
self.currentEntries = entries
self.enqueuedTransactions.append(transaction)
self.dequeueTransaction()
-
- self.layoutUnlockPanel()
}
- private func layoutUnlockPanel() {
+ private func layoutUnlockPanel(transition: ContainedViewLayoutTransition) {
guard let (_, isPremium) = self.currentState, let currentParams = self.currentParams else {
return
}
if !isPremium {
+ var transition = transition
let size = currentParams.size
let sideInset = currentParams.sideInset
let bottomInset = currentParams.bottomInset
@@ -261,6 +267,7 @@ final class PeerInfoRecommendedChannelsPaneNode: ASDisplayNode, PeerInfoPaneNode
} else {
unlockText = ComponentView()
self.unlockText = unlockText
+ transition = .immediate
}
if let current = self.unlockBackground {
@@ -327,14 +334,14 @@ final class PeerInfoRecommendedChannelsPaneNode: ASDisplayNode, PeerInfoPaneNode
view.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.unlockPressed)))
self.view.addSubview(view)
}
- view.frame = CGRect(origin: CGPoint(x: floor((size.width - unlockSize.width) / 2.0), y: size.height - bottomInset - unlockSize.height - 13.0 + scrollOffset), size: unlockSize)
+ transition.updateFrame(view: view, frame: CGRect(origin: CGPoint(x: floor((size.width - unlockSize.width) / 2.0), y: size.height - bottomInset - unlockSize.height - 13.0 + scrollOffset), size: unlockSize))
}
- unlockBackground.frame = CGRect(x: 0.0, y: size.height - bottomInset - 170.0 + scrollOffset, width: size.width, height: bottomInset + 170.0)
+ transition.updateFrame(view: unlockBackground, frame: CGRect(x: 0.0, y: size.height - bottomInset - 170.0 + scrollOffset, width: size.width, height: bottomInset + 170.0))
let buttonSideInset = sideInset + 16.0
let buttonSize = CGSize(width: size.width - buttonSideInset * 2.0, height: 50.0)
- unlockButton.frame = CGRect(origin: CGPoint(x: buttonSideInset, y: size.height - bottomInset - unlockSize.height - buttonSize.height - 26.0 + scrollOffset), size: buttonSize)
+ transition.updateFrame(node: unlockButton, frame: CGRect(origin: CGPoint(x: buttonSideInset, y: size.height - bottomInset - unlockSize.height - buttonSize.height - 26.0 + scrollOffset), size: buttonSize))
let _ = unlockButton.updateLayout(width: buttonSize.width, transition: .immediate)
} else {
self.unlockBackground?.removeFromSuperview()
diff --git a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoHeaderButtonNode.swift b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoHeaderButtonNode.swift
index 38474a74ee6..9c8a96680c6 100644
--- a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoHeaderButtonNode.swift
+++ b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoHeaderButtonNode.swift
@@ -42,7 +42,7 @@ final class PeerInfoHeaderButtonNode: HighlightableButtonNode {
private let action: (PeerInfoHeaderButtonNode, ContextGesture?) -> Void
let referenceNode: ContextReferenceContentNode
let containerNode: ContextControllerSourceNode
- private let backgroundNode: NavigationBackgroundNode
+ //private let backgroundNode: NavigationBackgroundNode
private let contentNode: ASDisplayNode
private let iconNode: ASImageNode
private let textNode: ImmediateTextNode
@@ -52,6 +52,9 @@ final class PeerInfoHeaderButtonNode: HighlightableButtonNode {
private var icon: PeerInfoHeaderButtonIcon?
private var isActive: Bool?
+ let backgroundContainerView: UIView
+ let backgroundView: UIView
+
init(key: PeerInfoHeaderButtonKey, action: @escaping (PeerInfoHeaderButtonNode, ContextGesture?) -> Void) {
self.key = key
self.action = action
@@ -60,8 +63,13 @@ final class PeerInfoHeaderButtonNode: HighlightableButtonNode {
self.containerNode = ContextControllerSourceNode()
self.containerNode.animateScale = false
- self.backgroundNode = NavigationBackgroundNode(color: UIColor(white: 1.0, alpha: 0.2), enableBlur: true, enableSaturation: false)
- self.backgroundNode.isUserInteractionEnabled = false
+ self.backgroundContainerView = UIView()
+ self.backgroundView = UIView()
+ self.backgroundView.backgroundColor = .white
+ self.backgroundContainerView.addSubview(self.backgroundView)
+
+ /*self.backgroundNode = NavigationBackgroundNode(color: UIColor(white: 1.0, alpha: 0.2), enableBlur: true, enableSaturation: false)
+ self.backgroundNode.isUserInteractionEnabled = false*/
self.contentNode = ASDisplayNode()
self.contentNode.isUserInteractionEnabled = false
@@ -80,7 +88,7 @@ final class PeerInfoHeaderButtonNode: HighlightableButtonNode {
self.accessibilityTraits = .button
self.containerNode.addSubnode(self.referenceNode)
- self.referenceNode.addSubnode(self.backgroundNode)
+ //self.referenceNode.addSubnode(self.backgroundNode)
self.referenceNode.addSubnode(self.contentNode)
self.contentNode.addSubnode(self.iconNode)
self.addSubnode(self.containerNode)
@@ -259,12 +267,14 @@ final class PeerInfoHeaderButtonNode: HighlightableButtonNode {
let backgroundY: CGFloat = size.height * (1.0 - fraction)
let backgroundFrame = CGRect(origin: CGPoint(x: 0.0, y: backgroundY), size: CGSize(width: size.width, height: max(0.0, size.height - backgroundY)))
- transition.updateFrame(node: self.backgroundNode, frame: backgroundFrame)
+ //transition.updateFrame(node: self.backgroundNode, frame: backgroundFrame)
+ transition.updateFrame(view: self.backgroundView, frame: backgroundFrame)
transition.updateSublayerTransformScale(node: self.contentNode, scale: 1.0 * fraction + 0.001 * (1.0 - fraction))
- self.backgroundNode.update(size: backgroundFrame.size, cornerRadius: min(11.0, backgroundFrame.height * 0.5), transition: transition)
- self.backgroundNode.updateColor(color: backgroundColor, transition: transition)
+ transition.updateCornerRadius(layer: self.backgroundView.layer, cornerRadius: min(11.0, backgroundFrame.height * 0.5))
+ //self.backgroundNode.update(size: backgroundFrame.size, cornerRadius: min(11.0, backgroundFrame.height * 0.5), transition: transition)
+ //self.backgroundNode.updateColor(color: backgroundColor, transition: transition)
transition.updateFrame(node: self.iconNode, frame: CGRect(origin: CGPoint(x: floor((size.width - iconSize.width) / 2.0), y: 1.0), size: iconSize))
if let animatedIconView = self.animatedIcon?.view {
transition.updateFrame(view: animatedIconView, frame: CGRect(origin: CGPoint(x: floor((size.width - iconSize.width) / 2.0), y: 1.0), size: iconSize))
diff --git a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoHeaderNode.swift b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoHeaderNode.swift
index 2483a8bd414..2a9dcd5fe19 100644
--- a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoHeaderNode.swift
+++ b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoHeaderNode.swift
@@ -102,6 +102,8 @@ final class PeerInfoHeaderNode: ASDisplayNode {
let backgroundBannerView: UIView
let backgroundCover = ComponentView()
let buttonsContainerNode: SparseNode
+ let buttonsBackgroundNode: NavigationBackgroundNode
+ let buttonsMaskView: UIView
let regularContentNode: PeerInfoHeaderRegularContentNode
let editingContentNode: PeerInfoHeaderEditingContentNode
let avatarOverlayNode: PeerInfoEditingAvatarOverlayNode
@@ -209,6 +211,11 @@ final class PeerInfoHeaderNode: ASDisplayNode {
self.buttonsContainerNode = SparseNode()
self.buttonsContainerNode.clipsToBounds = true
+ self.buttonsBackgroundNode = NavigationBackgroundNode(color: .clear, enableBlur: true, enableSaturation: false)
+ self.buttonsContainerNode.addSubnode(self.buttonsBackgroundNode)
+ self.buttonsMaskView = UIView()
+ self.buttonsBackgroundNode.view.mask = self.buttonsMaskView
+
self.regularContentNode = PeerInfoHeaderRegularContentNode()
var requestUpdateLayoutImpl: (() -> Void)?
self.editingContentNode = PeerInfoHeaderEditingContentNode(context: context, requestUpdateLayout: {
@@ -823,8 +830,7 @@ final class PeerInfoHeaderNode: ASDisplayNode {
if self.isSettings {
expandedAvatarListHeight = expandedAvatarListHeight + 60.0
} else {
- let avatarEnlargementFactor: CGFloat = 1.35
- expandedAvatarListHeight = floor(expandedAvatarListHeight * avatarEnlargementFactor)
+ expandedAvatarListHeight = expandedAvatarListHeight + 98.0
}
let expandedAvatarListSize = CGSize(width: width, height: expandedAvatarListHeight)
@@ -850,7 +856,7 @@ final class PeerInfoHeaderNode: ASDisplayNode {
isFake = peer.isFake || peer.isScam
}
- let titleShadowColor = UIColor(white: 0.0, alpha: 0.1)
+ let titleShadowColor: UIColor? = nil
if let peer = peer {
var title: String
@@ -1120,7 +1126,7 @@ final class PeerInfoHeaderNode: ASDisplayNode {
var bottomShadowHeight: CGFloat = 88.0
if !self.isSettings {
- bottomShadowHeight += 110.0
+ bottomShadowHeight += 100.0
}
let bottomShadowFrame = CGRect(origin: CGPoint(x: 0.0, y: expandedAvatarHeight - bottomShadowHeight), size: CGSize(width: width, height: bottomShadowHeight))
transition.updateFrame(node: self.avatarListNode.listContainerNode.bottomShadowNode, frame: bottomShadowFrame, beginWithCurrentState: true)
@@ -1648,6 +1654,10 @@ final class PeerInfoHeaderNode: ASDisplayNode {
buttonRightOrigin.y += actionButtonSize.height + 24.0
}
+ transition.updateFrameAdditive(node: self.buttonsBackgroundNode, frame: CGRect(origin: CGPoint(x: 0.0, y: buttonRightOrigin.y), size: CGSize(width: width, height: buttonSize.height)))
+ self.buttonsBackgroundNode.update(size: self.buttonsBackgroundNode.bounds.size, transition: transition)
+ self.buttonsBackgroundNode.updateColor(color: contentButtonBackgroundColor, enableBlur: true, transition: transition)
+
for buttonKey in buttonKeys.reversed() {
let buttonNode: PeerInfoHeaderButtonNode
var wasAdded = false
@@ -1660,6 +1670,7 @@ final class PeerInfoHeaderNode: ASDisplayNode {
})
self.buttonNodes[buttonKey] = buttonNode
self.buttonsContainerNode.addSubnode(buttonNode)
+ self.buttonsMaskView.addSubview(buttonNode.backgroundContainerView)
}
let buttonFrame = CGRect(origin: CGPoint(x: buttonRightOrigin.x - buttonSize.width, y: buttonRightOrigin.y), size: buttonSize)
@@ -1670,6 +1681,8 @@ final class PeerInfoHeaderNode: ASDisplayNode {
} else {
buttonTransition.updateFrame(node: buttonNode, frame: buttonFrame)
}
+ buttonTransition.updateFrame(view: buttonNode.backgroundContainerView, frame: buttonFrame.offsetBy(dx: 0.0, dy: -buttonFrame.minY))
+
let buttonText: String
let buttonIcon: PeerInfoHeaderButtonIcon
switch buttonKey {
@@ -1729,8 +1742,10 @@ final class PeerInfoHeaderNode: ASDisplayNode {
if wasAdded {
buttonNode.alpha = 0.0
+ buttonNode.backgroundContainerView.alpha = 0.0
}
transition.updateAlpha(node: buttonNode, alpha: buttonsTransitionFraction)
+ transition.updateAlpha(layer: buttonNode.backgroundContainerView.layer, alpha: buttonsTransitionFraction)
if case .mute = buttonKey, buttonNode.containerNode.alpha.isZero, additive {
if case let .animated(duration, curve) = transition {
@@ -1748,7 +1763,9 @@ final class PeerInfoHeaderNode: ASDisplayNode {
if !buttonKeys.contains(key) {
if let buttonNode = self.buttonNodes[key] {
self.buttonNodes.removeValue(forKey: key)
+ transition.updateAlpha(layer: buttonNode.backgroundContainerView.layer, alpha: 0.0)
transition.updateAlpha(node: buttonNode, alpha: 0.0) { [weak buttonNode] _ in
+ buttonNode?.backgroundContainerView.removeFromSuperview()
buttonNode?.removeFromSupernode()
}
}
diff --git a/submodules/TelegramUI/Components/ShareWithPeersScreen/Sources/ShareWithPeersScreenState.swift b/submodules/TelegramUI/Components/ShareWithPeersScreen/Sources/ShareWithPeersScreenState.swift
index 241ee1d82d0..89d1bdf7a26 100644
--- a/submodules/TelegramUI/Components/ShareWithPeersScreen/Sources/ShareWithPeersScreenState.swift
+++ b/submodules/TelegramUI/Components/ShareWithPeersScreen/Sources/ShareWithPeersScreenState.swift
@@ -335,6 +335,9 @@ public extension ShareWithPeersScreen {
var peers: [EnginePeer] = []
peers = chatList.items.filter { peer in
if let peer = peer.renderedPeer.peer {
+ if case .secretChat = peer {
+ return false
+ }
if self.initialPeerIds.contains(peer.id) {
return false
}
@@ -478,7 +481,9 @@ public extension ShareWithPeersScreen {
let state = State(
peers: peers.compactMap { $0.peer }.filter { peer in
- if case let .user(user) = peer {
+ if case .secretChat = peer {
+ return false
+ } else if case let .user(user) = peer {
if user.id == context.account.peerId {
return false
} else if user.botInfo != nil {
diff --git a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryChatContent.swift b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryChatContent.swift
index 3c1abd9c369..fca016a27d6 100644
--- a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryChatContent.swift
+++ b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryChatContent.swift
@@ -880,14 +880,26 @@ public final class StoryContentContextImpl: StoryContentContext {
var possibleItems: [(EnginePeer, EngineStoryItem)] = []
var pollItems: [StoryKey] = []
if let slice = currentState.centralPeerContext.sliceValue {
+ var shouldPollItem = false
if slice.peer.id == self.context.account.peerId {
+ shouldPollItem = true
+ } else if case .channel = slice.peer {
+ shouldPollItem = true
+ }
+ if shouldPollItem {
pollItems.append(StoryKey(peerId: slice.peer.id, id: slice.item.storyItem.id))
}
for item in currentState.centralPeerContext.nextItems {
possibleItems.append((slice.peer, item))
+ var shouldPollNextItem = false
if slice.peer.id == self.context.account.peerId {
+ shouldPollNextItem = true
+ } else if case .channel = slice.peer {
+ shouldPollNextItem = true
+ }
+ if shouldPollNextItem {
pollItems.append(StoryKey(peerId: slice.peer.id, id: item.id))
}
}
diff --git a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerComponent.swift b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerComponent.swift
index 9bcf6235c7a..120b8608f9b 100644
--- a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerComponent.swift
+++ b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerComponent.swift
@@ -715,6 +715,7 @@ public final class StoryItemSetContainerComponent: Component {
self.audioRecorderStatusDisposable?.dispose()
self.audioRecorderStatusDisposable?.dispose()
self.videoRecorderDisposable?.dispose()
+ self.updateDisposable.dispose()
}
func allowsExternalGestures(point: CGPoint) -> Bool {
@@ -5161,6 +5162,7 @@ public final class StoryItemSetContainerComponent: Component {
StoryContainerScreen.openPeerStories(context: component.context, peerId: peer.id, parentController: controller, avatarNode: avatarNode)
}
+ private let updateDisposable = MetaDisposable()
func openStoryEditing(repost: Bool = false) {
guard let component = self.component, let peerReference = PeerReference(component.slice.peer._asPeer()) else {
return
@@ -5234,7 +5236,6 @@ public final class StoryItemSetContainerComponent: Component {
transitionOut: nil
)
- let updateDisposable = MetaDisposable()
var updateProgressImpl: ((Float) -> Void)?
let controller = MediaEditorScreen(
context: context,
@@ -5324,7 +5325,7 @@ public final class StoryItemSetContainerComponent: Component {
TempBox.shared.dispose(tempFile)
}
if let imageData = compressImageToJPEG(image, quality: 0.7, tempFilePath: tempFile.path) {
- updateDisposable.set((context.engine.messages.editStory(peerId: peerId, id: id, media: .image(dimensions: dimensions, data: imageData, stickers: result.stickers), mediaAreas: result.mediaAreas, text: updatedText, entities: updatedEntities, privacy: nil)
+ self.updateDisposable.set((context.engine.messages.editStory(peerId: peerId, id: id, media: .image(dimensions: dimensions, data: imageData, stickers: result.stickers), mediaAreas: result.mediaAreas, text: updatedText, entities: updatedEntities, privacy: nil)
|> deliverOnMainQueue).startStrict(next: { [weak self] result in
guard let self else {
return
@@ -5378,7 +5379,7 @@ public final class StoryItemSetContainerComponent: Component {
}
}
- updateDisposable.set((context.engine.messages.editStory(peerId: peerId, id: id, media: .video(dimensions: dimensions, duration: duration, resource: resource, firstFrameFile: firstFrameFile, stickers: result.stickers), mediaAreas: result.mediaAreas, text: updatedText, entities: updatedEntities, privacy: nil)
+ self.updateDisposable.set((context.engine.messages.editStory(peerId: peerId, id: id, media: .video(dimensions: dimensions, duration: duration, resource: resource, firstFrameFile: firstFrameFile, stickers: result.stickers), mediaAreas: result.mediaAreas, text: updatedText, entities: updatedEntities, privacy: nil)
|> deliverOnMainQueue).startStrict(next: { [weak self] result in
guard let self else {
return
@@ -5441,9 +5442,9 @@ public final class StoryItemSetContainerComponent: Component {
self?.state?.updated(transition: .easeInOut(duration: 0.2))
}
self.component?.controller()?.push(controller)
- updateProgressImpl = { [weak controller] progress in
- controller?.updateEditProgress(progress, cancel: {
- updateDisposable.dispose()
+ updateProgressImpl = { [weak controller, weak self] progress in
+ controller?.updateEditProgress(progress, cancel: { [weak self] in
+ self?.updateDisposable.set(nil)
})
}
}
diff --git a/submodules/TelegramUI/Components/Stories/StoryFooterPanelComponent/Sources/StoryFooterPanelComponent.swift b/submodules/TelegramUI/Components/Stories/StoryFooterPanelComponent/Sources/StoryFooterPanelComponent.swift
index 2ff0324c261..1af28ee73c0 100644
--- a/submodules/TelegramUI/Components/Stories/StoryFooterPanelComponent/Sources/StoryFooterPanelComponent.swift
+++ b/submodules/TelegramUI/Components/Stories/StoryFooterPanelComponent/Sources/StoryFooterPanelComponent.swift
@@ -404,7 +404,6 @@ public final class StoryFooterPanelComponent: Component {
likeStatsText = AnimatedCountLabelView(frame: CGRect())
likeStatsText.isUserInteractionEnabled = false
self.likeStatsText = likeStatsText
- self.externalContainerView.addSubview(likeStatsText)
}
let reactionStatsLayout = likeStatsText.update(
@@ -472,7 +471,7 @@ public final class StoryFooterPanelComponent: Component {
environment: {},
containerSize: CGSize(width: 33.0, height: 33.0)
)
- if let likeButtonView = likeButton.view {
+ if let likeButtonView = likeButton.view as? MessageInputActionButtonComponent.View {
if likeButtonView.superview == nil {
self.addSubview(likeButtonView)
}
@@ -491,6 +490,15 @@ public final class StoryFooterPanelComponent: Component {
likeStatsTransition.setAlpha(view: likeButtonView, alpha: 1.0 - component.expandFraction)
rightContentOffset -= likeButtonSize.width + 14.0
+
+ if likeStatsText.superview == nil {
+ likeButtonView.button.view.addSubview(likeStatsText)
+ }
+
+ likeStatsFrame.origin.x -= likeButtonFrame.minX
+ likeStatsFrame.origin.y -= likeButtonFrame.minY
+ likeStatsTransition.setPosition(view: likeStatsText, position: likeStatsFrame.center)
+ likeStatsTransition.setBounds(view: likeStatsText, bounds: CGRect(origin: CGPoint(), size: likeStatsFrame.size))
}
if component.canShare {
@@ -502,7 +510,6 @@ public final class StoryFooterPanelComponent: Component {
forwardStatsText = AnimatedCountLabelView(frame: CGRect())
forwardStatsText.isUserInteractionEnabled = false
self.forwardStatsText = forwardStatsText
- self.externalContainerView.addSubview(forwardStatsText)
}
let forwardStatsLayout = forwardStatsText.update(
@@ -515,8 +522,6 @@ public final class StoryFooterPanelComponent: Component {
var forwardStatsFrame = CGRect(origin: CGPoint(x: rightContentOffset - forwardStatsLayout.size.width, y: floor((size.height - forwardStatsLayout.size.height) * 0.5)), size: forwardStatsLayout.size)
forwardStatsFrame.origin.y += component.expandFraction * 45.0
- forwardStatsTransition.setPosition(view: forwardStatsText, position: forwardStatsFrame.center)
- forwardStatsTransition.setBounds(view: forwardStatsText, bounds: CGRect(origin: CGPoint(), size: forwardStatsFrame.size))
var forwardStatsAlpha: CGFloat = (1.0 - component.expandFraction)
if forwardCount == 0 {
forwardStatsAlpha = 0.0
@@ -578,18 +583,27 @@ public final class StoryFooterPanelComponent: Component {
environment: {},
containerSize: CGSize(width: 33.0, height: 33.0)
)
- if let repostButtonView = repostButton.view {
+ if let repostButtonView = repostButton.view as? MessageInputActionButtonComponent.View {
if repostButtonView.superview == nil {
self.addSubview(repostButtonView)
}
var repostButtonFrame = CGRect(origin: CGPoint(x: rightContentOffset - repostButtonSize.width, y: floor((size.height - repostButtonSize.height) * 0.5)), size: repostButtonSize)
repostButtonFrame.origin.y += component.expandFraction * 45.0
- likeStatsTransition.setPosition(view: repostButtonView, position: repostButtonFrame.center)
- likeStatsTransition.setBounds(view: repostButtonView, bounds: CGRect(origin: CGPoint(), size: repostButtonFrame.size))
- likeStatsTransition.setAlpha(view: repostButtonView, alpha: 1.0 - component.expandFraction)
+ forwardStatsTransition.setPosition(view: repostButtonView, position: repostButtonFrame.center)
+ forwardStatsTransition.setBounds(view: repostButtonView, bounds: CGRect(origin: CGPoint(), size: repostButtonFrame.size))
+ forwardStatsTransition.setAlpha(view: repostButtonView, alpha: 1.0 - component.expandFraction)
rightContentOffset -= repostButtonSize.width + 14.0
+
+ if forwardStatsText.superview == nil {
+ repostButtonView.button.view.addSubview(forwardStatsText)
+ }
+
+ forwardStatsFrame.origin.x -= repostButtonFrame.minX
+ forwardStatsFrame.origin.y -= repostButtonFrame.minY
+ forwardStatsTransition.setPosition(view: forwardStatsText, position: forwardStatsFrame.center)
+ forwardStatsTransition.setBounds(view: forwardStatsText, bounds: CGRect(origin: CGPoint(), size: forwardStatsFrame.size))
}
let forwardButtonSize = forwardButton.update(
diff --git a/submodules/TelegramUI/Images.xcassets/Media Editor/AudioSmall.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Media Editor/AudioSmall.imageset/Contents.json
new file mode 100644
index 00000000000..76269b1a815
--- /dev/null
+++ b/submodules/TelegramUI/Images.xcassets/Media Editor/AudioSmall.imageset/Contents.json
@@ -0,0 +1,12 @@
+{
+ "images" : [
+ {
+ "filename" : "Type=Add.pdf",
+ "idiom" : "universal"
+ }
+ ],
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/submodules/TelegramUI/Images.xcassets/Media Editor/AudioSmall.imageset/Type=Add.pdf b/submodules/TelegramUI/Images.xcassets/Media Editor/AudioSmall.imageset/Type=Add.pdf
new file mode 100644
index 00000000000..72ca28578f8
Binary files /dev/null and b/submodules/TelegramUI/Images.xcassets/Media Editor/AudioSmall.imageset/Type=Add.pdf differ
diff --git a/submodules/TelegramUI/Images.xcassets/Media Editor/LocationSmall.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Media Editor/LocationSmall.imageset/Contents.json
new file mode 100644
index 00000000000..7e6c72220f4
--- /dev/null
+++ b/submodules/TelegramUI/Images.xcassets/Media Editor/LocationSmall.imageset/Contents.json
@@ -0,0 +1,12 @@
+{
+ "images" : [
+ {
+ "filename" : "location_24.pdf",
+ "idiom" : "universal"
+ }
+ ],
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/submodules/TelegramUI/Images.xcassets/Media Editor/LocationSmall.imageset/location_24.pdf b/submodules/TelegramUI/Images.xcassets/Media Editor/LocationSmall.imageset/location_24.pdf
new file mode 100644
index 00000000000..ed4e98fcd99
Binary files /dev/null and b/submodules/TelegramUI/Images.xcassets/Media Editor/LocationSmall.imageset/location_24.pdf differ
diff --git a/submodules/TelegramUI/Sources/AppDelegate.swift b/submodules/TelegramUI/Sources/AppDelegate.swift
index d456a1f0278..90b4f178ffb 100644
--- a/submodules/TelegramUI/Sources/AppDelegate.swift
+++ b/submodules/TelegramUI/Sources/AppDelegate.swift
@@ -398,6 +398,7 @@ private class UserInterfaceStyleObserverWindow: UIWindow {
premiumProductId: NGENV.premium_bundle,
privacyUrl: URL(string: NGENV.privacy_url)!,
referralBot: NGENV.referral_bot,
+ tapjoyApiKey: NGENV.tapjoy_api_key,
telegramAuthBot: NGENV.telegram_auth_bot,
termsUrl: URL(string: NGENV.terms_url)!,
webSocketUrl: NGENV.websocket_url
@@ -416,11 +417,11 @@ private class UserInterfaceStyleObserverWindow: UIWindow {
firebaseAnalyticsSender: {
FirebaseAnalyticsSender()
},
- remoteConfig: {
- RemoteConfigServiceImpl.shared
- },
lottieView: {
LottieViewImpl()
+ },
+ remoteConfig: {
+ RemoteConfigServiceImpl.shared
}
)
diff --git a/submodules/TelegramUI/Sources/ChatHistoryListNode.swift b/submodules/TelegramUI/Sources/ChatHistoryListNode.swift
index 72ec6eafa91..1f4552e6824 100644
--- a/submodules/TelegramUI/Sources/ChatHistoryListNode.swift
+++ b/submodules/TelegramUI/Sources/ChatHistoryListNode.swift
@@ -921,6 +921,48 @@ public final class ChatHistoryListNodeImpl: ListView, ChatHistoryNode, ChatHisto
strongSelf.maybeUpdateOverscrollAction(offset: offsetFromBottom)
}
+
+ var maxMessage: MessageIndex?
+ strongSelf.forEachVisibleMessageItemNode { itemNode in
+ if let item = itemNode.item {
+ var matches = false
+ if itemNode.frame.maxY < strongSelf.insets.top {
+ return
+ }
+ if itemNode.frame.minY >= strongSelf.insets.top {
+ matches = true
+ } else if itemNode.frame.minY >= strongSelf.insets.top - 100.0 {
+ matches = true
+ }
+
+ if matches {
+ var maxItemIndex: MessageIndex?
+ for (message, _) in item.content {
+ if let maxItemIndexValue = maxItemIndex {
+ if maxItemIndexValue < message.index {
+ maxItemIndex = message.index
+ }
+ } else {
+ maxItemIndex = message.index
+ }
+ }
+
+ if let maxItemIndex {
+ if let maxMessageValue = maxMessage {
+ if maxMessageValue < maxItemIndex {
+ maxMessage = maxItemIndex
+ }
+ } else {
+ maxMessage = maxItemIndex
+ }
+ }
+ }
+ }
+ }
+ if let maxMessage {
+ //print("read \(maxMessage.text)")
+ strongSelf.updateMaxVisibleReadIncomingMessageIndex(maxMessage)
+ }
}
}
@@ -2551,7 +2593,8 @@ public final class ChatHistoryListNodeImpl: ListView, ChatHistoryNode, ChatHisto
}
if let messageIndex = messageIndex {
- self.updateMaxVisibleReadIncomingMessageIndex(messageIndex)
+ let _ = messageIndex
+ //self.updateMaxVisibleReadIncomingMessageIndex(messageIndex)
}
if let maxOverallIndex = maxOverallIndex, maxOverallIndex != self.maxVisibleMessageIndexReported {
@@ -3262,7 +3305,8 @@ public final class ChatHistoryListNodeImpl: ListView, ChatHistoryNode, ChatHisto
}
if let messageIndex = messageIndex {
- strongSelf.updateMaxVisibleReadIncomingMessageIndex(messageIndex)
+ let _ = messageIndex
+ //strongSelf.updateMaxVisibleReadIncomingMessageIndex(messageIndex)
}
}
}
diff --git a/submodules/TelegramUI/Sources/ChatThemeScreen.swift b/submodules/TelegramUI/Sources/ChatThemeScreen.swift
index bc794e0aa37..8b2e570ad43 100644
--- a/submodules/TelegramUI/Sources/ChatThemeScreen.swift
+++ b/submodules/TelegramUI/Sources/ChatThemeScreen.swift
@@ -415,7 +415,7 @@ private final class ThemeSettingsThemeItemIconNode : ListViewItemNode {
}
}
if item.themeReference == nil {
- strongSelf.imageNode.backgroundColor = item.theme.actionSheet.opaqueItemBackgroundColor
+ strongSelf.imageNode.backgroundColor = item.theme.list.plainBackgroundColor
}
if updatedTheme || updatedSelected {
diff --git a/submodules/TelegramVoip/Sources/GroupCallContext.swift b/submodules/TelegramVoip/Sources/GroupCallContext.swift
index 3569cf77085..1860d6caeee 100644
--- a/submodules/TelegramVoip/Sources/GroupCallContext.swift
+++ b/submodules/TelegramVoip/Sources/GroupCallContext.swift
@@ -375,6 +375,7 @@ public final class OngoingGroupCallContext {
public let width: Int
public let height: Int
public let orientation: OngoingCallVideoOrientation
+ public let deviceRelativeOrientation: OngoingCallVideoOrientation?
public let mirrorHorizontally: Bool
public let mirrorVertically: Bool
@@ -392,6 +393,11 @@ public final class OngoingGroupCallContext {
self.width = Int(frameData.width)
self.height = Int(frameData.height)
self.orientation = OngoingCallVideoOrientation(frameData.orientation)
+ if frameData.hasDeviceRelativeOrientation {
+ self.deviceRelativeOrientation = OngoingCallVideoOrientation(frameData.deviceRelativeOrientation)
+ } else {
+ self.deviceRelativeOrientation = nil
+ }
self.mirrorHorizontally = frameData.mirrorHorizontally
self.mirrorVertically = frameData.mirrorVertically
}
diff --git a/submodules/TgVoipWebrtc/PublicHeaders/TgVoipWebrtc/OngoingCallThreadLocalContext.h b/submodules/TgVoipWebrtc/PublicHeaders/TgVoipWebrtc/OngoingCallThreadLocalContext.h
index daf08754987..ae8a5336752 100644
--- a/submodules/TgVoipWebrtc/PublicHeaders/TgVoipWebrtc/OngoingCallThreadLocalContext.h
+++ b/submodules/TgVoipWebrtc/PublicHeaders/TgVoipWebrtc/OngoingCallThreadLocalContext.h
@@ -185,6 +185,8 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
@property (nonatomic, readonly) int width;
@property (nonatomic, readonly) int height;
@property (nonatomic, readonly) OngoingCallVideoOrientationWebrtc orientation;
+@property (nonatomic, readonly) bool hasDeviceRelativeOrientation;
+@property (nonatomic, readonly) OngoingCallVideoOrientationWebrtc deviceRelativeOrientation;
@property (nonatomic, readonly) bool mirrorHorizontally;
@property (nonatomic, readonly) bool mirrorVertically;
diff --git a/submodules/TgVoipWebrtc/Sources/MediaStreaming.mm b/submodules/TgVoipWebrtc/Sources/MediaStreaming.mm
index 843cd1048f6..becd6a0484d 100644
--- a/submodules/TgVoipWebrtc/Sources/MediaStreaming.mm
+++ b/submodules/TgVoipWebrtc/Sources/MediaStreaming.mm
@@ -106,7 +106,7 @@ - (instancetype)initWithSink:(void (^_Nonnull)(CallVideoFrameData * _Nonnull))si
}
if (storedSink && mappedBuffer) {
- storedSink([[CallVideoFrameData alloc] initWithBuffer:mappedBuffer frame:videoFrame mirrorHorizontally:mirrorHorizontally mirrorVertically:mirrorVertically]);
+ storedSink([[CallVideoFrameData alloc] initWithBuffer:mappedBuffer frame:videoFrame mirrorHorizontally:mirrorHorizontally mirrorVertically:mirrorVertically hasDeviceRelativeVideoRotation:false deviceRelativeVideoRotation:OngoingCallVideoOrientation0]);
}
}));
}
diff --git a/submodules/TgVoipWebrtc/Sources/MediaUtils.h b/submodules/TgVoipWebrtc/Sources/MediaUtils.h
index 060651018f2..1bde53a0f20 100644
--- a/submodules/TgVoipWebrtc/Sources/MediaUtils.h
+++ b/submodules/TgVoipWebrtc/Sources/MediaUtils.h
@@ -53,6 +53,6 @@
@interface CallVideoFrameData (Initialization)
-- (instancetype _Nonnull)initWithBuffer:(id _Nonnull)buffer frame:(webrtc::VideoFrame const &)frame mirrorHorizontally:(bool)mirrorHorizontally mirrorVertically:(bool)mirrorVertically;
+- (instancetype _Nonnull)initWithBuffer:(id _Nonnull)buffer frame:(webrtc::VideoFrame const &)frame mirrorHorizontally:(bool)mirrorHorizontally mirrorVertically:(bool)mirrorVertically hasDeviceRelativeVideoRotation:(bool)hasDeviceRelativeVideoRotation deviceRelativeVideoRotation:(OngoingCallVideoOrientationWebrtc)deviceRelativeVideoRotation;
@end
diff --git a/submodules/TgVoipWebrtc/Sources/OngoingCallThreadLocalContext.mm b/submodules/TgVoipWebrtc/Sources/OngoingCallThreadLocalContext.mm
index 110536a4edd..06a79241885 100644
--- a/submodules/TgVoipWebrtc/Sources/OngoingCallThreadLocalContext.mm
+++ b/submodules/TgVoipWebrtc/Sources/OngoingCallThreadLocalContext.mm
@@ -476,7 +476,7 @@ @interface CallVideoFrameData () {
@implementation CallVideoFrameData
-- (instancetype)initWithBuffer:(id)buffer frame:(webrtc::VideoFrame const &)frame mirrorHorizontally:(bool)mirrorHorizontally mirrorVertically:(bool)mirrorVertically {
+- (instancetype)initWithBuffer:(id)buffer frame:(webrtc::VideoFrame const &)frame mirrorHorizontally:(bool)mirrorHorizontally mirrorVertically:(bool)mirrorVertically hasDeviceRelativeVideoRotation:(bool)hasDeviceRelativeVideoRotation deviceRelativeVideoRotation:(OngoingCallVideoOrientationWebrtc)deviceRelativeVideoRotation {
self = [super init];
if (self != nil) {
_buffer = buffer;
@@ -506,6 +506,9 @@ - (instancetype)initWithBuffer:(id)buffer frame:(webrtc::V
break;
}
}
+
+ _hasDeviceRelativeOrientation = hasDeviceRelativeVideoRotation;
+ _deviceRelativeOrientation = deviceRelativeVideoRotation;
_mirrorHorizontally = mirrorHorizontally;
_mirrorVertically = mirrorVertically;
@@ -586,6 +589,9 @@ - (instancetype)initWithSink:(void (^_Nonnull)(CallVideoFrameData * _Nonnull))si
bool mirrorHorizontally = false;
bool mirrorVertically = false;
+
+ bool hasDeviceRelativeVideoRotation = false;
+ OngoingCallVideoOrientationWebrtc deviceRelativeVideoRotation = OngoingCallVideoOrientation0;
if (videoFrame.video_frame_buffer()->type() == webrtc::VideoFrameBuffer::Type::kNative) {
id nativeBuffer = static_cast(videoFrame.video_frame_buffer().get())->wrapped_frame_buffer();
@@ -594,7 +600,8 @@ - (instancetype)initWithSink:(void (^_Nonnull)(CallVideoFrameData * _Nonnull))si
mappedBuffer = [[CallVideoFrameNativePixelBuffer alloc] initWithPixelBuffer:pixelBuffer.pixelBuffer];
}
if ([nativeBuffer isKindOfClass:[TGRTCCVPixelBuffer class]]) {
- if (((TGRTCCVPixelBuffer *)nativeBuffer).shouldBeMirrored) {
+ TGRTCCVPixelBuffer *tgNativeBuffer = (TGRTCCVPixelBuffer *)nativeBuffer;
+ if (tgNativeBuffer.shouldBeMirrored) {
switch (videoFrame.rotation()) {
case webrtc::kVideoRotation_0:
case webrtc::kVideoRotation_180:
@@ -608,6 +615,26 @@ - (instancetype)initWithSink:(void (^_Nonnull)(CallVideoFrameData * _Nonnull))si
break;
}
}
+ if (tgNativeBuffer.deviceRelativeVideoRotation != -1) {
+ hasDeviceRelativeVideoRotation = true;
+ switch (tgNativeBuffer.deviceRelativeVideoRotation) {
+ case webrtc::kVideoRotation_0:
+ deviceRelativeVideoRotation = OngoingCallVideoOrientation0;
+ break;
+ case webrtc::kVideoRotation_90:
+ deviceRelativeVideoRotation = OngoingCallVideoOrientation90;
+ break;
+ case webrtc::kVideoRotation_180:
+ deviceRelativeVideoRotation = OngoingCallVideoOrientation180;
+ break;
+ case webrtc::kVideoRotation_270:
+ deviceRelativeVideoRotation = OngoingCallVideoOrientation270;
+ break;
+ default:
+ deviceRelativeVideoRotation = OngoingCallVideoOrientation0;
+ break;
+ }
+ }
}
} else if (videoFrame.video_frame_buffer()->type() == webrtc::VideoFrameBuffer::Type::kNV12) {
rtc::scoped_refptr nv12Buffer(static_cast(videoFrame.video_frame_buffer().get()));
@@ -618,7 +645,7 @@ - (instancetype)initWithSink:(void (^_Nonnull)(CallVideoFrameData * _Nonnull))si
}
if (storedSink && mappedBuffer) {
- storedSink([[CallVideoFrameData alloc] initWithBuffer:mappedBuffer frame:videoFrame mirrorHorizontally:mirrorHorizontally mirrorVertically:mirrorVertically]);
+ storedSink([[CallVideoFrameData alloc] initWithBuffer:mappedBuffer frame:videoFrame mirrorHorizontally:mirrorHorizontally mirrorVertically:mirrorVertically hasDeviceRelativeVideoRotation:hasDeviceRelativeVideoRotation deviceRelativeVideoRotation:deviceRelativeVideoRotation]);
}
}));
}
diff --git a/submodules/TgVoipWebrtc/tgcalls b/submodules/TgVoipWebrtc/tgcalls
index fe91ca12ae6..fa2e53f5da9 160000
--- a/submodules/TgVoipWebrtc/tgcalls
+++ b/submodules/TgVoipWebrtc/tgcalls
@@ -1 +1 @@
-Subproject commit fe91ca12ae602fb4685a87ac0955fbb37589e3cb
+Subproject commit fa2e53f5da9b9653ab47169a922fb6c82847134a
diff --git a/swift_deps.bzl b/swift_deps.bzl
index f85c834671a..a6061575d5d 100644
--- a/swift_deps.bzl
+++ b/swift_deps.bzl
@@ -36,7 +36,7 @@ def swift_dependencies():
# branch: develop
swift_package(
name = "swiftpkg_nicegram_assistant_ios",
- commit = "35b2c2c88c8d2cb3d25cfa95e15e588f90578994",
+ commit = "05f2392cbf916a604d9d7e87dc640e7d4651dc12",
dependencies_index = "@//:swift_deps_index.json",
remote = "git@bitbucket.org:mobyrix/nicegram-assistant-ios.git",
)
@@ -52,7 +52,7 @@ def swift_dependencies():
# version: 5.15.5
swift_package(
name = "swiftpkg_sdwebimage",
- commit = "0383fd49fe4d9ae43f150f24693550ebe6ef0d14",
+ commit = "e278c13e46e8d20c895c221e922c6ac6b72aaca9",
dependencies_index = "@//:swift_deps_index.json",
remote = "https://github.com/SDWebImage/SDWebImage.git",
)
diff --git a/swift_deps_index.json b/swift_deps_index.json
index c0997b5221f..b96aba3232f 100644
--- a/swift_deps_index.json
+++ b/swift_deps_index.json
@@ -47,6 +47,16 @@
"LNExtensionExecutor-Static"
]
},
+ {
+ "name": "Tapjoy",
+ "c99name": "Tapjoy",
+ "src_type": "binary",
+ "label": "@swiftpkg_nicegram_assistant_ios//:Frameworks_Tapjoy.xcframework_Tapjoy",
+ "package_identity": "nicegram-assistant-ios",
+ "product_memberships": [
+ "nicegram-assistant"
+ ]
+ },
{
"name": "CoreSwiftUI",
"c99name": "CoreSwiftUI",
@@ -798,7 +808,7 @@
"name": "swiftpkg_nicegram_assistant_ios",
"identity": "nicegram-assistant-ios",
"remote": {
- "commit": "35b2c2c88c8d2cb3d25cfa95e15e588f90578994",
+ "commit": "05f2392cbf916a604d9d7e87dc640e7d4651dc12",
"remote": "git@bitbucket.org:mobyrix/nicegram-assistant-ios.git",
"branch": "develop"
}
@@ -816,9 +826,9 @@
"name": "swiftpkg_sdwebimage",
"identity": "sdwebimage",
"remote": {
- "commit": "0383fd49fe4d9ae43f150f24693550ebe6ef0d14",
+ "commit": "e278c13e46e8d20c895c221e922c6ac6b72aaca9",
"remote": "https://github.com/SDWebImage/SDWebImage.git",
- "version": "5.18.6"
+ "version": "5.18.7"
}
},
{
diff --git a/versions.json b/versions.json
index 597ea8a72fb..bcdcc881630 100644
--- a/versions.json
+++ b/versions.json
@@ -1,5 +1,5 @@
{
- "app": "1.4.8",
+ "app": "1.4.9",
"bazel": "6.4.0",
"xcode": "15.0.1"
}