diff --git a/Nicegram/NGUI/Sources/NicegramSettingsController.swift b/Nicegram/NGUI/Sources/NicegramSettingsController.swift index 658040e2d80..071928c9a7e 100644 --- a/Nicegram/NGUI/Sources/NicegramSettingsController.swift +++ b/Nicegram/NGUI/Sources/NicegramSettingsController.swift @@ -81,7 +81,6 @@ private enum EasyToggleType { case showRegDate case hideReactions case hideStories - case showDeletedMessages } @@ -509,8 +508,6 @@ private enum NicegramSettingsControllerEntry: ItemListNodeEntry { VarSystemNGSettings.hideReactions = value case .hideStories: NGSettings.hideStories = value - case .showDeletedMessages: - NGDeletedMessages.showDeletedMessages = value } }) case let .unblockHeader(text): @@ -669,9 +666,6 @@ private func nicegramSettingsControllerEntries(presentationData: PresentationDat entries.append(.easyToggle(toggleIndex, .hideStories, l("NicegramSettings.HideStories"), NGSettings.hideStories)) toggleIndex += 1 - entries.append(.easyToggle(toggleIndex, .showDeletedMessages, l("ShowDeletedMessages"), NGDeletedMessages.showDeletedMessages)) - toggleIndex += 1 - entries.append(.shareChannelsInfoToggle(l("NicegramSettings.ShareChannelsInfoToggle"), isShareChannelsInfoEnabled())) entries.append(.shareChannelsInfoNote(l("NicegramSettings.ShareChannelsInfoToggle.Note"))) diff --git a/Package.resolved b/Package.resolved index 2a6d603fdda..b8923f74b21 100644 --- a/Package.resolved +++ b/Package.resolved @@ -42,7 +42,7 @@ "location" : "git@bitbucket.org:mobyrix/nicegram-assistant-ios.git", "state" : { "branch" : "develop", - "revision" : "7d2b8a50729adb8666dec011494892cef8bc176d" + "revision" : "f1b83c87b7c0da94adad36a17d36b124e98b86c7" } }, { diff --git a/Telegram/Telegram-iOS/en.lproj/NiceLocalizable.strings b/Telegram/Telegram-iOS/en.lproj/NiceLocalizable.strings index 7f7b38497c6..771b089e502 100644 --- a/Telegram/Telegram-iOS/en.lproj/NiceLocalizable.strings +++ b/Telegram/Telegram-iOS/en.lproj/NiceLocalizable.strings @@ -217,13 +217,6 @@ /*Confirm Call*/ "ConfirmCall.Desc" = "Are you sure you want to make a call?"; -/* Deleted Messages */ -"ShowDeletedMessages" = "Show Deleted Messages"; - -/* Edited Messages */ -"ShowOriginalText" = "Show Original Text"; -"UndoShowOriginalText" = "Undo Show Original Text"; - /*Hidden Chats*/ "ChatContextMenu.Hide" = "Hide"; "ChatContextMenu.Unhide" = "Unhide"; diff --git a/submodules/AuthorizationUI/Sources/AuthorizationSequenceController.swift b/submodules/AuthorizationUI/Sources/AuthorizationSequenceController.swift index 478d1922630..7d1c2c6ad3a 100644 --- a/submodules/AuthorizationUI/Sources/AuthorizationSequenceController.swift +++ b/submodules/AuthorizationUI/Sources/AuthorizationSequenceController.swift @@ -1031,6 +1031,12 @@ public final class AuthorizationSequenceController: NavigationController, ASAuth let _ = TelegramEngineUnauthorized(account: strongSelf.account).auth.setState(state: UnauthorizedAccountState(isTestingEnvironment: strongSelf.account.testingEnvironment, masterDatacenterId: strongSelf.account.masterDatacenterId, contents: .phoneEntry(countryCode: countryCode, number: ""))).startStandalone() }, displayCancel: displayCancel) + controller.openUrl = { [weak self] url in + guard let self else { + return + } + self.openUrl(url) + } controller.signUpWithName = { [weak self, weak controller] firstName, lastName, avatarData, avatarAsset, avatarAdjustments in if let strongSelf = self { controller?.inProgress = true diff --git a/submodules/AuthorizationUI/Sources/AuthorizationSequenceSignUpController.swift b/submodules/AuthorizationUI/Sources/AuthorizationSequenceSignUpController.swift index 1bb8bf05215..02154b68c29 100644 --- a/submodules/AuthorizationUI/Sources/AuthorizationSequenceSignUpController.swift +++ b/submodules/AuthorizationUI/Sources/AuthorizationSequenceSignUpController.swift @@ -10,6 +10,7 @@ import ProgressNavigationButtonNode import ImageCompression import LegacyMediaPickerUI import Postbox +import TextFormat final class AuthorizationSequenceSignUpController: ViewController { private var controllerNode: AuthorizationSequenceSignUpControllerNode { @@ -25,6 +26,7 @@ final class AuthorizationSequenceSignUpController: ViewController { private var termsOfService: UnauthorizedAccountTermsOfService? var signUpWithName: ((String, String, Data?, Any?, TGVideoEditAdjustments?) -> Void)? + var openUrl: ((String) -> Void)? var avatarAsset: Any? var avatarAdjustments: TGVideoEditAdjustments? @@ -122,7 +124,32 @@ final class AuthorizationSequenceSignUpController: ViewController { return } strongSelf.view.endEditing(true) - strongSelf.present(standardTextAlertController(theme: AlertControllerTheme(presentationData: strongSelf.presentationData), title: strongSelf.presentationData.strings.Login_TermsOfServiceHeader, text: termsOfService.text, actions: [TextAlertAction(type: .defaultAction, title: strongSelf.presentationData.strings.Common_OK, action: {})]), in: .window(.root)) + + let presentAlertImpl: () -> Void = { + guard let strongSelf = self else { + return + } + var dismissImpl: (() -> Void)? + let alertTheme = AlertControllerTheme(presentationData: strongSelf.presentationData) + let attributedText = stringWithAppliedEntities(termsOfService.text, entities: termsOfService.entities, baseColor: alertTheme.primaryColor, linkColor: alertTheme.accentColor, baseFont: Font.regular(13.0), linkFont: Font.regular(13.0), boldFont: Font.semibold(13.0), italicFont: Font.italic(13.0), boldItalicFont: Font.semiboldItalic(13.0), fixedFont: Font.regular(13.0), blockQuoteFont: Font.regular(13.0), message: nil) + let contentNode = TextAlertContentNode(theme: alertTheme, title: NSAttributedString(string: strongSelf.presentationData.strings.Login_TermsOfServiceHeader, font: Font.medium(17.0), textColor: alertTheme.primaryColor, paragraphAlignment: .center), text: attributedText, actions: [ + TextAlertAction(type: .defaultAction, title: strongSelf.presentationData.strings.Common_OK, action: { + dismissImpl?() + }) + ], actionLayout: .vertical, dismissOnOutsideTap: true) + contentNode.textAttributeAction = (NSAttributedString.Key(rawValue: TelegramTextAttributes.URL), { value in + if let value = value as? String { + strongSelf.openUrl?(value) + } + }) + let controller = AlertController(theme: alertTheme, contentNode: contentNode) + dismissImpl = { [weak controller] in + controller?.dismissAnimated() + } + strongSelf.view.endEditing(true) + strongSelf.present(controller, in: .window(.root)) + } + presentAlertImpl() } self.controllerNode.updateData(firstName: self.initialName.0, lastName: self.initialName.1, hasTermsOfService: self.termsOfService != nil) diff --git a/submodules/Camera/Sources/Camera.swift b/submodules/Camera/Sources/Camera.swift index c966c7d1f54..609f9c2608d 100644 --- a/submodules/Camera/Sources/Camera.swift +++ b/submodules/Camera/Sources/Camera.swift @@ -45,16 +45,18 @@ final class CameraDeviceContext { private let exclusive: Bool private let additional: Bool + private let isRoundVideo: Bool let device = CameraDevice() let input = CameraInput() let output: CameraOutput - init(session: CameraSession, exclusive: Bool, additional: Bool, ciContext: CIContext, use32BGRA: Bool = false) { + init(session: CameraSession, exclusive: Bool, additional: Bool, ciContext: CIContext, colorSpace: CGColorSpace, isRoundVideo: Bool = false) { self.session = session self.exclusive = exclusive self.additional = additional - self.output = CameraOutput(exclusive: exclusive, ciContext: ciContext, use32BGRA: use32BGRA) + self.isRoundVideo = isRoundVideo + self.output = CameraOutput(exclusive: exclusive, ciContext: ciContext, colorSpace: colorSpace, use32BGRA: isRoundVideo) } func configure(position: Camera.Position, previewView: CameraSimplePreviewView?, audio: Bool, photo: Bool, metadata: Bool, preferWide: Bool = false, preferLowerFramerate: Bool = false, switchAudio: Bool = true) { @@ -63,7 +65,7 @@ final class CameraDeviceContext { } self.previewView = previewView - + self.device.configure(for: session, position: position, dual: !self.exclusive || self.additional, switchAudio: switchAudio) self.device.configureDeviceFormat(maxDimensions: self.maxDimensions(additional: self.additional, preferWide: preferWide), maxFramerate: self.preferredMaxFrameRate(useLower: preferLowerFramerate)) self.input.configure(for: session, device: self.device, audio: audio && switchAudio) @@ -83,18 +85,19 @@ final class CameraDeviceContext { } private func maxDimensions(additional: Bool, preferWide: Bool) -> CMVideoDimensions { - if additional || preferWide { - return CMVideoDimensions(width: 1920, height: 1440) + if self.isRoundVideo && !Camera.isDualCameraSupported { + return CMVideoDimensions(width: 640, height: 480) } else { - return CMVideoDimensions(width: 1920, height: 1080) + if additional || preferWide { + return CMVideoDimensions(width: 1920, height: 1440) + } else { + return CMVideoDimensions(width: 1920, height: 1080) + } } } private func preferredMaxFrameRate(useLower: Bool) -> Double { - if !self.exclusive { - return 30.0 - } - if useLower { + if !self.exclusive || self.isRoundVideo || useLower { return 30.0 } switch DeviceModel.current { @@ -108,14 +111,13 @@ final class CameraDeviceContext { private final class CameraContext { private let queue: Queue - private let session: CameraSession + private let ciContext: CIContext + private let colorSpace: CGColorSpace private var mainDeviceContext: CameraDeviceContext? private var additionalDeviceContext: CameraDeviceContext? - private let ciContext = CIContext() - private let initialConfiguration: Camera.Configuration private var invalidated = false @@ -129,7 +131,10 @@ private final class CameraContext { var secondaryPreviewView: CameraSimplePreviewView? private var lastSnapshotTimestamp: Double = CACurrentMediaTime() + private var savedSnapshot = false private var lastAdditionalSnapshotTimestamp: Double = CACurrentMediaTime() + private var savedAdditionalSnapshot = false + private func savePreviewSnapshot(pixelBuffer: CVPixelBuffer, front: Bool) { Queue.concurrentDefaultQueue().async { var ciImage = CIImage(cvImageBuffer: pixelBuffer) @@ -139,7 +144,7 @@ private final class CameraContext { transform = CGAffineTransformTranslate(transform, 0.0, -size.height) ciImage = ciImage.transformed(by: transform) } - ciImage = ciImage.clampedToExtent().applyingGaussianBlur(sigma: 40.0).cropped(to: CGRect(origin: .zero, size: size)) + ciImage = ciImage.clampedToExtent().applyingGaussianBlur(sigma: Camera.isDualCameraSupported ? 100.0 : 40.0).cropped(to: CGRect(origin: .zero, size: size)) if let cgImage = self.ciContext.createCGImage(ciImage, from: ciImage.extent) { let uiImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .right) if front { @@ -156,6 +161,10 @@ private final class CameraContext { self.queue = queue self.session = session + + self.colorSpace = CGColorSpaceCreateDeviceRGB() + self.ciContext = CIContext(options: [.workingColorSpace : self.colorSpace]) + self.initialConfiguration = configuration self.simplePreviewView = previewView self.secondaryPreviewView = secondaryPreviewView @@ -313,10 +322,10 @@ private final class CameraContext { if enabled { self.configure { self.mainDeviceContext?.invalidate() - self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: false, ciContext: self.ciContext, use32BGRA: self.initialConfiguration.isRoundVideo) + self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: false, ciContext: self.ciContext, colorSpace: self.colorSpace, isRoundVideo: self.initialConfiguration.isRoundVideo) self.mainDeviceContext?.configure(position: .back, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata) - self.additionalDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: true, ciContext: self.ciContext, use32BGRA: self.initialConfiguration.isRoundVideo) + self.additionalDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: true, ciContext: self.ciContext, colorSpace: self.colorSpace, isRoundVideo: self.initialConfiguration.isRoundVideo) self.additionalDeviceContext?.configure(position: .front, previewView: self.secondaryPreviewView, audio: false, photo: true, metadata: false) } self.mainDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in @@ -324,13 +333,14 @@ private final class CameraContext { return } let timestamp = CACurrentMediaTime() - if timestamp > self.lastSnapshotTimestamp + 2.5, !mainDeviceContext.output.isRecording { + if timestamp > self.lastSnapshotTimestamp + 2.5, !mainDeviceContext.output.isRecording || !self.savedSnapshot { var front = false if #available(iOS 13.0, *) { front = connection.inputPorts.first?.sourceDevicePosition == .front } self.savePreviewSnapshot(pixelBuffer: pixelBuffer, front: front) self.lastSnapshotTimestamp = timestamp + self.savedSnapshot = true } } self.additionalDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in @@ -338,13 +348,14 @@ private final class CameraContext { return } let timestamp = CACurrentMediaTime() - if timestamp > self.lastAdditionalSnapshotTimestamp + 2.5, !additionalDeviceContext.output.isRecording { + if timestamp > self.lastAdditionalSnapshotTimestamp + 2.5, !additionalDeviceContext.output.isRecording || !self.savedAdditionalSnapshot { var front = false if #available(iOS 13.0, *) { front = connection.inputPorts.first?.sourceDevicePosition == .front } self.savePreviewSnapshot(pixelBuffer: pixelBuffer, front: front) self.lastAdditionalSnapshotTimestamp = timestamp + self.savedAdditionalSnapshot = true } } } else { @@ -356,7 +367,7 @@ private final class CameraContext { let preferWide = self.initialConfiguration.preferWide || self.initialConfiguration.isRoundVideo let preferLowerFramerate = self.initialConfiguration.preferLowerFramerate || self.initialConfiguration.isRoundVideo - self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: true, additional: false, ciContext: self.ciContext, use32BGRA: self.initialConfiguration.isRoundVideo) + self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: true, additional: false, ciContext: self.ciContext, colorSpace: self.colorSpace, isRoundVideo: self.initialConfiguration.isRoundVideo) self.mainDeviceContext?.configure(position: self.positionValue, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: preferWide, preferLowerFramerate: preferLowerFramerate) } self.mainDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in @@ -364,13 +375,14 @@ private final class CameraContext { return } let timestamp = CACurrentMediaTime() - if timestamp > self.lastSnapshotTimestamp + 2.5, !mainDeviceContext.output.isRecording { + if timestamp > self.lastSnapshotTimestamp + 2.5, !mainDeviceContext.output.isRecording || !self.savedSnapshot { var front = false if #available(iOS 13.0, *) { front = connection.inputPorts.first?.sourceDevicePosition == .front } self.savePreviewSnapshot(pixelBuffer: pixelBuffer, front: front) self.lastSnapshotTimestamp = timestamp + self.savedSnapshot = true } } if self.initialConfiguration.reportAudioLevel { @@ -551,7 +563,7 @@ private final class CameraContext { let orientation = self.simplePreviewView?.videoPreviewLayer.connection?.videoOrientation ?? .portrait if self.initialConfiguration.isRoundVideo { - return mainDeviceContext.output.startRecording(mode: .roundVideo, orientation: .portrait, additionalOutput: self.additionalDeviceContext?.output) + return mainDeviceContext.output.startRecording(mode: .roundVideo, orientation: DeviceModel.current.isIpad ? orientation : .portrait, additionalOutput: self.additionalDeviceContext?.output) } else { if let additionalDeviceContext = self.additionalDeviceContext { return combineLatest( diff --git a/submodules/Camera/Sources/CameraOutput.swift b/submodules/Camera/Sources/CameraOutput.swift index f07f8cbee78..ab889bf248b 100644 --- a/submodules/Camera/Sources/CameraOutput.swift +++ b/submodules/Camera/Sources/CameraOutput.swift @@ -80,6 +80,7 @@ public struct CameraCode: Equatable { final class CameraOutput: NSObject { let exclusive: Bool let ciContext: CIContext + let colorSpace: CGColorSpace let isVideoMessage: Bool let photoOutput = AVCapturePhotoOutput() @@ -99,14 +100,17 @@ final class CameraOutput: NSObject { private var photoCaptureRequests: [Int64: PhotoCaptureContext] = [:] private var videoRecorder: VideoRecorder? + + private var captureOrientation: AVCaptureVideoOrientation = .portrait var processSampleBuffer: ((CMSampleBuffer, CVImageBuffer, AVCaptureConnection) -> Void)? var processAudioBuffer: ((CMSampleBuffer) -> Void)? var processCodes: (([CameraCode]) -> Void)? - init(exclusive: Bool, ciContext: CIContext, use32BGRA: Bool = false) { + init(exclusive: Bool, ciContext: CIContext, colorSpace: CGColorSpace, use32BGRA: Bool = false) { self.exclusive = exclusive self.ciContext = ciContext + self.colorSpace = colorSpace self.isVideoMessage = use32BGRA super.init() @@ -303,6 +307,7 @@ final class CameraOutput: NSObject { self.currentMode = mode self.lastSampleTimestamp = nil + self.captureOrientation = orientation var orientation = orientation let dimensions: CGSize @@ -530,13 +535,13 @@ final class CameraOutput: NSObject { if let current = self.roundVideoFilter { filter = current } else { - filter = CameraRoundVideoFilter(ciContext: self.ciContext) + filter = CameraRoundVideoFilter(ciContext: self.ciContext, colorSpace: self.colorSpace, simple: self.exclusive) self.roundVideoFilter = filter } if !filter.isPrepared { filter.prepare(with: newFormatDescription, outputRetainedBufferCountHint: 3) } - guard let newPixelBuffer = filter.render(pixelBuffer: videoPixelBuffer, additional: additional, transitionFactor: transitionFactor) else { + guard let newPixelBuffer = filter.render(pixelBuffer: videoPixelBuffer, additional: additional, captureOrientation: self.captureOrientation, transitionFactor: transitionFactor) else { self.semaphore.signal() return nil } diff --git a/submodules/Camera/Sources/CameraRoundVideoFilter.swift b/submodules/Camera/Sources/CameraRoundVideoFilter.swift index 6e0a4252fde..3ce2d80c05d 100644 --- a/submodules/Camera/Sources/CameraRoundVideoFilter.swift +++ b/submodules/Camera/Sources/CameraRoundVideoFilter.swift @@ -1,5 +1,6 @@ import Foundation import UIKit +import AVFoundation import CoreImage import CoreMedia import CoreVideo @@ -89,8 +90,10 @@ private func preallocateBuffers(pool: CVPixelBufferPool, allocationThreshold: In pixelBuffers.removeAll() } -class CameraRoundVideoFilter { +final class CameraRoundVideoFilter { private let ciContext: CIContext + private let colorSpace: CGColorSpace + private let simple: Bool private var resizeFilter: CIFilter? private var overlayFilter: CIFilter? @@ -104,8 +107,10 @@ class CameraRoundVideoFilter { private(set) var isPrepared = false - init(ciContext: CIContext) { + init(ciContext: CIContext, colorSpace: CGColorSpace, simple: Bool) { self.ciContext = ciContext + self.colorSpace = colorSpace + self.simple = simple } func prepare(with formatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int) { @@ -153,25 +158,51 @@ class CameraRoundVideoFilter { private var lastMainSourceImage: CIImage? private var lastAdditionalSourceImage: CIImage? - func render(pixelBuffer: CVPixelBuffer, additional: Bool, transitionFactor: CGFloat) -> CVPixelBuffer? { + func render(pixelBuffer: CVPixelBuffer, additional: Bool, captureOrientation: AVCaptureVideoOrientation, transitionFactor: CGFloat) -> CVPixelBuffer? { guard let resizeFilter = self.resizeFilter, let overlayFilter = self.overlayFilter, let compositeFilter = self.compositeFilter, let borderFilter = self.borderFilter, self.isPrepared else { return nil } - var sourceImage = CIImage(cvImageBuffer: pixelBuffer) - sourceImage = sourceImage.oriented(additional ? .leftMirrored : .right) + var sourceImage = CIImage(cvImageBuffer: pixelBuffer, options: [.colorSpace: self.colorSpace]) + var sourceOrientation: CGImagePropertyOrientation + var sourceIsLandscape = false + switch captureOrientation { + case .portrait: + sourceOrientation = additional ? .leftMirrored : .right + case .landscapeLeft: + sourceOrientation = additional ? .upMirrored : .down + sourceIsLandscape = true + case .landscapeRight: + sourceOrientation = additional ? .downMirrored : .up + sourceIsLandscape = true + case .portraitUpsideDown: + sourceOrientation = additional ? .rightMirrored : .left + @unknown default: + sourceOrientation = additional ? .leftMirrored : .right + } + sourceImage = sourceImage.oriented(sourceOrientation) let scale = CGFloat(videoMessageDimensions.width) / min(sourceImage.extent.width, sourceImage.extent.height) - resizeFilter.setValue(sourceImage, forKey: kCIInputImageKey) - resizeFilter.setValue(scale, forKey: kCIInputScaleKey) - - if let resizedImage = resizeFilter.outputImage { - sourceImage = resizedImage + if !self.simple { + resizeFilter.setValue(sourceImage, forKey: kCIInputImageKey) + resizeFilter.setValue(scale, forKey: kCIInputScaleKey) + + if let resizedImage = resizeFilter.outputImage { + sourceImage = resizedImage + } else { + sourceImage = sourceImage.transformed(by: CGAffineTransformMakeScale(scale, scale), highQualityDownsample: true) + } } else { sourceImage = sourceImage.transformed(by: CGAffineTransformMakeScale(scale, scale), highQualityDownsample: true) } - sourceImage = sourceImage.transformed(by: CGAffineTransformMakeTranslation(0.0, -(sourceImage.extent.height - sourceImage.extent.width) / 2.0)) - sourceImage = sourceImage.cropped(to: CGRect(x: 0.0, y: 0.0, width: sourceImage.extent.width, height: sourceImage.extent.width)) + + if sourceIsLandscape { + sourceImage = sourceImage.transformed(by: CGAffineTransformMakeTranslation(-(sourceImage.extent.width - sourceImage.extent.height) / 2.0, 0.0)) + sourceImage = sourceImage.cropped(to: CGRect(x: 0.0, y: 0.0, width: sourceImage.extent.height, height: sourceImage.extent.height)) + } else { + sourceImage = sourceImage.transformed(by: CGAffineTransformMakeTranslation(0.0, -(sourceImage.extent.height - sourceImage.extent.width) / 2.0)) + sourceImage = sourceImage.cropped(to: CGRect(x: 0.0, y: 0.0, width: sourceImage.extent.width, height: sourceImage.extent.width)) + } if additional { self.lastAdditionalSourceImage = sourceImage diff --git a/submodules/Camera/Sources/VideoRecorder.swift b/submodules/Camera/Sources/VideoRecorder.swift index 1b1d11900fd..1d5f9352cc4 100644 --- a/submodules/Camera/Sources/VideoRecorder.swift +++ b/submodules/Camera/Sources/VideoRecorder.swift @@ -112,7 +112,7 @@ private final class VideoRecorderImpl { } } } - + public func appendVideoSampleBuffer(_ sampleBuffer: CMSampleBuffer) { if let _ = self.hasError() { return @@ -129,6 +129,8 @@ private final class VideoRecorderImpl { } var failed = false if self.videoInput == nil { + Logger.shared.log("VideoRecorder", "Try adding video input") + let videoSettings = self.configuration.videoSettings if self.assetWriter.canApply(outputSettings: videoSettings, forMediaType: .video) { let videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings, sourceFormatHint: formatDescription) @@ -137,6 +139,8 @@ private final class VideoRecorderImpl { if self.assetWriter.canAdd(videoInput) { self.assetWriter.add(videoInput) self.videoInput = videoInput + + Logger.shared.log("VideoRecorder", "Successfully added video input") } else { failed = true } @@ -146,26 +150,32 @@ private final class VideoRecorderImpl { } if failed { - print("append video error") + Logger.shared.log("VideoRecorder", "Failed to append video buffer") return } - + if self.assetWriter.status == .unknown { if sampleBuffer.presentationTimestamp < self.recordingStartSampleTime { return } - if !self.assetWriter.startWriting() { - if let error = self.assetWriter.error { - self.transitionToFailedStatus(error: .avError(error)) - return + if self.videoInput != nil && (self.audioInput != nil || !self.configuration.hasAudio) { + if !self.assetWriter.startWriting() { + if let error = self.assetWriter.error { + self.transitionToFailedStatus(error: .avError(error)) + return + } } + + self.assetWriter.startSession(atSourceTime: presentationTime) + self.recordingStartSampleTime = presentationTime + self.lastVideoSampleTime = presentationTime } - - self.assetWriter.startSession(atSourceTime: presentationTime) - self.recordingStartSampleTime = presentationTime - self.lastVideoSampleTime = presentationTime } + if self.recordingStartSampleTime == .invalid || sampleBuffer.presentationTimestamp < self.recordingStartSampleTime { + return + } + if self.assetWriter.status == .writing { if self.recordingStopSampleTime != .invalid && sampleBuffer.presentationTimestamp > self.recordingStopSampleTime { self.hasAllVideoBuffers = true @@ -225,6 +235,8 @@ private final class VideoRecorderImpl { var failed = false if self.audioInput == nil { + Logger.shared.log("VideoRecorder", "Try adding audio input") + var audioSettings = self.configuration.audioSettings if let currentAudioStreamBasicDescription = CMAudioFormatDescriptionGetStreamBasicDescription(formatDescription) { audioSettings[AVSampleRateKey] = currentAudioStreamBasicDescription.pointee.mSampleRate @@ -247,6 +259,8 @@ private final class VideoRecorderImpl { if self.assetWriter.canAdd(audioInput) { self.assetWriter.add(audioInput) self.audioInput = audioInput + + Logger.shared.log("VideoRecorder", "Successfully added audio input") } else { failed = true } @@ -256,11 +270,11 @@ private final class VideoRecorderImpl { } if failed { - print("append audio error") + Logger.shared.log("VideoRecorder", "Failed to append audio buffer") return } - if self.assetWriter.status == .writing { + if self.recordingStartSampleTime != .invalid { //self.assetWriter.status == .writing { if sampleBuffer.presentationTimestamp < self.recordingStartSampleTime { return } diff --git a/submodules/Postbox/Sources/MessageHistoryTable.swift b/submodules/Postbox/Sources/MessageHistoryTable.swift index eeec4949ade..52c13bfdc14 100644 --- a/submodules/Postbox/Sources/MessageHistoryTable.swift +++ b/submodules/Postbox/Sources/MessageHistoryTable.swift @@ -3020,30 +3020,6 @@ final class MessageHistoryTable: Table { } // - // MARK: Nicegram DeletedMessages - func allMessageIds(whereAttributes predicate: ([MessageAttribute]) -> Bool) -> [MessageId] { - var result: [MessageId] = [] - self.valueBox.range( - self.table, - start: self.key(MessageIndex.absoluteLowerBound()), - end: self.key(MessageIndex.absoluteUpperBound()), - values: { key, value in - let entry = self.readIntermediateEntry(key, value: value) - let message = entry.message - let attributes = MessageHistoryTable.renderMessageAttributes(message) - - if predicate(attributes) { - result.append(message.id) - } - - return true - }, - limit: 0 - ) - return result - } - // - func allIndicesWithGlobalTag(tag: GlobalMessageTags) -> [GlobalMessageHistoryTagsTableEntry] { return self.globalTagsTable.getAll() } diff --git a/submodules/Postbox/Sources/Postbox.swift b/submodules/Postbox/Sources/Postbox.swift index e12098d554f..dec5f3808bb 100644 --- a/submodules/Postbox/Sources/Postbox.swift +++ b/submodules/Postbox/Sources/Postbox.swift @@ -176,12 +176,6 @@ public final class Transaction { } // - // MARK: Nicegram DeletedMessages - public func allMessageIds(whereAttributes predicate: ([MessageAttribute]) -> Bool) -> [MessageId] { - self.postbox?.allMessageIds(whereAttributes: predicate) ?? [] - } - // - public func removeAllMessagesWithGlobalTag(tag: GlobalMessageTags) { assert(!self.disposed) self.postbox?.removeAllMessagesWithGlobalTag(tag: tag) @@ -2223,12 +2217,6 @@ final class PostboxImpl { } // - // MARK: Nicegram DeletedMessages - fileprivate func allMessageIds(whereAttributes predicate: ([MessageAttribute]) -> Bool) -> [MessageId] { - self.messageHistoryTable.allMessageIds(whereAttributes: predicate) - } - // - fileprivate func removeAllMessagesWithGlobalTag(tag: GlobalMessageTags) { self.messageHistoryTable.removeAllMessagesWithGlobalTag(tag: tag, operationsByPeerId: &self.currentOperationsByPeerId, updatedMedia: &self.currentUpdatedMedia, unsentMessageOperations: ¤tUnsentOperations, updatedPeerReadStateOperations: &self.currentUpdatedSynchronizeReadStateOperations, globalTagsOperations: &self.currentGlobalTagsOperations, pendingActionsOperations: &self.currentPendingMessageActionsOperations, updatedMessageActionsSummaries: &self.currentUpdatedMessageActionsSummaries, updatedMessageTagSummaries: &self.currentUpdatedMessageTagSummaries, invalidateMessageTagSummaries: &self.currentInvalidateMessageTagSummaries, localTagsOperations: &self.currentLocalTagsOperations, timestampBasedMessageAttributesOperations: &self.currentTimestampBasedMessageAttributesOperations, forEachMedia: { _ in }) } diff --git a/submodules/TelegramCore/Sources/Account/AccountManager.swift b/submodules/TelegramCore/Sources/Account/AccountManager.swift index 1123dce66ab..6519c2dc1ba 100644 --- a/submodules/TelegramCore/Sources/Account/AccountManager.swift +++ b/submodules/TelegramCore/Sources/Account/AccountManager.swift @@ -160,9 +160,6 @@ public final class TelegramAccountManagerTypes: AccountManagerTypes { } private var declaredEncodables: Void = { - // MARK: Nicegram - declareEncodable(NicegramMessageAttribute.self, f: { NicegramMessageAttribute(decoder: $0) }) - // declareEncodable(UnauthorizedAccountState.self, f: { UnauthorizedAccountState(decoder: $0) }) declareEncodable(AuthorizedAccountState.self, f: { AuthorizedAccountState(decoder: $0) }) declareEncodable(TelegramUser.self, f: { TelegramUser(decoder: $0) }) diff --git a/submodules/TelegramCore/Sources/Nicegram/NGDeletedMessages.swift b/submodules/TelegramCore/Sources/Nicegram/NGDeletedMessages.swift deleted file mode 100644 index 9311cdd4dbb..00000000000 --- a/submodules/TelegramCore/Sources/Nicegram/NGDeletedMessages.swift +++ /dev/null @@ -1,91 +0,0 @@ -import Foundation -import Postbox -import SwiftSignalKit - -public struct NGDeletedMessages {} - -public extension NGDeletedMessages { - static var showDeletedMessages: Bool { - get { - UserDefaults.standard.bool(forKey: "ng_showDeletedMessages") - } set { - UserDefaults.standard.setValue(newValue, forKey: "ng_showDeletedMessages") - } - } - - static func actuallyDeleteMarkedMessages( - postbox: Postbox - ) -> Signal { - postbox.transaction { transaction in - let ids = transaction.allMessageIds { attributes in - for attribute in attributes { - if let nicegramAttribute = attribute as? NicegramMessageAttribute, - nicegramAttribute.isDeleted { - return true - } - } - return false - } - - _internal_deleteMessages( - transaction: transaction, - mediaBox: postbox.mediaBox, - ids: ids - ) - } - } -} - -extension NGDeletedMessages { - static func markMessagesAsDeleted( - globalIds: [Int32], - transaction: Transaction - ) -> [Int32] { - guard showDeletedMessages else { - return globalIds - } - - var markedIds: [Int32] = [] - - for globalId in globalIds { - if let id = transaction.messageIdsForGlobalIds([globalId]).first { - transaction.updateNicegramAttribute(messageId: id) { - if !$0.isDeleted { - $0.isDeleted = true - markedIds.append(globalId) - } - } - } - } - - let unmarkedIds = Set(globalIds).subtracting(markedIds) - - return Array(unmarkedIds) - } - - static func markMessagesAsDeleted( - ids: [MessageId], - transaction: Transaction - ) -> [MessageId] { - guard showDeletedMessages else { - return ids - } - - var markedIds: [MessageId] = [] - - for id in ids { - transaction.updateNicegramAttribute(messageId: id) { - if !$0.isDeleted { - $0.isDeleted = true - markedIds.append(id) - } - } - } - - let unmarkedIds = Set(ids).subtracting(markedIds) - - return Array(unmarkedIds) - } -} - - diff --git a/submodules/TelegramCore/Sources/Nicegram/NicegramMessageAttribute.swift b/submodules/TelegramCore/Sources/Nicegram/NicegramMessageAttribute.swift deleted file mode 100644 index 25332d11ffe..00000000000 --- a/submodules/TelegramCore/Sources/Nicegram/NicegramMessageAttribute.swift +++ /dev/null @@ -1,86 +0,0 @@ -import Postbox - -public class NicegramMessageAttribute: MessageAttribute { - public var isDeleted: Bool - public var originalText: String? - - public init( - isDeleted: Bool = false, - originalText: String? = nil - ) { - self.isDeleted = isDeleted - self.originalText = originalText - } - - public required init(decoder: PostboxDecoder) { - self.isDeleted = decoder.decodeBoolForKey("isDeleted", orElse: false) - self.originalText = decoder.decodeOptionalStringForKey("originalText") - } - - public func encode(_ encoder: PostboxEncoder) { - encoder.encodeBool(isDeleted, forKey: "isDeleted") - if let originalText { - encoder.encodeString(originalText, forKey: "originalText") - } - } -} - -public extension Message { - var nicegramAttribute: NicegramMessageAttribute { - for attribute in self.attributes { - if let nicegramAttribute = attribute as? NicegramMessageAttribute { - return nicegramAttribute - } - } - return NicegramMessageAttribute() - } -} - -public extension Transaction { - func updateNicegramAttribute(messageId: MessageId, _ block: (inout NicegramMessageAttribute) -> Void) { - self.updateMessage(messageId) { message in - var attributes = message.attributes - attributes.updateNicegramAttribute(block) - - return .update(StoreMessage(id: message.id, globallyUniqueId: message.globallyUniqueId, groupingKey: message.groupingKey, threadId: message.threadId, timestamp: message.timestamp, flags: StoreMessageFlags(message.flags), tags: message.tags, globalTags: message.globalTags, localTags: message.localTags, forwardInfo: message.forwardInfo.map(StoreMessageForwardInfo.init), authorId: message.author?.id, text: message.text, attributes: attributes, media: message.media)) - } - } -} - -public extension StoreMessage { - func updatingNicegramAttributeOnEdit( - previousMessage: Message - ) -> StoreMessage { - let newAttr = self.attributes.compactMap { $0 as? NicegramMessageAttribute }.first - let attr = newAttr ?? previousMessage.nicegramAttribute - - if attr.originalText == nil { - attr.originalText = previousMessage.text - } - - var attributes = self.attributes - attributes.updateNicegramAttribute { - $0 = attr - } - - return self.withUpdatedAttributes(attributes) - } -} - -private extension Array { - mutating func updateNicegramAttribute( - _ block: (inout NicegramMessageAttribute) -> Void - ) { - for (index, attribute) in self.enumerated() { - if var nicegramAttribute = attribute as? NicegramMessageAttribute { - block(&nicegramAttribute) - self[index] = nicegramAttribute - return - } - } - - var nicegramAttribute = NicegramMessageAttribute() - block(&nicegramAttribute) - self.append(nicegramAttribute) - } -} diff --git a/submodules/TelegramCore/Sources/PendingMessages/RequestEditMessage.swift b/submodules/TelegramCore/Sources/PendingMessages/RequestEditMessage.swift index 79d9ee98769..2532a673a46 100644 --- a/submodules/TelegramCore/Sources/PendingMessages/RequestEditMessage.swift +++ b/submodules/TelegramCore/Sources/PendingMessages/RequestEditMessage.swift @@ -229,8 +229,7 @@ private func requestEditMessageInternal(accountPeerId: PeerId, postbox: Postbox, } else { updatedFlags.remove(.Incoming) } - // MARK: Nicegram, add updatingNicegramAttributeOnEdit - return .update(message.withUpdatedLocalTags(updatedLocalTags).withUpdatedFlags(updatedFlags).updatingNicegramAttributeOnEdit(previousMessage: previousMessage)) + return .update(message.withUpdatedLocalTags(updatedLocalTags).withUpdatedFlags(updatedFlags)) }) } default: diff --git a/submodules/TelegramCore/Sources/State/AccountStateManagementUtils.swift b/submodules/TelegramCore/Sources/State/AccountStateManagementUtils.swift index bb87fe30337..5690d6bd4b9 100644 --- a/submodules/TelegramCore/Sources/State/AccountStateManagementUtils.swift +++ b/submodules/TelegramCore/Sources/State/AccountStateManagementUtils.swift @@ -3745,15 +3745,7 @@ func replayFinalState( let _ = transaction.addMessages(messages, location: .Random) } } - // MARK: Nicegram DeletedMessages, change ids to allIds - case let .DeleteMessagesWithGlobalIds(allIds): - // MARK: Nicegram DeletedMessages - let ids = NGDeletedMessages.markMessagesAsDeleted( - globalIds: allIds, - transaction: transaction - ) - // - + case let .DeleteMessagesWithGlobalIds(ids): var resourceIds: [MediaResourceId] = [] transaction.deleteMessagesWithGlobalIds(ids, forEachMedia: { media in addMessageMediaResourceIdsToRemove(media: media, resourceIds: &resourceIds) @@ -3761,8 +3753,7 @@ func replayFinalState( if !resourceIds.isEmpty { let _ = mediaBox.removeCachedResources(Array(Set(resourceIds)), force: true).start() } - // MARK: Nicegram DeletedMessages, change ids to allIds - deletedMessageIds.append(contentsOf: allIds.map { .global($0) }) + deletedMessageIds.append(contentsOf: ids.map { .global($0) }) case let .DeleteMessages(ids): _internal_deleteMessages(transaction: transaction, mediaBox: mediaBox, ids: ids, manualAddMessageThreadStatsDifference: { id, add, remove in addMessageThreadStatsDifference(threadKey: id, remove: remove, addedMessagePeer: nil, addedMessageId: nil, isOutgoing: false) @@ -3830,8 +3821,7 @@ func replayFinalState( if let message = locallyRenderedMessage(message: message, peers: peers) { generatedEvent = reactionGeneratedEvent(previousMessage.reactionsAttribute, message.reactionsAttribute, message: message, transaction: transaction) } - // MARK: Nicegram, add updatingNicegramAttributeOnEdit - return .update(message.withUpdatedLocalTags(updatedLocalTags).withUpdatedFlags(updatedFlags).withUpdatedAttributes(updatedAttributes).updatingNicegramAttributeOnEdit(previousMessage: previousMessage)) + return .update(message.withUpdatedLocalTags(updatedLocalTags).withUpdatedFlags(updatedFlags).withUpdatedAttributes(updatedAttributes)) }) if let generatedEvent = generatedEvent { addedReactionEvents.append(generatedEvent) diff --git a/submodules/TelegramCore/Sources/TelegramEngine/Messages/DeleteMessages.swift b/submodules/TelegramCore/Sources/TelegramEngine/Messages/DeleteMessages.swift index 7ef86e6ad35..fd85bd5d4b1 100644 --- a/submodules/TelegramCore/Sources/TelegramEngine/Messages/DeleteMessages.swift +++ b/submodules/TelegramCore/Sources/TelegramEngine/Messages/DeleteMessages.swift @@ -23,13 +23,6 @@ func addMessageMediaResourceIdsToRemove(message: Message, resourceIds: inout [Me } public func _internal_deleteMessages(transaction: Transaction, mediaBox: MediaBox, ids: [MessageId], deleteMedia: Bool = true, manualAddMessageThreadStatsDifference: ((MessageThreadKey, Int, Int) -> Void)? = nil) { - // MARK: Nicegram DeletedMessages - let ids = NGDeletedMessages.markMessagesAsDeleted( - ids: ids, - transaction: transaction - ) - // - var resourceIds: [MediaResourceId] = [] if deleteMedia { for id in ids { diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageItemCommon/Sources/ChatMessageItemCommon.swift b/submodules/TelegramUI/Components/Chat/ChatMessageItemCommon/Sources/ChatMessageItemCommon.swift index 7e213959965..b2ef550c96a 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageItemCommon/Sources/ChatMessageItemCommon.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageItemCommon/Sources/ChatMessageItemCommon.swift @@ -273,11 +273,6 @@ public func messageIsElligibleForLargeCustomEmoji(_ message: Message) -> Bool { } public func canAddMessageReactions(message: Message) -> Bool { - // MARK: Nicegram DeletedMessages - if message.nicegramAttribute.isDeleted { - return false - } - // if message.id.namespace != Namespaces.Message.Cloud { return false } diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageItemView/Sources/ChatMessageItemView.swift b/submodules/TelegramUI/Components/Chat/ChatMessageItemView/Sources/ChatMessageItemView.swift index 1774d65dbb8..c4356309f2e 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageItemView/Sources/ChatMessageItemView.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageItemView/Sources/ChatMessageItemView.swift @@ -677,12 +677,6 @@ open class ChatMessageItemView: ListViewItemNode, ChatMessageItemNodeProtocol { open func setupItem(_ item: ChatMessageItem, synchronousLoad: Bool) { self.item = item - - // MARK: Nicegram DeletedMessages - if item.message.nicegramAttribute.isDeleted { - self.alpha = 0.5 - } - // } open func updateAccessibilityData(_ accessibilityData: ChatMessageAccessibilityData) { diff --git a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreen.swift b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreen.swift index f4f1d33165b..83deaebbefa 100644 --- a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreen.swift +++ b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreen.swift @@ -9048,13 +9048,6 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodePro case .watch: push(watchSettingsController(context: self.context)) case .support: - // MARK: Nicegram GermanSupport - if self.presentationData.strings.baseLanguageCode.lowercased().contains("de"), - let url = URL(string: "https://t.me/EinleitungHilfeTelegram") { - CoreContainer.shared.urlOpener().open(url) - break - } - // let supportPeer = Promise() supportPeer.set(context.engine.peers.supportPeerId()) diff --git a/submodules/TelegramUI/Components/StorageUsageScreen/Sources/StorageUsageScreen.swift b/submodules/TelegramUI/Components/StorageUsageScreen/Sources/StorageUsageScreen.swift index 2cc80d66847..3e2661e6f35 100644 --- a/submodules/TelegramUI/Components/StorageUsageScreen/Sources/StorageUsageScreen.swift +++ b/submodules/TelegramUI/Components/StorageUsageScreen/Sources/StorageUsageScreen.swift @@ -2865,12 +2865,6 @@ final class StorageUsageScreenComponent: Component { return } - // MARK: Nicegram DeletedMessages - _ = NGDeletedMessages.actuallyDeleteMarkedMessages( - postbox: component.context.account.postbox - ).start() - // - if let _ = aggregatedData.peerId { var mappedCategories: [StorageUsageStats.CategoryKey] = [] for category in aggregatedData.selectedCategories { diff --git a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerComponent.swift b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerComponent.swift index 858b18a56b4..6d83d22d84e 100644 --- a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerComponent.swift +++ b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerComponent.swift @@ -6598,14 +6598,9 @@ public final class StoryItemSetContainerComponent: Component { } if !component.slice.item.storyItem.isForwardingDisabled { - // MARK: Nicegram SaveStories - let hasPremium = "".isEmpty - // - let saveText: String = component.strings.Story_Context_SaveToGallery items.append(.action(ContextMenuActionItem(text: saveText, icon: { theme in - // MARK: Nicegram SaveStories, change accountUser.isPremium to hasPremium - return generateTintedImage(image: UIImage(bundleImageName: hasPremium ? "Chat/Context Menu/Download" : "Chat/Context Menu/DownloadLocked"), color: theme.contextMenu.primaryColor) + return generateTintedImage(image: UIImage(bundleImageName: accountUser.isPremium ? "Chat/Context Menu/Download" : "Chat/Context Menu/DownloadLocked"), color: theme.contextMenu.primaryColor) }, action: { [weak self] _, a in a(.default) @@ -6613,8 +6608,7 @@ public final class StoryItemSetContainerComponent: Component { return } - // MARK: Nicegram SaveStories, change accountUser.isPremium to hasPremium - if hasPremium { + if accountUser.isPremium { self.requestSave() } else { self.presentSaveUpgradeScreen() diff --git a/submodules/TelegramUI/Components/VideoMessageCameraScreen/BUILD b/submodules/TelegramUI/Components/VideoMessageCameraScreen/BUILD index c872752a69b..9044bfadc1f 100644 --- a/submodules/TelegramUI/Components/VideoMessageCameraScreen/BUILD +++ b/submodules/TelegramUI/Components/VideoMessageCameraScreen/BUILD @@ -38,6 +38,7 @@ swift_library( "//submodules/DeviceAccess", "//submodules/TelegramUI/Components/MediaEditor", "//submodules/LegacyMediaPickerUI", + "//submodules/TelegramAudio", ], visibility = [ "//visibility:public", diff --git a/submodules/TelegramUI/Components/VideoMessageCameraScreen/Sources/VideoMessageCameraScreen.swift b/submodules/TelegramUI/Components/VideoMessageCameraScreen/Sources/VideoMessageCameraScreen.swift index 0bcd43ba739..ad68560a448 100644 --- a/submodules/TelegramUI/Components/VideoMessageCameraScreen/Sources/VideoMessageCameraScreen.swift +++ b/submodules/TelegramUI/Components/VideoMessageCameraScreen/Sources/VideoMessageCameraScreen.swift @@ -26,6 +26,7 @@ import MediaResources import LocalMediaResources import ImageCompression import LegacyMediaPickerUI +import TelegramAudio struct CameraState: Equatable { enum Recording: Equatable { @@ -584,7 +585,7 @@ public class VideoMessageCameraScreen: ViewController { if self.cameraState.isViewOnceEnabled != oldValue.isViewOnceEnabled { if self.cameraState.isViewOnceEnabled { let presentationData = self.context.sharedContext.currentPresentationData.with { $0 } - self.displayViewOnceTooltip(text: presentationData.strings.Chat_PlayVideoMessageOnceTooltip, hasIcon: false) + self.displayViewOnceTooltip(text: presentationData.strings.Chat_PlayVideoMessageOnceTooltip, hasIcon: true) let _ = ApplicationSpecificNotice.incrementVideoMessagesPlayOnceSuggestion(accountManager: self.context.sharedContext.accountManager, count: 3).startStandalone() } else { @@ -694,7 +695,7 @@ public class VideoMessageCameraScreen: ViewController { func withReadyCamera(isFirstTime: Bool = false, _ f: @escaping () -> Void) { let previewReady: Signal if #available(iOS 13.0, *) { - previewReady = self.cameraState.isDualCameraEnabled ? self.additionalPreviewView.isPreviewing : self.mainPreviewView.isPreviewing + previewReady = self.cameraState.isDualCameraEnabled ? self.additionalPreviewView.isPreviewing : self.mainPreviewView.isPreviewing |> delay(0.2, queue: Queue.mainQueue()) } else { previewReady = .single(true) |> delay(0.35, queue: Queue.mainQueue()) } @@ -1116,9 +1117,22 @@ public class VideoMessageCameraScreen: ViewController { let previewSide = min(369.0, layout.size.width - 24.0) let previewFrame: CGRect if layout.metrics.isTablet { - previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - previewSide) / 2.0), y: max(layout.statusBarHeight ?? 0.0 + 24.0, availableHeight * 0.2 - previewSide / 2.0)), size: CGSize(width: previewSide, height: previewSide)) + let statusBarOrientation: UIInterfaceOrientation + if #available(iOS 13.0, *) { + statusBarOrientation = UIApplication.shared.windows.first?.windowScene?.interfaceOrientation ?? .portrait + } else { + statusBarOrientation = UIApplication.shared.statusBarOrientation + } + + if statusBarOrientation == .landscapeLeft { + previewFrame = CGRect(origin: CGPoint(x: layout.size.width - 44.0 - previewSide, y: floorToScreenPixels((layout.size.height - previewSide) / 2.0)), size: CGSize(width: previewSide, height: previewSide)) + } else if statusBarOrientation == .landscapeRight { + previewFrame = CGRect(origin: CGPoint(x: 44.0, y: floorToScreenPixels((layout.size.height - previewSide) / 2.0)), size: CGSize(width: previewSide, height: previewSide)) + } else { + previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - previewSide) / 2.0), y: max(layout.statusBarHeight ?? 0.0 + 24.0, availableHeight * 0.2 - previewSide / 2.0)), size: CGSize(width: previewSide, height: previewSide)) + } } else { - previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - previewSide) / 2.0), y: max(layout.statusBarHeight ?? 0.0 + 16.0, availableHeight * 0.4 - previewSide / 2.0)), size: CGSize(width: previewSide, height: previewSide)) + previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - previewSide) / 2.0), y: max(layout.statusBarHeight ?? 0.0 + 24.0, availableHeight * 0.4 - previewSide / 2.0)), size: CGSize(width: previewSide, height: previewSide)) } if !self.animatingIn { transition.setFrame(view: self.previewContainerView, frame: previewFrame) @@ -1321,7 +1335,7 @@ public class VideoMessageCameraScreen: ViewController { public func takenRecordedData() -> Signal { let previewState = self.node.previewStatePromise.get() - let count = 12 + let count = 13 let initialPlaceholder: Signal if let firstResult = self.node.results.first { @@ -1439,9 +1453,10 @@ public class VideoMessageCameraScreen: ViewController { return } + var skipAction = false let currentTimestamp = CACurrentMediaTime() if let lastActionTimestamp = self.lastActionTimestamp, currentTimestamp - lastActionTimestamp < 0.5 { - return + skipAction = true } if case .none = self.cameraState.recording, self.node.results.isEmpty { @@ -1451,9 +1466,21 @@ public class VideoMessageCameraScreen: ViewController { if case .none = self.cameraState.recording { } else { - self.isSendingImmediately = true - self.waitingForNextResult = true - self.node.stopRecording.invoke(Void()) + if self.cameraState.duration > 0.5 { + if skipAction { + return + } + self.isSendingImmediately = true + self.waitingForNextResult = true + self.node.stopRecording.invoke(Void()) + } else { + self.completion(nil, nil, nil) + return + } + } + + guard !skipAction else { + return } self.didSend = true @@ -1495,78 +1522,97 @@ public class VideoMessageCameraScreen: ViewController { let dimensions = PixelDimensions(width: 400, height: 400) - var thumbnailImage = video.thumbnail + let thumbnailImage: Signal if startTime > 0.0 { - let composition = composition(with: results) - let imageGenerator = AVAssetImageGenerator(asset: composition) - imageGenerator.maximumSize = dimensions.cgSize - imageGenerator.appliesPreferredTrackTransform = true - - if let cgImage = try? imageGenerator.copyCGImage(at: CMTime(seconds: startTime, preferredTimescale: composition.duration.timescale), actualTime: nil) { - thumbnailImage = UIImage(cgImage: cgImage) + thumbnailImage = Signal { subscriber in + let composition = composition(with: results) + let imageGenerator = AVAssetImageGenerator(asset: composition) + imageGenerator.maximumSize = dimensions.cgSize + imageGenerator.appliesPreferredTrackTransform = true + + imageGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: CMTime(seconds: startTime, preferredTimescale: composition.duration.timescale))], completionHandler: { _, image, _, _, _ in + if let image { + subscriber.putNext(UIImage(cgImage: image)) + } else { + subscriber.putNext(video.thumbnail) + } + subscriber.putCompletion() + }) + + return ActionDisposable { + imageGenerator.cancelAllCGImageGeneration() + } } - } - - let values = MediaEditorValues(peerId: self.context.account.peerId, originalDimensions: dimensions, cropOffset: .zero, cropRect: CGRect(origin: .zero, size: dimensions.cgSize), cropScale: 1.0, cropRotation: 0.0, cropMirroring: false, cropOrientation: nil, gradientColors: nil, videoTrimRange: self.node.previewState?.trimRange, videoIsMuted: false, videoIsFullHd: false, videoIsMirrored: false, videoVolume: nil, additionalVideoPath: nil, additionalVideoIsDual: false, additionalVideoPosition: nil, additionalVideoScale: nil, additionalVideoRotation: nil, additionalVideoPositionChanges: [], additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, nightTheme: false, drawing: nil, entities: [], toolValues: [:], audioTrack: nil, audioTrackTrimRange: nil, audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, qualityPreset: .videoMessage) - - var resourceAdjustments: VideoMediaResourceAdjustments? = nil - if let valuesData = try? JSONEncoder().encode(values) { - let data = MemoryBuffer(data: valuesData) - let digest = MemoryBuffer(data: data.md5Digest()) - resourceAdjustments = VideoMediaResourceAdjustments(data: data, digest: digest, isStory: false) - } - - let resource: TelegramMediaResource - let liveUploadData: LegacyLiveUploadInterfaceResult? - if let current = self.node.currentLiveUploadData { - liveUploadData = current } else { - liveUploadData = self.node.liveUploadInterface?.fileUpdated(true) as? LegacyLiveUploadInterfaceResult - } - if !hasAdjustments, let liveUploadData, let data = try? Data(contentsOf: URL(fileURLWithPath: video.videoPath)) { - resource = LocalFileMediaResource(fileId: liveUploadData.id) - self.context.account.postbox.mediaBox.storeResourceData(resource.id, data: data, synchronous: true) - } else { - resource = LocalFileVideoMediaResource(randomId: Int64.random(in: Int64.min ... Int64.max), paths: videoPaths, adjustments: resourceAdjustments) - } - - var previewRepresentations: [TelegramMediaImageRepresentation] = [] - - let thumbnailResource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max)) - let thumbnailSize = video.dimensions.cgSize.aspectFitted(CGSize(width: 320.0, height: 320.0)) - if let thumbnailData = scaleImageToPixelSize(image: thumbnailImage, size: thumbnailSize)?.jpegData(compressionQuality: 0.4) { - self.context.account.postbox.mediaBox.storeResourceData(thumbnailResource.id, data: thumbnailData) - previewRepresentations.append(TelegramMediaImageRepresentation(dimensions: PixelDimensions(thumbnailSize), resource: thumbnailResource, progressiveSizes: [], immediateThumbnailData: nil, hasVideo: false, isPersonal: false)) + thumbnailImage = .single(video.thumbnail) } - let tempFile = TempBox.shared.tempFile(fileName: "file") - defer { - TempBox.shared.dispose(tempFile) - } - if let data = compressImageToJPEG(thumbnailImage, quality: 0.7, tempFilePath: tempFile.path) { - context.account.postbox.mediaBox.storeCachedResourceRepresentation(resource, representation: CachedVideoFirstFrameRepresentation(), data: data) - } + let _ = (thumbnailImage + |> deliverOnMainQueue).startStandalone(next: { [weak self] thumbnailImage in + guard let self else { + return + } + let values = MediaEditorValues(peerId: self.context.account.peerId, originalDimensions: dimensions, cropOffset: .zero, cropRect: CGRect(origin: .zero, size: dimensions.cgSize), cropScale: 1.0, cropRotation: 0.0, cropMirroring: false, cropOrientation: nil, gradientColors: nil, videoTrimRange: self.node.previewState?.trimRange, videoIsMuted: false, videoIsFullHd: false, videoIsMirrored: false, videoVolume: nil, additionalVideoPath: nil, additionalVideoIsDual: false, additionalVideoPosition: nil, additionalVideoScale: nil, additionalVideoRotation: nil, additionalVideoPositionChanges: [], additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, nightTheme: false, drawing: nil, entities: [], toolValues: [:], audioTrack: nil, audioTrackTrimRange: nil, audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, qualityPreset: .videoMessage) + + var resourceAdjustments: VideoMediaResourceAdjustments? = nil + if let valuesData = try? JSONEncoder().encode(values) { + let data = MemoryBuffer(data: valuesData) + let digest = MemoryBuffer(data: data.md5Digest()) + resourceAdjustments = VideoMediaResourceAdjustments(data: data, digest: digest, isStory: false) + } + + let resource: TelegramMediaResource + let liveUploadData: LegacyLiveUploadInterfaceResult? + if let current = self.node.currentLiveUploadData { + liveUploadData = current + } else { + liveUploadData = self.node.liveUploadInterface?.fileUpdated(true) as? LegacyLiveUploadInterfaceResult + } + if !hasAdjustments, let liveUploadData, let data = try? Data(contentsOf: URL(fileURLWithPath: video.videoPath)) { + resource = LocalFileMediaResource(fileId: liveUploadData.id) + self.context.account.postbox.mediaBox.storeResourceData(resource.id, data: data, synchronous: true) + } else { + resource = LocalFileVideoMediaResource(randomId: Int64.random(in: Int64.min ... Int64.max), paths: videoPaths, adjustments: resourceAdjustments) + } + + var previewRepresentations: [TelegramMediaImageRepresentation] = [] + + let thumbnailResource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max)) + let thumbnailSize = video.dimensions.cgSize.aspectFitted(CGSize(width: 320.0, height: 320.0)) + if let thumbnailData = scaleImageToPixelSize(image: thumbnailImage, size: thumbnailSize)?.jpegData(compressionQuality: 0.4) { + self.context.account.postbox.mediaBox.storeResourceData(thumbnailResource.id, data: thumbnailData) + previewRepresentations.append(TelegramMediaImageRepresentation(dimensions: PixelDimensions(thumbnailSize), resource: thumbnailResource, progressiveSizes: [], immediateThumbnailData: nil, hasVideo: false, isPersonal: false)) + } + + let tempFile = TempBox.shared.tempFile(fileName: "file") + defer { + TempBox.shared.dispose(tempFile) + } + if let data = compressImageToJPEG(thumbnailImage, quality: 0.7, tempFilePath: tempFile.path) { + context.account.postbox.mediaBox.storeCachedResourceRepresentation(resource, representation: CachedVideoFirstFrameRepresentation(), data: data) + } - let media = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: resource, previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.FileName(fileName: "video.mp4"), .Video(duration: finalDuration, size: video.dimensions, flags: [.instantRoundVideo], preloadSize: nil)]) - - - var attributes: [MessageAttribute] = [] - if self.cameraState.isViewOnceEnabled { - attributes.append(AutoremoveTimeoutMessageAttribute(timeout: viewOnceTimeout, countdownBeginTime: nil)) - } - - self.completion(.message( - text: "", - attributes: attributes, - inlineStickers: [:], - mediaReference: .standalone(media: media), - threadId: nil, - replyToMessageId: nil, - replyToStoryId: nil, - localGroupingKey: nil, - correlationId: nil, - bubbleUpEmojiOrStickersets: [] - ), silentPosting, scheduleTime) + let media = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: resource, previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.FileName(fileName: "video.mp4"), .Video(duration: finalDuration, size: video.dimensions, flags: [.instantRoundVideo], preloadSize: nil)]) + + + var attributes: [MessageAttribute] = [] + if self.cameraState.isViewOnceEnabled { + attributes.append(AutoremoveTimeoutMessageAttribute(timeout: viewOnceTimeout, countdownBeginTime: nil)) + } + + self.completion(.message( + text: "", + attributes: attributes, + inlineStickers: [:], + mediaReference: .standalone(media: media), + threadId: nil, + replyToMessageId: nil, + replyToStoryId: nil, + localGroupingKey: nil, + correlationId: nil, + bubbleUpEmojiOrStickersets: [] + ), silentPosting, scheduleTime) + }) }) } @@ -1625,12 +1671,21 @@ public class VideoMessageCameraScreen: ViewController { } private func requestAudioSession() { - self.audioSessionDisposable = self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: .recordWithOthers, activate: { [weak self] _ in + let audioSessionType: ManagedAudioSessionType + if self.context.sharedContext.currentMediaInputSettings.with({ $0 }).pauseMusicOnRecording { + audioSessionType = .record(speaker: false, withOthers: false) + } else { + audioSessionType = .recordWithOthers + } + + self.audioSessionDisposable = self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: audioSessionType, activate: { [weak self] _ in if #available(iOS 13.0, *) { try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(true) } if let self { - self.node.setupCamera() + Queue.mainQueue().async { + self.node.setupCamera() + } } }, deactivate: { _ in return .single(Void()) diff --git a/submodules/TelegramUI/Sources/ChatControllerForwardMessages.swift b/submodules/TelegramUI/Sources/ChatControllerForwardMessages.swift index 1e2d1cce7a6..8a124f32ba5 100644 --- a/submodules/TelegramUI/Sources/ChatControllerForwardMessages.swift +++ b/submodules/TelegramUI/Sources/ChatControllerForwardMessages.swift @@ -29,15 +29,6 @@ extension ChatControllerImpl { // MARK: Nicegram (cloud + asCopy) func forwardMessages(messages: [Message], options: ChatInterfaceForwardOptionsState? = nil, resetCurrent: Bool, cloud: Bool = false, asCopy: Bool = false) { - // MARK: Nicegram DeletedMessages - var asCopy = asCopy - - let hasDeletedMessage = messages.contains(where: { $0.nicegramAttribute.isDeleted }) - if hasDeletedMessage { - asCopy = true - } - // - let _ = self.presentVoiceMessageDiscardAlert(action: { // MARK: Nicegram diff --git a/submodules/TelegramUI/Sources/ChatHistoryEntriesForView.swift b/submodules/TelegramUI/Sources/ChatHistoryEntriesForView.swift index 24db62277ff..d7cd4e20db8 100644 --- a/submodules/TelegramUI/Sources/ChatHistoryEntriesForView.swift +++ b/submodules/TelegramUI/Sources/ChatHistoryEntriesForView.swift @@ -122,13 +122,6 @@ func chatHistoryEntriesForView( var message = entry.message var isRead = entry.isRead - // MARK: Nicegram DeletedMessages - if !NGDeletedMessages.showDeletedMessages, - message.nicegramAttribute.isDeleted { - continue - } - // - if pendingRemovedMessages.contains(message.id) { continue } diff --git a/submodules/TelegramUI/Sources/ChatInterfaceStateContextMenus.swift b/submodules/TelegramUI/Sources/ChatInterfaceStateContextMenus.swift index ae1379ef9ca..014d3d57d7c 100644 --- a/submodules/TelegramUI/Sources/ChatInterfaceStateContextMenus.swift +++ b/submodules/TelegramUI/Sources/ChatInterfaceStateContextMenus.swift @@ -1452,11 +1452,6 @@ func contextMenuForChatPresentationInterfaceState(chatPresentationInterfaceState if isMigrated { canPin = false } - // MARK: Nicegram DeletedMessages - if message.nicegramAttribute.isDeleted { - canPin = false - } - // if canPin { var pinnedSelectedMessageId: MessageId? @@ -1926,52 +1921,6 @@ func contextMenuForChatPresentationInterfaceState(chatPresentationInterfaceState } } - // MARK: Nicegram EditedMessages - if !isSecretChat { - let nicegramAttribute = message.nicegramAttribute - let originalText = nicegramAttribute.originalText - - let image: UIImage? = if #available(iOS 13.0, *) { - UIImage(systemName: "eye") - } else { - nil - } - - if message.hasTextBlock(.ngOriginalText) { - let action = ContextMenuActionItem( - text: l("UndoShowOriginalText"), - icon: { theme in - generateTintedImage(image: image, color: theme.actionSheet.primaryTextColor) - }, - action: { _, f in - message.removeTextBlock( - block: .ngOriginalText, - context: context - ) - f(.dismissWithoutContent) - } - ) - ngContextItems.append(.action(action)) - } else if let originalText, originalText != message.text { - let action = ContextMenuActionItem( - text: l("ShowOriginalText"), - icon: { theme in - generateTintedImage(image: image, color: theme.actionSheet.primaryTextColor) - }, - action: { _, f in - message.addTextBlock( - text: originalText, - block: .ngOriginalText, - context: context - ) - f(.dismissWithoutContent) - } - ) - ngContextItems.append(.action(action)) - } - } - // - // MARK: Nicegram MessageMetadata if !isSecretChat, #available(iOS 15.0, *) { let messageMetadataAction = ContextMenuActionItem( diff --git a/submodules/TelegramUI/Sources/ChatRecordingPreviewInputPanelNode.swift b/submodules/TelegramUI/Sources/ChatRecordingPreviewInputPanelNode.swift index 8a6c84f6302..b29d44420a6 100644 --- a/submodules/TelegramUI/Sources/ChatRecordingPreviewInputPanelNode.swift +++ b/submodules/TelegramUI/Sources/ChatRecordingPreviewInputPanelNode.swift @@ -337,19 +337,22 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode { ), environment: {}, forceUpdate: false, - containerSize: CGSize(width: width - leftInset - rightInset - 45.0 * 2.0, height: 33.0) + containerSize: CGSize(width: min(424, width - leftInset - rightInset - 45.0 * 2.0), height: 33.0) ) - + if let view = self.scrubber.view { if view.superview == nil { self.view.addSubview(view) } - - view.frame = CGRect(origin: CGPoint(x: leftInset + 45.0, y: 7.0 - UIScreenPixel), size: scrubberSize) + view.bounds = CGRect(origin: .zero, size: scrubberSize) } } } } + + if let view = self.scrubber.view { + view.frame = CGRect(origin: CGPoint(x: max(leftInset + 45.0, floorToScreenPixels((width - view.bounds.width) / 2.0)), y: 7.0 - UIScreenPixel), size: view.bounds.size) + } let panelHeight = defaultHeight(metrics: metrics) diff --git a/submodules/TelegramUI/Sources/Nicegram/NGDeeplinkHandler.swift b/submodules/TelegramUI/Sources/Nicegram/NGDeeplinkHandler.swift index b537615cacf..81c909ed355 100644 --- a/submodules/TelegramUI/Sources/Nicegram/NGDeeplinkHandler.swift +++ b/submodules/TelegramUI/Sources/Nicegram/NGDeeplinkHandler.swift @@ -216,7 +216,7 @@ private extension NGDeeplinkHandler { func handlePstAuth(url: URL) -> Bool { if #available(iOS 13.0, *) { Task { @MainActor in - CardUITgHelper.showSplashFromDeeplink() + await CardUITgHelper.routeToCardFromDeeplink() } return true } diff --git a/swift_deps.bzl b/swift_deps.bzl index e1cbd6b331b..79657119f48 100644 --- a/swift_deps.bzl +++ b/swift_deps.bzl @@ -36,7 +36,7 @@ def swift_dependencies(): # branch: develop swift_package( name = "swiftpkg_nicegram_assistant_ios", - commit = "7d2b8a50729adb8666dec011494892cef8bc176d", + commit = "f1b83c87b7c0da94adad36a17d36b124e98b86c7", dependencies_index = "@//:swift_deps_index.json", remote = "git@bitbucket.org:mobyrix/nicegram-assistant-ios.git", ) diff --git a/swift_deps_index.json b/swift_deps_index.json index 90e18911e7c..9fca603ad36 100644 --- a/swift_deps_index.json +++ b/swift_deps_index.json @@ -850,7 +850,7 @@ "name": "swiftpkg_nicegram_assistant_ios", "identity": "nicegram-assistant-ios", "remote": { - "commit": "7d2b8a50729adb8666dec011494892cef8bc176d", + "commit": "f1b83c87b7c0da94adad36a17d36b124e98b86c7", "remote": "git@bitbucket.org:mobyrix/nicegram-assistant-ios.git", "branch": "develop" } diff --git a/versions.json b/versions.json index 539763f63e2..783812bf6bf 100644 --- a/versions.json +++ b/versions.json @@ -1,5 +1,5 @@ { - "app": "1.5.3", + "app": "1.5.4", "bazel": "6.4.0", "xcode": "15.1", "macos": "13.0"