Skip to content

Commit

Permalink
Use vt for manually decoding frames. Fixes #533
Browse files Browse the repository at this point in the history
use pts from moonlight server to schedule frame display

use decompression callback unused frameRef field to propagate frameType information

Use obj-c cb for decode session

Revert to direct decode, use PTS correctly
  • Loading branch information
felipejfc committed Nov 27, 2022
1 parent 3494962 commit 31560a0
Show file tree
Hide file tree
Showing 3 changed files with 118 additions and 66 deletions.
15 changes: 5 additions & 10 deletions Limelight/Stream/Connection.m
Original file line number Diff line number Diff line change
Expand Up @@ -55,14 +55,9 @@ int DrDecoderSetup(int videoFormat, int width, int height, int redrawRate, void*
return 0;
}

void DrStart(void)
void DrCleanup(void)
{
[renderer start];
}

void DrStop(void)
{
[renderer stop];
[renderer cleanup];
}

-(BOOL) getVideoStats:(video_stats_t*)stats
Expand Down Expand Up @@ -433,9 +428,9 @@ -(id) initWithConfig:(StreamConfiguration*)config renderer:(VideoDecoderRenderer

LiInitializeVideoCallbacks(&_drCallbacks);
_drCallbacks.setup = DrDecoderSetup;
_drCallbacks.start = DrStart;
_drCallbacks.stop = DrStop;
_drCallbacks.capabilities = CAPABILITY_PULL_RENDERER | CAPABILITY_REFERENCE_FRAME_INVALIDATION_HEVC;
_drCallbacks.cleanup = DrCleanup;
_drCallbacks.submitDecodeUnit = DrSubmitDecodeUnit;
_drCallbacks.capabilities = CAPABILITY_DIRECT_SUBMIT | CAPABILITY_REFERENCE_FRAME_INVALIDATION_HEVC;

LiInitializeAudioCallbacks(&_arCallbacks);
_arCallbacks.init = ArInit;
Expand Down
5 changes: 3 additions & 2 deletions Limelight/Stream/VideoDecoderRenderer.h
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,10 @@
- (id)initWithView:(UIView*)view callbacks:(id<ConnectionCallbacks>)callbacks streamAspectRatio:(float)aspectRatio useFramePacing:(BOOL)useFramePacing;

- (void)setupWithVideoFormat:(int)videoFormat frameRate:(int)frameRate;
- (void)start;
- (void)stop;
- (void)cleanup;

- (int)submitDecodeBuffer:(unsigned char *)data length:(int)length bufferType:(int)bufferType frameType:(int)frameType pts:(unsigned int)pts;

- (OSStatus)decodeFrameWithSampleBuffer:(CMSampleBufferRef)sampleBuffer frameType:(int)frameType;

@end
164 changes: 110 additions & 54 deletions Limelight/Stream/VideoDecoderRenderer.m
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
// Copyright (c) 2014 Moonlight Stream. All rights reserved.
//

@import VideoToolbox;

#import "VideoDecoderRenderer.h"
#import "StreamView.h"

Expand All @@ -23,6 +25,7 @@ @implementation VideoDecoderRenderer {

NSData *spsData, *ppsData, *vpsData;
CMVideoFormatDescriptionRef formatDesc;
VTDecompressionSessionRef decompressionSession;

CADisplayLink* _displayLink;
BOOL framePacing;
Expand Down Expand Up @@ -74,6 +77,12 @@ - (void)reinitializeDisplayLayer
CFRelease(formatDesc);
formatDesc = nil;
}

if (decompressionSession != nil){
VTDecompressionSessionInvalidate(decompressionSession);
CFRelease(decompressionSession);
decompressionSession = nil;
}
}

- (id)initWithView:(StreamView*)view callbacks:(id<ConnectionCallbacks>)callbacks streamAspectRatio:(float)aspectRatio useFramePacing:(BOOL)useFramePacing
Expand All @@ -94,10 +103,7 @@ - (void)setupWithVideoFormat:(int)videoFormat frameRate:(int)frameRate
{
self->videoFormat = videoFormat;
self->frameRate = frameRate;
}

- (void)start
{

_displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkCallback:)];
if (@available(iOS 15.0, tvOS 15.0, *)) {
_displayLink.preferredFrameRateRange = CAFrameRateRangeMake(self->frameRate, self->frameRate, self->frameRate);
Expand All @@ -106,38 +112,37 @@ - (void)start
_displayLink.preferredFramesPerSecond = self->frameRate;
}
[_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSDefaultRunLoopMode];

}

- (void) setupDecompressionSession {
if (decompressionSession != NULL){
VTDecompressionSessionInvalidate(decompressionSession);
CFRelease(decompressionSession);
decompressionSession = nil;
}

int status = VTDecompressionSessionCreate(kCFAllocatorDefault,
formatDesc,
nil,
nil,
nil,
&decompressionSession);
if (status != noErr) {
NSLog(@"Failed to instance VTDecompressionSessionRef, status %d", status);
}

}

// TODO: Refactor this
int DrSubmitDecodeUnit(PDECODE_UNIT decodeUnit);

- (void)displayLinkCallback:(CADisplayLink *)sender
{
VIDEO_FRAME_HANDLE handle;
PDECODE_UNIT du;

while (LiPollNextVideoFrame(&handle, &du)) {
LiCompleteVideoFrame(handle, DrSubmitDecodeUnit(du));

if (framePacing) {
// Calculate the actual display refresh rate
double displayRefreshRate = 1 / (_displayLink.targetTimestamp - _displayLink.timestamp);

// Only pace frames if the display refresh rate is >= 90% of our stream frame rate.
// Battery saver, accessibility settings, or device thermals can cause the actual
// refresh rate of the display to drop below the physical maximum.
if (displayRefreshRate >= frameRate * 0.9f) {
// Keep one pending frame to smooth out gaps due to
// network jitter at the cost of 1 frame of latency
if (LiGetPendingVideoFrames() == 1) {
break;
}
}
}
}
// Do Nothing
}

- (void)stop
- (void)cleanup
{
[_displayLink invalidate];
}
Expand Down Expand Up @@ -262,6 +267,8 @@ - (int)submitDecodeBuffer:(unsigned char *)data length:(int)length bufferType:(i
formatDesc = NULL;
}
}

[self setupDecompressionSession];
}

// Data is NOT to be freed here. It's a direct usage of the caller's buffer.
Expand Down Expand Up @@ -330,46 +337,34 @@ - (int)submitDecodeBuffer:(unsigned char *)data length:(int)length bufferType:(i

CMSampleBufferRef sampleBuffer;

status = CMSampleBufferCreate(kCFAllocatorDefault,
CMSampleTimingInfo sampleTiming = {kCMTimeInvalid, CMTimeMake(pts, 1000), kCMTimeInvalid};

status = CMSampleBufferCreateReady(kCFAllocatorDefault,
frameBlockBuffer,
true, NULL,
NULL, formatDesc, 1, 0,
NULL, 0, NULL,
formatDesc, 1, 1,
&sampleTiming, 0, NULL,
&sampleBuffer);
if (status != noErr) {
Log(LOG_E, @"CMSampleBufferCreate failed: %d", (int)status);
CFRelease(dataBlockBuffer);
CFRelease(frameBlockBuffer);
return DR_NEED_IDR;
}

OSStatus decodeStatus = [self decodeFrameWithSampleBuffer: sampleBuffer frameType: frameType];

CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, YES);
CFMutableDictionaryRef dict = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachments, 0);

CFDictionarySetValue(dict, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue);
CFDictionarySetValue(dict, kCMSampleAttachmentKey_IsDependedOnByOthers, kCFBooleanTrue);

if (frameType == FRAME_TYPE_PFRAME) {
// P-frame
CFDictionarySetValue(dict, kCMSampleAttachmentKey_NotSync, kCFBooleanTrue);
CFDictionarySetValue(dict, kCMSampleAttachmentKey_DependsOnOthers, kCFBooleanTrue);
if (decodeStatus != noErr){
NSLog(@"Failed to decompress frame");
} else {
// I-frame
CFDictionarySetValue(dict, kCMSampleAttachmentKey_NotSync, kCFBooleanFalse);
CFDictionarySetValue(dict, kCMSampleAttachmentKey_DependsOnOthers, kCFBooleanFalse);

}

// Enqueue the next frame
[self->displayLayer enqueueSampleBuffer:sampleBuffer];

if (frameType == FRAME_TYPE_IDR) {
// Ensure the layer is visible now
self->displayLayer.hidden = NO;

// Tell our parent VC to hide the progress indicator
[self->_callbacks videoContentShown];
}
/* Flush in-process frames. */
//VTDecompressionSessionFinishDelayedFrames(decompressionSession);

/* Block until our callback has been called with the last frame. */
//VTDecompressionSessionWaitForAsynchronousFrames(decompressionSession);

// Dereference the buffers
CFRelease(dataBlockBuffer);
CFRelease(frameBlockBuffer);
Expand All @@ -378,4 +373,65 @@ - (int)submitDecodeBuffer:(unsigned char *)data length:(int)length bufferType:(i
return DR_OK;
}

- (OSStatus) decodeFrameWithSampleBuffer:(CMSampleBufferRef)sampleBuffer frameType:(int)frameType{
VTDecodeFrameFlags flags = kVTDecodeFrame_EnableAsynchronousDecompression;
VTDecodeInfoFlags flagOut = 0;

return VTDecompressionSessionDecodeFrameWithOutputHandler(decompressionSession, sampleBuffer, flags, &flagOut, ^(OSStatus status, VTDecodeInfoFlags infoFlags, CVImageBufferRef _Nullable imageBuffer, CMTime presentationTimestamp, CMTime presentationDuration) {
if (status != noErr)
{
NSError *error = [NSError errorWithDomain:NSOSStatusErrorDomain code:status userInfo:nil];
NSLog(@"Decompression session error: %@", error);
}

CMVideoFormatDescriptionRef formatDescriptionRef;

OSStatus res = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, imageBuffer, &formatDescriptionRef);
if (res != noErr){
NSLog(@"Failed to create video format description from imageBuffer");
}

CMSampleBufferRef sampleBuffer;
CMSampleTimingInfo sampleTiming = {kCMTimeInvalid, presentationTimestamp, kCMTimeInvalid};

OSStatus err = CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, imageBuffer, formatDescriptionRef, &sampleTiming, &sampleBuffer);

if (err != noErr){
NSLog(@"Error creating sample buffer for decompressed image buffer %d", (int)err);
return;
}

CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, YES);
CFMutableDictionaryRef dict = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachments, 0);

CFDictionarySetValue(dict, kCMSampleAttachmentKey_IsDependedOnByOthers, kCFBooleanTrue);

if (frameType == FRAME_TYPE_PFRAME) {
// P-frame
CFDictionarySetValue(dict, kCMSampleAttachmentKey_NotSync, kCFBooleanTrue);
CFDictionarySetValue(dict, kCMSampleAttachmentKey_DependsOnOthers, kCFBooleanTrue);
} else {
// I-frame
CFDictionarySetValue(dict, kCMSampleAttachmentKey_NotSync, kCFBooleanFalse);
CFDictionarySetValue(dict, kCMSampleAttachmentKey_DependsOnOthers, kCFBooleanFalse);
}

// Enqueue the next frame
[self->displayLayer enqueueSampleBuffer:sampleBuffer];

dispatch_async(dispatch_get_main_queue(), ^{
if (frameType == FRAME_TYPE_IDR) {
// Ensure the layer is visible now
self->displayLayer.hidden = NO;

// Tell our parent VC to hide the progress indicator
[self->_callbacks videoContentShown];
}
});

CFRelease(sampleBuffer);
CFRelease(formatDescriptionRef);
});
}

@end

0 comments on commit 31560a0

Please sign in to comment.