From e7f59e2e049903b3e4e200d8dce3f2a11c5e29a1 Mon Sep 17 00:00:00 2001 From: olzzon Date: Mon, 11 Mar 2024 11:14:54 +0100 Subject: [PATCH 01/81] wip: hotStandby - implement in device settings --- packages/mos-gateway/src/$schemas/devices.json | 6 ++++++ packages/mos-gateway/src/generated/devices.ts | 1 + 2 files changed, 7 insertions(+) diff --git a/packages/mos-gateway/src/$schemas/devices.json b/packages/mos-gateway/src/$schemas/devices.json index 1281fa38e51..1bf9b35c5d1 100644 --- a/packages/mos-gateway/src/$schemas/devices.json +++ b/packages/mos-gateway/src/$schemas/devices.json @@ -105,6 +105,12 @@ "ui:description": "How often to ping NRCS to determine connection status", "default": 30000 }, + "hotStandby": { + "type": "boolean", + "ui:title": "Secondary: Hot Standby", + "ui:description": "Is the secondary connection a hot standby for the primary", + "default": false + }, "ports": { "type": "object", "ui:title": "Ports", diff --git a/packages/mos-gateway/src/generated/devices.ts b/packages/mos-gateway/src/generated/devices.ts index 4585db5efd7..59bf8a31b20 100644 --- a/packages/mos-gateway/src/generated/devices.ts +++ b/packages/mos-gateway/src/generated/devices.ts @@ -24,6 +24,7 @@ export interface MosDeviceConfig { dontUseQueryPort?: boolean timeout?: number heartbeatInterval?: number + hotStandby?: boolean ports?: { lower: number upper: number From cef3236a1c5b741bb33abf4f13de2b616145c490 Mon Sep 17 00:00:00 2001 From: olzzon Date: Mon, 11 Mar 2024 11:19:29 +0100 Subject: [PATCH 02/81] wip: hotStandby - implement hotStandby error handling and messages --- .../mos-gateway/src/CoreMosDeviceHandler.ts | 42 +++++++++++++------ packages/mos-gateway/src/coreHandler.ts | 8 +++- packages/mos-gateway/src/mosHandler.ts | 9 ++-- 3 files changed, 40 insertions(+), 19 deletions(-) diff --git a/packages/mos-gateway/src/CoreMosDeviceHandler.ts b/packages/mos-gateway/src/CoreMosDeviceHandler.ts index a5da2de82cd..2eeefdd34cd 100644 --- a/packages/mos-gateway/src/CoreMosDeviceHandler.ts +++ b/packages/mos-gateway/src/CoreMosDeviceHandler.ts @@ -75,13 +75,15 @@ export class CoreMosDeviceHandler { private _pendingStoryItemChanges: Array = [] private _pendingChangeTimeout: number = 60 * 1000 private mosTypes: MosTypes + private _hotStandby: boolean private _messageQueue: Queue - constructor(parent: CoreHandler, mosDevice: IMOSDevice, mosHandler: MosHandler) { + constructor(parent: CoreHandler, mosDevice: IMOSDevice, mosHandler: MosHandler, hotStandby: boolean) { this._coreParentHandler = parent this._mosDevice = mosDevice this._mosHandler = mosHandler + this._hotStandby = hotStandby this._messageQueue = new Queue() @@ -138,25 +140,39 @@ export class CoreMosDeviceHandler { let statusCode: StatusCode const messages: Array = [] - if (connectionStatus.PrimaryConnected) { - if (connectionStatus.SecondaryConnected || !this._mosDevice.idSecondary) { + if (this._hotStandby) { + if (connectionStatus.PrimaryConnected) { statusCode = StatusCode.GOOD } else { - statusCode = StatusCode.WARNING_MINOR + if (connectionStatus.SecondaryConnected) { + statusCode = StatusCode.GOOD + messages.push(connectionStatus.SecondaryStatus || 'Running NRCS on hot standby') + } else { + statusCode = StatusCode.BAD + messages.push(connectionStatus.SecondaryStatus || 'Primary and hot standby are not connected') + } } } else { - if (connectionStatus.SecondaryConnected) { - statusCode = StatusCode.WARNING_MAJOR + if (connectionStatus.PrimaryConnected) { + if (connectionStatus.SecondaryConnected || !this._mosDevice.idSecondary) { + statusCode = StatusCode.GOOD + } else { + statusCode = StatusCode.WARNING_MINOR + } } else { - statusCode = StatusCode.BAD + if (connectionStatus.SecondaryConnected) { + statusCode = StatusCode.WARNING_MAJOR + } else { + statusCode = StatusCode.BAD + } } - } - if (!connectionStatus.PrimaryConnected) { - messages.push(connectionStatus.PrimaryStatus || 'Primary not connected') - } - if (this._mosDevice.idSecondary && !connectionStatus.SecondaryConnected) { - messages.push(connectionStatus.SecondaryStatus || 'Fallback not connected') + if (!connectionStatus.PrimaryConnected) { + messages.push(connectionStatus.PrimaryStatus || 'Primary not connected') + } + if (this._mosDevice.idSecondary && !connectionStatus.SecondaryConnected) { + messages.push(connectionStatus.SecondaryStatus || 'Fallback not connected') + } } this.core diff --git a/packages/mos-gateway/src/coreHandler.ts b/packages/mos-gateway/src/coreHandler.ts index dee6f39bf6a..502aa756c4d 100644 --- a/packages/mos-gateway/src/coreHandler.ts +++ b/packages/mos-gateway/src/coreHandler.ts @@ -142,9 +142,13 @@ export class CoreHandler { return options } - async registerMosDevice(mosDevice: IMOSDevice, mosHandler: MosHandler): Promise { + async registerMosDevice( + mosDevice: IMOSDevice, + mosHandler: MosHandler, + hotStandby: boolean + ): Promise { this.logger.info('registerMosDevice -------------') - const coreMos = new CoreMosDeviceHandler(this, mosDevice, mosHandler) + const coreMos = new CoreMosDeviceHandler(this, mosDevice, mosHandler, hotStandby) this._coreMosHandlers.push(coreMos) return coreMos.init().then(() => { diff --git a/packages/mos-gateway/src/mosHandler.ts b/packages/mos-gateway/src/mosHandler.ts index c06429124d5..365a674c672 100644 --- a/packages/mos-gateway/src/mosHandler.ts +++ b/packages/mos-gateway/src/mosHandler.ts @@ -59,6 +59,7 @@ export class MosHandler { private _logger: Winston.Logger private _disposed = false private _settings?: MosGatewayConfig + private _hotStandby: boolean private _coreHandler: CoreHandler | undefined private _observers: Array> = [] private _triggerupdateDevicesTimeout: any = null @@ -66,6 +67,7 @@ export class MosHandler { constructor(logger: Winston.Logger) { this._logger = logger + this._hotStandby = false this.mosTypes = getMosTypes(this.strict) // temporary, another will be set upon init() } async init(config: MosConfig, coreHandler: CoreHandler): Promise { @@ -101,7 +103,7 @@ export class MosHandler { this.mosTypes = getMosTypes(this.strict) - await this._initMosConnection() + await this._updateDevices() if (!this._coreHandler) throw Error('_coreHandler is undefined!') this._coreHandler.onConnected(() => { @@ -110,8 +112,6 @@ export class MosHandler { this.sendStatusOfAllMosDevices() }) this.setupObservers() - - return this._updateDevices() } async dispose(): Promise { this._disposed = true @@ -243,7 +243,7 @@ export class MosHandler { if (!this._coreHandler) throw Error('_coreHandler is undefined!') - const coreMosHandler = await this._coreHandler.registerMosDevice(mosDevice, this) + const coreMosHandler = await this._coreHandler.registerMosDevice(mosDevice, this, this._hotStandby) // this._logger.info('mosDevice registered -------------') // Setup message flow between the devices: @@ -420,6 +420,7 @@ export class MosHandler { for (const [deviceId, device] of Object.entries<{ options: MosDeviceConfig }>(devices)) { if (device) { if (device.options.secondary) { + this._hotStandby = device.options.secondary?.hotStandby || false // If the host isn't set, don't use secondary: if (!device.options.secondary.host || !device.options.secondary.id) delete device.options.secondary From 484b0d78e78ab1469986eec170a62d4428ebdc44 Mon Sep 17 00:00:00 2001 From: olzzon Date: Thu, 21 Mar 2024 11:42:17 +0100 Subject: [PATCH 03/81] wip: hotStandby - comment error message behavior --- packages/mos-gateway/src/CoreMosDeviceHandler.ts | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/packages/mos-gateway/src/CoreMosDeviceHandler.ts b/packages/mos-gateway/src/CoreMosDeviceHandler.ts index 2eeefdd34cd..ed300178684 100644 --- a/packages/mos-gateway/src/CoreMosDeviceHandler.ts +++ b/packages/mos-gateway/src/CoreMosDeviceHandler.ts @@ -141,9 +141,12 @@ export class CoreMosDeviceHandler { const messages: Array = [] if (this._hotStandby) { + // OpenMedia treats secondary server as hot-standby + // And thus is not considered as a warning if it's not connected if (connectionStatus.PrimaryConnected) { statusCode = StatusCode.GOOD } else { + // Primary not connected is only bad if there is no secondary: if (connectionStatus.SecondaryConnected) { statusCode = StatusCode.GOOD messages.push(connectionStatus.SecondaryStatus || 'Running NRCS on hot standby') @@ -154,6 +157,7 @@ export class CoreMosDeviceHandler { } } else { if (connectionStatus.PrimaryConnected) { + // ENPS expect both Primary and Secondary to be connected if both of them are configured if (connectionStatus.SecondaryConnected || !this._mosDevice.idSecondary) { statusCode = StatusCode.GOOD } else { @@ -161,8 +165,10 @@ export class CoreMosDeviceHandler { } } else { if (connectionStatus.SecondaryConnected) { + // Primary not connected should give a warning if Secondary is used. statusCode = StatusCode.WARNING_MAJOR } else { + // If neither Primary nor Secondary is connected, it's a bad state. statusCode = StatusCode.BAD } } From 783bedf6fbfa6e74e153873c629636d513c86105 Mon Sep 17 00:00:00 2001 From: ianshade Date: Thu, 16 May 2024 11:46:32 +0200 Subject: [PATCH 04/81] wip: countdownType --- .../lib/__tests__/rundownTiming.test.ts | 34 +++++------------ meteor/client/lib/rundownTiming.ts | 38 +++++++------------ .../client/ui/ClockView/PresenterScreen.tsx | 12 +++--- .../RundownTiming/SegmentDuration.tsx | 10 ++--- .../SegmentContainer/withResolvedSegment.ts | 7 +--- .../ui/SegmentList/SegmentListHeader.tsx | 2 +- .../SegmentScratchpad/SegmentScratchpad.tsx | 1 - .../SegmentScratchpadContainer.tsx | 1 - .../SegmentStoryboard/SegmentStoryboard.tsx | 2 +- .../SegmentStoryboardContainer.tsx | 1 - .../ui/SegmentTimeline/SegmentTimeline.tsx | 22 +++++++---- .../SegmentTimelineContainer.tsx | 14 +------ meteor/client/ui/Shelf/SegmentTimingPanel.tsx | 2 +- .../src/documents/part.ts | 3 -- .../src/documents/segment.ts | 13 +++++++ .../job-worker/src/blueprints/context/lib.ts | 1 - .../src/topics/activePlaylistTopic.ts | 4 +- .../src/topics/helpers/segmentTiming.ts | 10 ++--- 18 files changed, 73 insertions(+), 104 deletions(-) diff --git a/meteor/client/lib/__tests__/rundownTiming.test.ts b/meteor/client/lib/__tests__/rundownTiming.test.ts index 4557beec470..bb7e943834a 100644 --- a/meteor/client/lib/__tests__/rundownTiming.test.ts +++ b/meteor/client/lib/__tests__/rundownTiming.test.ts @@ -62,6 +62,7 @@ function makeMockSegment( timing?: { expectedStart?: number expectedEnd?: number + budgetDuration?: number } ): DBSegment { return literal({ @@ -143,7 +144,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: undefined, remainingTimeOnCurrentPart: undefined, rundownsBeforeNextBreak: undefined, - segmentBudgetDurations: {}, segmentStartedPlayback: {}, }) ) @@ -246,7 +246,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: undefined, remainingTimeOnCurrentPart: undefined, rundownsBeforeNextBreak: undefined, - segmentBudgetDurations: {}, segmentStartedPlayback: {}, }) ) @@ -349,7 +348,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: undefined, remainingTimeOnCurrentPart: undefined, rundownsBeforeNextBreak: undefined, - segmentBudgetDurations: {}, segmentStartedPlayback: {}, }) ) @@ -456,7 +454,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: undefined, remainingTimeOnCurrentPart: undefined, rundownsBeforeNextBreak: undefined, - segmentBudgetDurations: {}, segmentStartedPlayback: {}, }) ) @@ -584,7 +581,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: undefined, remainingTimeOnCurrentPart: undefined, rundownsBeforeNextBreak: undefined, - segmentBudgetDurations: {}, segmentStartedPlayback: {}, }) ) @@ -740,7 +736,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: false, remainingTimeOnCurrentPart: 2500, rundownsBeforeNextBreak: [], - segmentBudgetDurations: {}, segmentStartedPlayback: {}, nextRundownAnchor: undefined, }) @@ -897,7 +892,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: false, remainingTimeOnCurrentPart: -4000, rundownsBeforeNextBreak: [], - segmentBudgetDurations: {}, segmentStartedPlayback: {}, nextRundownAnchor: undefined, }) @@ -1003,7 +997,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: undefined, remainingTimeOnCurrentPart: undefined, rundownsBeforeNextBreak: undefined, - segmentBudgetDurations: {}, segmentStartedPlayback: {}, }) ) @@ -1142,7 +1135,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: undefined, remainingTimeOnCurrentPart: undefined, rundownsBeforeNextBreak: undefined, - segmentBudgetDurations: {}, segmentStartedPlayback: {}, }) ) @@ -1161,24 +1153,27 @@ describe('rundown Timing Calculator', () => { const segmentId1 = 'segment1' const segmentId2 = 'segment2' const segmentsMap: Map = new Map() - segmentsMap.set(protectString(segmentId1), makeMockSegment(segmentId1, 0, rundownId1)) - segmentsMap.set(protectString(segmentId2), makeMockSegment(segmentId2, 0, rundownId1)) + segmentsMap.set( + protectString(segmentId1), + makeMockSegment(segmentId1, 0, rundownId1, { budgetDuration: 5000 }) + ) + segmentsMap.set( + protectString(segmentId2), + makeMockSegment(segmentId2, 0, rundownId1, { budgetDuration: 3000 }) + ) const parts: DBPart[] = [] parts.push( makeMockPart('part1', 0, rundownId1, segmentId1, { - budgetDuration: 2000, expectedDuration: 1000, }) ) parts.push( makeMockPart('part2', 0, rundownId1, segmentId1, { - budgetDuration: 3000, expectedDuration: 1000, }) ) parts.push( makeMockPart('part3', 0, rundownId1, segmentId2, { - budgetDuration: 3000, expectedDuration: 1000, }) ) @@ -1261,10 +1256,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: undefined, remainingTimeOnCurrentPart: undefined, rundownsBeforeNextBreak: undefined, - segmentBudgetDurations: { - [segmentId1]: 5000, - [segmentId2]: 3000, - }, segmentStartedPlayback: {}, }) ) @@ -1386,7 +1377,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: undefined, remainingTimeOnCurrentPart: undefined, rundownsBeforeNextBreak: undefined, - segmentBudgetDurations: {}, segmentStartedPlayback: {}, nextRundownAnchor: undefined, }) @@ -1536,7 +1526,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: false, remainingTimeOnCurrentPart: 500, rundownsBeforeNextBreak: [], - segmentBudgetDurations: {}, segmentStartedPlayback: {}, nextRundownAnchor: 2000, }) @@ -1686,7 +1675,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: false, remainingTimeOnCurrentPart: -1500, rundownsBeforeNextBreak: [], - segmentBudgetDurations: {}, segmentStartedPlayback: {}, nextRundownAnchor: 4000, }) @@ -1842,7 +1830,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: false, remainingTimeOnCurrentPart: 500, rundownsBeforeNextBreak: [], - segmentBudgetDurations: {}, segmentStartedPlayback: {}, nextRundownAnchor: 3000, }) @@ -1992,7 +1979,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: false, remainingTimeOnCurrentPart: 500, rundownsBeforeNextBreak: [], - segmentBudgetDurations: {}, segmentStartedPlayback: {}, nextRundownAnchor: 2000, }) @@ -2142,7 +2128,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: false, remainingTimeOnCurrentPart: -1500, rundownsBeforeNextBreak: [], - segmentBudgetDurations: {}, segmentStartedPlayback: {}, nextRundownAnchor: 4000, }) @@ -2298,7 +2283,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: false, remainingTimeOnCurrentPart: 500, rundownsBeforeNextBreak: [], - segmentBudgetDurations: {}, segmentStartedPlayback: {}, nextRundownAnchor: 3000, }) diff --git a/meteor/client/lib/rundownTiming.ts b/meteor/client/lib/rundownTiming.ts index 164556c0e4c..cefdaa27200 100644 --- a/meteor/client/lib/rundownTiming.ts +++ b/meteor/client/lib/rundownTiming.ts @@ -66,7 +66,6 @@ export class RundownTimingCalculator { private partDisplayDurations: Record = {} private partDisplayDurationsNoPlayback: Record = {} private displayDurationGroups: Record = {} - private segmentBudgetDurations: Record = {} private segmentStartedPlayback: Record = {} private segmentAsPlayedDurations: Record = {} private breakProps: { @@ -123,6 +122,7 @@ export class RundownTimingCalculator { let displayStartsAtAccumulator = 0 let segmentDisplayDuration = 0 let segmentBudgetDurationLeft = 0 + let remainingBudgetOnCurrentSegment: undefined | number const rundownExpectedDurations: Record = {} const rundownAsPlayedDurations: Record = {} @@ -132,7 +132,6 @@ export class RundownTimingCalculator { let liveSegmentId: SegmentId | undefined Object.keys(this.displayDurationGroups).forEach((key) => delete this.displayDurationGroups[key]) - Object.keys(this.segmentBudgetDurations).forEach((key) => delete this.segmentBudgetDurations[key]) Object.keys(this.segmentStartedPlayback).forEach((key) => delete this.segmentStartedPlayback[key]) Object.keys(this.segmentAsPlayedDurations).forEach((key) => delete this.segmentAsPlayedDurations[key]) this.untimedSegments.clear() @@ -156,18 +155,6 @@ export class RundownTimingCalculator { this.nextSegmentId = undefined } - partInstances.forEach((partInstance) => { - const origPart = partInstance.part - if (origPart.budgetDuration !== undefined) { - const segmentId = unprotectString(origPart.segmentId) - if (this.segmentBudgetDurations[segmentId] !== undefined) { - this.segmentBudgetDurations[unprotectString(origPart.segmentId)] += origPart.budgetDuration - } else { - this.segmentBudgetDurations[unprotectString(origPart.segmentId)] = origPart.budgetDuration - } - } - }) - segmentEntryPartInstances.forEach((partInstance) => { if (partInstance.timings?.reportedStartedPlayback !== undefined) this.segmentStartedPlayback[unprotectString(partInstance.segmentId)] = @@ -179,6 +166,9 @@ export class RundownTimingCalculator { const partInstanceId = !partInstance.isTemporary ? partInstance._id : null const partInstanceOrPartId = unprotectString(partInstanceId ?? partId) const piecesForPart = pieces.get(partId) ?? [] + const partsSegment = segmentsMap.get(partInstance.segmentId) + const segmentBudget = partsSegment?.segmentTiming?.budgetDuration + const segmentUsesBudget = segmentBudget !== undefined if (partInstance.segmentId !== lastSegmentId) { this.untimedSegments.add(partInstance.segmentId) @@ -187,7 +177,10 @@ export class RundownTimingCalculator { if (segmentBudgetDurationLeft > 0) { waitAccumulator += segmentBudgetDurationLeft } - segmentBudgetDurationLeft = this.segmentBudgetDurations[unprotectString(partInstance.segmentId)] + if (lastSegmentId === liveSegmentId) { + remainingBudgetOnCurrentSegment = segmentBudgetDurationLeft + } + segmentBudgetDurationLeft = segmentBudget ?? 0 } // add piece to accumulator @@ -208,9 +201,6 @@ export class RundownTimingCalculator { (itIndex >= currentAIndex && currentAIndex >= 0) || (itIndex >= nextAIndex && nextAIndex >= 0 && currentAIndex === -1) - const segmentUsesBudget = - this.segmentBudgetDurations[unprotectString(partInstance.segmentId)] !== undefined - const partIsUntimed = partInstance.part.untimed || false if (!partIsUntimed) { @@ -307,9 +297,7 @@ export class RundownTimingCalculator { if (segmentUsesBudget) { currentRemaining = Math.max( 0, - this.segmentBudgetDurations[unprotectString(partInstance.segmentId)] - - segmentDisplayDuration - - (now - segmentStartedPlayback) + segmentBudget - segmentDisplayDuration - (now - segmentStartedPlayback) ) segmentBudgetDurationLeft = 0 } else { @@ -570,7 +558,7 @@ export class RundownTimingCalculator { if (segment._id === this.nextSegmentId) { nextSegmentIndex = itIndex } - const segmentBudgetDuration = this.segmentBudgetDurations[unprotectString(segment._id)] + const segmentBudgetDuration = segment.segmentTiming?.budgetDuration // If all of the Parts in a Segment are untimed, do not consider the Segment for // Playlist Remaining and As-Played durations. @@ -649,10 +637,10 @@ export class RundownTimingCalculator { partDisplayStartsAt: this.partDisplayStartsAt, partExpectedDurations: this.partExpectedDurations, partDisplayDurations: this.partDisplayDurations, - segmentBudgetDurations: this.segmentBudgetDurations, segmentStartedPlayback: this.segmentStartedPlayback, currentTime: now, remainingTimeOnCurrentPart, + remainingBudgetOnCurrentSegment, currentPartWillAutoNext, rundownsBeforeNextBreak, breakIsLastRundown, @@ -739,12 +727,12 @@ export interface RundownTimingContext { * if the Part does not have an expected duration. */ partExpectedDurations?: Record - /** Budget durations of segments (sum of parts budget durations). */ - segmentBudgetDurations?: Record /** Time when selected segments started playback. Contains only the current segment and the segment before, if we've just entered a new one */ segmentStartedPlayback?: Record /** Remaining time on current part */ remainingTimeOnCurrentPart?: number + /** Remaining budget on current segment */ + remainingBudgetOnCurrentSegment?: number /** Current part will autoNext */ currentPartWillAutoNext?: boolean /** Current time of this calculation */ diff --git a/meteor/client/ui/ClockView/PresenterScreen.tsx b/meteor/client/ui/ClockView/PresenterScreen.tsx index 5a3529083e9..d40d0ef41df 100644 --- a/meteor/client/ui/ClockView/PresenterScreen.tsx +++ b/meteor/client/ui/ClockView/PresenterScreen.tsx @@ -13,7 +13,7 @@ import { PieceIconContainer } from '../PieceIcons/PieceIcon' import { PieceNameContainer } from '../PieceIcons/PieceName' import { Timediff } from './Timediff' import { RundownUtils } from '../../lib/rundown' -import { PieceLifespan } from '@sofie-automation/blueprints-integration' +import { CountdownType, PieceLifespan } from '@sofie-automation/blueprints-integration' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { PieceCountdownContainer } from '../PieceIcons/PieceCountdown' import { PlaylistTiming } from '@sofie-automation/corelib/dist/playout/rundownTiming' @@ -433,9 +433,11 @@ function PresenterScreenContentDefaultLayout({ rundownIds, }: Readonly>) { if (playlist && playlistId && segments) { - let currentPartCountdown = 0 - if (currentPartInstance) { - currentPartCountdown = timingDurations.remainingTimeOnCurrentPart || 0 + let currentPartOrSegmentCountdown = 0 + if (currentSegment?.segmentTiming?.countdownType === CountdownType.SEGMENT_BUDGET_DURATION) { + currentPartOrSegmentCountdown = timingDurations.remainingBudgetOnCurrentSegment ?? 0 + } else if (currentPartInstance) { + currentPartOrSegmentCountdown = timingDurations.remainingTimeOnCurrentPart || 0 } const expectedStart = PlaylistTiming.getExpectedStart(playlist.timing) @@ -485,7 +487,7 @@ function PresenterScreenContentDefaultLayout({ />
- +
) : expectedStart ? ( diff --git a/meteor/client/ui/RundownView/RundownTiming/SegmentDuration.tsx b/meteor/client/ui/RundownView/RundownTiming/SegmentDuration.tsx index 5beb44330e6..bbc16a534de 100644 --- a/meteor/client/ui/RundownView/RundownTiming/SegmentDuration.tsx +++ b/meteor/client/ui/RundownView/RundownTiming/SegmentDuration.tsx @@ -1,18 +1,18 @@ import classNames from 'classnames' import React, { ReactNode } from 'react' import { withTiming, WithTiming } from './withTiming' -import { unprotectString } from '../../../../lib/lib' import { RundownUtils } from '../../../lib/rundown' import { PartUi } from '../../SegmentTimeline/SegmentTimelineContainer' import { calculatePartInstanceExpectedDurationWithPreroll, CalculateTimingsPiece, } from '@sofie-automation/corelib/dist/playout/timings' -import { PartId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { PartId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { getPartInstanceTimingId } from '../../../lib/rundownTiming' +import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' interface ISegmentDurationProps { - segmentId: SegmentId + segment: DBSegment parts: PartUi[] pieces: Map label?: ReactNode @@ -36,9 +36,7 @@ export const SegmentDuration = withTiming()(function let budget = 0 let playedOut = 0 - const segmentBudgetDuration = - props.timingDurations.segmentBudgetDurations && - props.timingDurations.segmentBudgetDurations[unprotectString(props.segmentId)] + const segmentBudgetDuration = props.segment.segmentTiming?.budgetDuration if (segmentBudgetDuration !== undefined) { budget = segmentBudgetDuration diff --git a/meteor/client/ui/SegmentContainer/withResolvedSegment.ts b/meteor/client/ui/SegmentContainer/withResolvedSegment.ts index a0f56852a11..35b6fb720ad 100644 --- a/meteor/client/ui/SegmentContainer/withResolvedSegment.ts +++ b/meteor/client/ui/SegmentContainer/withResolvedSegment.ts @@ -269,12 +269,7 @@ export function withResolvedSegment {playlist && parts && parts.length > 0 && ( {t('Duration')}} diff --git a/meteor/client/ui/SegmentScratchpad/SegmentScratchpad.tsx b/meteor/client/ui/SegmentScratchpad/SegmentScratchpad.tsx index 197a9a18f9a..08d2d40ee4d 100644 --- a/meteor/client/ui/SegmentScratchpad/SegmentScratchpad.tsx +++ b/meteor/client/ui/SegmentScratchpad/SegmentScratchpad.tsx @@ -55,7 +55,6 @@ interface IProps { onHeaderNoteClick?: (segmentId: SegmentId, level: NoteSeverity) => void isLastSegment: boolean lastValidPartIndex: number | undefined - budgetDuration?: number showCountdownToSegment: boolean fixedSegmentDuration: boolean | undefined subscriptionsReady: boolean diff --git a/meteor/client/ui/SegmentScratchpad/SegmentScratchpadContainer.tsx b/meteor/client/ui/SegmentScratchpad/SegmentScratchpadContainer.tsx index faf192ae463..ef0e23cea50 100644 --- a/meteor/client/ui/SegmentScratchpad/SegmentScratchpadContainer.tsx +++ b/meteor/client/ui/SegmentScratchpad/SegmentScratchpadContainer.tsx @@ -227,7 +227,6 @@ export const SegmentScratchpadContainer = withResolvedSegment(function S isLastSegment={props.isLastSegment} lastValidPartIndex={props.lastValidPartIndex} onHeaderNoteClick={props.onHeaderNoteClick} - budgetDuration={props.budgetDuration} showCountdownToSegment={props.showCountdownToSegment} fixedSegmentDuration={props.fixedSegmentDuration} subscriptionsReady={initialSubscriptionsReady} diff --git a/meteor/client/ui/SegmentStoryboard/SegmentStoryboard.tsx b/meteor/client/ui/SegmentStoryboard/SegmentStoryboard.tsx index 34274c549cb..6e0ace56f64 100644 --- a/meteor/client/ui/SegmentStoryboard/SegmentStoryboard.tsx +++ b/meteor/client/ui/SegmentStoryboard/SegmentStoryboard.tsx @@ -614,7 +614,7 @@ export const SegmentStoryboard = React.memo( props.parts.length > 0 && (!props.hasAlreadyPlayed || props.isNextSegment || props.isLiveSegment) && ( {t('Duration')}} diff --git a/meteor/client/ui/SegmentStoryboard/SegmentStoryboardContainer.tsx b/meteor/client/ui/SegmentStoryboard/SegmentStoryboardContainer.tsx index 157951927dc..8945d7c072d 100644 --- a/meteor/client/ui/SegmentStoryboard/SegmentStoryboardContainer.tsx +++ b/meteor/client/ui/SegmentStoryboard/SegmentStoryboardContainer.tsx @@ -228,7 +228,6 @@ export const SegmentStoryboardContainer = withResolvedSegment(function S lastValidPartIndex={props.lastValidPartIndex} onHeaderNoteClick={props.onHeaderNoteClick} onSwitchViewMode={props.onSwitchViewMode} - budgetDuration={props.budgetDuration} showCountdownToSegment={props.showCountdownToSegment} fixedSegmentDuration={props.fixedSegmentDuration} subscriptionsReady={initialSubscriptionsReady} diff --git a/meteor/client/ui/SegmentTimeline/SegmentTimeline.tsx b/meteor/client/ui/SegmentTimeline/SegmentTimeline.tsx index c374166a892..a2e90790d19 100644 --- a/meteor/client/ui/SegmentTimeline/SegmentTimeline.tsx +++ b/meteor/client/ui/SegmentTimeline/SegmentTimeline.tsx @@ -95,7 +95,6 @@ interface IProps { segmentRef?: (el: SegmentTimelineClass, segmentId: SegmentId) => void isLastSegment: boolean lastValidPartIndex: number | undefined - budgetDuration?: number showCountdownToSegment: boolean showDurationSourceLayers?: Set fixedSegmentDuration: boolean | undefined @@ -621,6 +620,7 @@ export class SegmentTimelineClass extends React.Component part.instance._id === this.props.playlist.currentPartInfo?.partInstanceId @@ -871,7 +876,7 @@ export class SegmentTimelineClass extends React.Component ) } @@ -950,12 +955,13 @@ export class SegmentTimelineClass extends React.Component } @@ -1106,7 +1112,7 @@ export class SegmentTimelineClass extends React.Component 0 && (!this.props.hasAlreadyPlayed || this.props.isNextSegment || this.props.isLiveSegment) && ( {t('Duration')}} diff --git a/meteor/client/ui/SegmentTimeline/SegmentTimelineContainer.tsx b/meteor/client/ui/SegmentTimeline/SegmentTimelineContainer.tsx index cef1b2ab025..4ca5cedcbf7 100644 --- a/meteor/client/ui/SegmentTimeline/SegmentTimelineContainer.tsx +++ b/meteor/client/ui/SegmentTimeline/SegmentTimelineContainer.tsx @@ -370,18 +370,7 @@ const SegmentTimelineContainerContent = withResolvedSegment( } private getSegmentBudgetDuration(): number | undefined { - let duration = 0 - let anyBudgetDurations = false - for (const part of this.props.parts) { - if (part.instance.part.budgetDuration !== undefined) { - anyBudgetDurations = true - duration += part.instance.part.budgetDuration - } - } - if (anyBudgetDurations) { - return duration - } - return undefined + return this.props.segmentui?.segmentTiming?.budgetDuration } onWindowResize = _.throttle(() => { @@ -708,7 +697,6 @@ const SegmentTimelineContainerContent = withResolvedSegment( isLastSegment={this.props.isLastSegment} lastValidPartIndex={this.props.lastValidPartIndex} onHeaderNoteClick={this.props.onHeaderNoteClick} - budgetDuration={this.props.budgetDuration} showCountdownToSegment={this.props.showCountdownToSegment} fixedSegmentDuration={this.props.fixedSegmentDuration} showDurationSourceLayers={this.props.showDurationSourceLayers} diff --git a/meteor/client/ui/Shelf/SegmentTimingPanel.tsx b/meteor/client/ui/Shelf/SegmentTimingPanel.tsx index 0b7fc44011f..d3cb7aeee26 100644 --- a/meteor/client/ui/Shelf/SegmentTimingPanel.tsx +++ b/meteor/client/ui/Shelf/SegmentTimingPanel.tsx @@ -65,7 +65,7 @@ class SegmentTimingPanelInner extends React.Component< )} {this.props.active && this.props.liveSegment && this.props.parts && this.props.pieces && ( ): IBlueprintP disableNextInTransition: part.disableNextInTransition, outTransition: clone(part.outTransition), expectedDuration: part.expectedDuration, - budgetDuration: part.budgetDuration, holdMode: part.holdMode, shouldNotifyCurrentPlayingPart: part.shouldNotifyCurrentPlayingPart, classes: clone(part.classes), diff --git a/packages/live-status-gateway/src/topics/activePlaylistTopic.ts b/packages/live-status-gateway/src/topics/activePlaylistTopic.ts index e511a074df2..d592436b667 100644 --- a/packages/live-status-gateway/src/topics/activePlaylistTopic.ts +++ b/packages/live-status-gateway/src/topics/activePlaylistTopic.ts @@ -15,6 +15,7 @@ import _ = require('underscore') import { PartTiming, calculateCurrentPartTiming } from './helpers/partTiming' import { SelectedPieceInstances, PieceInstancesHandler, PieceInstanceMin } from '../collections/pieceInstancesHandler' import { PieceStatus, toPieceStatus } from './helpers/pieceStatus' +import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' const THROTTLE_PERIOD_MS = 100 @@ -55,7 +56,8 @@ export class ActivePlaylistTopic CollectionObserver, CollectionObserver, CollectionObserver, - CollectionObserver + CollectionObserver, + CollectionObserver { public observerName = ActivePlaylistTopic.name private _activePlaylist: DBRundownPlaylist | undefined diff --git a/packages/live-status-gateway/src/topics/helpers/segmentTiming.ts b/packages/live-status-gateway/src/topics/helpers/segmentTiming.ts index 966354e6d95..413ca9c4fab 100644 --- a/packages/live-status-gateway/src/topics/helpers/segmentTiming.ts +++ b/packages/live-status-gateway/src/topics/helpers/segmentTiming.ts @@ -1,5 +1,6 @@ import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' +import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' export interface SegmentTiming { budgetDurationMs?: number @@ -11,12 +12,13 @@ export interface CurrentSegmentTiming extends SegmentTiming { } export function calculateCurrentSegmentTiming( + segment: DBSegment, currentPartInstance: DBPartInstance, firstInstanceInSegmentPlayout: DBPartInstance | undefined, segmentPartInstances: DBPartInstance[], segmentParts: DBPart[] ): CurrentSegmentTiming { - const segmentTiming = calculateSegmentTiming(segmentParts) + const segmentTiming = calculateSegmentTiming(segment, segmentParts) const playedDurations = segmentPartInstances.reduce((sum, partInstance) => { return (partInstance.timings?.duration ?? 0) + sum }, 0) @@ -36,11 +38,9 @@ export function calculateCurrentSegmentTiming( } } -export function calculateSegmentTiming(segmentParts: DBPart[]): SegmentTiming { +export function calculateSegmentTiming(segment: DBSegment, segmentParts: DBPart[]): SegmentTiming { return { - budgetDurationMs: segmentParts.reduce((sum, part): number | undefined => { - return part.budgetDuration != null && !part.untimed ? (sum ?? 0) + part.budgetDuration : sum - }, undefined), + budgetDurationMs: segment.segmentTiming?.budgetDuration, expectedDurationMs: segmentParts.reduce((sum, part): number => { return part.expectedDurationWithPreroll != null && !part.untimed ? sum + part.expectedDurationWithPreroll From 0ba4314d7b82f8cd7545206b7a15061d965352f9 Mon Sep 17 00:00:00 2001 From: ianshade Date: Tue, 28 May 2024 12:02:17 +0200 Subject: [PATCH 05/81] feat(SUPERFLY-7): add countdownType to LSG --- .../job-worker/src/blueprints/context/lib.ts | 1 - .../api/schemas/activePlaylist.yaml | 7 ++++ .../api/schemas/segments.yaml | 7 ++++ .../src/collections/segmentHandler.ts | 18 ++++----- .../src/liveStatusServer.ts | 1 + .../topics/__tests__/activePlaylist.spec.ts | 30 ++++++++++---- .../topics/__tests__/segmentsTopic.spec.ts | 39 +++++++------------ .../src/topics/activePlaylistTopic.ts | 15 ++++++- .../src/topics/helpers/segmentTiming.ts | 4 +- .../src/topics/segmentsTopic.ts | 2 +- 10 files changed, 76 insertions(+), 48 deletions(-) diff --git a/packages/job-worker/src/blueprints/context/lib.ts b/packages/job-worker/src/blueprints/context/lib.ts index aa81c0ab660..27eb348c71c 100644 --- a/packages/job-worker/src/blueprints/context/lib.ts +++ b/packages/job-worker/src/blueprints/context/lib.ts @@ -94,7 +94,6 @@ export const IBlueprintMutatablePartSampleKeys = allKeysOfObject { - const previousSegmentId = this._currentSegmentId const previousRundownIds = this._rundownIds switch (source) { @@ -91,11 +96,6 @@ export class SegmentHandler const allSegments = collection.find(undefined) await this._segmentsHandler.setSegments(allSegments) } - if (previousSegmentId !== this._currentSegmentId) { - if (this._currentSegmentId) { - this._collectionData = collection.findOne(this._currentSegmentId) - await this.notify(this._collectionData) - } - } + await this.updateAndNotify() } } diff --git a/packages/live-status-gateway/src/liveStatusServer.ts b/packages/live-status-gateway/src/liveStatusServer.ts index addc8c39340..9e0c5268250 100644 --- a/packages/live-status-gateway/src/liveStatusServer.ts +++ b/packages/live-status-gateway/src/liveStatusServer.ts @@ -106,6 +106,7 @@ export class LiveStatusServer { await partInstancesHandler.subscribe(activePlaylistTopic) await partsHandler.subscribe(activePlaylistTopic) await pieceInstancesHandler.subscribe(activePlaylistTopic) + await segmentHandler.subscribe(activePlaylistTopic) await playlistHandler.subscribe(activePiecesTopic) await showStyleBaseHandler.subscribe(activePiecesTopic) diff --git a/packages/live-status-gateway/src/topics/__tests__/activePlaylist.spec.ts b/packages/live-status-gateway/src/topics/__tests__/activePlaylist.spec.ts index 91f28d6d08f..3516b9970e6 100644 --- a/packages/live-status-gateway/src/topics/__tests__/activePlaylist.spec.ts +++ b/packages/live-status-gateway/src/topics/__tests__/activePlaylist.spec.ts @@ -9,6 +9,9 @@ import { literal } from '@sofie-automation/corelib/dist/lib' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' import { PartsHandler } from '../../collections/partsHandler' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' +import { SegmentHandler } from '../../collections/segmentHandler' +import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' +import { CountdownType } from '@sofie-automation/blueprints-integration' function makeEmptyTestPartInstances(): SelectedPartInstances { return { @@ -72,21 +75,25 @@ describe('ActivePlaylistTopic', () => { const testShowStyleBase = makeTestShowStyleBase() await topic.update(ShowStyleBaseHandler.name, testShowStyleBase as ShowStyleBaseExt) + + const segment1id = protectString('SEGMENT_1') const part1: Partial = { _id: protectString('PART_1'), title: 'Test Part', - segmentId: protectString('SEGMENT_1'), + segmentId: segment1id, expectedDurationWithPreroll: 10000, expectedDuration: 10000, publicData: { b: 'c' }, } + const currentPartInstance = { + _id: currentPartInstanceId, + part: part1, + timings: { plannedStartedPlayback: 1600000060000 }, + segmentId: segment1id, + } const testPartInstances: PartialDeep = { - current: { - _id: currentPartInstanceId, - part: part1, - timings: { plannedStartedPlayback: 1600000060000 }, - }, - firstInSegmentPlayout: {}, + current: currentPartInstance, + firstInSegmentPlayout: currentPartInstance, inCurrentSegment: [ literal>({ _id: protectString(currentPartInstanceId), @@ -99,6 +106,11 @@ describe('ActivePlaylistTopic', () => { await topic.update(PartsHandler.name, [part1] as DBPart[]) + await topic.update(SegmentHandler.name, { + _id: segment1id, + segmentTiming: { budgetDuration: 12300, countdownType: CountdownType.SEGMENT_BUDGET_DURATION }, + } as DBSegment) + topic.addSubscriber(mockSubscriber) const expectedStatus: ActivePlaylistStatus = { @@ -119,7 +131,9 @@ describe('ActivePlaylistTopic', () => { id: 'SEGMENT_1', timing: { expectedDurationMs: 10000, - projectedEndTime: 1600000070000, + budgetDurationMs: 12300, + projectedEndTime: 1600000072300, + countdownType: 'segment_budget_duration', }, }, rundownIds: unprotectStringArray(playlist.rundownIdsInOrder), diff --git a/packages/live-status-gateway/src/topics/__tests__/segmentsTopic.spec.ts b/packages/live-status-gateway/src/topics/__tests__/segmentsTopic.spec.ts index 05d7a1649fa..c7a9ecb733f 100644 --- a/packages/live-status-gateway/src/topics/__tests__/segmentsTopic.spec.ts +++ b/packages/live-status-gateway/src/topics/__tests__/segmentsTopic.spec.ts @@ -11,7 +11,7 @@ const RUNDOWN_1_ID = 'RUNDOWN_1' const RUNDOWN_2_ID = 'RUNDOWN_2' const THROTTLE_PERIOD_MS = 205 -function makeTestSegment(id: string, rank: number, rundownId: string): DBSegment { +function makeTestSegment(id: string, rank: number, rundownId: string, segmentProps?: Partial): DBSegment { return { _id: protectString(id), externalId: `NCS_SEGMENT_${id}`, @@ -19,6 +19,7 @@ function makeTestSegment(id: string, rank: number, rundownId: string): DBSegment _rank: rank, rundownId: protectString(rundownId), externalModified: 1695799420147, + ...segmentProps, } } @@ -27,7 +28,7 @@ function makeTestPart( rank: number, rundownId: string, segmentId: string, - partProps: Partial + partProps?: Partial ): DBPart { return { _id: protectString(id), @@ -260,33 +261,19 @@ describe('SegmentsTopic', () => { const segment_2_2_id = '2_2' await topic.update(SegmentsHandler.name, [ makeTestSegment('2_1', 1, RUNDOWN_2_ID), - makeTestSegment(segment_2_2_id, 2, RUNDOWN_2_ID), - makeTestSegment(segment_1_2_id, 2, RUNDOWN_1_ID), - makeTestSegment(segment_1_1_id, 1, RUNDOWN_1_ID), + makeTestSegment(segment_2_2_id, 2, RUNDOWN_2_ID, { segmentTiming: { budgetDuration: 51000 } }), + makeTestSegment(segment_1_2_id, 2, RUNDOWN_1_ID, { segmentTiming: { budgetDuration: 15000 } }), + makeTestSegment(segment_1_1_id, 1, RUNDOWN_1_ID, { segmentTiming: { budgetDuration: 5000 } }), ]) mockSubscriber.send.mockClear() await topic.update(PartsHandler.name, [ - makeTestPart('1_2_1', 1, RUNDOWN_1_ID, segment_1_2_id, { - budgetDuration: 10000, - }), - makeTestPart('2_2_1', 1, RUNDOWN_1_ID, segment_2_2_id, { - budgetDuration: 40000, - }), - makeTestPart('1_2_2', 2, RUNDOWN_1_ID, segment_1_2_id, { - budgetDuration: 5000, - }), - makeTestPart('1_1_2', 2, RUNDOWN_1_ID, segment_1_1_id, { - budgetDuration: 1000, - }), - makeTestPart('1_1_1', 1, RUNDOWN_1_ID, segment_1_1_id, { - budgetDuration: 3000, - }), - makeTestPart('2_2_2', 2, RUNDOWN_1_ID, segment_2_2_id, { - budgetDuration: 11000, - }), - makeTestPart('1_1_2', 2, RUNDOWN_1_ID, segment_1_1_id, { - budgetDuration: 1000, - }), + makeTestPart('1_2_1', 1, RUNDOWN_1_ID, segment_1_2_id), + makeTestPart('2_2_1', 1, RUNDOWN_1_ID, segment_2_2_id), + makeTestPart('1_2_2', 2, RUNDOWN_1_ID, segment_1_2_id), + makeTestPart('1_1_2', 2, RUNDOWN_1_ID, segment_1_1_id), + makeTestPart('1_1_1', 1, RUNDOWN_1_ID, segment_1_1_id), + makeTestPart('2_2_2', 2, RUNDOWN_1_ID, segment_2_2_id), + makeTestPart('1_1_2', 2, RUNDOWN_1_ID, segment_1_1_id), ]) jest.advanceTimersByTime(THROTTLE_PERIOD_MS) diff --git a/packages/live-status-gateway/src/topics/activePlaylistTopic.ts b/packages/live-status-gateway/src/topics/activePlaylistTopic.ts index d592436b667..66e86dac439 100644 --- a/packages/live-status-gateway/src/topics/activePlaylistTopic.ts +++ b/packages/live-status-gateway/src/topics/activePlaylistTopic.ts @@ -16,6 +16,7 @@ import { PartTiming, calculateCurrentPartTiming } from './helpers/partTiming' import { SelectedPieceInstances, PieceInstancesHandler, PieceInstanceMin } from '../collections/pieceInstancesHandler' import { PieceStatus, toPieceStatus } from './helpers/pieceStatus' import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' +import { SegmentHandler } from '../collections/segmentHandler' const THROTTLE_PERIOD_MS = 100 @@ -57,7 +58,7 @@ export class ActivePlaylistTopic CollectionObserver, CollectionObserver, CollectionObserver, - CollectionObserver + CollectionObserver { public observerName = ActivePlaylistTopic.name private _activePlaylist: DBRundownPlaylist | undefined @@ -69,6 +70,7 @@ export class ActivePlaylistTopic private _pieceInstancesInCurrentPartInstance: PieceInstanceMin[] | undefined private _pieceInstancesInNextPartInstance: PieceInstanceMin[] | undefined private _showStyleBaseExt: ShowStyleBaseExt | undefined + private _currentSegment: DBSegment | undefined private throttledSendStatusToAll: () => void constructor(logger: Logger) { @@ -118,10 +120,11 @@ export class ActivePlaylistTopic }) : null, currentSegment: - this._currentPartInstance && currentPart + this._currentPartInstance && currentPart && this._currentSegment ? literal({ id: unprotectString(currentPart.segmentId), timing: calculateCurrentSegmentTiming( + this._currentSegment, this._currentPartInstance, this._firstInstanceInSegmentPlayout, this._partInstancesInCurrentSegment, @@ -163,6 +166,7 @@ export class ActivePlaylistTopic private isDataInconsistent() { return ( this._currentPartInstance?._id !== this._activePlaylist?.currentPartInfo?.partInstanceId || + this._currentPartInstance?.segmentId !== this._currentSegment?._id || this._nextPartInstance?._id !== this._activePlaylist?.nextPartInfo?.partInstanceId || (this._pieceInstancesInCurrentPartInstance?.[0] && this._pieceInstancesInCurrentPartInstance?.[0].partInstanceId !== this._currentPartInstance?._id) || @@ -179,6 +183,7 @@ export class ActivePlaylistTopic | SelectedPartInstances | DBPart[] | SelectedPieceInstances + | DBSegment | undefined ): Promise { let hasAnythingChanged = false @@ -234,6 +239,12 @@ export class ActivePlaylistTopic this._pieceInstancesInNextPartInstance = pieceInstances.nextPartInstance break } + case SegmentHandler.name: { + this._currentSegment = data as DBSegment + this.logUpdateReceived('segment', source) + hasAnythingChanged = true + break + } default: throw new Error(`${this._name} received unsupported update from ${source}}`) } diff --git a/packages/live-status-gateway/src/topics/helpers/segmentTiming.ts b/packages/live-status-gateway/src/topics/helpers/segmentTiming.ts index 413ca9c4fab..7284a1c9fe5 100644 --- a/packages/live-status-gateway/src/topics/helpers/segmentTiming.ts +++ b/packages/live-status-gateway/src/topics/helpers/segmentTiming.ts @@ -5,6 +5,7 @@ import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' export interface SegmentTiming { budgetDurationMs?: number expectedDurationMs: number + countdownType?: 'part_expected_duration' | 'segment_budget_duration' } export interface CurrentSegmentTiming extends SegmentTiming { @@ -31,7 +32,7 @@ export function calculateCurrentSegmentTiming( const projectedBudgetEndTime = (firstInstanceInSegmentPlayout?.timings?.reportedStartedPlayback ?? firstInstanceInSegmentPlayout?.timings?.plannedStartedPlayback ?? - 0) + (segmentTiming.budgetDurationMs ?? 0) + Date.now()) + (segmentTiming.budgetDurationMs ?? 0) return { ...segmentTiming, projectedEndTime: segmentTiming.budgetDurationMs != null ? projectedBudgetEndTime : projectedEndTime, @@ -46,5 +47,6 @@ export function calculateSegmentTiming(segment: DBSegment, segmentParts: DBPart[ ? sum + part.expectedDurationWithPreroll : sum }, 0), + countdownType: segment.segmentTiming?.countdownType, } } diff --git a/packages/live-status-gateway/src/topics/segmentsTopic.ts b/packages/live-status-gateway/src/topics/segmentsTopic.ts index 22f2dd523bc..a80aecf1103 100644 --- a/packages/live-status-gateway/src/topics/segmentsTopic.ts +++ b/packages/live-status-gateway/src/topics/segmentsTopic.ts @@ -68,7 +68,7 @@ export class SegmentsTopic id: segmentId, rundownId: unprotectString(segment.rundownId), name: segment.name, - timing: calculateSegmentTiming(this._partsBySegment[segmentId] ?? []), + timing: calculateSegmentTiming(segment, this._partsBySegment[segmentId] ?? []), identifier: segment.identifier, publicData: segment.publicData, } From cf5515167173d38c6064bfb8ad2bf234329ea3ee Mon Sep 17 00:00:00 2001 From: ianshade Date: Tue, 28 May 2024 13:11:50 +0200 Subject: [PATCH 06/81] feat(SUPERFLY-7): use countdownType in GUI --- meteor/client/lib/rundownTiming.ts | 22 ++++++++++++++----- .../client/ui/ClockView/CameraScreen/Part.tsx | 4 ++-- .../client/ui/ClockView/PresenterScreen.tsx | 10 +++------ meteor/client/ui/RundownView.tsx | 4 ++-- ....tsx => CurrentPartOrSegmentRemaining.tsx} | 12 ++++++---- meteor/client/ui/SegmentList/LinePart.tsx | 4 ++-- meteor/client/ui/SegmentList/OnAirLine.tsx | 4 ++-- .../ui/SegmentStoryboard/StoryboardPart.tsx | 4 ++-- .../ui/SegmentTimeline/SegmentTimeline.tsx | 4 ++-- meteor/client/ui/Shelf/PartTimingPanel.tsx | 4 ++-- 10 files changed, 41 insertions(+), 31 deletions(-) rename meteor/client/ui/RundownView/RundownTiming/{CurrentPartRemaining.tsx => CurrentPartOrSegmentRemaining.tsx} (89%) diff --git a/meteor/client/lib/rundownTiming.ts b/meteor/client/lib/rundownTiming.ts index cefdaa27200..37f8496ec66 100644 --- a/meteor/client/lib/rundownTiming.ts +++ b/meteor/client/lib/rundownTiming.ts @@ -26,6 +26,7 @@ import { getCurrentTime, objectFromEntries } from '../../lib/lib' import { Settings } from '../../lib/Settings' import { Rundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' +import { CountdownType } from '@sofie-automation/blueprints-integration' // Minimum duration that a part can be assigned. Used by gap parts to allow them to "compress" to indicate time running out. const MINIMAL_NONZERO_DURATION = 1 @@ -169,18 +170,28 @@ export class RundownTimingCalculator { const partsSegment = segmentsMap.get(partInstance.segmentId) const segmentBudget = partsSegment?.segmentTiming?.budgetDuration const segmentUsesBudget = segmentBudget !== undefined + const lastStartedPlayback = partInstance.timings?.plannedStartedPlayback if (partInstance.segmentId !== lastSegmentId) { this.untimedSegments.add(partInstance.segmentId) - lastSegmentId = partInstance.segmentId + if (liveSegmentId && lastSegmentId === liveSegmentId) { + const liveSegment = segmentsMap.get(liveSegmentId) + + if (liveSegment?.segmentTiming?.countdownType === CountdownType.SEGMENT_BUDGET_DURATION) { + remainingBudgetOnCurrentSegment = + (this.segmentStartedPlayback[unprotectString(liveSegmentId)] ?? + lastStartedPlayback ?? + now) + + (liveSegment.segmentTiming.budgetDuration ?? 0) - + now + } + } segmentDisplayDuration = 0 if (segmentBudgetDurationLeft > 0) { waitAccumulator += segmentBudgetDurationLeft } - if (lastSegmentId === liveSegmentId) { - remainingBudgetOnCurrentSegment = segmentBudgetDurationLeft - } segmentBudgetDurationLeft = segmentBudget ?? 0 + lastSegmentId = partInstance.segmentId } // add piece to accumulator @@ -215,7 +226,6 @@ export class RundownTimingCalculator { calculatePartInstanceExpectedDurationWithPreroll(partInstance, piecesForPart) || 0 } - const lastStartedPlayback = partInstance.timings?.plannedStartedPlayback const playOffset = partInstance.timings?.playOffset || 0 let partDuration = 0 @@ -731,7 +741,7 @@ export interface RundownTimingContext { segmentStartedPlayback?: Record /** Remaining time on current part */ remainingTimeOnCurrentPart?: number - /** Remaining budget on current segment */ + /** Remaining budget on current segment, if its countdownType === CountdownType.SEGMENT_BUDGET_DURATION */ remainingBudgetOnCurrentSegment?: number /** Current part will autoNext */ currentPartWillAutoNext?: boolean diff --git a/meteor/client/ui/ClockView/CameraScreen/Part.tsx b/meteor/client/ui/ClockView/CameraScreen/Part.tsx index 7533af701bf..af7058cd49f 100644 --- a/meteor/client/ui/ClockView/CameraScreen/Part.tsx +++ b/meteor/client/ui/ClockView/CameraScreen/Part.tsx @@ -7,7 +7,7 @@ import { PieceExtended } from '../../../../lib/Rundown' import { getAllowSpeaking, getAllowVibrating } from '../../../lib/localStorage' import { getPartInstanceTimingValue } from '../../../lib/rundownTiming' import { AutoNextStatus } from '../../RundownView/RundownTiming/AutoNextStatus' -import { CurrentPartRemaining } from '../../RundownView/RundownTiming/CurrentPartRemaining' +import { CurrentPartOrSegmentRemaining } from '../../RundownView/RundownTiming/CurrentPartOrSegmentRemaining' import { PartCountdown } from '../../RundownView/RundownTiming/PartCountdown' import { PartDisplayDuration } from '../../RundownView/RundownTiming/PartDuration' import { TimingDataResolution, TimingTickResolution, withTiming } from '../../RundownView/RundownTiming/withTiming' @@ -70,7 +70,7 @@ export const Part = withTiming({ - >) { if (playlist && playlistId && segments) { - let currentPartOrSegmentCountdown = 0 - if (currentSegment?.segmentTiming?.countdownType === CountdownType.SEGMENT_BUDGET_DURATION) { - currentPartOrSegmentCountdown = timingDurations.remainingBudgetOnCurrentSegment ?? 0 - } else if (currentPartInstance) { - currentPartOrSegmentCountdown = timingDurations.remainingTimeOnCurrentPart || 0 - } + const currentPartOrSegmentCountdown = + timingDurations.remainingBudgetOnCurrentSegment ?? timingDurations.remainingTimeOnCurrentPart ?? 0 const expectedStart = PlaylistTiming.getExpectedStart(playlist.timing) const overUnderClock = getPlaylistTimingDiff(playlist, timingDurations) ?? 0 diff --git a/meteor/client/ui/RundownView.tsx b/meteor/client/ui/RundownView.tsx index da60fdb3b99..32b2c11d65d 100644 --- a/meteor/client/ui/RundownView.tsx +++ b/meteor/client/ui/RundownView.tsx @@ -30,7 +30,7 @@ import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { ContextMenu, MenuItem, ContextMenuTrigger } from '@jstarpl/react-contextmenu' import { RundownTimingProvider } from './RundownView/RundownTiming/RundownTimingProvider' import { withTiming, WithTiming } from './RundownView/RundownTiming/withTiming' -import { CurrentPartRemaining } from './RundownView/RundownTiming/CurrentPartRemaining' +import { CurrentPartOrSegmentRemaining } from './RundownView/RundownTiming/CurrentPartOrSegmentRemaining' import { AutoNextStatus } from './RundownView/RundownTiming/AutoNextStatus' import { SegmentTimelineContainer, PieceUi, PartUi, SegmentUi } from './SegmentTimeline/SegmentTimelineContainer' import { SegmentContextMenu } from './SegmentTimeline/SegmentContextMenu' @@ -299,7 +299,7 @@ const TimingDisplay = withTranslation()( {rundownPlaylist.currentPartInfo && ( - diff --git a/meteor/client/ui/RundownView/RundownTiming/CurrentPartRemaining.tsx b/meteor/client/ui/RundownView/RundownTiming/CurrentPartOrSegmentRemaining.tsx similarity index 89% rename from meteor/client/ui/RundownView/RundownTiming/CurrentPartRemaining.tsx rename to meteor/client/ui/RundownView/RundownTiming/CurrentPartOrSegmentRemaining.tsx index 2e987c7ea80..a62ca027598 100644 --- a/meteor/client/ui/RundownView/RundownTiming/CurrentPartRemaining.tsx +++ b/meteor/client/ui/RundownView/RundownTiming/CurrentPartOrSegmentRemaining.tsx @@ -20,11 +20,13 @@ interface IPartRemainingProps { let prevDisplayTime: number | undefined = undefined /** - * A presentational component that will render a countdown to the end of the current part - * @class CurrentPartRemaining + * A presentational component that will render a countdown to the end of the current part or segment, + * depending on the value of segmentTiming.countdownType + * + * @class CurrentPartOrSegmentRemaining * @extends React.Component> */ -export const CurrentPartRemaining = withTiming({ +export const CurrentPartOrSegmentRemaining = withTiming({ tickResolution: TimingTickResolution.Synced, dataResolution: TimingDataResolution.Synced, })( @@ -32,7 +34,9 @@ export const CurrentPartRemaining = withTiming({ render(): JSX.Element | null { if (!this.props.timingDurations || !this.props.timingDurations.currentTime) return null if (this.props.timingDurations.currentPartInstanceId !== this.props.currentPartInstanceId) return null - let displayTimecode = this.props.timingDurations.remainingTimeOnCurrentPart + let displayTimecode = + this.props.timingDurations.remainingBudgetOnCurrentSegment ?? + this.props.timingDurations.remainingTimeOnCurrentPart if (displayTimecode === undefined) return null displayTimecode *= -1 return ( diff --git a/meteor/client/ui/SegmentList/LinePart.tsx b/meteor/client/ui/SegmentList/LinePart.tsx index 0420bf63bb7..de34de81f69 100644 --- a/meteor/client/ui/SegmentList/LinePart.tsx +++ b/meteor/client/ui/SegmentList/LinePart.tsx @@ -7,7 +7,7 @@ import { contextMenuHoldToDisplayTime } from '../../lib/lib' import { RundownUtils } from '../../lib/rundown' import { getElementDocumentOffset } from '../../utils/positions' import { IContextMenuContext } from '../RundownView' -import { CurrentPartRemaining } from '../RundownView/RundownTiming/CurrentPartRemaining' +import { CurrentPartOrSegmentRemaining } from '../RundownView/RundownTiming/CurrentPartOrSegmentRemaining' import { PieceUi, SegmentUi } from '../SegmentContainer/withResolvedSegment' import { SegmentTimelinePartElementId } from '../SegmentTimeline/Parts/SegmentTimelinePart' import { LinePartIdentifier } from './LinePartIdentifier' @@ -131,7 +131,7 @@ export const LinePart: React.FC = function LinePart({
{/* */} {isLivePart && ( - ({
- -
{this.props.displayLiveLineCounter && ( - Date: Wed, 28 Aug 2024 12:33:58 +0200 Subject: [PATCH 07/81] fix: MosGateway hotstandby - send message for both servers if both connections are offline --- packages/mos-gateway/src/CoreMosDeviceHandler.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/mos-gateway/src/CoreMosDeviceHandler.ts b/packages/mos-gateway/src/CoreMosDeviceHandler.ts index 1bbcb4a8df8..cd0d29d6a37 100644 --- a/packages/mos-gateway/src/CoreMosDeviceHandler.ts +++ b/packages/mos-gateway/src/CoreMosDeviceHandler.ts @@ -152,6 +152,8 @@ export class CoreMosDeviceHandler { messages.push(connectionStatus.SecondaryStatus || 'Running NRCS on hot standby') } else { statusCode = StatusCode.BAD + // Send messages for both connections + messages.push(connectionStatus.PrimaryStatus || 'Primary and hot standby are not connected') messages.push(connectionStatus.SecondaryStatus || 'Primary and hot standby are not connected') } } From be5ebf22dfae21d1d4272247e09e1532a596cd2f Mon Sep 17 00:00:00 2001 From: Mint de Wit Date: Mon, 9 Sep 2024 15:09:43 +0000 Subject: [PATCH 08/81] chore: wip --- packages/playout-gateway/package.json | 2 +- packages/playout-gateway/src/coreHandler.ts | 2 +- packages/playout-gateway/src/tsrHandler.ts | 174 ++++++++++++++++++++ packages/yarn.lock | 25 ++- 4 files changed, 193 insertions(+), 10 deletions(-) diff --git a/packages/playout-gateway/package.json b/packages/playout-gateway/package.json index d884d638d1f..54382f65f82 100644 --- a/packages/playout-gateway/package.json +++ b/packages/playout-gateway/package.json @@ -60,7 +60,7 @@ "@sofie-automation/shared-lib": "1.52.0-in-development", "debug": "^4.3.4", "influx": "^5.9.3", - "timeline-state-resolver": "9.2.0-nightly-release52-20240909-111856-517f0ee37.0", + "timeline-state-resolver": "9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0", "tslib": "^2.6.2", "underscore": "^1.13.6", "winston": "^3.11.0" diff --git a/packages/playout-gateway/src/coreHandler.ts b/packages/playout-gateway/src/coreHandler.ts index dd316424009..b375155b71b 100644 --- a/packages/playout-gateway/src/coreHandler.ts +++ b/packages/playout-gateway/src/coreHandler.ts @@ -545,7 +545,7 @@ export class CoreTSRDeviceHandler { async dispose(subdevice: 'keepSubDevice' | 'removeSubDevice' = 'keepSubDevice'): Promise { this._observers.forEach((obs) => obs.stop()) - await this._tsrHandler.tsr.removeDevice(this._deviceId) + // await this._tsrHandler.tsr.removeDevice(this._deviceId) await this.core.setStatus({ statusCode: StatusCode.BAD, messages: ['Uninitialized'], diff --git a/packages/playout-gateway/src/tsrHandler.ts b/packages/playout-gateway/src/tsrHandler.ts index d3245bd3839..ce42ad62deb 100644 --- a/packages/playout-gateway/src/tsrHandler.ts +++ b/packages/playout-gateway/src/tsrHandler.ts @@ -223,6 +223,8 @@ export class TSRHandler { }) this.tsr.on('timeTrace', (trace: FinishedTrace) => sendTrace(trace)) + this.attachTSRConnectionEvents() + this.logger.debug('tsr init') await this.tsr.init() @@ -234,6 +236,178 @@ export class TSRHandler { this.logger.debug('tsr init done') } + private attachTSRConnectionEvents () { + this.tsr.connectionManager.on('connectionAdded', (id, container) => { + const coreTsrHandler = new CoreTSRDeviceHandler(this._coreHandler, Promise.resolve(container), id, this) + this._coreTsrHandlers[id] = coreTsrHandler + + // set the status to uninitialized for now: + coreTsrHandler.statusChanged({ + statusCode: StatusCode.BAD, + messages: ['Device initialising...'], + }) + }) + + this.tsr.connectionManager.on('connectionInitialised', (id) => { + const coreTsrHandler = this._coreTsrHandlers[id] + + if (!coreTsrHandler) { + this.logger.error('TSR Connection initialised when there was not CoreTSRHandler for it') + return + } + + coreTsrHandler.init().catch(e => this.logger.error('CoreTSRHandler failed to initialise', e)) // todo - is this the right way to log this? + }) + + this.tsr.connectionManager.on('connectionRemoved', (id) => { + const coreTsrHandler = this._coreTsrHandlers[id] + + if (!coreTsrHandler) { + this.logger.error('TSR Connection was removed when but there was not CoreTSRHandler to handle that') + return + } + + coreTsrHandler.dispose('removeSubDevice') + delete this._coreTsrHandlers[id] + }) + + const fixLog = (id: string, e: string): string => { + const device = this._coreTsrHandlers[id]?._device + + return `Device "${device?.deviceName ?? id}" (${device?.instanceId ?? 'instance unknown'})` + e + } + const fixError = (id: string, e: Error): any => { + const device = this._coreTsrHandlers[id]?._device + const name = `Device "${device?.deviceName ?? id}" (${device?.instanceId ?? 'instance unknown'})` + + return { + message: e.message && name + ': ' + e.message, + name: e.name && name + ': ' + e.name, + stack: e.stack && e.stack + '\nAt device' + name, + } + } + const fixContext = (...context: any[]): any => { + return { + context, + } + } + + this.tsr.connectionManager.on('connectionEvent:connectionChanged', (id, status) => { + const coreTsrHandler = this._coreTsrHandlers[id] + if (!coreTsrHandler) return + + coreTsrHandler.statusChanged(status) + + // When the status has changed, the deviceName might have changed: + coreTsrHandler._device.reloadProps().catch((err) => { + this.logger.error(`Error in reloadProps: ${stringifyError(err)}`) + }) + // hack to make sure atem has media after restart + if ( + (status.statusCode === StatusCode.GOOD || + status.statusCode === StatusCode.WARNING_MINOR || + status.statusCode === StatusCode.WARNING_MAJOR) && + coreTsrHandler._device.deviceType === DeviceType.ATEM && + !disableAtemUpload + ) { + const assets = (coreTsrHandler._device.deviceOptions as DeviceOptionsAtem).options?.mediaPoolAssets + if (assets && assets.length > 0) { + try { + this.uploadFilesToAtem( + coreTsrHandler._device, + assets.filter((asset) => _.isNumber(asset.position) && asset.path) + ) + } catch (e) { + // don't worry about it. + } + } + } + }) + this.tsr.connectionManager.on('connectionEvent:slowSentCommand', (id, info) => { + // If the internalDelay is too large, it should be logged as an error, + // since something took too long internally. + + if (info.internalDelay > 100) { + this.logger.error('slowSentCommand', { + id, + ...info, + }) + } else { + this.logger.warn('slowSentCommand', { + id, + ...info, + }) + } + }) + this.tsr.connectionManager.on('connectionEvent:slowFulfilledCommand', (id, info) => { + // Note: we don't emit slow fulfilled commands as error, since + // the fulfillment of them lies on the device being controlled, not on us. + + this.logger.warn('slowFulfilledCommand', { + id, + ...info, + }) + }) + this.tsr.connectionManager.on('connectionEvent:commandError', (id, error, context) => { + // todo: handle this better + this.logger.error(fixError(id, error), { context }) + }) + this.tsr.connectionManager.on('connectionEvent:commandReport', (_id, commandReport) => { + if (this._reportAllCommands) { + // Todo: send these to Core + this.logger.info('commandReport', { + commandReport: commandReport, + }) + } + }) + this.tsr.connectionManager.on('connectionEvent:updateMediaObject', (id, collectionId, docId, doc) => { + const coreTsrHandler = this._coreTsrHandlers[id] + if (!coreTsrHandler) return + + coreTsrHandler.onUpdateMediaObject(collectionId, docId, doc) + }) + this.tsr.connectionManager.on('connectionEvent:clearMediaObjects', (id, collectionId) => { + const coreTsrHandler = this._coreTsrHandlers[id] + if (!coreTsrHandler) return + + coreTsrHandler.onClearMediaObjectCollection(collectionId) + }) + this.tsr.connectionManager.on('connectionEvent:info', (id, info) => { + this.logger.info(fixLog(id, info)) + }) + this.tsr.connectionManager.on('connectionEvent:warning', (id, warning) => { + this.logger.warn(fixLog(id, warning)) + }) + this.tsr.connectionManager.on('connectionEvent:error', (id, context, error) => { + this.logger.error(fixError(id, error), fixContext(context)) + }) + this.tsr.connectionManager.on('connectionEvent:debug', (id, ...args) => { + const device = this._coreTsrHandlers[id]?._device + + if (!device?.debugLogging && !this._coreHandler.logDebug) { + return + } + if (args.length === 0) { + this.logger.debug('>empty message<') + return + } + const data = args.map((arg) => (typeof arg === 'object' ? JSON.stringify(arg) : arg)) + this.logger.debug(`Device "${device?.deviceName || id}" (${device?.instanceId})`, { data }) + }) + this.tsr.connectionManager.on('connectionEvent:debugState', (id, state) => { + const device = this._coreTsrHandlers[id]?._device + + if (device?.debugState && this._coreHandler.logDebug) { + // Fetch the Id that core knows this device by + const coreId = this._coreTsrHandlers[device.deviceId].core.deviceId + this._debugStates.set(unprotectString(coreId), state) + } + }) + this.tsr.connectionManager.on('connectionEvent:timeTrace', (_id, trace) => { + sendTrace(trace) + }) + } + private loadSubdeviceConfigurations(): { [deviceType: string]: Record } { const defaultDeviceOptions: { [deviceType: string]: Record } = {} diff --git a/packages/yarn.lock b/packages/yarn.lock index ed4403af09d..219c4260db1 100644 --- a/packages/yarn.lock +++ b/packages/yarn.lock @@ -21739,7 +21739,7 @@ asn1@evs-broadcast/node-asn1: "@sofie-automation/shared-lib": 1.52.0-in-development debug: ^4.3.4 influx: ^5.9.3 - timeline-state-resolver: 9.2.0-nightly-release52-20240909-111856-517f0ee37.0 + timeline-state-resolver: 9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0 tslib: ^2.6.2 underscore: ^1.13.6 winston: ^3.11.0 @@ -25655,7 +25655,7 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"superfly-timeline@npm:9.0.1, superfly-timeline@npm:^9.0.1": +"superfly-timeline@npm:9.0.1, superfly-timeline@npm:^9.0.0": version: 9.0.1 resolution: "superfly-timeline@npm:9.0.1" dependencies: @@ -26046,6 +26046,15 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard +"timeline-state-resolver-types@npm:9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0": + version: 9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0 + resolution: "timeline-state-resolver-types@npm:9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0" + dependencies: + tslib: ^2.6.3 + checksum: 08fb51f32cae7ad790ad6a8ec56518e54b280268c83b9614f75c43656b4d0f4f1d00c5637cf5d3a03e8c3e17c93d37ece9a50b07eb4928816f22e1fed25bc2dc + languageName: node + linkType: hard + "timeline-state-resolver-types@npm:9.2.0-nightly-release52-20240909-111856-517f0ee37.0": version: 9.2.0-nightly-release52-20240909-111856-517f0ee37.0 resolution: "timeline-state-resolver-types@npm:9.2.0-nightly-release52-20240909-111856-517f0ee37.0" @@ -26055,9 +26064,9 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"timeline-state-resolver@npm:9.2.0-nightly-release52-20240909-111856-517f0ee37.0": - version: 9.2.0-nightly-release52-20240909-111856-517f0ee37.0 - resolution: "timeline-state-resolver@npm:9.2.0-nightly-release52-20240909-111856-517f0ee37.0" +"timeline-state-resolver@npm:9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0": + version: 9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0 + resolution: "timeline-state-resolver@npm:9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0" dependencies: "@tv2media/v-connection": ^7.3.4 atem-connection: 3.5.0 @@ -26080,9 +26089,9 @@ asn1@evs-broadcast/node-asn1: p-timeout: ^3.2.0 simple-oauth2: ^5.1.0 sprintf-js: ^1.1.3 - superfly-timeline: ^9.0.1 + superfly-timeline: ^9.0.0 threadedclass: ^1.2.2 - timeline-state-resolver-types: 9.2.0-nightly-release52-20240909-111856-517f0ee37.0 + timeline-state-resolver-types: 9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0 tslib: ^2.6.3 tv-automation-quantel-gateway-client: ^3.1.7 type-fest: ^3.13.1 @@ -26090,7 +26099,7 @@ asn1@evs-broadcast/node-asn1: utf-8-validate: ^6.0.4 ws: ^8.18.0 xml-js: ^1.6.11 - checksum: 65e4b7ad24f414efd940fda3987900123aea8bd5ec41cadeca7763c13e98e24ceff4384f0103f8079d2e823986de56f3dd28bc2afeb3e6632d74c0e29f88ad6b + checksum: 9f1bfb8e43ec26b610ec21f25d1f28d39da32458b2209a96ee48863f27fa5cd218fea2a2802cbf7e810762dec185a1b9e25057edda9dedd53ccbdbd59b776e95 languageName: node linkType: hard From 3d75720698d02c54e82795397777c695296cb735 Mon Sep 17 00:00:00 2001 From: Mint de Wit Date: Tue, 10 Sep 2024 12:38:34 +0000 Subject: [PATCH 09/81] chore: wip --- meteor/yarn.lock | 10 +- packages/playout-gateway/src/coreHandler.ts | 22 +- packages/playout-gateway/src/tsrHandler.ts | 571 ++------------------ packages/shared-lib/package.json | 2 +- packages/yarn.lock | 11 +- 5 files changed, 55 insertions(+), 561 deletions(-) diff --git a/meteor/yarn.lock b/meteor/yarn.lock index d0a1a2f5b2d..3f7844e680b 100644 --- a/meteor/yarn.lock +++ b/meteor/yarn.lock @@ -1242,7 +1242,7 @@ __metadata: resolution: "@sofie-automation/shared-lib@portal:../packages/shared-lib::locator=automation-core%40workspace%3A." dependencies: "@mos-connection/model": v4.2.0-alpha.1 - timeline-state-resolver-types: 9.2.0-nightly-release52-20240909-111856-517f0ee37.0 + timeline-state-resolver-types: 9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0 tslib: ^2.6.2 type-fest: ^3.13.1 languageName: node @@ -10196,12 +10196,12 @@ __metadata: languageName: node linkType: hard -"timeline-state-resolver-types@npm:9.2.0-nightly-release52-20240909-111856-517f0ee37.0": - version: 9.2.0-nightly-release52-20240909-111856-517f0ee37.0 - resolution: "timeline-state-resolver-types@npm:9.2.0-nightly-release52-20240909-111856-517f0ee37.0" +"timeline-state-resolver-types@npm:9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0": + version: 9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0 + resolution: "timeline-state-resolver-types@npm:9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0" dependencies: tslib: ^2.6.3 - checksum: 6f9526e3e60021b722fd152272a7697a2b78517fb800a9e0879170388dcfeaaa2d386f80b9868b20bffe058033c9158077f93294cff7907c5d8a6d7b27e186f6 + checksum: 08fb51f32cae7ad790ad6a8ec56518e54b280268c83b9614f75c43656b4d0f4f1d00c5637cf5d3a03e8c3e17c93d37ece9a50b07eb4928816f22e1fed25bc2dc languageName: node linkType: hard diff --git a/packages/playout-gateway/src/coreHandler.ts b/packages/playout-gateway/src/coreHandler.ts index b375155b71b..1f73222973f 100644 --- a/packages/playout-gateway/src/coreHandler.ts +++ b/packages/playout-gateway/src/coreHandler.ts @@ -346,7 +346,7 @@ export class CoreHandler { const devices: any[] = [] if (this._tsrHandler) { - for (const device of this._tsrHandler.tsr.getDevices()) { + for (const device of this._tsrHandler.tsr.connectionManager.getConnections()) { devices.push({ instanceId: device.instanceId, deviceId: device.deviceId, @@ -416,7 +416,6 @@ export class CoreTSRDeviceHandler { public _deviceId: string public _device!: BaseRemoteDeviceIntegration private _coreParentHandler: CoreHandler - private _tsrHandler: TSRHandler private _hasGottenStatusChange = false private _deviceStatus: PeripheralDeviceAPI.PeripheralDeviceStatusObject = { statusCode: StatusCode.BAD, @@ -424,16 +423,10 @@ export class CoreTSRDeviceHandler { } private disposed = false - constructor( - parent: CoreHandler, - device: Promise>, - deviceId: string, - tsrHandler: TSRHandler - ) { + constructor(parent: CoreHandler, device: Promise>, deviceId: string) { this._coreParentHandler = parent this._devicePr = device this._deviceId = deviceId - this._tsrHandler = tsrHandler } async init(): Promise { this._device = await this._devicePr @@ -455,10 +448,11 @@ export class CoreTSRDeviceHandler { ) }) + console.log('has got status? ' + this._hasGottenStatusChange) if (!this._hasGottenStatusChange) { this._deviceStatus = await this._device.device.getStatus() - this.sendStatus() } + this.sendStatus() if (this.disposed) throw new Error('CoreTSRDeviceHandler cant init, is disposed') await this.setupSubscriptionsAndObservers() if (this.disposed) throw new Error('CoreTSRDeviceHandler cant init, is disposed') @@ -490,8 +484,9 @@ export class CoreTSRDeviceHandler { // setup observers this._coreParentHandler.setupObserverForPeripheralDeviceCommands(this) } - statusChanged(deviceStatus: Partial): void { - this._hasGottenStatusChange = true + statusChanged(deviceStatus: Partial, fromDevice = true): void { + console.log('device ' + this._deviceId + ' status set to ' + deviceStatus.statusCode) + if (fromDevice) this._hasGottenStatusChange = true this._deviceStatus = { ...this._deviceStatus, @@ -545,7 +540,8 @@ export class CoreTSRDeviceHandler { async dispose(subdevice: 'keepSubDevice' | 'removeSubDevice' = 'keepSubDevice'): Promise { this._observers.forEach((obs) => obs.stop()) - // await this._tsrHandler.tsr.removeDevice(this._deviceId) + if (!this.core) return + await this.core.setStatus({ statusCode: StatusCode.BAD, messages: ['Uninitialized'], diff --git a/packages/playout-gateway/src/tsrHandler.ts b/packages/playout-gateway/src/tsrHandler.ts index ce42ad62deb..40def381031 100644 --- a/packages/playout-gateway/src/tsrHandler.ts +++ b/packages/playout-gateway/src/tsrHandler.ts @@ -7,15 +7,10 @@ import { TSRTimelineObj, TSRTimeline, TSRTimelineContent, - CommandReport, DeviceOptionsAtem, AtemMediaPoolAsset, - MediaObject, ExpectedPlayoutItem, ExpectedPlayoutItemContent, - SlowSentCommandInfo, - SlowFulfilledCommandInfo, - DeviceStatus, StatusCode, Datastore, } from 'timeline-state-resolver' @@ -55,8 +50,6 @@ import { unprotectString, } from '@sofie-automation/server-core-integration' import { BaseRemoteDeviceIntegration } from 'timeline-state-resolver/dist/service/remoteDeviceInstance' -import { DeviceEvents } from 'timeline-state-resolver/dist/service/device' -import EventEmitter = require('eventemitter3') const debug = Debug('playout-gateway') @@ -69,31 +62,6 @@ export interface TimelineContentObjectTmp { inGroup?: string } -/** Max time for initializing devices */ -const INIT_TIMEOUT = 10000 - -enum DeviceAction { - ADD = 'add', - READD = 'readd', - REMOVE = 'remove', -} - -type DeviceActionResult = { - success: boolean - deviceId: string - action: DeviceAction -} - -type UpdateDeviceOperationsResult = - | { - success: true - results: DeviceActionResult[] - } - | { - success: false - reason: 'timeout' | 'error' - details: string[] - } /** * Represents a connection between Gateway and TSR @@ -236,16 +204,31 @@ export class TSRHandler { this.logger.debug('tsr init done') } - private attachTSRConnectionEvents () { + private attachTSRConnectionEvents() { + this.tsr.connectionManager.on('info', (info) => this.logger.info('TSR ConnectionManager: ' + info)) + this.tsr.connectionManager.on('warning', (warning) => this.logger.warn('TSR ConnectionManager: ' + warning)) + this.tsr.connectionManager.on('debug', (...args) => { + if (!this._coreHandler.logDebug) { + return + } + const data = args.map((arg) => (typeof arg === 'object' ? JSON.stringify(arg) : arg)) + this.logger.debug(`TSR ConnectionManager debug (${args.length})`, { data }) + }) + this.tsr.connectionManager.on('connectionAdded', (id, container) => { - const coreTsrHandler = new CoreTSRDeviceHandler(this._coreHandler, Promise.resolve(container), id, this) + const coreTsrHandler = new CoreTSRDeviceHandler(this._coreHandler, Promise.resolve(container), id) this._coreTsrHandlers[id] = coreTsrHandler // set the status to uninitialized for now: - coreTsrHandler.statusChanged({ - statusCode: StatusCode.BAD, - messages: ['Device initialising...'], - }) + coreTsrHandler.statusChanged( + { + statusCode: StatusCode.BAD, + messages: ['Device initialising...'], + }, + false + ) + + this._triggerupdateExpectedPlayoutItems() // So that any recently created devices will get all the ExpectedPlayoutItems }) this.tsr.connectionManager.on('connectionInitialised', (id) => { @@ -256,7 +239,7 @@ export class TSRHandler { return } - coreTsrHandler.init().catch(e => this.logger.error('CoreTSRHandler failed to initialise', e)) // todo - is this the right way to log this? + coreTsrHandler.init().catch((e) => this.logger.error('CoreTSRHandler failed to initialise', e)) // todo - is this the right way to log this? }) this.tsr.connectionManager.on('connectionRemoved', (id) => { @@ -267,18 +250,20 @@ export class TSRHandler { return } - coreTsrHandler.dispose('removeSubDevice') + coreTsrHandler.dispose('removeSubDevice').catch((e) => { + this.logger.error('Failed to dispose of coreTsrHandler for ' + id + ': ' + e) + }) delete this._coreTsrHandlers[id] }) const fixLog = (id: string, e: string): string => { const device = this._coreTsrHandlers[id]?._device - return `Device "${device?.deviceName ?? id}" (${device?.instanceId ?? 'instance unknown'})` + e + return `Device "${device?.deviceName ?? id}" (${device?.instanceId ?? 'instance unknown'}): ` + e } const fixError = (id: string, e: Error): any => { const device = this._coreTsrHandlers[id]?._device - const name = `Device "${device?.deviceName ?? id}" (${device?.instanceId ?? 'instance unknown'})` + const name = `Device "${device?.deviceName ?? id}" (${device?.instanceId ?? 'instance unknown'})` return { message: e.message && name + ': ' + e.message, @@ -295,7 +280,7 @@ export class TSRHandler { this.tsr.connectionManager.on('connectionEvent:connectionChanged', (id, status) => { const coreTsrHandler = this._coreTsrHandlers[id] if (!coreTsrHandler) return - + coreTsrHandler.statusChanged(status) // When the status has changed, the deviceName might have changed: @@ -341,12 +326,12 @@ export class TSRHandler { }) this.tsr.connectionManager.on('connectionEvent:slowFulfilledCommand', (id, info) => { // Note: we don't emit slow fulfilled commands as error, since - // the fulfillment of them lies on the device being controlled, not on us. + // the fulfillment of them lies on the device being controlled, not on us. - this.logger.warn('slowFulfilledCommand', { - id, - ...info, - }) + this.logger.warn('slowFulfilledCommand', { + id, + ...info, + }) }) this.tsr.connectionManager.on('connectionEvent:commandError', (id, error, context) => { // todo: handle this better @@ -670,40 +655,14 @@ export class TSRHandler { } private async _updateDevices(): Promise { - this.logger.debug('updateDevices start') - const peripheralDevice = this._getPeripheralDevice() - const ps: Promise[] = [] - const promiseOperations: { [id: string]: { deviceId: string; operation: DeviceAction } } = {} - const keepTrack = async (p: Promise, deviceId: string, operation: DeviceAction) => { - const name = `${operation}_${deviceId}` - promiseOperations[name] = { - deviceId, - operation, - } - return p.then((result) => { - delete promiseOperations[name] - return result - }) - } - const deviceOptions = new Map() - if (peripheralDevice) { + const connections: Record = {} const devices = peripheralDevice.playoutDevices for (const [deviceId, device0] of Object.entries(devices)) { - const device = device0 - if (!device.disable) { - deviceOptions.set(deviceId, device) - } - } - - for (const [deviceId, orgDeviceOptions] of deviceOptions.entries()) { - const oldDevice: BaseRemoteDeviceIntegration | undefined = this.tsr.getDevice( - deviceId, - true - ) + if (device0.disable) continue const deviceOptions = _.extend( { @@ -712,9 +671,8 @@ export class TSRHandler { limitSlowFulfilledCommand: 100, options: {}, }, - this.populateDefaultValuesIfMissing(orgDeviceOptions) + this.populateDefaultValuesIfMissing(device0) ) - if (this._multiThreaded !== null && deviceOptions.isMultiThreaded === undefined) { deviceOptions.isMultiThreaded = this._multiThreaded } @@ -722,130 +680,11 @@ export class TSRHandler { deviceOptions.reportAllCommands = this._reportAllCommands } - if (!oldDevice) { - if (deviceOptions.options) { - this.logger.info('Initializing device: ' + deviceId) - this.logger.info('new', deviceOptions) - ps.push(keepTrack(this._addDevice(deviceId, deviceOptions), deviceId, DeviceAction.ADD)) - } - } else { - if (deviceOptions.options) { - let anyChanged = false - - if ( - // Changing the debug flag shouldn't restart the device: - !_.isEqual(_.omit(oldDevice.deviceOptions, 'debug'), _.omit(deviceOptions, 'debug')) - ) { - anyChanged = true - } - - if (anyChanged) { - deviceOptions.debug = this.getDeviceDebug(orgDeviceOptions) - - this.logger.info('Re-initializing device: ' + deviceId) - this.logger.info('old', oldDevice.deviceOptions) - this.logger.info('new', deviceOptions) - ps.push( - keepTrack(this._removeDevice(deviceId), deviceId, DeviceAction.REMOVE).then(async () => - keepTrack(this._addDevice(deviceId, deviceOptions), deviceId, DeviceAction.READD) - ) - ) - } - } - } + connections[deviceId] = deviceOptions } - for (const oldDevice of this.tsr.getDevices()) { - const deviceId = oldDevice.deviceId - if (!deviceOptions.has(deviceId)) { - this.logger.info('Un-initializing device: ' + deviceId) - ps.push(keepTrack(this._removeDevice(deviceId), deviceId, DeviceAction.REMOVE)) - } - } + this.tsr.connectionManager.setConnections(connections) } - - const resultsOrTimeout = await Promise.race([ - Promise.all(ps).then((results) => ({ - success: true, - results, - })), - new Promise((resolve) => - setTimeout(() => { - const keys = Object.keys(promiseOperations) - if (keys.length) { - this.logger.warn( - `Timeout in _updateDevices: ${Object.values<{ deviceId: string; operation: DeviceAction }>( - promiseOperations - ) - .map((op) => op.deviceId) - .join(',')}` - ) - } - - Promise.all( - // At this point in time, promiseOperations contains the promises that have timed out. - // If we tried to add or re-add a device, that apparently failed so we should remove the device in order to - // give it another chance next time _updateDevices() is called. - Object.values<{ deviceId: string; operation: DeviceAction }>(promiseOperations) - .filter((op) => op.operation === DeviceAction.ADD || op.operation === DeviceAction.READD) - .map(async (op) => - // the device was never added, should retry next round - this._removeDevice(op.deviceId) - ) - ) - .catch((e) => { - this.logger.error( - `Error when trying to remove unsuccessfully initialized devices: ${stringifyIds( - Object.values<{ deviceId: string; operation: DeviceAction }>(promiseOperations).map( - (op) => op.deviceId - ) - )}`, - e - ) - }) - .finally(() => { - resolve({ - success: false, - reason: 'error', - details: keys, - }) - }) - }, INIT_TIMEOUT) - ), // Timeout if not all are resolved within INIT_TIMEOUT - ]) - - await this._reportResult(resultsOrTimeout) - - const debugLoggingPs: Promise[] = [] - // Set logDebug on the devices: - for (const device of this.tsr.getDevices()) { - const options: DeviceOptionsAny | undefined = deviceOptions.get(device.deviceId) - if (!options) { - continue - } - const debug: boolean = this.getDeviceDebug(options) - if (device.debugLogging !== debug) { - this.logger.info(`Setting logDebug of device ${device.deviceId} to ${debug}`) - debugLoggingPs.push(device.setDebugLogging(debug)) - } - } - // Set debugState on devices: - for (const device of this.tsr.getDevices()) { - const options: DeviceOptionsAny | undefined = deviceOptions.get(device.deviceId) - if (!options) { - continue - } - - const debug: boolean = this.getDeviceDebugState(options) - if (device.debugState !== debug) { - this.logger.info(`Setting debugState of device ${device.deviceId} to ${debug}`) - debugLoggingPs.push(device.setDebugState(debug)) - } - } - await Promise.all(debugLoggingPs) - - this._triggerupdateExpectedPlayoutItems() // So that any recently created devices will get all the ExpectedPlayoutItems - this.logger.debug('updateDevices end') } private populateDefaultValuesIfMissing(deviceOptions: DeviceOptionsAny): DeviceOptionsAny { @@ -855,303 +694,6 @@ export class TSRHandler { deviceOptions.options = { ...this.defaultDeviceOptions[deviceOptions.type], ...options } return deviceOptions } - - private getDeviceDebug(deviceOptions: DeviceOptionsAny): boolean { - return deviceOptions.debug || this._coreHandler.logDebug || false - } - private getDeviceDebugState(deviceOptions: DeviceOptionsAny): boolean { - return (deviceOptions.debugState && this._coreHandler.logState) || false - } - private async _reportResult(resultsOrTimeout: UpdateDeviceOperationsResult): Promise { - this.logger.warn(JSON.stringify(resultsOrTimeout)) - // Check if the updateDevice operation failed before completing - if (!resultsOrTimeout.success) { - // It failed because there was a global timeout (not a device-specific failure) - if (resultsOrTimeout.reason === 'timeout') { - await this._coreHandler.core.setStatus({ - statusCode: StatusCode.FATAL, - messages: [ - `Time-out during device update. Timed-out on devices: ${stringifyIds( - resultsOrTimeout.details - )}`, - ], - }) - // It failed for an unknown reason - } else { - await this._coreHandler.core.setStatus({ - statusCode: StatusCode.BAD, - messages: [ - `Unknown error during device update: ${resultsOrTimeout.reason}. Devices: ${stringifyIds( - resultsOrTimeout.details - )}`, - ], - }) - } - - return - } - - // updateDevice finished successfully, let's see if any of the individual devices failed - const failures = resultsOrTimeout.results.filter((result) => !result.success) - // Group the failures according to what sort of an operation was executed - const addFailureDeviceIds = failures - .filter((failure) => failure.action === DeviceAction.ADD) - .map((failure) => failure.deviceId) - const removeFailureDeviceIds = failures - .filter((failure) => failure.action === DeviceAction.REMOVE) - .map((failure) => failure.deviceId) - - // There were no failures, good - if (failures.length === 0) { - await this._coreHandler.core.setStatus({ - statusCode: StatusCode.GOOD, - messages: [], - }) - return - } - // Something did fail, let's report it as the status - await this._coreHandler.core.setStatus({ - statusCode: StatusCode.BAD, - messages: [ - addFailureDeviceIds.length > 0 - ? `Unable to initialize devices, check configuration: ${stringifyIds(addFailureDeviceIds)}` - : null, - removeFailureDeviceIds.length > 0 - ? `Failed to remove devices: ${stringifyIds(removeFailureDeviceIds)}` - : null, - ].filter(Boolean) as string[], - }) - } - - private async _addDevice(deviceId: string, options: DeviceOptionsAny): Promise { - this.logger.debug('Adding device ' + deviceId) - - try { - if (this._coreTsrHandlers[deviceId]) { - throw new Error(`There is already a _coreTsrHandlers for deviceId "${deviceId}"!`) - } - - const devicePr: Promise> = this.tsr.createDevice( - deviceId, - options - ) - - const coreTsrHandler = new CoreTSRDeviceHandler(this._coreHandler, devicePr, deviceId, this) - - this._coreTsrHandlers[deviceId] = coreTsrHandler - - // set the status to uninitialized for now: - coreTsrHandler.statusChanged({ - statusCode: StatusCode.BAD, - messages: ['Device initialising...'], - }) - - const device = await devicePr - - // Set up device status - const deviceType = device.deviceType - - const onDeviceStatusChanged = (connectedOrStatus: Partial) => { - let deviceStatus: Partial - if (_.isBoolean(connectedOrStatus)) { - // for backwards compability, to be removed later - if (connectedOrStatus) { - deviceStatus = { - statusCode: StatusCode.GOOD, - } - } else { - deviceStatus = { - statusCode: StatusCode.BAD, - messages: ['Disconnected'], - } - } - } else { - deviceStatus = connectedOrStatus - } - coreTsrHandler.statusChanged(deviceStatus) - - // When the status has changed, the deviceName might have changed: - device.reloadProps().catch((err) => { - this.logger.error(`Error in reloadProps: ${stringifyError(err)}`) - }) - // hack to make sure atem has media after restart - if ( - (deviceStatus.statusCode === StatusCode.GOOD || - deviceStatus.statusCode === StatusCode.WARNING_MINOR || - deviceStatus.statusCode === StatusCode.WARNING_MAJOR) && - deviceType === DeviceType.ATEM && - !disableAtemUpload - ) { - const assets = (options as DeviceOptionsAtem).options?.mediaPoolAssets - if (assets && assets.length > 0) { - try { - this.uploadFilesToAtem( - device, - assets.filter((asset) => _.isNumber(asset.position) && asset.path) - ) - } catch (e) { - // don't worry about it. - } - } - } - } - const onSlowSentCommand = (info: SlowSentCommandInfo) => { - // If the internalDelay is too large, it should be logged as an error, - // since something took too long internally. - - if (info.internalDelay > 100) { - this.logger.error('slowSentCommand', { - deviceName: device.deviceName, - ...info, - }) - } else { - this.logger.warn('slowSentCommand', { - deviceName: device.deviceName, - ...info, - }) - } - } - const onSlowFulfilledCommand = (info: SlowFulfilledCommandInfo) => { - // Note: we don't emit slow fulfilled commands as error, since - // the fulfillment of them lies on the device being controlled, not on us. - - this.logger.warn('slowFulfilledCommand', { - deviceName: device.deviceName, - ...info, - }) - } - const onCommandReport = (commandReport: CommandReport) => { - if (this._reportAllCommands) { - // Todo: send these to Core - this.logger.info('commandReport', { - commandReport: commandReport, - }) - } - } - const onCommandError = (error: any, context: any) => { - // todo: handle this better - this.logger.error(fixError(error), { context }) - } - const onUpdateMediaObject = (collectionId: string, docId: string, doc: MediaObject | null) => { - coreTsrHandler.onUpdateMediaObject(collectionId, docId, doc) - } - const onClearMediaObjectCollection = (collectionId: string) => { - coreTsrHandler.onClearMediaObjectCollection(collectionId) - } - const fixLog = (e: string): string => `Device "${device.deviceName || deviceId}" (${device.instanceId})` + e - const fixError = (e: Error): any => { - const name = `Device "${device.deviceName || deviceId}" (${device.instanceId})` - - return { - message: e.message && name + ': ' + e.message, - name: e.name && name + ': ' + e.name, - stack: e.stack && e.stack + '\nAt device' + name, - } - } - const fixContext = (...context: any[]): any => { - return { - context, - } - } - await coreTsrHandler.init() - - device.onChildClose = () => { - // Called if a child is closed / crashed - this.logger.warn(`Child of device ${deviceId} closed/crashed`) - debug(`Trigger update devices because "${deviceId}" process closed`) - - onDeviceStatusChanged({ - statusCode: StatusCode.BAD, - messages: ['Child process closed'], - }) - - this._removeDevice(deviceId).then( - () => { - this._triggerUpdateDevices() - }, - () => { - this._triggerUpdateDevices() - } - ) - } - - await addListenerToDevice(device, 'connectionChanged', onDeviceStatusChanged) - // await addListenerToDevice(device, 'slowCommand', onSlowCommand) - await addListenerToDevice(device, 'slowSentCommand', onSlowSentCommand) - await addListenerToDevice(device, 'slowFulfilledCommand', onSlowFulfilledCommand) - await addListenerToDevice(device, 'commandError', onCommandError) - await addListenerToDevice(device, 'commandReport', onCommandReport) - await addListenerToDevice(device, 'updateMediaObject', onUpdateMediaObject) - await addListenerToDevice(device, 'clearMediaObjects', onClearMediaObjectCollection) - - await addListenerToDevice(device, 'info', (info) => { - this.logger.info(fixLog(info)) - }) - await addListenerToDevice(device, 'warning', (warning: string) => { - this.logger.warn(fixLog(warning)) - }) - await addListenerToDevice(device, 'error', (context, error) => { - this.logger.error(fixError(error), fixContext(context)) - }) - - await addListenerToDevice(device, 'debug', (...args) => { - if (!device.debugLogging && !this._coreHandler.logDebug) { - return - } - if (args.length === 0) { - this.logger.debug('>empty message<') - return - } - const data = args.map((arg) => (typeof arg === 'object' ? JSON.stringify(arg) : arg)) - this.logger.debug(`Device "${device.deviceName || deviceId}" (${device.instanceId})`, { data }) - }) - - await addListenerToDevice(device, 'debugState', (...args) => { - if (device.debugState && this._coreHandler.logDebug) { - // Fetch the Id that core knows this device by - const coreId = this._coreTsrHandlers[device.deviceId].core.deviceId - this._debugStates.set(unprotectString(coreId), args[0]) - } - }) - - await addListenerToDevice(device, 'timeTrace', (trace) => sendTrace(trace)) - /* eslint-enable @typescript-eslint/await-thenable */ - - // now initialize it - await this.tsr.initDevice(deviceId, options) - - // also ask for the status now, and update: - onDeviceStatusChanged(await device.device.getStatus()) - return { - action: DeviceAction.ADD, - deviceId, - success: true, - } - } catch (error) { - // Initialization failed, clean up any artifacts and see if we can try again later: - this.logger.error(`Error when adding device "${deviceId}"`, { error }) - debug(`Error when adding device "${deviceId}"`) - try { - await this._removeDevice(deviceId) - } catch (error) { - this.logger.error(`Error when cleaning up after adding device "${deviceId}" error...`, error) - } - - if (!this._triggerUpdateDevicesTimeout) { - this._triggerUpdateDevicesTimeout = setTimeout(() => { - debug(`Trigger updateDevices from failure "${deviceId}"`) - // try again later: - this._triggerUpdateDevices() - }, 10 * 1000) - } - - return { - action: DeviceAction.ADD, - deviceId, - success: false, - } - } - } /** * This function is a quick and dirty solution to load a still to the atem mixers. * This does not serve as a proper implementation! And need to be refactor @@ -1174,25 +716,6 @@ export class TSRHandler { process.stderr.on('data', (data) => this.logger.info(data.toString())) process.on('close', () => process.removeAllListeners()) } - private async _removeDevice(deviceId: string): Promise { - let success = false - if (this._coreTsrHandlers[deviceId]) { - try { - await this._coreTsrHandlers[deviceId].dispose('removeSubDevice') - this.logger.debug('Disposed device ' + deviceId) - success = true - } catch (error) { - this.logger.error(`Error when removing device "${deviceId}"`, error) - } - } - delete this._coreTsrHandlers[deviceId] - - return { - deviceId, - action: DeviceAction.REMOVE, - success, - } - } private _triggerupdateExpectedPlayoutItems() { if (!this._initialized) return if (this._triggerupdateExpectedPlayoutItemsTimeout) { @@ -1223,7 +746,7 @@ export class TSRHandler { } await Promise.all( - _.map(this.tsr.getDevices(), async (container) => { + _.map(this.tsr.connectionManager.getConnections(), async (container) => { if (!container.details.supportsExpectedPlayoutItems) { return } @@ -1415,19 +938,3 @@ export function getHash(str: string): string { export function stringifyIds(ids: string[]): string { return ids.map((id) => `"${id}"`).join(', ') } - -async function addListenerToDevice( - device: BaseRemoteDeviceIntegration, - eventName: T, - fcn: EventEmitter.EventListener -): Promise { - // Note for the future: - // It is important that the callbacks returns void, - // otherwise there might be problems with threadedclass! - // Also, it is critical that all of these `.on` calls be `await`ed. - // They aren't typed as promises due to limitations of TypeScript, - // but due to threadedclass they _are_ promises. - - const emitterHack = device.device as unknown as EventEmitter - await Promise.resolve(emitterHack.on(eventName, fcn)) -} diff --git a/packages/shared-lib/package.json b/packages/shared-lib/package.json index f0640fd2642..fc35b2014a1 100644 --- a/packages/shared-lib/package.json +++ b/packages/shared-lib/package.json @@ -39,7 +39,7 @@ ], "dependencies": { "@mos-connection/model": "v4.2.0-alpha.1", - "timeline-state-resolver-types": "9.2.0-nightly-release52-20240909-111856-517f0ee37.0", + "timeline-state-resolver-types": "9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0", "tslib": "^2.6.2", "type-fest": "^3.13.1" }, diff --git a/packages/yarn.lock b/packages/yarn.lock index 219c4260db1..c19d693ede7 100644 --- a/packages/yarn.lock +++ b/packages/yarn.lock @@ -5301,7 +5301,7 @@ __metadata: resolution: "@sofie-automation/shared-lib@workspace:shared-lib" dependencies: "@mos-connection/model": v4.2.0-alpha.1 - timeline-state-resolver-types: 9.2.0-nightly-release52-20240909-111856-517f0ee37.0 + timeline-state-resolver-types: 9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0 tslib: ^2.6.2 type-fest: ^3.13.1 languageName: unknown @@ -26055,15 +26055,6 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"timeline-state-resolver-types@npm:9.2.0-nightly-release52-20240909-111856-517f0ee37.0": - version: 9.2.0-nightly-release52-20240909-111856-517f0ee37.0 - resolution: "timeline-state-resolver-types@npm:9.2.0-nightly-release52-20240909-111856-517f0ee37.0" - dependencies: - tslib: ^2.6.3 - checksum: 6f9526e3e60021b722fd152272a7697a2b78517fb800a9e0879170388dcfeaaa2d386f80b9868b20bffe058033c9158077f93294cff7907c5d8a6d7b27e186f6 - languageName: node - linkType: hard - "timeline-state-resolver@npm:9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0": version: 9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0 resolution: "timeline-state-resolver@npm:9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0" From ce6854336f4df8fc4f4b512a3de1f039fc0f1309 Mon Sep 17 00:00:00 2001 From: olzzon Date: Tue, 24 Sep 2024 13:37:14 +0200 Subject: [PATCH 10/81] fix: make openMediaHotStandby option pr subdevice, and rename from hotStandby to openMediaHotStandby --- .../mos-gateway/src/$schemas/devices.json | 40 +++++++++++++------ .../mos-gateway/src/CoreMosDeviceHandler.ts | 8 ++-- packages/mos-gateway/src/coreHandler.ts | 4 +- packages/mos-gateway/src/generated/devices.ts | 2 +- packages/mos-gateway/src/mosHandler.ts | 13 ++++-- 5 files changed, 44 insertions(+), 23 deletions(-) diff --git a/packages/mos-gateway/src/$schemas/devices.json b/packages/mos-gateway/src/$schemas/devices.json index 1bf9b35c5d1..ae44a971995 100644 --- a/packages/mos-gateway/src/$schemas/devices.json +++ b/packages/mos-gateway/src/$schemas/devices.json @@ -61,13 +61,20 @@ "ui:title": "(Optional) MOS Query Port", "ui:description": "Connect to an alternate port for 'query' port MOS messages", "default": 10542 - } + } }, - "required": ["lower", "upper", "query"], + "required": [ + "lower", + "upper", + "query" + ], "additionalProperties": false } }, - "required": ["id", "host"], + "required": [ + "id", + "host" + ], "additionalProperties": false }, "secondary": { @@ -105,10 +112,10 @@ "ui:description": "How often to ping NRCS to determine connection status", "default": 30000 }, - "hotStandby": { + "openMediaHotStandby": { "type": "boolean", - "ui:title": "Secondary: Hot Standby", - "ui:description": "Is the secondary connection a hot standby for the primary", + "ui:title": "Secondary: OpenMedia Hot Standby", + "ui:description": "Is the secondary connection a OpenMedia hot standby for the primary", "default": false }, "ports": { @@ -132,16 +139,25 @@ "ui:title": "(Optional) MOS Query Port", "ui:description": "Connect to an alternate port for 'query' port MOS messages", "default": 10542 - } + } }, - "required": ["lower", "upper", "query"], + "required": [ + "lower", + "upper", + "query" + ], "additionalProperties": false - } + } }, - "required": ["id", "host"], + "required": [ + "id", + "host" + ], "additionalProperties": false } }, - "required": ["primary"], + "required": [ + "primary" + ], "additionalProperties": false -} +} \ No newline at end of file diff --git a/packages/mos-gateway/src/CoreMosDeviceHandler.ts b/packages/mos-gateway/src/CoreMosDeviceHandler.ts index cd0d29d6a37..d14598ffaad 100644 --- a/packages/mos-gateway/src/CoreMosDeviceHandler.ts +++ b/packages/mos-gateway/src/CoreMosDeviceHandler.ts @@ -75,15 +75,15 @@ export class CoreMosDeviceHandler { private _pendingStoryItemChanges: Array = [] private _pendingChangeTimeout: number = 60 * 1000 private mosTypes: MosTypes - private _hotStandby: boolean + private _openMediaHotStandby: boolean private _messageQueue: Queue - constructor(parent: CoreHandler, mosDevice: IMOSDevice, mosHandler: MosHandler, hotStandby: boolean) { + constructor(parent: CoreHandler, mosDevice: IMOSDevice, mosHandler: MosHandler, openMediaHotStandby: boolean) { this._coreParentHandler = parent this._mosDevice = mosDevice this._mosHandler = mosHandler - this._hotStandby = hotStandby + this._openMediaHotStandby = openMediaHotStandby this._messageQueue = new Queue() @@ -140,7 +140,7 @@ export class CoreMosDeviceHandler { let statusCode: StatusCode const messages: Array = [] - if (this._hotStandby) { + if (this._openMediaHotStandby) { // OpenMedia treats secondary server as hot-standby // And thus is not considered as a warning if it's not connected if (connectionStatus.PrimaryConnected) { diff --git a/packages/mos-gateway/src/coreHandler.ts b/packages/mos-gateway/src/coreHandler.ts index 502aa756c4d..c693aee6af0 100644 --- a/packages/mos-gateway/src/coreHandler.ts +++ b/packages/mos-gateway/src/coreHandler.ts @@ -145,10 +145,10 @@ export class CoreHandler { async registerMosDevice( mosDevice: IMOSDevice, mosHandler: MosHandler, - hotStandby: boolean + openMediaHotStandby: boolean ): Promise { this.logger.info('registerMosDevice -------------') - const coreMos = new CoreMosDeviceHandler(this, mosDevice, mosHandler, hotStandby) + const coreMos = new CoreMosDeviceHandler(this, mosDevice, mosHandler, openMediaHotStandby) this._coreMosHandlers.push(coreMos) return coreMos.init().then(() => { diff --git a/packages/mos-gateway/src/generated/devices.ts b/packages/mos-gateway/src/generated/devices.ts index 59bf8a31b20..f192cf76146 100644 --- a/packages/mos-gateway/src/generated/devices.ts +++ b/packages/mos-gateway/src/generated/devices.ts @@ -24,7 +24,7 @@ export interface MosDeviceConfig { dontUseQueryPort?: boolean timeout?: number heartbeatInterval?: number - hotStandby?: boolean + openMediaHotStandby?: boolean ports?: { lower: number upper: number diff --git a/packages/mos-gateway/src/mosHandler.ts b/packages/mos-gateway/src/mosHandler.ts index 365a674c672..975dada3e37 100644 --- a/packages/mos-gateway/src/mosHandler.ts +++ b/packages/mos-gateway/src/mosHandler.ts @@ -59,7 +59,7 @@ export class MosHandler { private _logger: Winston.Logger private _disposed = false private _settings?: MosGatewayConfig - private _hotStandby: boolean + private _openMediaHotStandby: Record private _coreHandler: CoreHandler | undefined private _observers: Array> = [] private _triggerupdateDevicesTimeout: any = null @@ -67,7 +67,7 @@ export class MosHandler { constructor(logger: Winston.Logger) { this._logger = logger - this._hotStandby = false + this._openMediaHotStandby = {} this.mosTypes = getMosTypes(this.strict) // temporary, another will be set upon init() } async init(config: MosConfig, coreHandler: CoreHandler): Promise { @@ -243,7 +243,11 @@ export class MosHandler { if (!this._coreHandler) throw Error('_coreHandler is undefined!') - const coreMosHandler = await this._coreHandler.registerMosDevice(mosDevice, this, this._hotStandby) + const coreMosHandler = await this._coreHandler.registerMosDevice( + mosDevice, + this, + mosDevice.idSecondary ? this._openMediaHotStandby[mosDevice.idSecondary] : false + ) // this._logger.info('mosDevice registered -------------') // Setup message flow between the devices: @@ -420,7 +424,8 @@ export class MosHandler { for (const [deviceId, device] of Object.entries<{ options: MosDeviceConfig }>(devices)) { if (device) { if (device.options.secondary) { - this._hotStandby = device.options.secondary?.hotStandby || false + this._openMediaHotStandby[device.options.secondary.id] = + device.options.secondary?.openMediaHotStandby || false // If the host isn't set, don't use secondary: if (!device.options.secondary.host || !device.options.secondary.id) delete device.options.secondary From 9ff2be134f084f2b9d13c9c6c2907363e4362dd4 Mon Sep 17 00:00:00 2001 From: Mint de Wit Date: Fri, 27 Sep 2024 12:03:05 +0000 Subject: [PATCH 11/81] chore: update tsr --- meteor/yarn.lock | 10 +++++----- packages/playout-gateway/package.json | 2 +- packages/shared-lib/package.json | 2 +- packages/yarn.lock | 26 +++++++++++++------------- 4 files changed, 20 insertions(+), 20 deletions(-) diff --git a/meteor/yarn.lock b/meteor/yarn.lock index 3f7844e680b..ec96c7c0278 100644 --- a/meteor/yarn.lock +++ b/meteor/yarn.lock @@ -1242,7 +1242,7 @@ __metadata: resolution: "@sofie-automation/shared-lib@portal:../packages/shared-lib::locator=automation-core%40workspace%3A." dependencies: "@mos-connection/model": v4.2.0-alpha.1 - timeline-state-resolver-types: 9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0 + timeline-state-resolver-types: 9.2.0-nightly-release52-20240923-122840-58cfbb259.0 tslib: ^2.6.2 type-fest: ^3.13.1 languageName: node @@ -10196,12 +10196,12 @@ __metadata: languageName: node linkType: hard -"timeline-state-resolver-types@npm:9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0": - version: 9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0 - resolution: "timeline-state-resolver-types@npm:9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0" +"timeline-state-resolver-types@npm:9.2.0-nightly-release52-20240923-122840-58cfbb259.0": + version: 9.2.0-nightly-release52-20240923-122840-58cfbb259.0 + resolution: "timeline-state-resolver-types@npm:9.2.0-nightly-release52-20240923-122840-58cfbb259.0" dependencies: tslib: ^2.6.3 - checksum: 08fb51f32cae7ad790ad6a8ec56518e54b280268c83b9614f75c43656b4d0f4f1d00c5637cf5d3a03e8c3e17c93d37ece9a50b07eb4928816f22e1fed25bc2dc + checksum: c041363201bcfc0daac2ebca021b09fddc1f5b12fdeb932d9c19bfadc3ee308aa81f36c74c005edad2e756ed1c6465de779bfca5ed63ffd940878bf015497231 languageName: node linkType: hard diff --git a/packages/playout-gateway/package.json b/packages/playout-gateway/package.json index 54382f65f82..cb6bfdc773f 100644 --- a/packages/playout-gateway/package.json +++ b/packages/playout-gateway/package.json @@ -60,7 +60,7 @@ "@sofie-automation/shared-lib": "1.52.0-in-development", "debug": "^4.3.4", "influx": "^5.9.3", - "timeline-state-resolver": "9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0", + "timeline-state-resolver": "9.2.0-nightly-release52-20240923-122840-58cfbb259.0", "tslib": "^2.6.2", "underscore": "^1.13.6", "winston": "^3.11.0" diff --git a/packages/shared-lib/package.json b/packages/shared-lib/package.json index fc35b2014a1..40c506ca0c8 100644 --- a/packages/shared-lib/package.json +++ b/packages/shared-lib/package.json @@ -39,7 +39,7 @@ ], "dependencies": { "@mos-connection/model": "v4.2.0-alpha.1", - "timeline-state-resolver-types": "9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0", + "timeline-state-resolver-types": "9.2.0-nightly-release52-20240923-122840-58cfbb259.0", "tslib": "^2.6.2", "type-fest": "^3.13.1" }, diff --git a/packages/yarn.lock b/packages/yarn.lock index c19d693ede7..d3c39cb5a32 100644 --- a/packages/yarn.lock +++ b/packages/yarn.lock @@ -5301,7 +5301,7 @@ __metadata: resolution: "@sofie-automation/shared-lib@workspace:shared-lib" dependencies: "@mos-connection/model": v4.2.0-alpha.1 - timeline-state-resolver-types: 9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0 + timeline-state-resolver-types: 9.2.0-nightly-release52-20240923-122840-58cfbb259.0 tslib: ^2.6.2 type-fest: ^3.13.1 languageName: unknown @@ -21739,7 +21739,7 @@ asn1@evs-broadcast/node-asn1: "@sofie-automation/shared-lib": 1.52.0-in-development debug: ^4.3.4 influx: ^5.9.3 - timeline-state-resolver: 9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0 + timeline-state-resolver: 9.2.0-nightly-release52-20240923-122840-58cfbb259.0 tslib: ^2.6.2 underscore: ^1.13.6 winston: ^3.11.0 @@ -25655,7 +25655,7 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"superfly-timeline@npm:9.0.1, superfly-timeline@npm:^9.0.0": +"superfly-timeline@npm:9.0.1, superfly-timeline@npm:^9.0.1": version: 9.0.1 resolution: "superfly-timeline@npm:9.0.1" dependencies: @@ -26046,18 +26046,18 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"timeline-state-resolver-types@npm:9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0": - version: 9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0 - resolution: "timeline-state-resolver-types@npm:9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0" +"timeline-state-resolver-types@npm:9.2.0-nightly-release52-20240923-122840-58cfbb259.0": + version: 9.2.0-nightly-release52-20240923-122840-58cfbb259.0 + resolution: "timeline-state-resolver-types@npm:9.2.0-nightly-release52-20240923-122840-58cfbb259.0" dependencies: tslib: ^2.6.3 - checksum: 08fb51f32cae7ad790ad6a8ec56518e54b280268c83b9614f75c43656b4d0f4f1d00c5637cf5d3a03e8c3e17c93d37ece9a50b07eb4928816f22e1fed25bc2dc + checksum: c041363201bcfc0daac2ebca021b09fddc1f5b12fdeb932d9c19bfadc3ee308aa81f36c74c005edad2e756ed1c6465de779bfca5ed63ffd940878bf015497231 languageName: node linkType: hard -"timeline-state-resolver@npm:9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0": - version: 9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0 - resolution: "timeline-state-resolver@npm:9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0" +"timeline-state-resolver@npm:9.2.0-nightly-release52-20240923-122840-58cfbb259.0": + version: 9.2.0-nightly-release52-20240923-122840-58cfbb259.0 + resolution: "timeline-state-resolver@npm:9.2.0-nightly-release52-20240923-122840-58cfbb259.0" dependencies: "@tv2media/v-connection": ^7.3.4 atem-connection: 3.5.0 @@ -26080,9 +26080,9 @@ asn1@evs-broadcast/node-asn1: p-timeout: ^3.2.0 simple-oauth2: ^5.1.0 sprintf-js: ^1.1.3 - superfly-timeline: ^9.0.0 + superfly-timeline: ^9.0.1 threadedclass: ^1.2.2 - timeline-state-resolver-types: 9.2.0-nightly-feat-connection-mgmt-SOFIE-1152-20240909-113333-004243e02.0 + timeline-state-resolver-types: 9.2.0-nightly-release52-20240923-122840-58cfbb259.0 tslib: ^2.6.3 tv-automation-quantel-gateway-client: ^3.1.7 type-fest: ^3.13.1 @@ -26090,7 +26090,7 @@ asn1@evs-broadcast/node-asn1: utf-8-validate: ^6.0.4 ws: ^8.18.0 xml-js: ^1.6.11 - checksum: 9f1bfb8e43ec26b610ec21f25d1f28d39da32458b2209a96ee48863f27fa5cd218fea2a2802cbf7e810762dec185a1b9e25057edda9dedd53ccbdbd59b776e95 + checksum: a127cd66d96f06bae3ff16291bc1be4cd1c6589c8843632c489e2432df2b34789adc62db0826d7069bb6ff1b4a8c56e0f37ffe5a17d9a92b8f3533963e0bdb71 languageName: node linkType: hard From bcb9edf54739d51cebf120b44702beb258ed2003 Mon Sep 17 00:00:00 2001 From: Kasper Olsson Hans Date: Mon, 23 Sep 2024 17:09:30 +0200 Subject: [PATCH 12/81] feat: config package manager from blueprints --- meteor/__mocks__/defaultCollectionObjects.ts | 2 +- meteor/server/api/rest/v1/typeConversion.ts | 2 +- meteor/server/api/studio/api.ts | 2 +- meteor/server/migration/0_1_0.ts | 2 +- meteor/server/migration/X_X_X.ts | 43 +- .../migration/__tests__/migrations.test.ts | 6 +- .../expectedPackages/generate.ts | 29 +- .../expectedPackages/publication.ts | 4 +- .../packageManager/packageContainers.ts | 8 +- .../__tests__/checkPieceContentStatus.test.ts | 10 +- .../checkPieceContentStatus.ts | 14 +- .../pieceContentStatusUI/common.ts | 6 +- .../blueprints-integration/src/api/studio.ts | 3 + packages/corelib/src/dataModel/Studio.ts | 12 +- .../src/__mocks__/defaultCollectionObjects.ts | 2 +- packages/job-worker/src/playout/upgrade.ts | 12 + .../src/core/model/PackageContainer.ts | 7 + .../src/__mocks__/defaultCollectionObjects.ts | 2 +- .../lib/Components/LabelAndOverrides.tsx | 41 + .../ui/Settings/Studio/PackageManager.tsx | 846 ------------------ .../Studio/PackageManager/AccessorTable.tsx | 77 ++ .../PackageManager/AccessorTableRow.tsx | 565 ++++++++++++ .../PackageContainerPickers.tsx | 75 ++ .../PackageManager/PackageContainers.tsx | 331 +++++++ .../Settings/Studio/PackageManager/index.tsx | 37 + 25 files changed, 1244 insertions(+), 894 deletions(-) create mode 100644 packages/shared-lib/src/core/model/PackageContainer.ts delete mode 100644 packages/webui/src/client/ui/Settings/Studio/PackageManager.tsx create mode 100644 packages/webui/src/client/ui/Settings/Studio/PackageManager/AccessorTable.tsx create mode 100644 packages/webui/src/client/ui/Settings/Studio/PackageManager/AccessorTableRow.tsx create mode 100644 packages/webui/src/client/ui/Settings/Studio/PackageManager/PackageContainerPickers.tsx create mode 100644 packages/webui/src/client/ui/Settings/Studio/PackageManager/PackageContainers.tsx create mode 100644 packages/webui/src/client/ui/Settings/Studio/PackageManager/index.tsx diff --git a/meteor/__mocks__/defaultCollectionObjects.ts b/meteor/__mocks__/defaultCollectionObjects.ts index bab46f41aac..e254257340a 100644 --- a/meteor/__mocks__/defaultCollectionObjects.ts +++ b/meteor/__mocks__/defaultCollectionObjects.ts @@ -114,7 +114,7 @@ export function defaultStudio(_id: StudioId): DBStudio { _rundownVersionHash: '', routeSetsWithOverrides: wrapDefaultObject({}), routeSetExclusivityGroupsWithOverrides: wrapDefaultObject({}), - packageContainers: {}, + packageContainersWithOverrides: wrapDefaultObject({}), previewContainerIds: [], thumbnailContainerIds: [], peripheralDeviceSettings: { diff --git a/meteor/server/api/rest/v1/typeConversion.ts b/meteor/server/api/rest/v1/typeConversion.ts index b8788de6e36..a9c43b9bf04 100644 --- a/meteor/server/api/rest/v1/typeConversion.ts +++ b/meteor/server/api/rest/v1/typeConversion.ts @@ -279,7 +279,7 @@ export async function studioFrom(apiStudio: APIStudio, existingId?: StudioId): P routeSetsWithOverrides: wrapDefaultObject({}), _rundownVersionHash: '', routeSetExclusivityGroupsWithOverrides: wrapDefaultObject({}), - packageContainers: {}, + packageContainersWithOverrides: wrapDefaultObject({}), previewContainerIds: [], thumbnailContainerIds: [], peripheralDeviceSettings: { diff --git a/meteor/server/api/studio/api.ts b/meteor/server/api/studio/api.ts index 4a646967d28..70e7699f80a 100644 --- a/meteor/server/api/studio/api.ts +++ b/meteor/server/api/studio/api.ts @@ -52,7 +52,7 @@ export async function insertStudioInner(organizationId: OrganizationId | null, n _rundownVersionHash: '', routeSetsWithOverrides: wrapDefaultObject({}), routeSetExclusivityGroupsWithOverrides: wrapDefaultObject({}), - packageContainers: {}, + packageContainersWithOverrides: wrapDefaultObject({}), thumbnailContainerIds: [], previewContainerIds: [], peripheralDeviceSettings: { diff --git a/meteor/server/migration/0_1_0.ts b/meteor/server/migration/0_1_0.ts index 15aafc34517..80247e212bf 100644 --- a/meteor/server/migration/0_1_0.ts +++ b/meteor/server/migration/0_1_0.ts @@ -447,7 +447,7 @@ export const addSteps = addMigrationSteps('0.1.0', [ _rundownVersionHash: '', routeSetsWithOverrides: wrapDefaultObject({}), routeSetExclusivityGroupsWithOverrides: wrapDefaultObject({}), - packageContainers: {}, + packageContainersWithOverrides: wrapDefaultObject({}), thumbnailContainerIds: [], previewContainerIds: [], peripheralDeviceSettings: { diff --git a/meteor/server/migration/X_X_X.ts b/meteor/server/migration/X_X_X.ts index 06d2547dbb0..b21dab93495 100644 --- a/meteor/server/migration/X_X_X.ts +++ b/meteor/server/migration/X_X_X.ts @@ -2,7 +2,11 @@ import { addMigrationSteps } from './databaseMigration' import { CURRENT_SYSTEM_VERSION } from './currentSystemVersion' import { Studios } from '../collections' import { convertObjectIntoOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' -import { StudioRouteSet, StudioRouteSetExclusivityGroup } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { + StudioRouteSet, + StudioRouteSetExclusivityGroup, + StudioPackageContainer, +} from '@sofie-automation/corelib/dist/dataModel/Studio' /* * ************************************************************************************** @@ -92,4 +96,41 @@ export const addSteps = addMigrationSteps(CURRENT_SYSTEM_VERSION, [ } }, }, + { + id: `convert packageContainers to ObjectWithOverrides`, + canBeRunAutomatically: true, + validate: async () => { + const studios = await Studios.findFetchAsync({ packageContainers: { $exists: true } }) + + for (const studio of studios) { + //@ts-expect-error packageContainers is not typed as ObjectWithOverrides + if (studio.packageContainers) { + return 'packageContainers must be converted to an ObjectWithOverrides' + } + } + + return false + }, + migrate: async () => { + const studios = await Studios.findFetchAsync({ packageContainers: { $exists: true } }) + + for (const studio of studios) { + //@ts-expect-error packageContainers is not typed as ObjectWithOverrides + if (!studio.packageContainers) continue + //@ts-expect-error packageContainers is not typed as ObjectWithOverrides + const oldPackageContainers = studio.packageContainers as any as Record + + const newPackageContainers = convertObjectIntoOverrides(oldPackageContainers) + + await Studios.updateAsync(studio._id, { + $set: { + packageContainersWithOverrides: newPackageContainers, + }, + $unset: { + packageContainers: 1, + }, + }) + } + }, + }, ]) diff --git a/meteor/server/migration/__tests__/migrations.test.ts b/meteor/server/migration/__tests__/migrations.test.ts index 3278b7a9eeb..f4ec9ac82c7 100644 --- a/meteor/server/migration/__tests__/migrations.test.ts +++ b/meteor/server/migration/__tests__/migrations.test.ts @@ -132,7 +132,7 @@ describe('Migrations', () => { _rundownVersionHash: '', routeSetsWithOverrides: wrapDefaultObject({}), routeSetExclusivityGroupsWithOverrides: wrapDefaultObject({}), - packageContainers: {}, + packageContainersWithOverrides: wrapDefaultObject({}), previewContainerIds: [], thumbnailContainerIds: [], peripheralDeviceSettings: { @@ -170,7 +170,7 @@ describe('Migrations', () => { _rundownVersionHash: '', routeSetsWithOverrides: wrapDefaultObject({}), routeSetExclusivityGroupsWithOverrides: wrapDefaultObject({}), - packageContainers: {}, + packageContainersWithOverrides: wrapDefaultObject({}), previewContainerIds: [], thumbnailContainerIds: [], peripheralDeviceSettings: { @@ -208,7 +208,7 @@ describe('Migrations', () => { _rundownVersionHash: '', routeSetsWithOverrides: wrapDefaultObject({}), routeSetExclusivityGroupsWithOverrides: wrapDefaultObject({}), - packageContainers: {}, + packageContainersWithOverrides: wrapDefaultObject({}), previewContainerIds: [], thumbnailContainerIds: [], peripheralDeviceSettings: { diff --git a/meteor/server/publications/packageManager/expectedPackages/generate.ts b/meteor/server/publications/packageManager/expectedPackages/generate.ts index 4850ceb301e..b84c6482a1f 100644 --- a/meteor/server/publications/packageManager/expectedPackages/generate.ts +++ b/meteor/server/publications/packageManager/expectedPackages/generate.ts @@ -17,6 +17,7 @@ import { CustomPublishCollection } from '../../../lib/customPublication' import { logger } from '../../../logging' import { ExpectedPackagesContentCache } from './contentCache' import type { StudioFields } from './publication' +import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' /** * Regenerate the output for the provided ExpectedPackage `regenerateIds`, updating the data in `collection` as needed @@ -37,6 +38,7 @@ export async function updateCollectionForExpectedPackageIds( ): Promise { const updatedDocIds = new Set() const missingExpectedPackageIds = new Set() + const packageContainers = applyAndValidateOverrides(studio.packageContainersWithOverrides).obj for (const packageId of regenerateIds) { const packageDoc = contentCache.ExpectedPackages.findOne(packageId) @@ -66,7 +68,8 @@ export async function updateCollectionForExpectedPackageIds( }, deviceId, null, - Priorities.OTHER // low priority + Priorities.OTHER, // low priority + packageContainers ) updatedDocIds.add(routedPackage._id) @@ -105,6 +108,7 @@ export async function updateCollectionForPieceInstanceIds( ): Promise { const updatedDocIds = new Set() const missingPieceInstanceIds = new Set() + const packageContainers = applyAndValidateOverrides(studio.packageContainersWithOverrides).obj for (const pieceInstanceId of regenerateIds) { const pieceInstanceDoc = contentCache.PieceInstances.findOne(pieceInstanceId) @@ -140,7 +144,8 @@ export async function updateCollectionForPieceInstanceIds( }, deviceId, pieceInstanceId, - Priorities.OTHER // low priority + Priorities.OTHER, // low priority + packageContainers ) updatedDocIds.add(routedPackage._id) @@ -172,17 +177,21 @@ enum Priorities { } function generateExpectedPackageForDevice( - studio: Pick, + studio: Pick< + StudioLight, + '_id' | 'packageContainersWithOverrides' | 'previewContainerIds' | 'thumbnailContainerIds' + >, expectedPackage: PackageManagerExpectedPackageBase, deviceId: PeripheralDeviceId, pieceInstanceId: PieceInstanceId | null, - priority: Priorities + priority: Priorities, + packageContainers: Record ): PackageManagerExpectedPackage { // Lookup Package sources: const combinedSources: PackageContainerOnPackage[] = [] for (const packageSource of expectedPackage.sources) { - const lookedUpSource = studio.packageContainers[packageSource.containerId] + const lookedUpSource = packageContainers[packageSource.containerId] if (lookedUpSource) { combinedSources.push(calculateCombinedSource(packageSource, lookedUpSource)) } else { @@ -199,7 +208,7 @@ function generateExpectedPackageForDevice( } // Lookup Package targets: - const combinedTargets = calculateCombinedTargets(studio, expectedPackage, deviceId) + const combinedTargets = calculateCombinedTargets(expectedPackage, deviceId, packageContainers) if (!combinedSources.length && expectedPackage.sources.length !== 0) { logger.warn(`Pub.expectedPackagesForDevice: No sources found for "${expectedPackage._id}"`) @@ -253,14 +262,14 @@ function calculateCombinedSource( return combinedSource } function calculateCombinedTargets( - studio: Pick, expectedPackage: PackageManagerExpectedPackageBase, - deviceId: PeripheralDeviceId + deviceId: PeripheralDeviceId, + packageContainers: Record ): PackageContainerOnPackage[] { const mappingDeviceId = unprotectString(deviceId) let packageContainerId: string | undefined - for (const [containerId, packageContainer] of Object.entries(studio.packageContainers)) { + for (const [containerId, packageContainer] of Object.entries(packageContainers)) { if (packageContainer.deviceIds.includes(mappingDeviceId)) { // TODO: how to handle if a device has multiple containers? packageContainerId = containerId @@ -270,7 +279,7 @@ function calculateCombinedTargets( const combinedTargets: PackageContainerOnPackage[] = [] if (packageContainerId) { - const lookedUpTarget = studio.packageContainers[packageContainerId] + const lookedUpTarget = packageContainers[packageContainerId] if (lookedUpTarget) { // Todo: should the be any combination of properties here? combinedTargets.push({ diff --git a/meteor/server/publications/packageManager/expectedPackages/publication.ts b/meteor/server/publications/packageManager/expectedPackages/publication.ts index 969786b2b44..02455803a9d 100644 --- a/meteor/server/publications/packageManager/expectedPackages/publication.ts +++ b/meteor/server/publications/packageManager/expectedPackages/publication.ts @@ -56,14 +56,14 @@ export type StudioFields = | '_id' | 'routeSetsWithOverrides' | 'mappingsWithOverrides' - | 'packageContainers' + | 'packageContainersWithOverrides' | 'previewContainerIds' | 'thumbnailContainerIds' const studioFieldSpecifier = literal>>({ _id: 1, routeSetsWithOverrides: 1, mappingsWithOverrides: 1, - packageContainers: 1, + packageContainersWithOverrides: 1, previewContainerIds: 1, thumbnailContainerIds: 1, }) diff --git a/meteor/server/publications/packageManager/packageContainers.ts b/meteor/server/publications/packageManager/packageContainers.ts index a479f8d66ad..8c43c0a6111 100644 --- a/meteor/server/publications/packageManager/packageContainers.ts +++ b/meteor/server/publications/packageManager/packageContainers.ts @@ -15,11 +15,12 @@ import { PeripheralDevicePubSub, PeripheralDevicePubSubCollectionsNames, } from '@sofie-automation/shared-lib/dist/pubsub/peripheralDevice' +import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' -type StudioFields = '_id' | 'packageContainers' +type StudioFields = '_id' | 'packageContainersWithOverrides' const studioFieldSpecifier = literal>>({ _id: 1, - packageContainers: 1, + packageContainersWithOverrides: 1, }) interface PackageManagerPackageContainersArgs { @@ -68,8 +69,9 @@ async function manipulateExpectedPackagesPublicationData( const packageContainers: { [containerId: string]: PackageContainer } = {} if (studio) { + const studioPackageContainers = applyAndValidateOverrides(studio.packageContainersWithOverrides).obj for (const [containerId, studioPackageContainer] of Object.entries( - studio.packageContainers + studioPackageContainers )) { packageContainers[containerId] = studioPackageContainer.container } diff --git a/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts b/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts index 9d4138eadb7..f0d34355f54 100644 --- a/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts +++ b/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts @@ -5,6 +5,7 @@ import { getMediaObjectMediaId, PieceContentStreamInfo, checkPieceContentStatusAndDependencies, + PieceContentStatusStudio, } from '../checkPieceContentStatus' import { PackageInfo, @@ -31,12 +32,10 @@ import { MediaStream, MediaStreamType, } from '@sofie-automation/shared-lib/dist/core/model/MediaObjects' -import { UIStudio } from '@sofie-automation/meteor-lib/dist/api/studios' import { defaultStudio } from '../../../../__mocks__/defaultCollectionObjects' import { testInFiber } from '../../../../__mocks__/helpers/jest' import { MediaObjects } from '../../../collections' import { PieceDependencies } from '../common' -import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' import { DEFAULT_MINIMUM_TAKE_SPAN } from '@sofie-automation/shared-lib/dist/core/constants' const mockMediaObjectsCollection = MongoMock.getInnerMockCollection(MediaObjects) @@ -174,17 +173,14 @@ describe('lib/mediaObjects', () => { } const mockDefaultStudio = defaultStudio(protectString('studio0')) - const mockStudio: Complete< - Pick & - Pick - > = { + const mockStudio: Complete = { _id: mockDefaultStudio._id, settings: mockStudioSettings, - packageContainers: mockDefaultStudio.packageContainers, previewContainerIds: ['previews0'], thumbnailContainerIds: ['thumbnails0'], routeSets: applyAndValidateOverrides(mockDefaultStudio.routeSetsWithOverrides).obj, mappings: applyAndValidateOverrides(mockDefaultStudio.mappingsWithOverrides).obj, + packageContainers: applyAndValidateOverrides(mockDefaultStudio.packageContainersWithOverrides).obj, } mockMediaObjectsCollection.insert( diff --git a/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts b/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts index e90b34a2d8a..987d8650315 100644 --- a/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts +++ b/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts @@ -173,11 +173,15 @@ export type PieceContentStatusPiece = Pick { + extends Pick { /** Mappings between the physical devices / outputs and logical ones */ mappings: MappingsExt /** Route sets with overrides */ routeSets: Record + /** Contains settings for which Package Containers are present in the studio. + * (These are used by the Package Manager and the Expected Packages) + */ + packageContainers: Record } export async function checkPieceContentStatusAndDependencies( @@ -557,7 +561,7 @@ async function checkPieceContentExpectedPackageStatus( const sideEffect = getSideEffect(expectedPackage, studio) thumbnailUrl = await getAssetUrlFromPackageContainerStatus( - studio, + studio.packageContainers, getPackageContainerPackageStatus, expectedPackageId, sideEffect.thumbnailContainerId, @@ -569,7 +573,7 @@ async function checkPieceContentExpectedPackageStatus( const sideEffect = getSideEffect(expectedPackage, studio) previewUrl = await getAssetUrlFromPackageContainerStatus( - studio, + studio.packageContainers, getPackageContainerPackageStatus, expectedPackageId, sideEffect.previewContainerId, @@ -716,7 +720,7 @@ async function checkPieceContentExpectedPackageStatus( } async function getAssetUrlFromPackageContainerStatus( - studio: PieceContentStatusStudio, + packageContainers: Record, getPackageContainerPackageStatus: ( packageContainerId: string, expectedPackageId: ExpectedPackageId @@ -727,7 +731,7 @@ async function getAssetUrlFromPackageContainerStatus( ): Promise { if (!assetContainerId || !packageAssetPath) return - const assetPackageContainer = studio.packageContainers[assetContainerId] + const assetPackageContainer = packageContainers[assetContainerId] if (!assetPackageContainer) return const previewPackageOnPackageContainer = await getPackageContainerPackageStatus(assetContainerId, expectedPackageId) diff --git a/meteor/server/publications/pieceContentStatusUI/common.ts b/meteor/server/publications/pieceContentStatusUI/common.ts index f271150973a..591f1eb16ec 100644 --- a/meteor/server/publications/pieceContentStatusUI/common.ts +++ b/meteor/server/publications/pieceContentStatusUI/common.ts @@ -14,7 +14,7 @@ import { PieceContentStatusStudio } from './checkPieceContentStatus' export type StudioFields = | '_id' | 'settings' - | 'packageContainers' + | 'packageContainersWithOverrides' | 'previewContainerIds' | 'thumbnailContainerIds' | 'mappingsWithOverrides' @@ -22,7 +22,7 @@ export type StudioFields = export const studioFieldSpecifier = literal>>({ _id: 1, settings: 1, - packageContainers: 1, + packageContainersWithOverrides: 1, previewContainerIds: 1, thumbnailContainerIds: 1, mappingsWithOverrides: 1, @@ -113,10 +113,10 @@ export async function fetchStudio(studioId: StudioId): Promise extends BlueprintManifestBase { @@ -116,6 +117,8 @@ export interface BlueprintResultApplyStudioConfig { routeSets?: Record /** Route Set Exclusivity Groups */ routeSetExclusivityGroups?: Record + /** Package Containers */ + packageContainers?: Record } export interface IStudioConfigPreset { diff --git a/packages/corelib/src/dataModel/Studio.ts b/packages/corelib/src/dataModel/Studio.ts index 13158954666..c3f8fd2c533 100644 --- a/packages/corelib/src/dataModel/Studio.ts +++ b/packages/corelib/src/dataModel/Studio.ts @@ -1,4 +1,4 @@ -import { IBlueprintConfig, PackageContainer, TSR } from '@sofie-automation/blueprints-integration' +import { IBlueprintConfig, TSR } from '@sofie-automation/blueprints-integration' import { ObjectWithOverrides } from '../settings/objectWithOverrides' import { StudioId, OrganizationId, BlueprintId, ShowStyleBaseId, MappingsHash, PeripheralDeviceId } from './Ids' import { BlueprintHash, LastBlueprintConfig } from './Blueprint' @@ -13,6 +13,7 @@ import { StudioRouteSetExclusivityGroup, StudioRouteType, } from '@sofie-automation/shared-lib/dist/core/model/StudioRouteSet' +import { StudioPackageContainer } from '@sofie-automation/shared-lib/dist/core/model/PackageContainer' export { MappingsExt, MappingExt, MappingsHash } @@ -26,6 +27,7 @@ export { ResultingMappingRoutes, StudioRouteSet, StudioRouteType, + StudioPackageContainer, } export interface IStudioSettings { @@ -128,7 +130,7 @@ export interface DBStudio { /** Contains settings for which Package Containers are present in the studio. * (These are used by the Package Manager and the Expected Packages) */ - packageContainers: Record + packageContainersWithOverrides: ObjectWithOverrides> /** Which package containers is used for media previews in GUI */ previewContainerIds: string[] @@ -182,9 +184,3 @@ export interface StudioPlayoutDevice { options: TSR.DeviceOptionsAny } - -export interface StudioPackageContainer { - /** List of which peripheraldevices uses this packageContainer */ - deviceIds: string[] - container: PackageContainer -} diff --git a/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts b/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts index 8f1ce6389f0..171a929dba7 100644 --- a/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts +++ b/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts @@ -110,7 +110,7 @@ export function defaultStudio(_id: StudioId): DBStudio { }, routeSetsWithOverrides: wrapDefaultObject({}), routeSetExclusivityGroupsWithOverrides: wrapDefaultObject({}), - packageContainers: {}, + packageContainersWithOverrides: wrapDefaultObject({}), previewContainerIds: [], thumbnailContainerIds: [], peripheralDeviceSettings: { diff --git a/packages/job-worker/src/playout/upgrade.ts b/packages/job-worker/src/playout/upgrade.ts index 25a38d48132..94acc2a1a57 100644 --- a/packages/job-worker/src/playout/upgrade.ts +++ b/packages/job-worker/src/playout/upgrade.ts @@ -3,6 +3,7 @@ import { MappingsExt, StudioIngestDevice, StudioInputDevice, + StudioPackageContainer, StudioPlayoutDevice, StudioRouteSet, StudioRouteSetExclusivityGroup, @@ -91,6 +92,16 @@ export async function handleBlueprintUpgradeForStudio(context: JobContext, _data ]) ) + const packageContainers = Object.fromEntries( + Object.entries(result.packageContainers ?? {}).map((dev) => [ + dev[0], + literal>({ + deviceIds: (dev[1] as StudioPackageContainer).deviceIds, + container: (dev[1] as StudioPackageContainer).container, + }), + ]) + ) + await context.directCollections.Studios.update(context.studioId, { $set: { 'mappingsWithOverrides.defaults': translateMappings(result.mappings), @@ -99,6 +110,7 @@ export async function handleBlueprintUpgradeForStudio(context: JobContext, _data 'peripheralDeviceSettings.inputDevices.defaults': inputDevices, 'routeSetsWithOverrides.defaults': routeSets, 'routeSetExclusivityGroupsWithOverrides.defaults': routeSetExclusivityGroups, + 'packageContainersWithOverrides.defaults': packageContainers, lastBlueprintConfig: { blueprintHash: blueprint.blueprintDoc.blueprintHash, blueprintId: blueprint.blueprintId, diff --git a/packages/shared-lib/src/core/model/PackageContainer.ts b/packages/shared-lib/src/core/model/PackageContainer.ts new file mode 100644 index 00000000000..f2eeeab8945 --- /dev/null +++ b/packages/shared-lib/src/core/model/PackageContainer.ts @@ -0,0 +1,7 @@ +import { PackageContainer } from '../../package-manager/package' + +export interface StudioPackageContainer { + /** List of which peripheraldevices uses this packageContainer */ + deviceIds: string[] + container: PackageContainer +} diff --git a/packages/webui/src/__mocks__/defaultCollectionObjects.ts b/packages/webui/src/__mocks__/defaultCollectionObjects.ts index 4a699eb99ed..99dfea69b74 100644 --- a/packages/webui/src/__mocks__/defaultCollectionObjects.ts +++ b/packages/webui/src/__mocks__/defaultCollectionObjects.ts @@ -109,7 +109,7 @@ export function defaultStudio(_id: StudioId): DBStudio { _rundownVersionHash: '', routeSetsWithOverrides: wrapDefaultObject({}), routeSetExclusivityGroupsWithOverrides: wrapDefaultObject({}), - packageContainers: {}, + packageContainersWithOverrides: wrapDefaultObject({}), previewContainerIds: [], thumbnailContainerIds: [], peripheralDeviceSettings: { diff --git a/packages/webui/src/client/lib/Components/LabelAndOverrides.tsx b/packages/webui/src/client/lib/Components/LabelAndOverrides.tsx index ef291cafc8a..be6c4d39f22 100644 --- a/packages/webui/src/client/lib/Components/LabelAndOverrides.tsx +++ b/packages/webui/src/client/lib/Components/LabelAndOverrides.tsx @@ -199,3 +199,44 @@ export function LabelAndOverridesForBase64Image( return {...props} formatDefaultValue={formatter} /> } + +export function LabelAndOverridesForMultiSelect( + props: Omit, 'formatDefaultValue' | 'children'> & { + options: DropdownInputOption[] + children: ( + value: TValue[], + setValue: (value: TValue[]) => void, + options: DropdownInputOption[] + ) => React.ReactNode + } +): JSX.Element { + const formatMultiLine = useCallback( + (value: any) => { + const matchedOption = findOptionByValue(props.options, value) + if (matchedOption) { + return `"${matchedOption.name}"` + } else { + return `Value: "${value}"` + } + }, + [props.options] + ) + const formatter = useCallback( + (defaultValue: any) => { + if (defaultValue === undefined || defaultValue.length === 0) return '""' + + if (Array.isArray(defaultValue)) { + return defaultValue.map(formatMultiLine).join('/n') + } else { + return formatMultiLine(defaultValue) + } + }, + [formatMultiLine] + ) + + return ( + {...props} formatDefaultValue={formatter}> + {(value, setValue) => props.children(value, setValue, props.options)} + + ) +} diff --git a/packages/webui/src/client/ui/Settings/Studio/PackageManager.tsx b/packages/webui/src/client/ui/Settings/Studio/PackageManager.tsx deleted file mode 100644 index ee2b3fd286a..00000000000 --- a/packages/webui/src/client/ui/Settings/Studio/PackageManager.tsx +++ /dev/null @@ -1,846 +0,0 @@ -import ClassNames from 'classnames' -import * as React from 'react' -import { Meteor } from 'meteor/meteor' -import * as _ from 'underscore' -import { DBStudio, StudioPackageContainer } from '@sofie-automation/corelib/dist/dataModel/Studio' -import { EditAttribute, EditAttributeBase } from '../../../lib/EditAttribute' -import { doModalDialog } from '../../../lib/ModalDialog' -import { Translated } from '../../../lib/ReactMeteorData/react-meteor-data' -import { FontAwesomeIcon } from '@fortawesome/react-fontawesome' -import { faTrash, faPencilAlt, faCheck, faPlus } from '@fortawesome/free-solid-svg-icons' -import { withTranslation } from 'react-i18next' -import { Accessor } from '@sofie-automation/blueprints-integration' -import { Studios } from '../../../collections' -import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' -import { LabelActual } from '../../../lib/Components/LabelAndOverrides' - -interface IStudioPackageManagerSettingsProps { - studio: DBStudio -} -interface IStudioPackageManagerSettingsState { - editedPackageContainer: Array - editedAccessors: Array -} - -export const StudioPackageManagerSettings = withTranslation()( - class StudioPackageManagerSettings extends React.Component< - Translated, - IStudioPackageManagerSettingsState - > { - constructor(props: Translated) { - super(props) - - this.state = { - editedPackageContainer: [], - editedAccessors: [], - } - } - isPackageContainerEdited = (containerId: string) => { - return this.state.editedPackageContainer.indexOf(containerId) >= 0 - } - finishEditPackageContainer = (containerId: string) => { - const index = this.state.editedPackageContainer.indexOf(containerId) - if (index >= 0) { - this.state.editedPackageContainer.splice(index, 1) - this.setState({ - editedPackageContainer: this.state.editedPackageContainer, - }) - } - } - editPackageContainer = (containerId: string) => { - if (this.state.editedPackageContainer.indexOf(containerId) < 0) { - this.state.editedPackageContainer.push(containerId) - this.setState({ - editedPackageContainer: this.state.editedPackageContainer, - }) - } else { - this.finishEditPackageContainer(containerId) - } - } - confirmRemovePackageContainer = (containerId: string) => { - const { t } = this.props - doModalDialog({ - title: t('Remove this Package Container?'), - yes: t('Remove'), - no: t('Cancel'), - onAccept: () => { - this.removePackageContainer(containerId) - }, - message: ( - -

- {t('Are you sure you want to remove the Package Container "{{containerId}}"?', { - containerId: containerId, - })} -

-

{t('Please note: This action is irreversible!')}

-
- ), - }) - } - removePackageContainer = (containerId: string) => { - const unsetObject: Record = {} - unsetObject['packageContainers.' + containerId] = 1 - Studios.update(this.props.studio._id, { - $unset: unsetObject, - }) - } - addNewPackageContainer = () => { - // find free key name - const newKeyName = 'newContainer' - let iter = 0 - while ((this.props.studio.packageContainers || {})[newKeyName + iter]) { - iter++ - } - - const newPackageContainer: StudioPackageContainer = { - deviceIds: [], - container: { - label: 'New Package Container', - accessors: {}, - }, - } - const setObject: Record = {} - setObject['packageContainers.' + newKeyName + iter] = newPackageContainer - - Studios.update(this.props.studio._id, { - $set: setObject, - }) - } - containerId = (edit: EditAttributeBase, newValue: string) => { - const oldContainerId = edit.props.overrideDisplayValue - const newContainerId = newValue + '' - const packageContainer = this.props.studio.packageContainers[oldContainerId] - - if (this.props.studio.packageContainers[newContainerId]) { - throw new Meteor.Error(400, 'PackageContainer "' + newContainerId + '" already exists') - } - - const mSet: Record = {} - const mUnset: Record = {} - mSet['packageContainers.' + newContainerId] = packageContainer - mUnset['packageContainers.' + oldContainerId] = 1 - - if (edit.props.collection) { - edit.props.collection.update(this.props.studio._id, { - $set: mSet, - $unset: mUnset, - }) - } - - this.finishEditPackageContainer(oldContainerId) - this.editPackageContainer(newContainerId) - } - getPlayoutDeviceIds() { - const deviceIds: { - name: string - value: string - }[] = [] - - const playoutDevices = applyAndValidateOverrides(this.props.studio.peripheralDeviceSettings.playoutDevices).obj - - for (const deviceId of Object.keys(playoutDevices)) { - deviceIds.push({ - name: deviceId, - value: deviceId, - }) - } - - return deviceIds - } - renderPackageContainers() { - const { t } = this.props - - if (Object.keys(this.props.studio.packageContainers).length === 0) { - return ( - - {t('There are no Package Containers set up.')} - - ) - } - - return _.map( - this.props.studio.packageContainers, - (packageContainer: StudioPackageContainer, containerId: string) => { - return ( - - - {containerId} - {packageContainer.container.label} - - - - - - - {this.isPackageContainerEdited(containerId) && ( - - -
- - - - - - -
-
-
-
-

{t('Accessors')}

- - {this.renderAccessors(containerId, packageContainer)} -
-
- -
-
-
- - - )} -
- ) - } - ) - } - isAccessorEdited = (containerId: string, accessorId: string) => { - return this.state.editedAccessors.indexOf(containerId + accessorId) >= 0 - } - finishEditAccessor = (containerId: string, accessorId: string) => { - const index = this.state.editedAccessors.indexOf(containerId + accessorId) - if (index >= 0) { - this.state.editedAccessors.splice(index, 1) - this.setState({ - editedAccessors: this.state.editedAccessors, - }) - } - } - editAccessor = (containerId: string, accessorId: string) => { - if (this.state.editedAccessors.indexOf(containerId + accessorId) < 0) { - this.state.editedAccessors.push(containerId + accessorId) - this.setState({ - editedAccessors: this.state.editedAccessors, - }) - } else { - this.finishEditAccessor(containerId, accessorId) - } - } - confirmRemoveAccessor = (containerId: string, accessorId: string) => { - const { t } = this.props - doModalDialog({ - title: t('Remove this Package Container Accessor?'), - yes: t('Remove'), - no: t('Cancel'), - onAccept: () => { - this.removeAccessor(containerId, accessorId) - }, - message: ( - -

- {t('Are you sure you want to remove the Package Container Accessor "{{accessorId}}"?', { - accessorId: accessorId, - })} -

-

{t('Please note: This action is irreversible!')}

-
- ), - }) - } - removeAccessor = (containerId: string, accessorId: string) => { - const unsetObject: Record = {} - unsetObject[`packageContainers.${containerId}.container.accessors.${accessorId}`] = 1 - Studios.update(this.props.studio._id, { - $unset: unsetObject, - }) - } - addNewAccessor = (containerId: string) => { - // find free key name - const newKeyName = 'local' - let iter = 0 - const packageContainer = this.props.studio.packageContainers[containerId] - if (!packageContainer) throw new Error(`Can't add an accessor to nonexistant Package Container "${containerId}"`) - - while (packageContainer.container.accessors[newKeyName + iter]) { - iter++ - } - const accessorId = newKeyName + iter - - const newAccessor: Accessor.LocalFolder = { - type: Accessor.AccessType.LOCAL_FOLDER, - label: 'Local folder', - allowRead: true, - allowWrite: false, - folderPath: '', - } - const setObject: Record = {} - setObject[`packageContainers.${containerId}.container.accessors.${accessorId}`] = newAccessor - - Studios.update(this.props.studio._id, { - $set: setObject, - }) - } - updateAccessorId = (edit: EditAttributeBase, newValue: string) => { - const oldAccessorId = edit.props.overrideDisplayValue - const newAccessorId = newValue + '' - const containerId = edit.props.attribute - if (!containerId) throw new Error(`containerId not set`) - const packageContainer = this.props.studio.packageContainers[containerId] - if (!packageContainer) throw new Error(`Can't edit an accessor to nonexistant Package Container "${containerId}"`) - - const accessor = this.props.studio.packageContainers[containerId].container.accessors[oldAccessorId] - - if (this.props.studio.packageContainers[containerId].container.accessors[newAccessorId]) { - throw new Meteor.Error(400, 'Accessor "' + newAccessorId + '" already exists') - } - - const mSet: Record = {} - const mUnset: Record = {} - mSet[`packageContainers.${containerId}.container.accessors.${newAccessorId}`] = accessor - mUnset[`packageContainers.${containerId}.container.accessors.${oldAccessorId}`] = 1 - - if (edit.props.collection) { - edit.props.collection.update(this.props.studio._id, { - $set: mSet, - $unset: mUnset, - }) - } - - this.finishEditAccessor(containerId, oldAccessorId) - this.editAccessor(containerId, newAccessorId) - } - - renderAccessors(containerId: string, packageContainer: StudioPackageContainer) { - const { t } = this.props - - if (Object.keys(this.props.studio.packageContainers).length === 0) { - return ( - - {t('There are no Accessors set up.')} - - ) - } - - return _.map(packageContainer.container.accessors, (accessor: Accessor.Any, accessorId: string) => { - const accessorContent: string[] = [] - _.each(accessor as any, (value, key: string) => { - if (key !== 'type' && value !== '') { - let str = JSON.stringify(value) - if (str.length > 20) str = str.slice(0, 17) + '...' - accessorContent.push(`${key}: ${str}`) - } - }) - return ( - - - {accessorId} - {/* {accessor.name} */} - {accessor.type} - {accessorContent.join(', ')} - - - - - - - {this.isAccessorEdited(containerId, accessorId) && ( - - -
- - - - {accessor.type === Accessor.AccessType.LOCAL_FOLDER ? ( - <> - - - - - ) : accessor.type === Accessor.AccessType.HTTP ? ( - <> - - - - - - - ) : accessor.type === Accessor.AccessType.HTTP_PROXY ? ( - <> - - - - - ) : accessor.type === Accessor.AccessType.FILE_SHARE ? ( - <> - - - - - - ) : accessor.type === Accessor.AccessType.QUANTEL ? ( - <> - - - - - - - - - - - - ) : null} - - - - -
-
- -
- - - )} -
- ) - }) - } - getAvailablePackageContainers() { - const arr: { - name: string - value: string - }[] = [] - - for (const [containerId, packageContainer] of Object.entries( - this.props.studio.packageContainers - )) { - let hasHttpAccessor = false - for (const accessor of Object.values(packageContainer.container.accessors)) { - if (accessor.type === Accessor.AccessType.HTTP_PROXY) { - hasHttpAccessor = true - break - } - } - if (hasHttpAccessor) { - arr.push({ - name: packageContainer.container.label, - value: containerId, - }) - } - } - return arr - } - - render(): JSX.Element { - const { t } = this.props - return ( -
-

{t('Package Manager')}

- -
-

{t('Studio Settings')}

- -
-
- -
- -
-
-
- -
- -
-
-
- -

{t('Package Containers')}

- - {this.renderPackageContainers()} -
-
- -
-
-
- ) - } - } -) diff --git a/packages/webui/src/client/ui/Settings/Studio/PackageManager/AccessorTable.tsx b/packages/webui/src/client/ui/Settings/Studio/PackageManager/AccessorTable.tsx new file mode 100644 index 00000000000..f34a59683c2 --- /dev/null +++ b/packages/webui/src/client/ui/Settings/Studio/PackageManager/AccessorTable.tsx @@ -0,0 +1,77 @@ +import * as React from 'react' +import * as _ from 'underscore' +import { StudioPackageContainer } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { FontAwesomeIcon } from '@fortawesome/react-fontawesome' +import { faPlus } from '@fortawesome/free-solid-svg-icons' +import { useTranslation } from 'react-i18next' +import { Accessor } from '@sofie-automation/blueprints-integration' +import { useToggleExpandHelper } from '../../../util/useToggleExpandHelper' +import { OverrideOpHelper, WrappedOverridableItemNormal } from '../../util/OverrideOpHelper' +import { AccessorTableRow } from './AccessorTableRow' + +interface AccessorsTableProps { + packageContainer: WrappedOverridableItemNormal + overrideHelper: OverrideOpHelper +} + +export function AccessorsTable({ packageContainer, overrideHelper }: AccessorsTableProps): React.JSX.Element { + const { t } = useTranslation() + const { toggleExpanded, isExpanded } = useToggleExpandHelper() + + const addNewAccessor = React.useCallback(() => { + const newKeyName = 'local' + let iter = 0 + if (!packageContainer.id) + throw new Error(`Can't add an accessor to nonexistant Package Container "${packageContainer.id}"`) + + while (packageContainer.computed?.container.accessors[newKeyName + iter]) { + iter++ + } + const accessorId = newKeyName + iter + + const newAccessor: Accessor.LocalFolder = { + type: Accessor.AccessType.LOCAL_FOLDER, + label: 'Local folder', + allowRead: true, + allowWrite: false, + folderPath: '', + } + + overrideHelper().setItemValue(packageContainer.id, `container.accessors.${accessorId}`, newAccessor).commit() + + setTimeout(() => { + toggleExpanded(accessorId, true) + }, 1) + }, [toggleExpanded, overrideHelper]) + + const container = packageContainer.computed.container + + return ( + <> + + {Object.keys(container.accessors || {}).length === 0 ? ( + + + + ) : ( + _.map(container.accessors || {}, (accessor: Accessor.Any, accessorId: string) => ( + + )) + )} +
{t('There are no Accessors set up.')}
+
+ +
+ + ) +} diff --git a/packages/webui/src/client/ui/Settings/Studio/PackageManager/AccessorTableRow.tsx b/packages/webui/src/client/ui/Settings/Studio/PackageManager/AccessorTableRow.tsx new file mode 100644 index 00000000000..eee92146cdd --- /dev/null +++ b/packages/webui/src/client/ui/Settings/Studio/PackageManager/AccessorTableRow.tsx @@ -0,0 +1,565 @@ +import ClassNames from 'classnames' +import * as React from 'react' +import { Meteor } from 'meteor/meteor' +import { StudioPackageContainer } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { doModalDialog } from '../../../../lib/ModalDialog' +import { FontAwesomeIcon } from '@fortawesome/react-fontawesome' +import { faTrash, faPencilAlt, faCheck } from '@fortawesome/free-solid-svg-icons' +import { useTranslation } from 'react-i18next' +import { Accessor } from '@sofie-automation/blueprints-integration' +import { + LabelActual, + LabelAndOverrides, + LabelAndOverridesForCheckbox, + LabelAndOverridesForDropdown, +} from '../../../../lib/Components/LabelAndOverrides' +import { TextInputControl } from '../../../../lib/Components/TextInput' +import { DropdownInputControl, getDropdownInputOptions } from '../../../../lib/Components/DropdownInput' +import { OverrideOpHelper, WrappedOverridableItemNormal } from '../../util/OverrideOpHelper' +import { CheckboxControl } from '../../../../lib/Components/Checkbox' + +interface AccessorTableRowProps { + packageContainer: WrappedOverridableItemNormal + accessorId: string + accessor: Accessor.Any + overrideHelper: OverrideOpHelper + toggleExpanded: (exclusivityGroupId: string, force?: boolean) => void + isExpanded: boolean +} + +export function AccessorTableRow({ + accessor, + accessorId, + packageContainer, + overrideHelper, + toggleExpanded, + isExpanded, +}: AccessorTableRowProps): React.JSX.Element { + const { t } = useTranslation() + + const confirmRemoveAccessor = (accessorId: string) => { + doModalDialog({ + title: t('Remove this Package Container Accessor?'), + yes: t('Remove'), + no: t('Cancel'), + onAccept: () => { + overrideHelper().setItemValue(packageContainer.id, `container.accessors.${accessorId}`, undefined).commit() + }, + message: ( + +

+ {t('Are you sure you want to remove the Package Container Accessor "{{accessorId}}"?', { + accessorId: accessorId, + })} +

+

{t('Please note: This action is irreversible!')}

+
+ ), + }) + } + + const updateAccessorId = React.useCallback( + (newAccessorId: string) => { + const oldAccessorId = accessorId + if (!packageContainer.id) throw new Error(`containerId not set`) + if (!packageContainer) throw new Error(`Can't edit an accessor to nonexistant Package Container"`) + + const accessor = packageContainer.computed?.container.accessors[oldAccessorId] + + if (packageContainer.computed?.container.accessors[newAccessorId]) { + throw new Meteor.Error(400, 'Accessor "' + newAccessorId + '" already exists') + } + + // Add a copy of accessor with the new ID, and remove the old + overrideHelper() + .setItemValue(packageContainer.id, `container.accessors.${oldAccessorId}`, undefined) + .setItemValue(packageContainer.id, `container.accessors.${newAccessorId}`, accessor) + .commit() + + setTimeout(() => { + toggleExpanded(oldAccessorId, false) + toggleExpanded(newAccessorId, true) + }, 100) + }, + [overrideHelper, toggleExpanded, packageContainer, accessorId] + ) + + if (Object.keys(packageContainer.computed?.container || {}).length === 0) { + return ( + + {t('There are no Accessors set up.')} + + ) + } + + return ( + + + {accessorId} + {/* {accessor.name} */} + {accessor.label} + {/*{accessorContent.join(', ')}*/} + + + + + + + {isExpanded && ( + + +
+ + + {(value, handleUpdate) => ( + + )} + + + {(value, handleUpdate, options) => { + return ( + + ) + }} + + {accessor.type === Accessor.AccessType.LOCAL_FOLDER ? ( + <> + + {(value, handleUpdate) => ( + + )} + + + {(value, handleUpdate) => ( + + )} + + + ) : accessor.type === Accessor.AccessType.HTTP ? ( + <> + + {(value, handleUpdate) => ( + + )} + + + {(value, handleUpdate) => ( + + )} + + + {(value, handleUpdate) => ( + + )} + + + + {(value, handleUpdate) => ( + + )} + + + ) : accessor.type === Accessor.AccessType.HTTP_PROXY ? ( + <> + + {(value, handleUpdate) => ( + + )} + + + {(value, handleUpdate) => ( + + )} + + + ) : accessor.type === Accessor.AccessType.FILE_SHARE ? ( + <> + + {(value, handleUpdate) => ( + + )} + + + {(value, handleUpdate) => ( + + )} + + + {(value, handleUpdate) => ( + + )} + + + {(value, handleUpdate) => ( + + )} + + + ) : accessor.type === Accessor.AccessType.QUANTEL ? ( + <> + + {(value, handleUpdate) => ( + + )} + + + {(value, handleUpdate) => ( + + )} + + + {(value, handleUpdate) => ( + + )} + + + {(value, handleUpdate) => ( + + )} + + + {(value, handleUpdate) => ( + + )} + + + {(value, handleUpdate) => ( + + )} + + + {(value, handleUpdate) => ( + + )} + + + ) : null} + + + {(value, handleUpdate) => } + + + {(value, handleUpdate) => } + +
+
+ +
+ + + )} +
+ ) +} diff --git a/packages/webui/src/client/ui/Settings/Studio/PackageManager/PackageContainerPickers.tsx b/packages/webui/src/client/ui/Settings/Studio/PackageManager/PackageContainerPickers.tsx new file mode 100644 index 00000000000..576425b2992 --- /dev/null +++ b/packages/webui/src/client/ui/Settings/Studio/PackageManager/PackageContainerPickers.tsx @@ -0,0 +1,75 @@ +import * as React from 'react' +import { DBStudio, StudioPackageContainer } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { EditAttribute } from '../../../../lib/EditAttribute' +import { useTranslation } from 'react-i18next' +import { Accessor } from '@sofie-automation/blueprints-integration' +import { Studios } from '../../../../collections' +import { DropdownInputOption } from '../../../../lib/Components/DropdownInput' +import { WrappedOverridableItem } from '../../util/OverrideOpHelper' + +interface PackageContainersPickersProps { + studio: DBStudio + packageContainersFromOverrides: WrappedOverridableItem[] +} + +export function PackageContainersPickers({ + studio, + packageContainersFromOverrides, +}: PackageContainersPickersProps): JSX.Element { + const { t } = useTranslation() + + const availablePackageContainerOptions = React.useMemo(() => { + const arr: DropdownInputOption[] = [] + + packageContainersFromOverrides.forEach((packageContainer) => { + let hasHttpAccessor = false + if (packageContainer.computed) { + for (const accessor of Object.values(packageContainer.computed.container.accessors)) { + if (accessor.type === Accessor.AccessType.HTTP_PROXY) { + hasHttpAccessor = true + break + } + } + if (hasHttpAccessor) { + arr.push({ + name: packageContainer.computed.container.label, + value: packageContainer.id, + i: arr.length, + }) + } + } + }) + return arr + }, [packageContainersFromOverrides]) + + return ( +
+
+ +
+ +
+
+
+ +
+ +
+
+
+ ) +} diff --git a/packages/webui/src/client/ui/Settings/Studio/PackageManager/PackageContainers.tsx b/packages/webui/src/client/ui/Settings/Studio/PackageManager/PackageContainers.tsx new file mode 100644 index 00000000000..d92e3e31c82 --- /dev/null +++ b/packages/webui/src/client/ui/Settings/Studio/PackageManager/PackageContainers.tsx @@ -0,0 +1,331 @@ +import ClassNames from 'classnames' +import * as React from 'react' +import { DBStudio, StudioPackageContainer } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { doModalDialog } from '../../../../lib/ModalDialog' +import { FontAwesomeIcon } from '@fortawesome/react-fontawesome' +import { faTrash, faPencilAlt, faPlus, faSync } from '@fortawesome/free-solid-svg-icons' +import { useTranslation } from 'react-i18next' +import { Studios } from '../../../../collections' +import { + ObjectOverrideSetOp, + SomeObjectOverrideOp, + applyAndValidateOverrides, +} from '@sofie-automation/corelib/dist/settings/objectWithOverrides' +import { + LabelActual, + LabelAndOverrides, + LabelAndOverridesForMultiSelect, +} from '../../../../lib/Components/LabelAndOverrides' +import { useToggleExpandHelper } from '../../../util/useToggleExpandHelper' +import { literal } from '@sofie-automation/corelib/dist/lib' +import { TextInputControl } from '../../../../lib/Components/TextInput' +import { DropdownInputOption } from '../../../../lib/Components/DropdownInput' +import { MultiSelectInputControl } from '../../../../lib/Components/MultiSelectInput' +import { + OverrideOpHelper, + WrappedOverridableItem, + WrappedOverridableItemNormal, + useOverrideOpHelper, +} from '../../util/OverrideOpHelper' +import { AccessorsTable } from './AccessorTable' + +interface PackageContainersTableProps { + studio: DBStudio + packageContainersFromOverrides: WrappedOverridableItem[] +} + +export function PackageContainersTable({ + studio, + packageContainersFromOverrides, +}: PackageContainersTableProps): React.JSX.Element { + const { t } = useTranslation() + const { toggleExpanded, isExpanded } = useToggleExpandHelper() + + const saveOverrides = React.useCallback( + (newOps: SomeObjectOverrideOp[]) => { + Studios.update(studio._id, { + $set: { + 'packageContainersWithOverrides.overrides': newOps, + }, + }) + }, + [studio._id] + ) + + const overrideHelper = useOverrideOpHelper(saveOverrides, studio.packageContainersWithOverrides) + + const addNewPackageContainer = React.useCallback(() => { + const resolvedPackageContainers = applyAndValidateOverrides(studio.packageContainersWithOverrides).obj + + // find free key name + const newKeyName = 'newContainer' + let iter = 0 + while (resolvedPackageContainers[newKeyName + iter.toString()]) { + iter++ + } + + const newId = newKeyName + iter.toString() + const newPackageContainer: StudioPackageContainer = { + deviceIds: [], + container: { + label: 'New Package Container ' + iter.toString(), + accessors: {}, + }, + } + + const addOp = literal({ + op: 'set', + path: newId, + value: newPackageContainer, + }) + + Studios.update(studio._id, { + $push: { + 'packageContainersWithOverrides.overrides': addOp, + }, + }) + + setTimeout(() => { + toggleExpanded(newId, true) + }, 1) + }, [studio._id, studio.packageContainersWithOverrides]) + + const confirmRemovePackageContainer = (containerId: string) => { + doModalDialog({ + title: t('Remove this Package Container?'), + yes: t('Remove'), + no: t('Cancel'), + onAccept: () => { + overrideHelper().deleteItem(containerId).commit() + }, + message: ( + +

+ {t('Are you sure you want to remove the Package Container "{{containerId}}"?', { + containerId: containerId, + })} +

+

{t('Please note: This action is irreversible!')}

+
+ ), + }) + } + + const confirmReset = React.useCallback( + (packgageContainerId: string) => { + doModalDialog({ + title: t('Reset this Package Container?'), + yes: t('Reset'), + no: t('Cancel'), + onAccept: () => { + overrideHelper().resetItem(packgageContainerId).commit() + }, + message: ( + +

+ {t('Are you sure you want to reset all overrides for Packing Container "{{id}}"?', { + id: packgageContainerId, + })} +

+

{t('Please note: This action is irreversible!')}

+
+ ), + }) + }, + [t, packageContainersFromOverrides, overrideHelper] + ) + + return ( + <> + + {packageContainersFromOverrides.map( + (packageContainer: WrappedOverridableItem): React.JSX.Element => + packageContainer.type == 'normal' ? ( + + ) : ( + + ) + )} +
+
+ +
+ + ) +} + +interface PackageContainerDeletedRowProps { + packageContainer: WrappedOverridableItem + overrideHelper: OverrideOpHelper +} + +function PackageContainerDeletedRow({ packageContainer, overrideHelper }: Readonly) { + const doUndeleteItem = React.useCallback( + () => overrideHelper().resetItem(packageContainer.id).commit(), + [overrideHelper, packageContainer.id] + ) + + return ( + + {packageContainer.id} + {packageContainer.defaults?.container.label} + {packageContainer.id} + + + + + ) +} + +interface PackageContainerRowProps { + studio: DBStudio + packageContainer: WrappedOverridableItemNormal + overrideHelper: OverrideOpHelper + toggleExpanded: (id: string, forceState?: boolean | undefined) => void + isExpanded: (id: string) => boolean + confirmRemovePackageContainer: (id: string) => void + confirmReset: (id: string) => void +} + +function PackageContainerRow({ + studio, + packageContainer, + overrideHelper, + toggleExpanded, + isExpanded, + confirmRemovePackageContainer, + confirmReset, +}: PackageContainerRowProps): React.JSX.Element { + const { t } = useTranslation() + + const availablePlayoutDevicesOptions: DropdownInputOption[] = React.useMemo(() => { + const playoutDevicesFromOverrrides = applyAndValidateOverrides(studio.peripheralDeviceSettings.playoutDevices).obj + + const devices: DropdownInputOption[] = [] + + for (const deviceId of Object.keys(playoutDevicesFromOverrrides)) { + devices.push({ + name: deviceId, + value: deviceId, + i: devices.length, + }) + } + return devices + }, [studio.peripheralDeviceSettings.playoutDevices]) + + const updatePackageContainerId = React.useCallback( + (newPackageContainerId: string) => { + overrideHelper().changeItemId(packageContainer.id, newPackageContainerId).commit() + toggleExpanded(newPackageContainerId, true) + }, + [overrideHelper, toggleExpanded, packageContainer.id] + ) + + return ( + + + {packageContainer.id} + {packageContainer.computed.container.label} + + + {packageContainer.defaults && packageContainer.overrideOps.length > 0 && ( + + )} + + + + + {isExpanded(packageContainer.id) && ( + + +
+ + + {(value, handleUpdate) => ( + + )} + + + {(value, handleUpdate, options) => ( + + )} + +
+
+
+
+

{t('Accessors')}

+ +
+
+ + + )} +
+ ) +} diff --git a/packages/webui/src/client/ui/Settings/Studio/PackageManager/index.tsx b/packages/webui/src/client/ui/Settings/Studio/PackageManager/index.tsx new file mode 100644 index 00000000000..707b1372f90 --- /dev/null +++ b/packages/webui/src/client/ui/Settings/Studio/PackageManager/index.tsx @@ -0,0 +1,37 @@ +import * as React from 'react' +import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { useTranslation } from 'react-i18next' +import { getAllCurrentAndDeletedItemsFromOverrides } from '../../util/OverrideOpHelper' +import { PackageContainersPickers } from './PackageContainerPickers' +import { PackageContainersTable } from './PackageContainers' + +interface StudioPackageManagerSettingsProps { + studio: DBStudio +} + +export function StudioPackageManagerSettings({ studio }: StudioPackageManagerSettingsProps): React.JSX.Element { + const { t } = useTranslation() + + const packageContainersFromOverrides = React.useMemo( + () => + getAllCurrentAndDeletedItemsFromOverrides(studio.packageContainersWithOverrides, (a, b) => + a[0].localeCompare(b[0]) + ), + [studio.packageContainersWithOverrides] + ) + + return ( +
+

{t('Package Manager')}

+ +
+

{t('Studio Settings')}

+ + + +

{t('Package Containers')}

+ +
+
+ ) +} From a884216eafd5643109ef17e6d20b1cdade7e3677 Mon Sep 17 00:00:00 2001 From: Kasper Olsson Hans Date: Fri, 27 Sep 2024 14:19:47 +0200 Subject: [PATCH 13/81] feat: routeset controlled ab player assignment --- meteor/server/migration/X_X_X.ts | 41 ++++ packages/corelib/src/dataModel/Studio.ts | 2 + packages/corelib/src/overrideOpHelper.ts | 10 - .../abPlayback/__tests__/abPlayback.spec.ts | 14 +- .../__tests__/abPlaybackResolver.spec.ts | 145 ++++++++++++++ .../__tests__/routeSetDisabling.spec.ts | 182 ++++++++++++++++++ .../playout/abPlayback/abPlaybackResolver.ts | 24 ++- .../playout/abPlayback/abPlaybackSessions.ts | 9 +- .../playout/abPlayback/applyAssignments.ts | 15 +- .../src/playout/abPlayback/index.ts | 14 +- .../playout/abPlayback/routeSetDisabling.ts | 47 +++++ packages/job-worker/src/playout/upgrade.ts | 1 + .../src/studio/model/StudioBaselineHelper.ts | 25 +-- .../src/core/model/StudioRouteSet.ts | 12 ++ .../Studio/Routings/RouteSetAbPlayers.tsx | 152 +++++++++++++++ .../ui/Settings/Studio/Routings/RouteSets.tsx | 49 ++++- 16 files changed, 686 insertions(+), 56 deletions(-) create mode 100644 packages/job-worker/src/playout/abPlayback/__tests__/routeSetDisabling.spec.ts create mode 100644 packages/job-worker/src/playout/abPlayback/routeSetDisabling.ts create mode 100644 packages/webui/src/client/ui/Settings/Studio/Routings/RouteSetAbPlayers.tsx diff --git a/meteor/server/migration/X_X_X.ts b/meteor/server/migration/X_X_X.ts index 06d2547dbb0..900ffdcdba8 100644 --- a/meteor/server/migration/X_X_X.ts +++ b/meteor/server/migration/X_X_X.ts @@ -15,6 +15,8 @@ import { StudioRouteSet, StudioRouteSetExclusivityGroup } from '@sofie-automatio */ export const addSteps = addMigrationSteps(CURRENT_SYSTEM_VERSION, [ + // Add your migration here + { id: `convert routesets to ObjectWithOverrides`, canBeRunAutomatically: true, @@ -52,6 +54,45 @@ export const addSteps = addMigrationSteps(CURRENT_SYSTEM_VERSION, [ } }, }, + { + id: `add abPlayers object`, + canBeRunAutomatically: true, + validate: async () => { + const studios = await Studios.findFetchAsync({ routeSetsWithOverrides: { $exists: true } }) + + for (const studio of studios) { + const routeSetsDefaults = studio.routeSetsWithOverrides.defaults as any as Record< + string, + StudioRouteSet + > + for (const key of Object.keys(routeSetsDefaults)) { + if (!routeSetsDefaults[key].abPlayers) { + return 'AB players must be added to routeSetsWithOverrides' + } + } + } + + return false + }, + migrate: async () => { + const studios = await Studios.findFetchAsync({ routeSetsWithOverrides: { $exists: true } }) + + for (const studio of studios) { + const newRouteSetswithOverrides = studio.routeSetsWithOverrides + for (const key of Object.keys(newRouteSetswithOverrides.defaults)) { + if (!newRouteSetswithOverrides.defaults[key].abPlayers) { + newRouteSetswithOverrides.defaults[key].abPlayers = [] + } + } + + await Studios.updateAsync(studio._id, { + $set: { + routeSetsWithOverrides: newRouteSetswithOverrides, + }, + }) + } + }, + }, { id: `convert routeSetExclusivityGroups to ObjectWithOverrides`, canBeRunAutomatically: true, diff --git a/packages/corelib/src/dataModel/Studio.ts b/packages/corelib/src/dataModel/Studio.ts index 13158954666..a7d88f50229 100644 --- a/packages/corelib/src/dataModel/Studio.ts +++ b/packages/corelib/src/dataModel/Studio.ts @@ -12,6 +12,7 @@ import { StudioRouteSet, StudioRouteSetExclusivityGroup, StudioRouteType, + StudioAbPlayerDisabling, } from '@sofie-automation/shared-lib/dist/core/model/StudioRouteSet' export { MappingsExt, MappingExt, MappingsHash } @@ -26,6 +27,7 @@ export { ResultingMappingRoutes, StudioRouteSet, StudioRouteType, + StudioAbPlayerDisabling, } export interface IStudioSettings { diff --git a/packages/corelib/src/overrideOpHelper.ts b/packages/corelib/src/overrideOpHelper.ts index c909e4f6a77..5962a8126a1 100644 --- a/packages/corelib/src/overrideOpHelper.ts +++ b/packages/corelib/src/overrideOpHelper.ts @@ -317,13 +317,3 @@ export class OverrideOpHelperImpl implements OverrideOpHelperBatcher { this.#saveOverrides(this.#object.overrides) } } - -/** - * A helper to work with modifying an ObjectWithOverrides - */ -export function useOverrideOpHelperBackend( - saveOverrides: (newOps: SomeObjectOverrideOp[]) => void, - objectWithOverrides: ObjectWithOverrides -): OverrideOpHelperBatcher { - return new OverrideOpHelperImpl(saveOverrides, objectWithOverrides) -} diff --git a/packages/job-worker/src/playout/abPlayback/__tests__/abPlayback.spec.ts b/packages/job-worker/src/playout/abPlayback/__tests__/abPlayback.spec.ts index 10f07391734..e7035aaa6bf 100644 --- a/packages/job-worker/src/playout/abPlayback/__tests__/abPlayback.spec.ts +++ b/packages/job-worker/src/playout/abPlayback/__tests__/abPlayback.spec.ts @@ -10,7 +10,7 @@ import { ABSessionAssignments } from '@sofie-automation/corelib/dist/dataModel/R import { OnGenerateTimelineObjExt } from '@sofie-automation/corelib/dist/dataModel/Timeline' import { literal } from '@sofie-automation/corelib/dist/lib' import { protectString } from '@sofie-automation/corelib/dist/protectedString' -import { AssignmentResult, resolveAbAssignmentsFromRequests } from '../abPlaybackResolver' +import { AssignmentResult, PlayerId, resolveAbAssignmentsFromRequests } from '../abPlaybackResolver' import { calculateSessionTimeRanges } from '../abPlaybackSessions' import { AbSessionHelper } from '../abSessionHelper' @@ -79,7 +79,7 @@ function resolveAbSessions( timelineObjs: OnGenerateTimelineObjExt[], previousAssignmentMap: ABSessionAssignments, sessionPool: string, - playerIds: Array, + playerIds: Array, now: number ): AssignmentResult { const sessionRequests = calculateSessionTimeRanges( @@ -458,7 +458,7 @@ describe('resolveMediaPlayers', () => { }, inst_1_clip_def: { sessionId: 'inst_1_clip_def', - playerId: 3, + playerId: 1, lookahead: true, }, } @@ -482,13 +482,13 @@ describe('resolveMediaPlayers', () => { [1, 2], 0 ) - expect(assignments.failedRequired).toHaveLength(0) + expect(assignments.failedRequired).toEqual(['inst_2_clip_ghi']) expect(assignments.failedOptional).toHaveLength(0) expect(assignments.requests).toHaveLength(3) expect(assignments.requests).toEqual([ - { end: 7400, id: 'inst_0_clip_abc', playerId: 5, start: 2400, optional: false }, - { end: 7400, id: 'inst_1_clip_def', playerId: 3, start: 2400, optional: false }, - { end: 6800, id: 'inst_2_clip_ghi', playerId: 1, start: 2800, optional: false }, + { end: 7400, id: 'inst_0_clip_abc', playerId: 2, start: 2400, optional: false }, + { end: 7400, id: 'inst_1_clip_def', playerId: 1, start: 2400, optional: false }, + { end: 6800, id: 'inst_2_clip_ghi', playerId: undefined, start: 2800, optional: false }, ]) expect(mockGetPieceSessionId).toHaveBeenCalledTimes(3) diff --git a/packages/job-worker/src/playout/abPlayback/__tests__/abPlaybackResolver.spec.ts b/packages/job-worker/src/playout/abPlayback/__tests__/abPlaybackResolver.spec.ts index 5cc5d4c267d..cf961f0490b 100644 --- a/packages/job-worker/src/playout/abPlayback/__tests__/abPlaybackResolver.spec.ts +++ b/packages/job-worker/src/playout/abPlayback/__tests__/abPlaybackResolver.spec.ts @@ -628,4 +628,149 @@ describe('resolveAbAssignmentsFromRequests', () => { expectGotPlayer(res, 'e', 3) expectGotPlayer(res, 'f', undefined) }) + + describe('add/remove players', () => { + test('reshuffle lookahead when removing player', () => { + const requests: SessionRequest[] = [ + // current clip + { + id: 'a', + start: 1000, + end: undefined, + playerId: 2, + }, + // previous clip + { + id: 'b', + start: 0, + playerId: 1, + end: 5000, + }, + // lookaheads + { + id: 'd', + start: Number.POSITIVE_INFINITY, + end: undefined, + lookaheadRank: 1, + playerId: 1, + }, + { + id: 'e', + start: Number.POSITIVE_INFINITY, + playerId: 3, // From before + end: undefined, + lookaheadRank: 2, + }, + { + id: 'f', + start: Number.POSITIVE_INFINITY, + end: undefined, + lookaheadRank: 3, + playerId: 2, + }, + ] + + const res = resolveAbAssignmentsFromRequests(resolverOptions, TWO_SLOTS, requests, 10000) + expect(res).toBeTruthy() + expect(res.failedOptional).toEqual([]) + expect(res.failedRequired).toEqual([]) + expectGotPlayer(res, 'a', 2) + expectGotPlayer(res, 'b', 1) + expectGotPlayer(res, 'd', 1) + expectGotPlayer(res, 'e', undefined) + expectGotPlayer(res, 'f', undefined) + }) + + test('reshuffle current when removing player', () => { + const requests: SessionRequest[] = [ + // current clip + { + id: 'a', + start: 1000, + end: undefined, + playerId: 3, + }, + // previous clip + { + id: 'b', + start: 0, + playerId: 1, + end: 5000, + }, + // lookaheads + { + id: 'd', + start: Number.POSITIVE_INFINITY, + end: undefined, + lookaheadRank: 1, + playerId: 1, + }, + { + id: 'e', + start: Number.POSITIVE_INFINITY, + playerId: 2, + end: undefined, + lookaheadRank: 2, + }, + ] + + const res = resolveAbAssignmentsFromRequests(resolverOptions, TWO_SLOTS, requests, 10000) + expect(res).toBeTruthy() + expect(res.failedOptional).toEqual([]) + expect(res.failedRequired).toEqual([]) + expectGotPlayer(res, 'a', 2) + expectGotPlayer(res, 'b', 1) + expectGotPlayer(res, 'd', 1) + expectGotPlayer(res, 'e', undefined) + }) + + test('add player allows distributing timed clips', () => { + const requests: SessionRequest[] = [ + // current clip + { + id: 'a', + start: 1000, + end: 11000, + playerId: 1, + }, + { + id: 'b', + start: 13000, // soon + end: undefined, + playerId: 1, + }, + { + id: 'c', + start: 1000, + end: undefined, + playerId: 2, + }, + // lookaheads + { + id: 'd', + start: Number.POSITIVE_INFINITY, + end: undefined, + lookaheadRank: 1, + playerId: 1, + }, + { + id: 'e', + start: Number.POSITIVE_INFINITY, + playerId: 2, + end: undefined, + lookaheadRank: 2, + }, + ] + + const res = resolveAbAssignmentsFromRequests(resolverOptions, THREE_SLOTS, requests, 10000) + expect(res).toBeTruthy() + expect(res.failedOptional).toEqual([]) + expect(res.failedRequired).toEqual([]) + expectGotPlayer(res, 'a', 1) + expectGotPlayer(res, 'b', 3) + expectGotPlayer(res, 'c', 2) + expectGotPlayer(res, 'd', 1) + expectGotPlayer(res, 'e', undefined) + }) + }) }) diff --git a/packages/job-worker/src/playout/abPlayback/__tests__/routeSetDisabling.spec.ts b/packages/job-worker/src/playout/abPlayback/__tests__/routeSetDisabling.spec.ts new file mode 100644 index 00000000000..ca88fa1bff0 --- /dev/null +++ b/packages/job-worker/src/playout/abPlayback/__tests__/routeSetDisabling.spec.ts @@ -0,0 +1,182 @@ +import { StudioRouteBehavior, StudioRouteSet } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { abPoolFilterDisabled, findPlayersInRouteSets } from '../routeSetDisabling' +import { ABPlayerDefinition } from '@sofie-automation/blueprints-integration' +import { clone } from '@sofie-automation/corelib/dist/lib' + +describe('route set disabling ab players', () => { + const POOL_NAME = '_test_' + function runDisablePlayersFiltering( + routeSets: Record, + players: ABPlayerDefinition[] + ): ABPlayerDefinition[] { + const members = findPlayersInRouteSets(routeSets) + return abPoolFilterDisabled(POOL_NAME, players, members) + } + + const DEFAULT_PLAYERS: ABPlayerDefinition[] = [ + { playerId: 1 }, + { playerId: 2 }, + { playerId: 3 }, + { playerId: 4 }, + { playerId: 5 }, + ] + + test('no routesets', () => { + const result = runDisablePlayersFiltering({}, DEFAULT_PLAYERS) + expect(result).toEqual(DEFAULT_PLAYERS) + }) + + describe('single routeset per player', () => { + const ROUTESETS_SEPARATE: Record = { + pl1: { + name: '', + active: true, + behavior: StudioRouteBehavior.TOGGLE, + routes: [], + abPlayers: [ + { + poolName: POOL_NAME, + playerId: 1, + }, + ], + }, + pl2: { + name: '', + active: true, + behavior: StudioRouteBehavior.TOGGLE, + routes: [], + abPlayers: [ + { + poolName: POOL_NAME, + playerId: 2, + }, + ], + }, + pl3: { + name: '', + active: true, + behavior: StudioRouteBehavior.TOGGLE, + routes: [], + abPlayers: [ + { + poolName: POOL_NAME, + playerId: 3, + }, + ], + }, + } + + test('active routes', () => { + const result = runDisablePlayersFiltering(ROUTESETS_SEPARATE, DEFAULT_PLAYERS) + expect(result).toEqual(DEFAULT_PLAYERS) + }) + + test('inactive routes', () => { + const routesets = clone(ROUTESETS_SEPARATE) + routesets['pl3'].active = false + + // deactivate this, but for a different pool + routesets['pl2'].active = false + routesets['pl2'].abPlayers[0].poolName = 'ANOTHER' + + const result = runDisablePlayersFiltering(routesets, DEFAULT_PLAYERS) + + const expectedPlayers = DEFAULT_PLAYERS.filter((p) => p.playerId !== 3) + expect(result).toEqual(expectedPlayers) + }) + }) + + describe('multiple routesets per player', () => { + /** + * This is testing the scenario of these 3 routesets where only one can be active at a time + */ + const ROUTESETS_GROUPED: Record = { + all: { + name: '', + active: true, + behavior: StudioRouteBehavior.TOGGLE, + exclusivityGroup: 'ab', + routes: [], + abPlayers: [ + { + poolName: POOL_NAME, + playerId: 1, + }, + { + poolName: POOL_NAME, + playerId: 2, + }, + { + poolName: POOL_NAME, + playerId: 3, + }, + { + poolName: POOL_NAME, + playerId: 4, + }, + ], + }, + first: { + name: '', + active: false, + behavior: StudioRouteBehavior.TOGGLE, + exclusivityGroup: 'ab', + routes: [], + abPlayers: [ + { + poolName: POOL_NAME, + playerId: 1, + }, + { + poolName: POOL_NAME, + playerId: 2, + }, + ], + }, + second: { + name: '', + active: false, + behavior: StudioRouteBehavior.TOGGLE, + exclusivityGroup: 'ab', + routes: [], + abPlayers: [ + { + poolName: POOL_NAME, + playerId: 3, + }, + { + poolName: POOL_NAME, + playerId: 4, + }, + ], + }, + } + + test('all', () => { + const result = runDisablePlayersFiltering(ROUTESETS_GROUPED, DEFAULT_PLAYERS) + expect(result).toEqual(DEFAULT_PLAYERS) + }) + + test('first', () => { + const routesets = clone(ROUTESETS_GROUPED) + routesets['all'].active = false + routesets['first'].active = true + + const result = runDisablePlayersFiltering(routesets, DEFAULT_PLAYERS) + + const expectedPlayers = DEFAULT_PLAYERS.filter((p) => p.playerId !== 3 && p.playerId !== 4) + expect(result).toEqual(expectedPlayers) + }) + + test('second', () => { + const routesets = clone(ROUTESETS_GROUPED) + routesets['all'].active = false + routesets['second'].active = true + + const result = runDisablePlayersFiltering(routesets, DEFAULT_PLAYERS) + + const expectedPlayers = DEFAULT_PLAYERS.filter((p) => p.playerId !== 1 && p.playerId !== 2) + expect(result).toEqual(expectedPlayers) + }) + }) +}) diff --git a/packages/job-worker/src/playout/abPlayback/abPlaybackResolver.ts b/packages/job-worker/src/playout/abPlayback/abPlaybackResolver.ts index 782a8b92297..485d8f64295 100644 --- a/packages/job-worker/src/playout/abPlayback/abPlaybackResolver.ts +++ b/packages/job-worker/src/playout/abPlayback/abPlaybackResolver.ts @@ -61,6 +61,22 @@ export function resolveAbAssignmentsFromRequests( rawRequests: SessionRequest[], now: number // Current time ): AssignmentResult { + // Check that the player assigned still exists + const validPlayerIdsSet = new Set(playerIds) + for (const req of rawRequests) { + if (req.playerId !== undefined && !validPlayerIdsSet.has(req.playerId)) { + delete req.playerId + } + } + + const originalLookaheadAssignments: Record = {} + for (const req of rawRequests) { + if (req.lookaheadRank !== undefined && req.playerId !== undefined) { + originalLookaheadAssignments[req.id] = req.playerId + delete req.playerId + } + } + const res: AssignmentResult = { requests: _.sortBy(rawRequests, (r) => r.start).map((v) => clone(v)), failedRequired: [], @@ -82,14 +98,6 @@ export function resolveAbAssignmentsFromRequests( return res } - const originalLookaheadAssignments: Record = {} - for (const req of rawRequests) { - if (req.lookaheadRank !== undefined && req.playerId !== undefined) { - originalLookaheadAssignments[req.id] = req.playerId - delete req.playerId - } - } - const safeNow = now + resolverOptions.nowWindow // Treat now + nowWindow as now, as it is likely that anything changed within that window will be late to air // Clear assignments for anything which has no chance of being preloaded yet diff --git a/packages/job-worker/src/playout/abPlayback/abPlaybackSessions.ts b/packages/job-worker/src/playout/abPlayback/abPlaybackSessions.ts index 94fb3b72137..1a9fd75e503 100644 --- a/packages/job-worker/src/playout/abPlayback/abPlaybackSessions.ts +++ b/packages/job-worker/src/playout/abPlayback/abPlaybackSessions.ts @@ -5,6 +5,7 @@ import { OnGenerateTimelineObjExt } from '@sofie-automation/corelib/dist/dataMod import * as _ from 'underscore' import { SessionRequest } from './abPlaybackResolver' import { AbSessionHelper } from './abSessionHelper' +import { ReadonlyDeep } from 'type-fest' /** * Calculate all of the AB-playback sessions currently on the timeline @@ -19,7 +20,7 @@ export function calculateSessionTimeRanges( abSessionHelper: AbSessionHelper, resolvedPieces: ResolvedPieceInstance[], timelineObjects: OnGenerateTimelineObjExt[], - previousAssignmentMap: ABSessionAssignments, + previousAssignmentMap: ReadonlyDeep | undefined, poolName: string ): SessionRequest[] { const sessionRequests: { [sessionId: string]: SessionRequest | undefined } = {} @@ -47,7 +48,7 @@ export function calculateSessionTimeRanges( end: val.end === undefined || end === undefined ? undefined : Math.max(val.end, end), optional: val.optional && (session.optional ?? false), lookaheadRank: undefined, - playerId: previousAssignmentMap[sessionId]?.playerId, // Persist previous assignments + playerId: previousAssignmentMap?.[sessionId]?.playerId, // Persist previous assignments } } else { // New session @@ -57,7 +58,7 @@ export function calculateSessionTimeRanges( end, optional: session.optional ?? false, lookaheadRank: undefined, - playerId: previousAssignmentMap[sessionId]?.playerId, // Persist previous assignments + playerId: previousAssignmentMap?.[sessionId]?.playerId, // Persist previous assignments } } } @@ -102,7 +103,7 @@ export function calculateSessionTimeRanges( start: Number.MAX_SAFE_INTEGER, // Distant future end: undefined, lookaheadRank: i + 1, // This is so that we can easily work out which to use first - playerId: previousAssignmentMap[grp.id]?.playerId, + playerId: previousAssignmentMap?.[grp.id]?.playerId, }) } }) diff --git a/packages/job-worker/src/playout/abPlayback/applyAssignments.ts b/packages/job-worker/src/playout/abPlayback/applyAssignments.ts index 42125a702b5..009efc92736 100644 --- a/packages/job-worker/src/playout/abPlayback/applyAssignments.ts +++ b/packages/job-worker/src/playout/abPlayback/applyAssignments.ts @@ -9,8 +9,9 @@ import { ABSessionAssignment, ABSessionAssignments } from '@sofie-automation/cor import { OnGenerateTimelineObjExt } from '@sofie-automation/corelib/dist/dataModel/Timeline' import { logger } from '../../logging' import * as _ from 'underscore' -import { SessionRequest } from './abPlaybackResolver' +import { PlayerId, SessionRequest } from './abPlaybackResolver' import { AbSessionHelper } from './abSessionHelper' +import { ReadonlyDeep } from 'type-fest' /** * Apply the ab assignments for a pool to the timeline @@ -28,12 +29,12 @@ export function applyAbPlayerObjectAssignments( blueprintContext: ICommonContext, abConfiguration: Pick, timelineObjs: OnGenerateTimelineObjExt[], - previousAssignmentMap: ABSessionAssignments, + previousAssignmentMap: ReadonlyDeep | undefined, resolvedAssignments: Readonly, poolName: string ): ABSessionAssignments { const newAssignments: ABSessionAssignments = {} - const persistAssignment = (sessionId: string, playerId: number | string, lookahead: boolean): void => { + const persistAssignment = (sessionId: string, playerId: PlayerId, lookahead: boolean): void => { // Track the assignment, so that the next onTimelineGenerate can try to reuse the same session if (newAssignments[sessionId]) { // TODO - warn? @@ -86,7 +87,7 @@ export function applyAbPlayerObjectAssignments( unexpectedSessions.push(`${sessionId}(${objs.map((obj) => obj.id).join(',')})`) // If there was a previous assignment, hopefully that is better than nothing - const prev = previousAssignmentMap[sessionId] + const prev = previousAssignmentMap?.[sessionId] if (prev) { failedObjects.push( ...updateObjectsToAbPlayer(blueprintContext, abConfiguration, poolName, prev.playerId, objs) @@ -119,7 +120,7 @@ function updateObjectsToAbPlayer( context: ICommonContext, abConfiguration: Pick, poolName: string, - playerId: number | string, + playerId: PlayerId, objs: OnGenerateTimelineObj[] ): OnGenerateTimelineObj[] { const failedObjects: OnGenerateTimelineObj[] = [] @@ -142,7 +143,7 @@ function updateObjectsToAbPlayer( function applyUpdateToKeyframes( poolName: string, - playerId: number | string, + playerId: PlayerId, obj: OnGenerateTimelineObj ): boolean { if (!obj.keyframes) return false @@ -174,7 +175,7 @@ function applyUpdateToKeyframes( function applylayerMoveRule( timelineObjectLayerChangeRules: ABTimelineLayerChangeRules | undefined, poolName: string, - playerId: number | string, + playerId: PlayerId, obj: OnGenerateTimelineObj ): boolean { const ruleId = obj.isLookahead ? obj.lookaheadForLayer || obj.layer : obj.layer diff --git a/packages/job-worker/src/playout/abPlayback/index.ts b/packages/job-worker/src/playout/abPlayback/index.ts index 0de0ce19a9c..bd1e2facc0e 100644 --- a/packages/job-worker/src/playout/abPlayback/index.ts +++ b/packages/job-worker/src/playout/abPlayback/index.ts @@ -17,6 +17,8 @@ import { AbSessionHelper } from './abSessionHelper' import { ShowStyleContext } from '../../blueprints/context' import { logger } from '../../logging' import { ABPlayerDefinition } from '@sofie-automation/blueprints-integration' +import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' +import { abPoolFilterDisabled, findPlayersInRouteSets } from './routeSetDisabling' /** * Resolve and apply AB-playback for the given timeline @@ -24,7 +26,7 @@ import { ABPlayerDefinition } from '@sofie-automation/blueprints-integration' * @param abSessionHelper Helper for generation sessionId * @param blueprint Blueprint of the currently playing ShowStyle * @param showStyle The currently playing ShowStyle - * @param playlist The currently playing Playlist + * @param playoutModel The current playout model * @param resolvedPieces All the PieceInstances on the timeline, resolved to have 'accurate' playback timings * @param timelineObjects The current timeline * @returns New AB assignments to be persisted on the playlist for the next call @@ -71,8 +73,14 @@ export function applyAbPlaybackForTimeline( const now = getCurrentTime() const abConfiguration = blueprint.blueprint.getAbResolverConfiguration(blueprintContext) + const routeSetMembers = findPlayersInRouteSets(applyAndValidateOverrides(context.studio.routeSetsWithOverrides).obj) + for (const [poolName, players] of Object.entries(abConfiguration.pools)) { - const previousAssignmentMap: ABSessionAssignments = previousAbSessionAssignments[poolName] || {} + // Filter out offline devices + const filteredPlayers = abPoolFilterDisabled(poolName, players, routeSetMembers) + + const previousAssignmentMap: ReadonlyDeep | undefined = + playlist.assignedAbSessions?.[poolName] const sessionRequests = calculateSessionTimeRanges( abSessionHelper, resolvedPieces, @@ -83,7 +91,7 @@ export function applyAbPlaybackForTimeline( const assignments = resolveAbAssignmentsFromRequests( abConfiguration.resolverOptions, - players.map((player) => player.playerId), + filteredPlayers.map((player) => player.playerId), sessionRequests, now ) diff --git a/packages/job-worker/src/playout/abPlayback/routeSetDisabling.ts b/packages/job-worker/src/playout/abPlayback/routeSetDisabling.ts new file mode 100644 index 00000000000..356e276e01c --- /dev/null +++ b/packages/job-worker/src/playout/abPlayback/routeSetDisabling.ts @@ -0,0 +1,47 @@ +import type { ABPlayerDefinition } from '@sofie-automation/blueprints-integration' +import type { StudioRouteSet } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { logger } from '../../logging' +import type { PlayerId } from './abPlaybackResolver' + +/** + * Map> + */ +type MembersOfRouteSets = Map> + +export function findPlayersInRouteSets(routeSets: Record): MembersOfRouteSets { + const routeSetEnabledPlayers: MembersOfRouteSets = new Map() + for (const [_key, routeSet] of Object.entries(routeSets)) { + for (const abPlayer of routeSet.abPlayers) { + let poolEntry = routeSetEnabledPlayers.get(abPlayer.poolName) + if (!poolEntry) { + poolEntry = new Map() + routeSetEnabledPlayers.set(abPlayer.poolName, poolEntry) + } + + // Make sure player is marked as enabled + const currentState = poolEntry.get(abPlayer.playerId) + poolEntry.set(abPlayer.playerId, currentState || routeSet.active) + } + } + return routeSetEnabledPlayers +} + +export function abPoolFilterDisabled( + poolName: string, + players: ABPlayerDefinition[], + membersOfRouteSets: MembersOfRouteSets +): ABPlayerDefinition[] { + const poolRouteSetEnabledPlayers = membersOfRouteSets.get(poolName) + if (!poolRouteSetEnabledPlayers || poolRouteSetEnabledPlayers.size == 0) return players + + // Filter out any disabled players: + return players.filter((player) => { + const playerState = poolRouteSetEnabledPlayers.get(player.playerId) + if (playerState === false) { + logger.silly(`AB Pool ${poolName} playerId : ${player.playerId} are disabled`) + return false + } + + return true + }) +} diff --git a/packages/job-worker/src/playout/upgrade.ts b/packages/job-worker/src/playout/upgrade.ts index 25a38d48132..08e0b1774d5 100644 --- a/packages/job-worker/src/playout/upgrade.ts +++ b/packages/job-worker/src/playout/upgrade.ts @@ -79,6 +79,7 @@ export async function handleBlueprintUpgradeForStudio(context: JobContext, _data behavior: (dev[1] as StudioRouteSet).behavior ?? {}, exclusivityGroup: (dev[1] as StudioRouteSet).exclusivityGroup ?? undefined, routes: (dev[1] as StudioRouteSet).routes, + abPlayers: (dev[1] as StudioRouteSet).abPlayers, }), ]) ) diff --git a/packages/job-worker/src/studio/model/StudioBaselineHelper.ts b/packages/job-worker/src/studio/model/StudioBaselineHelper.ts index c7e97729388..f20ece98609 100644 --- a/packages/job-worker/src/studio/model/StudioBaselineHelper.ts +++ b/packages/job-worker/src/studio/model/StudioBaselineHelper.ts @@ -10,8 +10,8 @@ import { StudioRouteBehavior, StudioRouteSet } from '@sofie-automation/corelib/d import { logger } from '../../logging' import { WrappedOverridableItemNormal, - useOverrideOpHelperBackend, getAllCurrentItemsFromOverrides, + OverrideOpHelperImpl, } from '@sofie-automation/corelib/dist/overrideOpHelper' import { ObjectWithOverrides, SomeObjectOverrideOp } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' @@ -85,21 +85,13 @@ export class StudioBaselineHelper { updateRouteSetActive(routeSetId: string, isActive: boolean | 'toggle'): void { const studio = this.#context.studio - const saveOverrides = (newOps: SomeObjectOverrideOp[]) => { - // this.#overridesRouteSetBuffer = { defaults: this.#overridesRouteSetBuffer.defaults, overrides: newOps } - this.#overridesRouteSetBuffer.overrides = newOps - this.#routeSetChanged = true - } - const overrideHelper = useOverrideOpHelperBackend(saveOverrides, this.#overridesRouteSetBuffer) const routeSets: WrappedOverridableItemNormal[] = getAllCurrentItemsFromOverrides( this.#overridesRouteSetBuffer, null ) - const routeSet = routeSets.find((routeSet) => { - return routeSet.id === routeSetId - }) + const routeSet = routeSets.find((routeSet) => routeSet.id === routeSetId) if (routeSet === undefined) throw new Error(`RouteSet "${routeSetId}" not found!`) @@ -108,8 +100,15 @@ export class StudioBaselineHelper { if (routeSet.computed?.behavior === StudioRouteBehavior.ACTIVATE_ONLY && isActive === false) throw new Error(`RouteSet "${routeSet.id}" is ACTIVATE_ONLY`) + const saveOverrides = (newOps: SomeObjectOverrideOp[]) => { + // this.#overridesRouteSetBuffer = { defaults: this.#overridesRouteSetBuffer.defaults, overrides: newOps } + this.#overridesRouteSetBuffer.overrides = newOps + this.#routeSetChanged = true + } + const overrideHelper = new OverrideOpHelperImpl(saveOverrides, this.#overridesRouteSetBuffer) + logger.debug(`switchRouteSet "${studio._id}" "${routeSet.id}"=${isActive}`) - overrideHelper.setItemValue(routeSet.id, `active`, isActive).commit() + overrideHelper.setItemValue(routeSet.id, `active`, isActive) // Deactivate other routeSets in the same exclusivity group: if (routeSet.computed.exclusivityGroup && isActive === true) { @@ -117,9 +116,11 @@ export class StudioBaselineHelper { if (otherRouteSet.id === routeSet.id) continue if (otherRouteSet.computed?.exclusivityGroup === routeSet.computed.exclusivityGroup) { logger.debug(`switchRouteSet Other ID "${studio._id}" "${otherRouteSet.id}"=false`) - overrideHelper.setItemValue(otherRouteSet.id, `active`, false).commit() + overrideHelper.setItemValue(otherRouteSet.id, `active`, false) } } } + + overrideHelper.commit() } } diff --git a/packages/shared-lib/src/core/model/StudioRouteSet.ts b/packages/shared-lib/src/core/model/StudioRouteSet.ts index 3c0ae4562cb..ce6737952f2 100644 --- a/packages/shared-lib/src/core/model/StudioRouteSet.ts +++ b/packages/shared-lib/src/core/model/StudioRouteSet.ts @@ -18,7 +18,13 @@ export interface StudioRouteSet { behavior: StudioRouteBehavior routes: RouteMapping[] + /** + * AB Pool members + * Any players referenced here will only be active when this or another routeset is `active` + */ + abPlayers: StudioAbPlayerDisabling[] } + export enum StudioRouteBehavior { HIDDEN = 0, TOGGLE = 1, @@ -36,6 +42,12 @@ export interface RouteMapping extends ResultingMappingRoute { /** Which original layer to route. If false, a "new" layer will be inserted during routing */ mappedLayer: string | undefined } + +export interface StudioAbPlayerDisabling { + poolName: string + playerId: string | number +} + export interface ResultingMappingRoutes { /** Routes that route existing layers */ existing: { diff --git a/packages/webui/src/client/ui/Settings/Studio/Routings/RouteSetAbPlayers.tsx b/packages/webui/src/client/ui/Settings/Studio/Routings/RouteSetAbPlayers.tsx new file mode 100644 index 00000000000..e2c1c5025dc --- /dev/null +++ b/packages/webui/src/client/ui/Settings/Studio/Routings/RouteSetAbPlayers.tsx @@ -0,0 +1,152 @@ +import * as React from 'react' +import { StudioRouteSet, StudioAbPlayerDisabling } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { doModalDialog } from '../../../../lib/ModalDialog' +import { FontAwesomeIcon } from '@fortawesome/react-fontawesome' +import { faTrash } from '@fortawesome/free-solid-svg-icons' +import { useTranslation } from 'react-i18next' +import { literal } from '@sofie-automation/corelib/dist/lib' +import { LabelAndOverrides } from '../../../../lib/Components/LabelAndOverrides' +import { + OverrideOpHelper, + OverrideOpHelperForItemContents, + WrappedOverridableItemNormal, +} from '../../util/OverrideOpHelper' +import { TextInputControl } from '../../../../lib/Components/TextInput' +import { OverrideOpHelperArrayTable } from '../../../../lib/forms/SchemaFormTable/ArrayTableOpHelper' + +interface RouteSetAbPlayersProps { + routeSet: WrappedOverridableItemNormal + overrideHelper: OverrideOpHelper +} + +export function RouteSetAbPlayers({ routeSet, overrideHelper }: Readonly): React.JSX.Element { + const { t } = useTranslation() + + const tableOverrideHelper = React.useCallback( + () => new OverrideOpHelperArrayTable(overrideHelper(), routeSet.id, routeSet.computed.abPlayers, 'abPlayers'), + [overrideHelper, routeSet.id, routeSet.computed.abPlayers] + ) + + const confirmRemoveAbPlayer = React.useCallback( + (route: WrappedOverridableItemNormal) => { + doModalDialog({ + title: t('Remove this AB PLayers from this Route Set?'), + yes: t('Remove'), + no: t('Cancel'), + onAccept: () => { + tableOverrideHelper().deleteRow(route.id).commit() + }, + message: ( + <> +

+ {t('Are you sure you want to remove the AB Player "{{playerId}}"?', { + playerId: route.computed.playerId, + })} +

+

{t('Please note: This action is irreversible!')}

+ + ), + }) + }, + [tableOverrideHelper] + ) + + return ( + <> +

{t('AB Playout devices')}

+

+ {t( + 'Any AB Playout devices here will only be active when this or another RouteSet that includes them is active' + )} +

+ {routeSet.computed.abPlayers.length === 0 ? ( +

{t('There are no AB Playout devices set up yet')}

+ ) : ( + routeSet.computed.abPlayers.map((route, index) => ( + + )) + )} + + ) +} + +interface AbPlayerRowProps { + tableOverrideHelper: OverrideOpHelperForItemContents + abPlayer: StudioAbPlayerDisabling + index: number + confirmRemoveAbPlayer: (route: WrappedOverridableItemNormal) => void +} + +function AbPlayerRow({ + tableOverrideHelper, + abPlayer, + index, + confirmRemoveAbPlayer, +}: Readonly): React.JSX.Element { + const { t } = useTranslation() + + const player = React.useMemo( + () => + literal>({ + type: 'normal', + id: index + '', + computed: abPlayer, + defaults: undefined, + overrideOps: [], + }), + [abPlayer, index] + ) + + const confirmRemoveRouteLocal = React.useCallback( + () => confirmRemoveAbPlayer(player), + [confirmRemoveAbPlayer, player] + ) + + return ( +
+ +
+ + {(value, handleUpdate) => ( + + )} + + + {(value, handleUpdate) => ( + + )} + +
+
+ ) +} diff --git a/packages/webui/src/client/ui/Settings/Studio/Routings/RouteSets.tsx b/packages/webui/src/client/ui/Settings/Studio/Routings/RouteSets.tsx index a41ec30f45e..cf555b46111 100644 --- a/packages/webui/src/client/ui/Settings/Studio/Routings/RouteSets.tsx +++ b/packages/webui/src/client/ui/Settings/Studio/Routings/RouteSets.tsx @@ -32,6 +32,7 @@ import { } from '../../../../lib/Components/LabelAndOverrides' import { OverrideOpHelper, + OverrideOpHelperForItemContents, useOverrideOpHelper, WrappedOverridableItem, WrappedOverridableItemDeleted, @@ -49,6 +50,7 @@ import { } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' import { Studios } from '../../../../collections' import { useToggleExpandHelper } from '../../../util/useToggleExpandHelper' +import { RouteSetAbPlayers } from './RouteSetAbPlayers' interface RouteSetsTable { studio: DBStudio @@ -85,6 +87,7 @@ export function RouteSetsTable({ name: 'New Route Set ' + iter.toString(), active: false, routes: [], + abPlayers: [], behavior: StudioRouteBehavior.TOGGLE, exclusivityGroup: undefined, }) @@ -211,6 +214,17 @@ function RouteSetRow({ overrideHelper().setItemValue(routeId, 'routes', newRoutes).commit() } + const addNewAbPlayerInSet = (routeId: string) => { + const newAbPlayers = routeSet.computed?.abPlayers || [] + + newAbPlayers.push({ + poolName: '', + playerId: '', + }) + + overrideHelper().setItemValue(routeId, 'abPlayers', newAbPlayers).commit() + } + const updateRouteSetId = React.useCallback( (newRouteSetId: string) => { overrideHelper().changeItemId(routeSet.id, newRouteSetId).commit() @@ -243,6 +257,12 @@ function RouteSetRow({ ) const routesIsOverridden = hasOpWithPath(routeSet.overrideOps, routeSet.id, 'routes') + const resyncAbPlayerTable = React.useCallback( + () => overrideHelper().clearItemOverrides(routeSet.id, 'abPlayers').commit(), + [overrideHelper, routeSet.id] + ) + const abPlayerIsOverridden = hasOpWithPath(routeSet.overrideOps, routeSet.id, 'abPlayers') + return (
- @@ -391,6 +408,28 @@ function RouteSetRow({ )}
+ +
+ +   + {routeSet.defaults && ( + + )} +
+ )} @@ -497,7 +536,7 @@ function RenderRoutes({ interface RenderRoutesRowProps { manifest: MappingsSettingsManifests translationNamespaces: string[] - tableOverrideHelper: () => OverrideOpHelperArrayTable + tableOverrideHelper: OverrideOpHelperForItemContents studioMappings: ReadonlyDeep rawRoute: RouteMapping routeIndex: number @@ -703,7 +742,7 @@ interface IDeviceMappingSettingsProps { translationNamespaces: string[] manifest: MappingsSettingsManifest | undefined mappedLayer: ReadonlyDeep | undefined - overrideHelper: () => OverrideOpHelperArrayTable + overrideHelper: OverrideOpHelperForItemContents route: WrappedOverridableItemNormal } From af8daaa44f47e4fc7a46cde92796585a3f9c5b55 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Mon, 30 Sep 2024 17:14:16 +0100 Subject: [PATCH 14/81] feat: processIngestData & userEditOperations SUPERFLY-2 (#5) Co-authored-by: olzzon --- meteor/__mocks__/defaultCollectionObjects.ts | 1 - meteor/__mocks__/helpers/database.ts | 17 +- .../server/__tests__/_testEnvironment.test.ts | 4 +- meteor/server/__tests__/cronjobs.test.ts | 84 +- meteor/server/api/__tests__/cleanup.test.ts | 12 +- .../api/__tests__/peripheralDevice.test.ts | 3 - meteor/server/api/cleanup.ts | 6 +- meteor/server/api/ingest/ingestCache.ts | 68 +- meteor/server/api/ingest/rundownInput.ts | 8 +- meteor/server/api/userActions.ts | 37 +- meteor/server/collections/rundown.ts | 16 +- meteor/server/lib/rest/v1/buckets.ts | 2 +- meteor/server/publications/rundown.ts | 10 +- .../src/api/showStyle.ts | 5 +- .../blueprints-integration/src/api/studio.ts | 27 +- .../src/context/index.ts | 1 + .../src/context/processIngestDataContext.ts | 62 ++ .../src/documents/part.ts | 6 + .../src/documents/piece.ts | 6 + .../src/documents/rundown.ts | 6 + .../src/documents/segment.ts | 7 + packages/blueprints-integration/src/index.ts | 2 + .../src/ingest-types.ts | 35 + packages/blueprints-integration/src/ingest.ts | 404 +++++++- .../blueprints-integration/src/userEditing.ts | 45 + packages/corelib/src/dataModel/Collections.ts | 3 +- packages/corelib/src/dataModel/Ids.ts | 7 +- .../corelib/src/dataModel/IngestDataCache.ts | 43 - .../src/dataModel/NrcsIngestDataCache.ts | 52 + packages/corelib/src/dataModel/Part.ts | 8 +- packages/corelib/src/dataModel/Piece.ts | 9 +- packages/corelib/src/dataModel/Rundown.ts | 5 + packages/corelib/src/dataModel/Segment.ts | 8 +- .../src/dataModel/SofieIngestDataCache.ts | 57 ++ .../src/dataModel/UserEditingDefinitions.ts | 30 + packages/corelib/src/pubsub.ts | 8 +- packages/corelib/src/snapshots.ts | 6 +- packages/corelib/src/worker/ingest.ts | 21 +- .../job-worker/src/__mocks__/collection.ts | 9 +- packages/job-worker/src/__mocks__/context.ts | 7 +- .../src/__mocks__/defaultCollectionObjects.ts | 1 - .../src/__mocks__/helpers/snapshot.ts | 1 - .../src/__mocks__/presetCollections.ts | 3 - .../context/ProcessIngestDataContext.ts | 55 + .../SyncIngestUpdateToPartInstanceContext.ts | 8 +- .../src/blueprints/context/index.ts | 1 + .../job-worker/src/blueprints/context/lib.ts | 111 +- .../PartAndPieceInstanceActionService.ts | 6 +- .../ingest/MutableIngestPartImpl.ts | 79 ++ .../ingest/MutableIngestRundownImpl.ts | 406 ++++++++ .../ingest/MutableIngestSegmentImpl.ts | 262 +++++ .../__tests__/MutableIngestPartImpl.spec.ts | 162 +++ .../MutableIngestRundownImpl.spec.ts | 901 ++++++++++++++++ .../MutableIngestSegmentImpl.spec.ts | 526 ++++++++++ .../defaultApplyIngestChanges.spec.ts | 960 ++++++++++++++++++ .../groupPartsInMosRundownAndChanges.spec.ts | 698 +++++++++++++ .../src/blueprints/ingest/__tests__/util.ts | 29 + .../ingest/defaultApplyIngestChanges.ts | 440 ++++++++ .../ingest/groupPartsInRundownAndChanges.ts | 287 ++++++ .../job-worker/src/blueprints/postProcess.ts | 2 + packages/job-worker/src/db/collections.ts | 15 +- .../src/ingest/__tests__/ingest.test.ts | 262 +++-- .../ingest/__tests__/ingestPartJobs.spec.ts | 314 ++++++ .../__tests__/ingestRundownJobs.spec.ts | 431 ++++++++ .../__tests__/ingestSegmentJobs.spec.ts | 409 ++++++++ .../__tests__/selectShowStyleVariant.test.ts | 2 + .../src/ingest/__tests__/updateNext.test.ts | 4 - .../src/ingest/createAdlibTestingRundown.ts | 17 +- .../src/ingest/generationRundown.ts | 181 ++-- .../src/ingest/generationSegment.ts | 35 +- packages/job-worker/src/ingest/ingestCache.ts | 219 ---- .../job-worker/src/ingest/ingestPartJobs.ts | 129 ++- .../src/ingest/ingestRundownJobs.ts | 184 ++-- .../src/ingest/ingestSegmentJobs.ts | 359 +++---- packages/job-worker/src/ingest/jobWrappers.ts | 83 ++ packages/job-worker/src/ingest/lib.ts | 4 +- packages/job-worker/src/ingest/lock.ts | 128 +-- .../src/ingest/model/IngestModel.ts | 6 +- .../model/implementation/IngestModelImpl.ts | 7 +- .../__snapshots__/mosIngest.test.ts.snap | 760 +++++++------- .../__tests__/diffSegmentEntries.test.ts | 555 ---------- .../mosDevice/__tests__/mock-mos-data.ts | 12 +- .../mosDevice/__tests__/mosIngest.test.ts | 245 +++-- .../mosDevice/__tests__/mosStoryJobs.spec.ts | 622 ++++++++++++ .../job-worker/src/ingest/mosDevice/diff.ts | 305 ------ .../job-worker/src/ingest/mosDevice/lib.ts | 47 +- .../src/ingest/mosDevice/mosRundownJobs.ts | 188 ++-- .../src/ingest/mosDevice/mosStoryJobs.ts | 522 +++++----- .../src/ingest/mosDevice/mosToIngest.ts | 135 --- .../job-worker/src/ingest/nrcsIngestCache.ts | 242 +++++ packages/job-worker/src/ingest/packageInfo.ts | 119 +-- .../job-worker/src/ingest/runOperation.ts | 577 +++++++++++ .../job-worker/src/ingest/sofieIngestCache.ts | 263 +++++ .../job-worker/src/ingest/userOperation.ts | 23 + .../src/playout/__tests__/helpers/rundowns.ts | 1 - .../playout/lookahead/__tests__/util.test.ts | 3 - .../playout/model/PlayoutPartInstanceModel.ts | 7 +- .../PlayoutPartInstanceModelImpl.ts | 26 +- .../implementation/PlayoutRundownModelImpl.ts | 2 - .../__tests__/PlayoutRundownModelImpl.spec.ts | 3 - .../__tests__/PlayoutSegmentModelImpl.spec.ts | 1 - packages/job-worker/src/playout/snapshot.ts | 27 +- packages/job-worker/src/rundownPlaylists.ts | 3 +- .../job-worker/src/workers/ingest/jobs.ts | 46 +- packages/lerna.json | 2 +- .../topics/__tests__/segmentsTopic.spec.ts | 1 - packages/meteor-lib/src/api/userActions.ts | 10 +- packages/meteor-lib/src/userAction.ts | 1 + .../shared-lib/src/peripheralDevice/ingest.ts | 29 +- .../src/__mocks__/defaultCollectionObjects.ts | 1 - .../webui/src/__mocks__/helpers/database.ts | 3 - .../__snapshots__/rundown.test.ts.snap | 1 - .../lib/__tests__/rundownTiming.test.ts | 1 - .../webui/src/client/lib/clientUserAction.ts | 2 + .../src/client/ui/MediaStatus/MediaStatus.tsx | 1 - .../ui/SegmentTimeline/SegmentContextMenu.tsx | 40 +- .../ui/SegmentTimeline/SegmentTimeline.scss | 16 + .../ui/SegmentTimeline/SegmentTimeline.tsx | 28 +- .../RenderUserEditOperations.tsx | 90 ++ 119 files changed, 10707 insertions(+), 3127 deletions(-) create mode 100644 packages/blueprints-integration/src/context/processIngestDataContext.ts create mode 100644 packages/blueprints-integration/src/ingest-types.ts create mode 100644 packages/blueprints-integration/src/userEditing.ts delete mode 100644 packages/corelib/src/dataModel/IngestDataCache.ts create mode 100644 packages/corelib/src/dataModel/NrcsIngestDataCache.ts create mode 100644 packages/corelib/src/dataModel/SofieIngestDataCache.ts create mode 100644 packages/corelib/src/dataModel/UserEditingDefinitions.ts create mode 100644 packages/job-worker/src/blueprints/context/ProcessIngestDataContext.ts create mode 100644 packages/job-worker/src/blueprints/ingest/MutableIngestPartImpl.ts create mode 100644 packages/job-worker/src/blueprints/ingest/MutableIngestRundownImpl.ts create mode 100644 packages/job-worker/src/blueprints/ingest/MutableIngestSegmentImpl.ts create mode 100644 packages/job-worker/src/blueprints/ingest/__tests__/MutableIngestPartImpl.spec.ts create mode 100644 packages/job-worker/src/blueprints/ingest/__tests__/MutableIngestRundownImpl.spec.ts create mode 100644 packages/job-worker/src/blueprints/ingest/__tests__/MutableIngestSegmentImpl.spec.ts create mode 100644 packages/job-worker/src/blueprints/ingest/__tests__/defaultApplyIngestChanges.spec.ts create mode 100644 packages/job-worker/src/blueprints/ingest/__tests__/groupPartsInMosRundownAndChanges.spec.ts create mode 100644 packages/job-worker/src/blueprints/ingest/__tests__/util.ts create mode 100644 packages/job-worker/src/blueprints/ingest/defaultApplyIngestChanges.ts create mode 100644 packages/job-worker/src/blueprints/ingest/groupPartsInRundownAndChanges.ts create mode 100644 packages/job-worker/src/ingest/__tests__/ingestPartJobs.spec.ts create mode 100644 packages/job-worker/src/ingest/__tests__/ingestRundownJobs.spec.ts create mode 100644 packages/job-worker/src/ingest/__tests__/ingestSegmentJobs.spec.ts delete mode 100644 packages/job-worker/src/ingest/ingestCache.ts create mode 100644 packages/job-worker/src/ingest/jobWrappers.ts delete mode 100644 packages/job-worker/src/ingest/mosDevice/__tests__/diffSegmentEntries.test.ts create mode 100644 packages/job-worker/src/ingest/mosDevice/__tests__/mosStoryJobs.spec.ts delete mode 100644 packages/job-worker/src/ingest/mosDevice/diff.ts delete mode 100644 packages/job-worker/src/ingest/mosDevice/mosToIngest.ts create mode 100644 packages/job-worker/src/ingest/nrcsIngestCache.ts create mode 100644 packages/job-worker/src/ingest/runOperation.ts create mode 100644 packages/job-worker/src/ingest/sofieIngestCache.ts create mode 100644 packages/job-worker/src/ingest/userOperation.ts create mode 100644 packages/webui/src/client/ui/UserEditOperations/RenderUserEditOperations.tsx diff --git a/meteor/__mocks__/defaultCollectionObjects.ts b/meteor/__mocks__/defaultCollectionObjects.ts index e254257340a..d2349c80ec6 100644 --- a/meteor/__mocks__/defaultCollectionObjects.ts +++ b/meteor/__mocks__/defaultCollectionObjects.ts @@ -134,7 +134,6 @@ export function defaultSegment(_id: SegmentId, rundownId: RundownId): DBSegment externalId: unprotectString(_id), rundownId: rundownId, name: 'Default Segment', - externalModified: 1, } } diff --git a/meteor/__mocks__/helpers/database.ts b/meteor/__mocks__/helpers/database.ts index e881d8a1f59..6abd5a60bff 100644 --- a/meteor/__mocks__/helpers/database.ts +++ b/meteor/__mocks__/helpers/database.ts @@ -14,13 +14,11 @@ import { SourceLayerType, StudioBlueprintManifest, BlueprintManifestType, - IngestRundown, BlueprintManifestBase, ShowStyleBlueprintManifest, IShowStyleContext, BlueprintResultRundown, BlueprintResultSegment, - IngestSegment, IBlueprintAdLibPiece, IBlueprintRundown, IBlueprintSegment, @@ -32,6 +30,8 @@ import { StatusCode, IBlueprintPieceType, IBlueprintActionManifest, + SofieIngestSegment, + SofieIngestRundown, } from '@sofie-automation/blueprints-integration' import { DBShowStyleBase } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' import { DBShowStyleVariant } from '@sofie-automation/corelib/dist/dataModel/ShowStyleVariant' @@ -429,7 +429,10 @@ export async function setupMockShowStyleBlueprint( getShowStyleVariantId: (): string | null => { return SHOW_STYLE_VARIANT_ID }, - getRundown: (_context: IShowStyleContext, ingestRundown: IngestRundown): BlueprintResultRundown => { + getRundown: ( + _context: IShowStyleContext, + ingestRundown: SofieIngestRundown + ): BlueprintResultRundown => { const rundown: IBlueprintRundown = { externalId: ingestRundown.externalId, name: ingestRundown.name, @@ -452,7 +455,10 @@ export async function setupMockShowStyleBlueprint( baseline: { timelineObjects: [] }, } }, - getSegment: (_context: unknown, ingestSegment: IngestSegment): BlueprintResultSegment => { + getSegment: ( + _context: unknown, + ingestSegment: SofieIngestSegment + ): BlueprintResultSegment => { const segment: IBlueprintSegment = { name: ingestSegment.name ? ingestSegment.name : ingestSegment.externalId, privateData: ingestSegment.payload, @@ -654,7 +660,6 @@ export async function setupDefaultRundown( externalId: 'MOCK_SEGMENT_0', rundownId: rundown._id, name: 'Segment 0', - externalModified: 1, } await Segments.mutableCollection.insertAsync(segment0) /* tslint:disable:ter-indent*/ @@ -764,7 +769,6 @@ export async function setupDefaultRundown( externalId: 'MOCK_SEGMENT_2', rundownId: rundown._id, name: 'Segment 1', - externalModified: 1, } await Segments.mutableCollection.insertAsync(segment1) @@ -807,7 +811,6 @@ export async function setupDefaultRundown( externalId: 'MOCK_SEGMENT_2', rundownId: rundown._id, name: 'Segment 2', - externalModified: 1, } await Segments.mutableCollection.insertAsync(segment2) diff --git a/meteor/server/__tests__/_testEnvironment.test.ts b/meteor/server/__tests__/_testEnvironment.test.ts index 7ff4542025a..9fde690091b 100644 --- a/meteor/server/__tests__/_testEnvironment.test.ts +++ b/meteor/server/__tests__/_testEnvironment.test.ts @@ -11,7 +11,7 @@ import { Evaluations, ExpectedMediaItems, ExternalMessageQueue, - IngestDataCache, + NrcsIngestDataCache, MediaObjects, MediaWorkFlows, MediaWorkFlowSteps, @@ -74,7 +74,7 @@ describe('Basic test of test environment', () => { // @ts-ignore expect(ExternalMessageQueue._isMock).toBeTruthy() // @ts-ignore - expect(IngestDataCache._isMock).toBeTruthy() + expect(NrcsIngestDataCache._isMock).toBeTruthy() // @ts-ignore expect(MediaObjects._isMock).toBeTruthy() // @ts-ignore diff --git a/meteor/server/__tests__/cronjobs.test.ts b/meteor/server/__tests__/cronjobs.test.ts index a0831841791..2c189d38afd 100644 --- a/meteor/server/__tests__/cronjobs.test.ts +++ b/meteor/server/__tests__/cronjobs.test.ts @@ -19,7 +19,8 @@ import { PieceInstance } from '@sofie-automation/corelib/dist/dataModel/PieceIns import { Meteor } from 'meteor/meteor' import { EmptyPieceTimelineObjectsBlob } from '@sofie-automation/corelib/dist/dataModel/Piece' import { - IngestDataCacheObjId, + NrcsIngestDataCacheObjId, + SofieIngestDataCacheObjId, PartId, PeripheralDeviceId, RundownId, @@ -42,7 +43,7 @@ import '../cronjobs' import '../api/peripheralDevice' import { CoreSystem, - IngestDataCache, + NrcsIngestDataCache, PartInstances, Parts, PeripheralDeviceCommands, @@ -51,8 +52,9 @@ import { Snapshots, UserActionsLog, Segments, + SofieIngestDataCache, } from '../collections' -import { IngestCacheType } from '@sofie-automation/corelib/dist/dataModel/IngestDataCache' +import { NrcsIngestCacheType } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' import { JSONBlobStringify } from '@sofie-automation/shared-lib/dist/lib/JSONBlob' import { DefaultEnvironment, @@ -61,6 +63,7 @@ import { } from '../../__mocks__/helpers/database' import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' import { Settings } from '../Settings' +import { SofieIngestCacheType } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' describe('cronjobs', () => { let env: DefaultEnvironment @@ -170,45 +173,95 @@ describe('cronjobs', () => { await PeripheralDevices.removeAsync({}) }) - testInFiber('Remove IngestDataCache objects that are not connected to any Rundown', async () => { - // Set up a mock rundown, a detached IngestDataCache object and an object attached to the mock rundown - // Detached IngestDataCache object 0 - const dataCache0Id = protectString(getRandomString()) - await IngestDataCache.mutableCollection.insertAsync({ + testInFiber('Remove NrcsIngestDataCache objects that are not connected to any Rundown', async () => { + // Set up a mock rundown, a detached NrcsIngestDataCache object and an object attached to the mock rundown + // Detached NrcsIngestDataCache object 0 + const dataCache0Id = protectString(getRandomString()) + await NrcsIngestDataCache.mutableCollection.insertAsync({ _id: dataCache0Id, data: { externalId: '', name: '', segments: [], type: '', + rundownSource: {} as any, + payload: undefined, }, modified: new Date(2000, 0, 1, 0, 0, 0).getTime(), // this one is attached to rundown0 rundownId: getRandomId(), - type: IngestCacheType.RUNDOWN, + type: NrcsIngestCacheType.RUNDOWN, }) - // Attached IngestDataCache object 1 - const dataCache1Id = protectString(getRandomString()) - await IngestDataCache.mutableCollection.insertAsync({ + // Attached NrcsIngestDataCache object 1 + const dataCache1Id = protectString(getRandomString()) + await NrcsIngestDataCache.mutableCollection.insertAsync({ _id: dataCache1Id, data: { externalId: '', name: '', segments: [], type: '', + rundownSource: {} as any, + payload: undefined, }, modified: new Date(2000, 0, 1, 0, 0, 0).getTime(), // just some random ID rundownId: rundownId, - type: IngestCacheType.RUNDOWN, + type: NrcsIngestCacheType.RUNDOWN, }) await runCronjobs() - expect(await IngestDataCache.findOneAsync(dataCache1Id)).toMatchObject({ + expect(await NrcsIngestDataCache.findOneAsync(dataCache1Id)).toMatchObject({ _id: dataCache1Id, }) - expect(await IngestDataCache.findOneAsync(dataCache0Id)).toBeUndefined() + expect(await NrcsIngestDataCache.findOneAsync(dataCache0Id)).toBeUndefined() + }) + testInFiber('Remove SofieIngestDataCache objects that are not connected to any Rundown', async () => { + // Set up a mock rundown, a detached SofieIngestDataCache object and an object attached to the mock rundown + // Detached SofieIngestDataCache object 0 + const dataCache0Id = protectString(getRandomString()) + await SofieIngestDataCache.mutableCollection.insertAsync({ + _id: dataCache0Id, + data: { + externalId: '', + name: '', + segments: [], + type: '', + rundownSource: {} as any, + userEditStates: {}, + payload: undefined, + }, + modified: new Date(2000, 0, 1, 0, 0, 0).getTime(), + // this one is attached to rundown0 + rundownId: getRandomId(), + type: SofieIngestCacheType.RUNDOWN, + }) + // Attached SofieIngestDataCache object 1 + const dataCache1Id = protectString(getRandomString()) + await SofieIngestDataCache.mutableCollection.insertAsync({ + _id: dataCache1Id, + data: { + externalId: '', + name: '', + segments: [], + type: '', + rundownSource: {} as any, + userEditStates: {}, + payload: undefined, + }, + modified: new Date(2000, 0, 1, 0, 0, 0).getTime(), + // just some random ID + rundownId: rundownId, + type: SofieIngestCacheType.RUNDOWN, + }) + + await runCronjobs() + + expect(await SofieIngestDataCache.findOneAsync(dataCache1Id)).toMatchObject({ + _id: dataCache1Id, + }) + expect(await SofieIngestDataCache.findOneAsync(dataCache0Id)).toBeUndefined() }) testInFiber('Removes old PartInstances and PieceInstances', async () => { // nightlyCronjobInner() @@ -217,7 +270,6 @@ describe('cronjobs', () => { _id: getRandomId(), _rank: 0, externalId: '', - externalModified: 0, rundownId, name: 'mock segment', } diff --git a/meteor/server/api/__tests__/cleanup.test.ts b/meteor/server/api/__tests__/cleanup.test.ts index 75cc3e5f35b..6aecf74097c 100644 --- a/meteor/server/api/__tests__/cleanup.test.ts +++ b/meteor/server/api/__tests__/cleanup.test.ts @@ -32,7 +32,7 @@ import { ExpectedPackageWorkStatuses, ExpectedPlayoutItems, ExternalMessageQueue, - IngestDataCache, + NrcsIngestDataCache, PackageContainerPackageStatuses, PackageInfos, PeripheralDeviceCommands, @@ -45,6 +45,7 @@ import { TranslationsBundles, PackageContainerStatuses, TimelineDatastore, + SofieIngestDataCache, } from '../../collections' import { Collections } from '../../collections/lib' import { generateTranslationBundleOriginId } from '../translationsBundles' @@ -300,7 +301,14 @@ async function setDefaultDatatoDB(env: DefaultEnvironment, now: number) { tryCount: 0, type: '' as any, }) - await IngestDataCache.mutableCollection.insertAsync({ + await NrcsIngestDataCache.mutableCollection.insertAsync({ + _id: getRandomId(), + data: {} as any, + modified: 0, + rundownId, + type: '' as any, + }) + await SofieIngestDataCache.mutableCollection.insertAsync({ _id: getRandomId(), data: {} as any, modified: 0, diff --git a/meteor/server/api/__tests__/peripheralDevice.test.ts b/meteor/server/api/__tests__/peripheralDevice.test.ts index 3048dd3d170..7299084e523 100644 --- a/meteor/server/api/__tests__/peripheralDevice.test.ts +++ b/meteor/server/api/__tests__/peripheralDevice.test.ts @@ -120,7 +120,6 @@ describe('test peripheralDevice general API methods', () => { _rank: 0, rundownId: rundownID, name: 'Fire', - externalModified: 1, }) await Parts.mutableCollection.insertAsync({ _id: protectString('part000'), @@ -164,7 +163,6 @@ describe('test peripheralDevice general API methods', () => { externalId: 'segment01', rundownId: rundownID, name: 'Water', - externalModified: 1, }) await Segments.mutableCollection.insertAsync({ _id: protectString('segment2'), @@ -172,7 +170,6 @@ describe('test peripheralDevice general API methods', () => { externalId: 'segment02', rundownId: rundownID, name: 'Earth', - externalModified: 1, }) }) beforeEach(async () => { diff --git a/meteor/server/api/cleanup.ts b/meteor/server/api/cleanup.ts index f7633884070..a15f3b49f16 100644 --- a/meteor/server/api/cleanup.ts +++ b/meteor/server/api/cleanup.ts @@ -37,7 +37,7 @@ import { ExpectedPackageWorkStatuses, ExpectedPlayoutItems, ExternalMessageQueue, - IngestDataCache, + NrcsIngestDataCache, MediaObjects, MediaWorkFlows, MediaWorkFlowSteps, @@ -69,6 +69,7 @@ import { UserActionsLog, Workers, WorkerThreadStatuses, + SofieIngestDataCache, } from '../collections' import { AsyncOnlyMongoCollection, AsyncOnlyReadOnlyMongoCollection } from '../collections/collection' import { getCollectionKey } from '../collections/lib' @@ -276,7 +277,8 @@ export async function cleanupOldDataInner(actuallyCleanup = false): Promise removedParts.add(id)) await ownedByRundownId(RundownBaselineAdLibActions) await ownedByRundownId(RundownBaselineAdLibPieces) diff --git a/meteor/server/api/ingest/ingestCache.ts b/meteor/server/api/ingest/ingestCache.ts index e8f2cc74c9a..971e984e2f0 100644 --- a/meteor/server/api/ingest/ingestCache.ts +++ b/meteor/server/api/ingest/ingestCache.ts @@ -1,58 +1,51 @@ import * as _ from 'underscore' import { Meteor } from 'meteor/meteor' -import { IngestRundown, IngestSegment, IngestPart } from '@sofie-automation/blueprints-integration' +import { IngestRundown, IngestSegment } from '@sofie-automation/blueprints-integration' import { logger } from '../../logging' import { profiler } from '../profiler' import { RundownId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { IngestDataCache } from '../../collections' -import { IngestCacheType, IngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/IngestDataCache' +import { NrcsIngestDataCache } from '../../collections' +import { + NrcsIngestCacheType, + NrcsIngestDataCacheObj, + NrcsIngestDataCacheObjRundown, + NrcsIngestDataCacheObjSegment, +} from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' import { groupByToMap } from '@sofie-automation/corelib/dist/lib' -interface LocalIngestBase { - modified: number -} -export interface LocalIngestRundown extends IngestRundown, LocalIngestBase { - segments: LocalIngestSegment[] -} -export interface LocalIngestSegment extends IngestSegment, LocalIngestBase { - parts: LocalIngestPart[] -} -export interface LocalIngestPart extends IngestPart, LocalIngestBase {} - export class RundownIngestDataCache { - private constructor(private readonly rundownId: RundownId, private readonly documents: IngestDataCacheObj[]) {} + private constructor(private readonly rundownId: RundownId, private readonly documents: NrcsIngestDataCacheObj[]) {} static async create(rundownId: RundownId): Promise { - const docs = await IngestDataCache.findFetchAsync({ rundownId }) + const docs = await NrcsIngestDataCache.findFetchAsync({ rundownId }) return new RundownIngestDataCache(rundownId, docs) } - fetchRundown(): LocalIngestRundown | undefined { + fetchRundown(): IngestRundown | undefined { const span = profiler.startSpan('ingest.ingestCache.loadCachedRundownData') - const cachedRundown = this.documents.find((e) => e.type === IngestCacheType.RUNDOWN) + const cachedRundown = this.documents.find( + (e): e is NrcsIngestDataCacheObjRundown => e.type === NrcsIngestCacheType.RUNDOWN + ) if (!cachedRundown) { span?.end() return undefined } - const ingestRundown = cachedRundown.data as LocalIngestRundown - ingestRundown.modified = cachedRundown.modified + const ingestRundown = cachedRundown.data const segmentMap = groupByToMap(this.documents, 'segmentId') for (const objs of segmentMap.values()) { - const segmentEntry = objs.find((e) => e.type === IngestCacheType.SEGMENT) + const segmentEntry = objs.find( + (e): e is NrcsIngestDataCacheObjSegment => e.type === NrcsIngestCacheType.SEGMENT + ) if (segmentEntry) { - const ingestSegment = segmentEntry.data as LocalIngestSegment - ingestSegment.modified = segmentEntry.modified + const ingestSegment = segmentEntry.data for (const entry of objs) { - if (entry.type === IngestCacheType.PART) { - const ingestPart = entry.data as LocalIngestPart - ingestPart.modified = entry.modified - - ingestSegment.parts.push(ingestPart) + if (entry.type === NrcsIngestCacheType.PART) { + ingestSegment.parts.push(entry.data) } } @@ -67,10 +60,12 @@ export class RundownIngestDataCache { return ingestRundown } - fetchSegment(segmentId: SegmentId): LocalIngestSegment | undefined { + fetchSegment(segmentId: SegmentId): IngestSegment | undefined { const cacheEntries = this.documents.filter((d) => d.segmentId && d.segmentId === segmentId) - const segmentEntries = cacheEntries.filter((e) => e.type === IngestCacheType.SEGMENT) + const segmentEntries = cacheEntries.filter( + (e): e is NrcsIngestDataCacheObjSegment => e.type === NrcsIngestCacheType.SEGMENT + ) if (segmentEntries.length > 1) logger.warn( `There are multiple segments (${cacheEntries.length}) in IngestDataCache for rundownId: "${this.rundownId}", segmentId: "${segmentId}"` @@ -78,17 +73,14 @@ export class RundownIngestDataCache { const segmentEntry = segmentEntries[0] if (!segmentEntry) return undefined - if (segmentEntry.type !== IngestCacheType.SEGMENT) throw new Meteor.Error(500, 'Wrong type on cached segment') + if (segmentEntry.type !== NrcsIngestCacheType.SEGMENT) + throw new Meteor.Error(500, 'Wrong type on cached segment') - const ingestSegment = segmentEntry.data as LocalIngestSegment - ingestSegment.modified = segmentEntry.modified + const ingestSegment = segmentEntry.data for (const entry of cacheEntries) { - if (entry.type === IngestCacheType.PART) { - const ingestPart = entry.data as LocalIngestPart - ingestPart.modified = entry.modified - - ingestSegment.parts.push(ingestPart) + if (entry.type === NrcsIngestCacheType.PART) { + ingestSegment.parts.push(entry.data) } } diff --git a/meteor/server/api/ingest/rundownInput.ts b/meteor/server/api/ingest/rundownInput.ts index bacdd0fd557..5b1cf69940b 100644 --- a/meteor/server/api/ingest/rundownInput.ts +++ b/meteor/server/api/ingest/rundownInput.ts @@ -1,7 +1,7 @@ import { Meteor } from 'meteor/meteor' import { check } from '../../lib/check' import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' -import { IngestDataCache, MediaObjects, Parts, Rundowns, Segments } from '../../collections' +import { NrcsIngestDataCache, MediaObjects, Parts, Rundowns, Segments } from '../../collections' import { literal } from '../../lib/tempLib' import { lazyIgnore } from '../../lib/lib' import { IngestRundown, IngestSegment, IngestPart, IngestPlaylist } from '@sofie-automation/blueprints-integration' @@ -17,7 +17,7 @@ import { MethodContext } from '../methodContext' import { IngestJobs } from '@sofie-automation/corelib/dist/worker/ingest' import { MediaObject } from '@sofie-automation/shared-lib/dist/core/model/MediaObjects' import { PeripheralDeviceId, RundownId, SegmentId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { IngestCacheType } from '@sofie-automation/corelib/dist/dataModel/IngestDataCache' +import { NrcsIngestCacheType } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' export namespace RundownInput { @@ -415,9 +415,9 @@ async function onMediaObjectChanged(newDocument: MediaObject, oldDocument?: Medi const validSegmentIds = new Set( ( - await IngestDataCache.findFetchAsync( + await NrcsIngestDataCache.findFetchAsync( { - type: IngestCacheType.SEGMENT, + type: NrcsIngestCacheType.SEGMENT, rundownId: { $in: updateIds.map((obj) => obj.rundownId) }, }, { diff --git a/meteor/server/api/userActions.ts b/meteor/server/api/userActions.ts index 8ccc6dda652..3bbae9f5f33 100644 --- a/meteor/server/api/userActions.ts +++ b/meteor/server/api/userActions.ts @@ -5,7 +5,7 @@ import { Time } from '../lib/tempLib' import { ServerPlayoutAPI } from './playout/playout' import { NewUserActionAPI, UserActionAPIMethods } from '@sofie-automation/meteor-lib/dist/api/userActions' import { EvaluationBase } from '@sofie-automation/meteor-lib/dist/collections/Evaluations' -import { IngestPart, IngestAdlib, ActionUserData } from '@sofie-automation/blueprints-integration' +import { IngestPart, IngestAdlib, ActionUserData, UserOperationTarget } from '@sofie-automation/blueprints-integration' import { storeRundownPlaylistSnapshot } from './snapshot' import { registerClassToMeteorMethods, ReplaceOptionalWithNullInMethodArguments } from '../methods' import { ServerRundownAPI } from './rundown' @@ -46,11 +46,12 @@ import { ShowStyleVariantId, StudioId, } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { IngestDataCache, Parts, Pieces, Rundowns } from '../collections' -import { IngestCacheType } from '@sofie-automation/corelib/dist/dataModel/IngestDataCache' +import { NrcsIngestDataCache, Parts, Pieces, Rundowns } from '../collections' +import { NrcsIngestCacheType } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' import { verifyHashedToken } from './singleUseTokens' import { QuickLoopMarker } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { runIngestOperation } from './ingest/lib' +import { RundownPlaylistContentWriteAccess } from '../security/rundownPlaylist' import { IngestJobs } from '@sofie-automation/corelib/dist/worker/ingest' async function pieceSetInOutPoints( @@ -69,10 +70,10 @@ async function pieceSetInOutPoints( }) if (!rundown) throw new Meteor.Error(501, `Rundown "${part.rundownId}" not found!`) - const partCache = await IngestDataCache.findOneAsync({ + const partCache = await NrcsIngestDataCache.findOneAsync({ rundownId: rundown._id, partId: part._id, - type: IngestCacheType.PART, + type: NrcsIngestCacheType.PART, }) if (!partCache) throw new Meteor.Error(404, `Part Cache for "${partId}" not found!`) const piece = await Pieces.findOneAsync(pieceId) @@ -1269,6 +1270,32 @@ class ServerUserActionAPI ) } + async executeUserChangeOperation( + userEvent: string, + eventTime: Time, + rundownId: RundownId, + operationTarget: UserOperationTarget, + operation: { id: string; [key: string]: any } + ): Promise> { + return ServerClientAPI.runUserActionInLog( + this, + userEvent, + eventTime, + 'executeUserChangeOperation', + { operationTarget, operation }, + async () => { + const access = await RundownPlaylistContentWriteAccess.rundown(this, rundownId) + if (!access.rundown) throw new Error(`Rundown "${rundownId}" not found`) + + await runIngestOperation(access.rundown.studioId, IngestJobs.UserExecuteChangeOperation, { + rundownExternalId: access.rundown.externalId, + operationTarget, + operation, + }) + } + ) + } + async createAdlibTestingRundownForShowStyleVariant( userEvent: string, eventTime: number, diff --git a/meteor/server/collections/rundown.ts b/meteor/server/collections/rundown.ts index ff35b315911..3ef2d82eca9 100644 --- a/meteor/server/collections/rundown.ts +++ b/meteor/server/collections/rundown.ts @@ -1,7 +1,8 @@ import { AdLibAction } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' import { CollectionName } from '@sofie-automation/corelib/dist/dataModel/Collections' -import { IngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/IngestDataCache' +import { NrcsIngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' +import { SofieIngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' import { Piece } from '@sofie-automation/corelib/dist/dataModel/Piece' import { PieceInstance } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' @@ -28,10 +29,17 @@ registerIndex(AdLibPieces, { _rank: 1, }) -export const IngestDataCache = createAsyncOnlyReadOnlyMongoCollection( - CollectionName.IngestDataCache +export const NrcsIngestDataCache = createAsyncOnlyReadOnlyMongoCollection( + CollectionName.NrcsIngestDataCache ) -registerIndex(IngestDataCache, { +registerIndex(NrcsIngestDataCache, { + rundownId: 1, +}) + +export const SofieIngestDataCache = createAsyncOnlyReadOnlyMongoCollection( + CollectionName.SofieIngestDataCache +) +registerIndex(SofieIngestDataCache, { rundownId: 1, }) diff --git a/meteor/server/lib/rest/v1/buckets.ts b/meteor/server/lib/rest/v1/buckets.ts index 83f45113f39..bafe7c1136b 100644 --- a/meteor/server/lib/rest/v1/buckets.ts +++ b/meteor/server/lib/rest/v1/buckets.ts @@ -114,7 +114,7 @@ export interface APIImportAdlib { externalId: string name: string payloadType: string - payload?: unknown + payload: unknown | undefined showStyleBaseId: string } diff --git a/meteor/server/publications/rundown.ts b/meteor/server/publications/rundown.ts index 9c56b681f9b..a34c85824c8 100644 --- a/meteor/server/publications/rundown.ts +++ b/meteor/server/publications/rundown.ts @@ -17,7 +17,7 @@ import { AdLibActions, AdLibPieces, ExpectedPlayoutItems, - IngestDataCache, + NrcsIngestDataCache, PartInstances, Parts, PeripheralDevices, @@ -29,7 +29,7 @@ import { Segments, } from '../collections' import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' -import { IngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/IngestDataCache' +import { NrcsIngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' import { literal } from '@sofie-automation/corelib/dist/lib' import { PartId, @@ -532,16 +532,16 @@ meteorPublish( // Note: this publication is for dev purposes only: meteorPublish( CorelibPubSub.ingestDataCache, - async function (selector: MongoQuery, token: string | undefined) { + async function (selector: MongoQuery, token: string | undefined) { if (!selector) throw new Meteor.Error(400, 'selector argument missing') - const modifier: FindOptions = { + const modifier: FindOptions = { fields: {}, } if ( NoSecurityReadAccess.any() || (await RundownReadAccess.rundownContent(selector.rundownId, { userId: this.userId, token })) ) { - return IngestDataCache.findWithCursor(selector, modifier) + return NrcsIngestDataCache.findWithCursor(selector, modifier) } return null } diff --git a/packages/blueprints-integration/src/api/showStyle.ts b/packages/blueprints-integration/src/api/showStyle.ts index 52ec4559fd1..8b40534e63c 100644 --- a/packages/blueprints-integration/src/api/showStyle.ts +++ b/packages/blueprints-integration/src/api/showStyle.ts @@ -19,7 +19,7 @@ import type { IOnTakeContext, IOnSetAsNextContext, } from '../context' -import type { IngestAdlib, ExtendedIngestRundown, IngestSegment, IngestRundown } from '../ingest' +import type { IngestAdlib, ExtendedIngestRundown, IngestRundown } from '../ingest' import type { IBlueprintExternalMessageQueueObj } from '../message' import type { MigrationStepShowStyle } from '../migrations' import type { @@ -46,6 +46,7 @@ import type { BlueprintConfigCoreConfig, BlueprintManifestBase, BlueprintManifes import type { IBlueprintTriggeredActions } from '../triggers' import type { ExpectedPackage } from '../package' import type { ABResolverConfiguration } from '../abPlayback' +import type { SofieIngestSegment } from '../ingest-types' export type TimelinePersistentState = unknown @@ -85,7 +86,7 @@ export interface ShowStyleBlueprintManifest BlueprintResultSegment | Promise /** diff --git a/packages/blueprints-integration/src/api/studio.ts b/packages/blueprints-integration/src/api/studio.ts index f10ccd3b93d..f4a3b6525bc 100644 --- a/packages/blueprints-integration/src/api/studio.ts +++ b/packages/blueprints-integration/src/api/studio.ts @@ -4,9 +4,21 @@ import type { BlueprintConfigCoreConfig, BlueprintManifestBase, BlueprintManifes import type { JSONSchema } from '@sofie-automation/shared-lib/dist/lib/JSONSchemaTypes' import type { JSONBlob } from '@sofie-automation/shared-lib/dist/lib/JSONBlob' import type { MigrationStepStudio } from '../migrations' -import type { ICommonContext, IFixUpConfigContext, IStudioBaselineContext, IStudioUserContext } from '../context' +import type { + ICommonContext, + IFixUpConfigContext, + IStudioBaselineContext, + IStudioUserContext, + IProcessIngestDataContext, +} from '../context' import type { IBlueprintShowStyleBase } from '../showStyle' -import type { ExtendedIngestRundown } from '../ingest' +import type { + ExtendedIngestRundown, + NrcsIngestChangeDetails, + IngestRundown, + MutableIngestRundown, + UserOperationChange, +} from '../ingest' import type { ExpectedPlayoutItemGeneric, IBlueprintResultRundownPlaylist, IBlueprintRundownDB } from '../documents' import type { BlueprintMappings } from '../studio' import type { TimelineObjectCoreExt, TSR } from '../timeline' @@ -80,6 +92,17 @@ export interface StudioBlueprintManifest TProcessedConfig + + /** + * Process an ingest operation, to apply changes to the sofie interpretation of the ingest data + */ + processIngestData?: ( + context: IProcessIngestDataContext, + mutableIngestRundown: MutableIngestRundown, + nrcsIngestRundown: IngestRundown, + previousNrcsIngestRundown: IngestRundown | undefined, + changes: NrcsIngestChangeDetails | UserOperationChange + ) => Promise } export interface BlueprintResultStudioBaseline { diff --git a/packages/blueprints-integration/src/context/index.ts b/packages/blueprints-integration/src/context/index.ts index 594ecff3de0..843436ddd8a 100644 --- a/packages/blueprints-integration/src/context/index.ts +++ b/packages/blueprints-integration/src/context/index.ts @@ -5,6 +5,7 @@ export * from './fixUpConfigContext' export * from './onSetAsNextContext' export * from './onTakeContext' export * from './packageInfoContext' +export * from './processIngestDataContext' export * from './rundownContext' export * from './showStyleContext' export * from './studioContext' diff --git a/packages/blueprints-integration/src/context/processIngestDataContext.ts b/packages/blueprints-integration/src/context/processIngestDataContext.ts new file mode 100644 index 00000000000..3a0a6f632c4 --- /dev/null +++ b/packages/blueprints-integration/src/context/processIngestDataContext.ts @@ -0,0 +1,62 @@ +import type { IngestRundown, IngestSegment } from '@sofie-automation/shared-lib/dist/peripheralDevice/ingest' +import type { IStudioContext } from './studioContext' +import type { IngestDefaultChangesOptions, MutableIngestRundown, NrcsIngestChangeDetails } from '../ingest' + +export interface IProcessIngestDataContext extends IStudioContext { + /** + * Perform the default syncing of changes from the ingest data to the rundown. + * This may be overly agressive at removing any changes made by user operations. + * If you are using user operations, you may need to perform some pre and post fixups to ensure changes aren't wiped unnecessarily. + * @param ingestRundown NRCS version of the IngestRundown to copy from + * @param ingestChanges A description of the changes that have been made to the rundown and should be propogated + * @param options Options for how to apply the changes + */ + defaultApplyIngestChanges( + mutableIngestRundown: MutableIngestRundown, + ingestRundown: IngestRundown, + ingestChanges: NrcsIngestChangeDetails, + options?: IngestDefaultChangesOptions + ): void + + /** + * Group Parts in a MOS Rundown and return a new changes object + * This will group the Parts based on the segment name, using the separator provided to extract the segment name from the part name + * Note: This ignores a lot of the contents of the `ingestChanges` object, and relies more on the `previousIngestRundown` instead + * @param ingestRundown The rundown whose parts needs grouping + * @param previousIngestRundown The rundown prior to the changes, if known + * @param ingestChanges The changes which have been performed in `ingestRundown`, that need to translating + * @param partNameSeparator A string to split the part name on + * @returns A transformed rundown and changes object + */ + groupMosPartsInRundownAndChangesWithSeparator( + ingestRundown: IngestRundown, + previousIngestRundown: IngestRundown | undefined, + ingestChanges: NrcsIngestChangeDetails, + partNameSeparator: string + ): GroupPartsInMosRundownAndChangesResult + + /** + * Group Parts in a Rundown and return a new changes object + * Note: This ignores a lot of the contents of the `ingestChanges` object, and relies more on the `previousIngestRundown` instead + * @param ingestRundown The rundown whose parts needs grouping + * @param previousIngestRundown The rundown prior to the changes, if known + * @param ingestChanges The changes which have been performed in `ingestRundown`, that need to translating + * @param groupPartsIntoSegments A function to group parts into segments + * @returns A transformed rundown and changes object + */ + groupPartsInRundownAndChanges( + ingestRundown: IngestRundown, + previousIngestRundown: IngestRundown | undefined, + ingestChanges: NrcsIngestChangeDetails, + groupPartsIntoSegments: (ingestSegments: IngestSegment[]) => IngestSegment[] + ): GroupPartsInMosRundownAndChangesResult +} + +export interface GroupPartsInMosRundownAndChangesResult< + TRundownPayload = unknown, + TSegmentPayload = unknown, + TPartPayload = unknown +> { + nrcsIngestRundown: IngestRundown + ingestChanges: NrcsIngestChangeDetails +} diff --git a/packages/blueprints-integration/src/documents/part.ts b/packages/blueprints-integration/src/documents/part.ts index 6f656a88023..4d035656d58 100644 --- a/packages/blueprints-integration/src/documents/part.ts +++ b/packages/blueprints-integration/src/documents/part.ts @@ -1,3 +1,4 @@ +import { UserEditingDefinition } from '../userEditing' import type { NoteSeverity } from '../lib' import type { ITranslatableMessage } from '../translations' @@ -85,6 +86,11 @@ export interface IBlueprintMutatablePart /** Whether the piece affects the output of the Studio or is describing an invisible state within the Studio */ notInVision?: boolean + + /** + * User editing definitions for this piece + */ + userEditOperations?: UserEditingDefinition[] } export interface IBlueprintPieceDB extends IBlueprintPiece { diff --git a/packages/blueprints-integration/src/documents/rundown.ts b/packages/blueprints-integration/src/documents/rundown.ts index 4c5318b068e..4775ca39d8c 100644 --- a/packages/blueprints-integration/src/documents/rundown.ts +++ b/packages/blueprints-integration/src/documents/rundown.ts @@ -1,3 +1,4 @@ +import { UserEditingDefinition } from '../userEditing' import type { RundownPlaylistTiming } from './playlistTiming' /** The Rundown generated from Blueprint */ @@ -26,6 +27,11 @@ export interface IBlueprintRundown diff --git a/packages/blueprints-integration/src/index.ts b/packages/blueprints-integration/src/index.ts index 95246bc308a..d5196e59f74 100644 --- a/packages/blueprints-integration/src/index.ts +++ b/packages/blueprints-integration/src/index.ts @@ -6,6 +6,7 @@ export * from './content' export * from './context' export * from './documents' export * from './ingest' +export * from './ingest-types' export * from './lib' export * from './message' export * from './migrations' @@ -19,6 +20,7 @@ export * from './timeline' export * from './util' export * from './translations' export * from './triggers' +export * from './userEditing' export { MOS } from '@sofie-automation/shared-lib/dist/mos' diff --git a/packages/blueprints-integration/src/ingest-types.ts b/packages/blueprints-integration/src/ingest-types.ts new file mode 100644 index 00000000000..69c4ee0cf9d --- /dev/null +++ b/packages/blueprints-integration/src/ingest-types.ts @@ -0,0 +1,35 @@ +import { IngestPart, IngestPlaylist, IngestRundown, IngestSegment } from './ingest' + +export interface SofieIngestPlaylist extends IngestPlaylist { + /** Ingest cache of rundowns in this playlist. */ + rundowns: SofieIngestRundown[] +} +export interface SofieIngestRundown + extends IngestRundown { + /** Array of segments in this rundown */ + segments: SofieIngestSegment[] + + /** States for UserEdits, could be lock from NRCS updates, + * lock from user changes, + * or removedByUser + * */ + userEditStates: Record +} +export interface SofieIngestSegment + extends IngestSegment { + /** Array of parts in this segment */ + parts: SofieIngestPart[] + + /** States for UserEdits, could be lock from NRCS updates, + * lock from user changes, + * or removedByUser + * */ + userEditStates: Record +} +export interface SofieIngestPart extends IngestPart { + /** States for UserEdits, could be lock from NRCS updates, + * lock from user changes, + * or removedByUser + * */ + userEditStates: Record +} diff --git a/packages/blueprints-integration/src/ingest.ts b/packages/blueprints-integration/src/ingest.ts index eb6e8b61629..057a20a9daa 100644 --- a/packages/blueprints-integration/src/ingest.ts +++ b/packages/blueprints-integration/src/ingest.ts @@ -1,5 +1,7 @@ -import { IngestRundown } from '@sofie-automation/shared-lib/dist/peripheralDevice/ingest' +import { IngestPart, IngestSegment } from '@sofie-automation/shared-lib/dist/peripheralDevice/ingest' import { IBlueprintRundownDBData } from './documents' +import { ReadonlyDeep } from 'type-fest' +import { SofieIngestRundown } from './ingest-types' export { IngestPart, @@ -9,7 +11,403 @@ export { IngestAdlib, } from '@sofie-automation/shared-lib/dist/peripheralDevice/ingest' -/** The IngesteRundown is extended with data from Core */ -export interface ExtendedIngestRundown extends IngestRundown { +/** The IngestRundown is extended with data from Core */ +export interface ExtendedIngestRundown + extends SofieIngestRundown { coreData: IBlueprintRundownDBData | undefined } + +/** + * Describes the possible ingest changes that can have been made to a part by the NRCS + */ +export enum NrcsIngestPartChangeDetails { + Inserted = 'inserted', + Deleted = 'deleted', + Updated = 'updated', +} + +/** + * Describes some of the possible ingest changes that can have been made to a segment by the NRCS + */ +export enum NrcsIngestSegmentChangeDetailsEnum { + /** + * The segment has been inserted into the rundown, or the segment has changed sufficiently to require a full regeneration + */ + InsertedOrUpdated = 'inserted-or-updated', + /** + * The segment has been removed from the rundown + */ + Deleted = 'deleted', +} + +/** + * Describes the possible ingest changes that can have been made to the rundown properties by the NRCS + */ +export enum NrcsIngestRundownChangeDetails { + /** + * The payload or name of the rundown has changed. + */ + Payload = 'payload', + + /** + * A full regeneration of the rundown and all segments is required. + * This will typically remove all user driven changes. + */ + Regenerate = 'regenerate', +} + +/** + * Describes the possible ingest changes that can have been made to the contents of a segment by the NRCS + */ +export interface NrcsIngestSegmentChangeDetailsObject { + /** + * True when the payload or name of the segment has changed. + */ + payloadChanged?: boolean + + /** + * True when the rank of any part in the segment has changed. + */ + partOrderChanged?: boolean + + /** + * Descibes the changes to the parts in the rundown + */ + partChanges?: Record +} + +export enum IngestChangeType { + /** Indicate that this change is from ingest operations */ + Ingest = 'ingest', + /** Indicate that this change is from user operations */ + User = 'user', +} + +/** + * Describes the possible ingest changes that can have been made to a segment by the NRCS + */ +export type NrcsIngestSegmentChangeDetails = NrcsIngestSegmentChangeDetailsEnum | NrcsIngestSegmentChangeDetailsObject + +export interface NrcsIngestChangeDetails { + /** Indicate that this change is from ingest operations */ + source: IngestChangeType.Ingest + + /** + * True when the rank of any segment in the rundown has changed. + * Expressing what exactly has changed non-trivial particularly how to represent that in this structure, + * so for now we just have a simple boolean. + * If this is false, no segments have been reordered, added or removed. + */ + segmentOrderChanged?: boolean + + /** + * Describes the changes to the rundown itself + */ + rundownChanges?: NrcsIngestRundownChangeDetails + + /** + * Describes the changes to the segments in the rundown + */ + segmentChanges?: Record + + /** + * Describes any changes to segment external ids + * This is used to ensure that content belonging to a segment gets moved between segments correctly + * Note: this is not currently defined by Sofie, but is defined by `groupPartsInRundownAndChanges` and `groupMosPartsInRundownAndChangesWithSeparator` + */ + changedSegmentExternalIds?: Record +} + +export interface UserOperationTarget { + segmentExternalId: string | undefined + partExternalId: string | undefined + pieceExternalId: string | undefined +} + +export type DefaultUserOperations = { + id: '__sofie-move-segment' // Future: define properly + payload: Record +} + +export interface UserOperationChange { + /** Indicate that this change is from user operations */ + source: IngestChangeType.User + + operationTarget: UserOperationTarget + operation: DefaultUserOperations | TCustomBlueprintOperations +} + +export interface MutableIngestRundown { + /** Id of the rundown as reported by the ingest gateway. Must be unique for each rundown owned by the gateway */ + readonly externalId: string + /** Name of the rundown */ + readonly name: string + + /** Something that identified the data source. eg "spreadsheet", "mos" */ + readonly type: string + + /** Payload of rundown metadata. For use by other blueprints methods */ + readonly payload: ReadonlyDeep | undefined + + readonly userEditStates: Record + + /** Array of segments in this rundown */ + readonly segments: ReadonlyArray> + + /** + * Search for a Part through the whole IngestRundown + * @param partExternalId externalId of the Part + */ + findPart(partExternalId: string): MutableIngestPart | undefined + + /** + * Search for a Part through the whole IngestRundown + * @param partExternalId externalId of the Part + * @returns The part and segment that the part belongs to + */ + findPartAndSegment(partExternalId: string): + | { + part: MutableIngestPart + segment: MutableIngestSegment + } + | undefined + + getSegment(segmentExternalId: string): MutableIngestSegment | undefined + + /** + * Move a segment to a new position in the rundown + * @param segmentExternalId externalId of the Segment to move + * @param beforeSegmentExternalId externalId of the Segment to position before. If null, position at the end + */ + moveSegmentBefore(segmentExternalId: string, beforeSegmentExternalId: string | null): void + + /** + * Move a segment to a new position in the rundown + * @param segmentExternalId externalId of the Segment to move + * @param afterSegmentExternalId externalId of the Segment to position after. If null, position at the beginning + */ + moveSegmentAfter(segmentExternalId: string, afterSegmentExternalId: string | null): void + + /** + * Replace a Segment in the Rundown with a new one. If the Segment does not already exist, it will be inserted. + * This will replace all of the Parts in the Segment as well, along with the payload and other properties of the Segment. + * @param segment the new IngestSegment to insert + * @param beforeSegmentExternalId externalId of the Segment to position before. If null, position at the end + * @returns the new MutableIngestSegment + */ + replaceSegment( + segment: Omit, 'rank'>, + beforeSegmentExternalId: string | null + ): MutableIngestSegment + + /** + * Change the externalId of a Segment + * @param oldSegmentExternalId Id of the segment to change + * @param newSegmentExternalId New id for the segment + */ + changeSegmentExternalId( + oldSegmentExternalId: string, + newSegmentExternalId: string + ): MutableIngestSegment + + /** + * Change the originalExternalId of a Segment + * This allows for tracking of segments that have been renamed, after a Segment has been added or replaced + * @param segmentExternalId Id of the segment to update + * @param originalSegmentExternalId Original id for the segment + */ + changeSegmentOriginalExternalId( + segmentExternalId: string, + originalSegmentExternalId: string + ): MutableIngestSegment + + /** + * Remove a Segment from the Rundown + * @param segmentExternalId externalId of the Segment to remove + * @returns true if the segment was removed, false if it was not found + */ + removeSegment(segmentExternalId: string): boolean + + /** + * Remove all Segments from the Rundown + */ + removeAllSegments(): void + + /** + * Force the whole Rundown to be re-run through the ingest blueprints, even if there are no changes + */ + forceFullRegenerate(): void + + /** + * Set name of the Rundown + */ + setName(name: string): void + + /** + * Update the payload of the Rundown + * This will trigger the Rundown and RundownPlaylist to be updated, but not Segments + * @param payload the new payload + */ + replacePayload(payload: ReadonlyDeep | TRundownPayload): void + + /** + * Update the portion of the payload of the Rundown + * This will trigger the Rundown and RundownPlaylist to be updated, but not Segments + * @param key the key of the payload to update + * @param value the new value + */ + setPayloadProperty( + key: TKey, + value: ReadonlyDeep | TRundownPayload[TKey] + ): void + + setUserEditState(key: string, value: boolean): void +} + +export interface MutableIngestSegment { + /** Id of the segment as reported by the ingest gateway. Must be unique for each segment in the rundown */ + readonly externalId: string + /** Name of the segment */ + readonly name: string + + /** If the segment has had it's externalId changed, the id before the change */ + readonly originalExternalId: string | undefined + + /** Payload of segment metadata. For use by other blueprints methods */ + readonly payload: ReadonlyDeep | undefined + + readonly userEditStates: Record + + /** Array of parts in this segment */ + readonly parts: ReadonlyArray> + + /** + * Get a Part from the Segment + * @param partExternalId externalId of the Part + */ + getPart(partExternalId: string): MutableIngestPart | undefined + + /** + * Move a part to a new position in the segment + * @param partExternalId externalId of the Part to move + * @param beforePartExternalId externalId of the Part to position before. If null, position at the end + */ + movePartBefore(partExternalId: string, beforePartExternalId: string | null): void + + /** + * Move a part to a new position in the segment + * @param partExternalId externalId of the Part to move + * @param afterPartExternalId externalId of the Part to position after. If null, position at the beginning + */ + movePartAfter(partExternalId: string, afterPartExternalId: string | null): void + + /** + * Replace a Part in the Segment with a new one. If the Part does not already exist, it will be inserted. + * This will replace the payload and other properties of the Part. + * @param ingestPart the new IngestPart to insert + * @param beforePartExternalId externalId of the Part to position before. If null, position at the end + * @returns the new MutableIngestPart + */ + replacePart( + ingestPart: Omit, 'rank'>, + beforePartExternalId: string | null + ): MutableIngestPart + + /** + * Remove a Part from the Segment + * @param partExternalId externalId of the Part to remove + * @returns true if the part was removed, false if it was not found + */ + removePart(partExternalId: string): boolean + + /** + * Force this segment to be regenerated, even if there are no changes + */ + forceRegenerate(): void + + /** + * Set the name of the Segment + */ + setName(name: string): void + + /** + * Update the payload of the Segment + * This will trigger the Segment to be updated + * @param payload the new payload + */ + replacePayload(payload: ReadonlyDeep | TSegmentPayload): void + + /** + * Update the portion of the payload of the Segment + * This will trigger the Segment to be updated + * @param key the key of the payload to update + * @param value the new value + */ + setPayloadProperty( + key: TKey, + value: ReadonlyDeep | TSegmentPayload[TKey] + ): void + + setUserEditState(key: string, value: boolean): void +} + +export interface MutableIngestPart { + /** Id of the part as reported by the ingest gateway. Must be unique for each part in the rundown */ + readonly externalId: string + /** Name of the part */ + readonly name: string + + /** Payload of the part. For use by other blueprints methods */ + readonly payload: ReadonlyDeep | undefined + + readonly userEditStates: Record + + /** + * Set the name of the Part + */ + setName(name: string): void + + /** + * Update the payload of the Part + * This will trigger the Segment to be updated + * @param payload the new payload + */ + replacePayload(payload: ReadonlyDeep | TPartPayload): void + + /** + * Update the portion of the payload of the Part + * This will trigger the Segment to be updated + * @param key the key of the payload to update + * @param value the new value + */ + setPayloadProperty( + key: TKey, + value: ReadonlyDeep | TPartPayload[TKey] + ): void + + setUserEditState(key: string, value: boolean): void +} + +export type TransformPayloadFunction = (payload: any, oldPayload: ReadonlyDeep | undefined) => T | ReadonlyDeep + +export interface IngestDefaultChangesOptions< + TRundownPayload = unknown, + TSegmentPayload = unknown, + TPartPayload = unknown +> { + /** + * A custom transform for the payload of a Rundown. + * Typically this will translate from a NRCS native structure to a javascript friendly structure. + */ + transformRundownPayload: TransformPayloadFunction + /** + * A custom transform for the payload of a Segment. + * Typically this will translate from a NRCS native structure to a javascript friendly structure. + */ + transformSegmentPayload: TransformPayloadFunction + /** + * A custom transform for the payload of a Part. + * Typically this will translate from a NRCS native structure to a javascript friendly structure. + */ + transformPartPayload: TransformPayloadFunction +} diff --git a/packages/blueprints-integration/src/userEditing.ts b/packages/blueprints-integration/src/userEditing.ts new file mode 100644 index 00000000000..b199341768b --- /dev/null +++ b/packages/blueprints-integration/src/userEditing.ts @@ -0,0 +1,45 @@ +import { JSONBlob } from '@sofie-automation/shared-lib/dist/lib/JSONBlob' +import type { ITranslatableMessage } from './translations' +import { JSONSchema } from '@sofie-automation/shared-lib/dist/lib/JSONSchemaTypes' + +/** + * Description of a user performed editing operation allowed on an document + */ +export type UserEditingDefinition = UserEditingDefinitionAction | UserEditingDefinitionForm + +/** + * A simple 'action' that can be performed + */ +export interface UserEditingDefinitionAction { + type: UserEditingType.ACTION + /** Id of this operation */ + id: string + /** Label to show to the user for this operation */ + label: ITranslatableMessage + /** Icon to show to when this action is 'active' */ + svgIcon?: string + /** Whether this action should be indicated as being active */ + isActive?: boolean +} + +/** + * A simple form based operation + */ +export interface UserEditingDefinitionForm { + type: UserEditingType.FORM + /** Id of this operation */ + id: string + /** Label to show to the user for this operation */ + label: ITranslatableMessage + /** The json schema describing the form to display */ + schema: JSONBlob + /** Current values to populate the form with */ + currentValues: Record +} + +export enum UserEditingType { + /** Action */ + ACTION = 'action', + /** Form of selections */ + FORM = 'form', +} diff --git a/packages/corelib/src/dataModel/Collections.ts b/packages/corelib/src/dataModel/Collections.ts index 7105b14aa99..670bdfcd442 100644 --- a/packages/corelib/src/dataModel/Collections.ts +++ b/packages/corelib/src/dataModel/Collections.ts @@ -13,7 +13,8 @@ export enum CollectionName { ExpectedPackageWorkStatuses = 'expectedPackageWorkStatuses', ExpectedPlayoutItems = 'expectedPlayoutItems', ExternalMessageQueue = 'externalMessageQueue', - IngestDataCache = 'ingestDataCache', + NrcsIngestDataCache = 'ingestDataCache', // Future: this could be renamed to nrcsIngestDataCache + SofieIngestDataCache = 'sofieIngestDataCache', MediaObjects = 'mediaObjects', MediaWorkFlows = 'mediaWorkFlows', MediaWorkFlowSteps = 'mediaWorkFlowSteps', diff --git a/packages/corelib/src/dataModel/Ids.ts b/packages/corelib/src/dataModel/Ids.ts index 840efbe33e4..1e4e544cca8 100644 --- a/packages/corelib/src/dataModel/Ids.ts +++ b/packages/corelib/src/dataModel/Ids.ts @@ -35,8 +35,11 @@ export type ExpectedPlayoutItemId = ProtectedString<'ExpectedPlayoutItemId'> /** A string, identifying a ExternalMessageQueueObj */ export type ExternalMessageQueueObjId = ProtectedString<'ExternalMessageQueueObjId'> -/** A string, identifying a IngestDataCacheObj */ -export type IngestDataCacheObjId = ProtectedString<'IngestDataCacheObjId'> +/** A string, identifying a NrcsIngestDataCacheObj */ +export type NrcsIngestDataCacheObjId = ProtectedString<'NrcsIngestDataCacheObjId'> + +/** A string, identifying a SofieIngestDataCacheObj */ +export type SofieIngestDataCacheObjId = ProtectedString<'SofieIngestDataCacheObjId'> /** A string, identifying a Organization */ export type OrganizationId = ProtectedString<'OrganizationId'> diff --git a/packages/corelib/src/dataModel/IngestDataCache.ts b/packages/corelib/src/dataModel/IngestDataCache.ts deleted file mode 100644 index e19bc00d1e0..00000000000 --- a/packages/corelib/src/dataModel/IngestDataCache.ts +++ /dev/null @@ -1,43 +0,0 @@ -import { IngestRundown, IngestSegment, IngestPart } from '@sofie-automation/blueprints-integration' -import { IngestDataCacheObjId, RundownId, SegmentId, PartId } from './Ids' - -export enum IngestCacheType { - RUNDOWN = 'rundown', - SEGMENT = 'segment', - PART = 'part', -} -export type IngestCacheData = IngestRundown | IngestSegment | IngestPart - -export interface IngestDataCacheObjBase { - _id: IngestDataCacheObjId - modified: number - type: IngestCacheType - - /** Id of the Rundown */ - rundownId: RundownId - segmentId?: SegmentId - partId?: PartId - - data: IngestCacheData -} - -export interface IngestDataCacheObjRundown extends IngestDataCacheObjBase { - type: IngestCacheType.RUNDOWN - rundownId: RundownId - data: IngestRundown -} -export interface IngestDataCacheObjSegment extends IngestDataCacheObjBase { - type: IngestCacheType.SEGMENT - rundownId: RundownId - segmentId: SegmentId - - data: IngestSegment -} -export interface IngestDataCacheObjPart extends IngestDataCacheObjBase { - type: IngestCacheType.PART - rundownId: RundownId - segmentId: SegmentId - partId: PartId - data: IngestPart -} -export type IngestDataCacheObj = IngestDataCacheObjRundown | IngestDataCacheObjSegment | IngestDataCacheObjPart diff --git a/packages/corelib/src/dataModel/NrcsIngestDataCache.ts b/packages/corelib/src/dataModel/NrcsIngestDataCache.ts new file mode 100644 index 00000000000..1fe42d95464 --- /dev/null +++ b/packages/corelib/src/dataModel/NrcsIngestDataCache.ts @@ -0,0 +1,52 @@ +import { IngestRundown, IngestSegment, IngestPart } from '@sofie-automation/blueprints-integration' +import { NrcsIngestDataCacheObjId, RundownId, SegmentId, PartId } from './Ids' +import { RundownSource } from './Rundown' + +export enum NrcsIngestCacheType { + RUNDOWN = 'rundown', + SEGMENT = 'segment', + PART = 'part', +} +export type IngestCacheData = IngestRundown | IngestSegment | IngestPart + +export interface IngestRundownWithSource + extends IngestRundown { + rundownSource: RundownSource +} + +interface IngestDataCacheObjBase { + _id: NrcsIngestDataCacheObjId + modified: number + type: NrcsIngestCacheType + + /** Id of the Rundown */ + rundownId: RundownId + segmentId?: SegmentId + partId?: PartId + + data: IngestCacheData +} + +export interface NrcsIngestDataCacheObjRundown extends IngestDataCacheObjBase { + type: NrcsIngestCacheType.RUNDOWN + rundownId: RundownId + data: IngestRundownWithSource +} +export interface NrcsIngestDataCacheObjSegment extends IngestDataCacheObjBase { + type: NrcsIngestCacheType.SEGMENT + rundownId: RundownId + segmentId: SegmentId + + data: IngestSegment +} +export interface NrcsIngestDataCacheObjPart extends IngestDataCacheObjBase { + type: NrcsIngestCacheType.PART + rundownId: RundownId + segmentId: SegmentId + partId: PartId + data: IngestPart +} +export type NrcsIngestDataCacheObj = + | NrcsIngestDataCacheObjRundown + | NrcsIngestDataCacheObjSegment + | NrcsIngestDataCacheObjPart diff --git a/packages/corelib/src/dataModel/Part.ts b/packages/corelib/src/dataModel/Part.ts index 5792e023429..5194cb98b83 100644 --- a/packages/corelib/src/dataModel/Part.ts +++ b/packages/corelib/src/dataModel/Part.ts @@ -3,6 +3,7 @@ import { ITranslatableMessage } from '../TranslatableMessage' import { PartId, RundownId, SegmentId } from './Ids' import { PartNote } from './Notes' import { ReadonlyDeep } from 'type-fest' +import { CoreUserEditingDefinition } from './UserEditingDefinitions' export interface PartInvalidReason { message: ITranslatableMessage @@ -11,7 +12,7 @@ export interface PartInvalidReason { } /** A "Line" in NRK Lingo. */ -export interface DBPart extends IBlueprintPart { +export interface DBPart extends Omit { _id: PartId /** * Position inside the segment @@ -35,6 +36,11 @@ export interface DBPart extends IBlueprintPart { /** A modified expectedDuration with the piece/transition derived timings factored in */ expectedDurationWithTransition: number | undefined + + /** + * User editing definitions for this part + */ + userEditOperations?: CoreUserEditingDefinition[] } export function isPartPlayable(part: Pick, 'invalid' | 'floated'>): boolean { diff --git a/packages/corelib/src/dataModel/Piece.ts b/packages/corelib/src/dataModel/Piece.ts index a340c45fff6..88d8e95865c 100644 --- a/packages/corelib/src/dataModel/Piece.ts +++ b/packages/corelib/src/dataModel/Piece.ts @@ -7,6 +7,7 @@ import { } from '@sofie-automation/blueprints-integration' import { ProtectedString, protectString, unprotectString } from '../protectedString' import { PieceId, RundownId, SegmentId, PartId } from './Ids' +import { CoreUserEditingDefinition } from './UserEditingDefinitions' /** A generic list of playback availability statuses for a Piece */ export enum PieceStatusCode { @@ -49,8 +50,7 @@ export interface PieceGeneric extends Omit { /** Stringified timelineObjects */ timelineObjectsString: PieceTimelineObjectsBlob } - -export interface Piece extends PieceGeneric, Omit { +export interface Piece extends PieceGeneric, Omit { /** * This is the id of the rundown this piece starts playing in. * Currently this is the only rundown the piece could be playing in @@ -72,6 +72,11 @@ export interface Piece extends PieceGeneric, Omit diff --git a/packages/corelib/src/dataModel/Rundown.ts b/packages/corelib/src/dataModel/Rundown.ts index 1907fb87255..a4fd75f3559 100644 --- a/packages/corelib/src/dataModel/Rundown.ts +++ b/packages/corelib/src/dataModel/Rundown.ts @@ -10,6 +10,7 @@ import { } from './Ids' import { RundownNote } from './Notes' import { ReadonlyDeep } from 'type-fest' +import { CoreUserEditingDefinition } from './UserEditingDefinitions' export enum RundownOrphanedReason { /** Rundown is deleted from the source but we still need it */ @@ -85,6 +86,10 @@ export interface Rundown { playlistId: RundownPlaylistId /** If the playlistId has ben set manually by a user in Sofie */ playlistIdIsSetInSofie?: boolean + /** + * User editing definitions for this rundown + */ + userEditOperations?: CoreUserEditingDefinition[] } /** A description of where a Rundown originated from */ diff --git a/packages/corelib/src/dataModel/Segment.ts b/packages/corelib/src/dataModel/Segment.ts index 76996af431c..89b03d102f7 100644 --- a/packages/corelib/src/dataModel/Segment.ts +++ b/packages/corelib/src/dataModel/Segment.ts @@ -1,6 +1,7 @@ import { SegmentDisplayMode, SegmentTimingInfo } from '@sofie-automation/blueprints-integration' import { SegmentId, RundownId } from './Ids' import { SegmentNote } from './Notes' +import { CoreUserEditingDefinition } from './UserEditingDefinitions' export enum SegmentOrphanedReason { /** Segment is deleted from the NRCS but we still need it */ @@ -18,8 +19,6 @@ export interface DBSegment { _rank: number /** ID of the source object in the gateway */ externalId: string - /** Timestamp when the externalData was last modified */ - externalModified: number /** The rundown this segment belongs to */ rundownId: RundownId @@ -47,4 +46,9 @@ export interface DBSegment { /** Holds notes (warnings / errors) thrown by the blueprints during creation */ notes?: Array + + /** + * User editing definitions for this segment + */ + userEditOperations?: CoreUserEditingDefinition[] } diff --git a/packages/corelib/src/dataModel/SofieIngestDataCache.ts b/packages/corelib/src/dataModel/SofieIngestDataCache.ts new file mode 100644 index 00000000000..76819968c8c --- /dev/null +++ b/packages/corelib/src/dataModel/SofieIngestDataCache.ts @@ -0,0 +1,57 @@ +import { SofieIngestRundown, SofieIngestSegment, SofieIngestPart } from '@sofie-automation/blueprints-integration' +import { SofieIngestDataCacheObjId, RundownId, SegmentId, PartId } from './Ids' +import { RundownSource } from './Rundown' + +export enum SofieIngestCacheType { + RUNDOWN = 'rundown', + SEGMENT = 'segment', + PART = 'part', +} +export type SofieIngestCacheData = SofieIngestRundown | SofieIngestSegment | SofieIngestPart + +export interface SofieIngestRundownWithSource< + TRundownPayload = unknown, + TSegmentPayload = unknown, + TPartPayload = unknown +> extends SofieIngestRundown { + rundownSource: RundownSource +} + +interface SofieIngestDataCacheBase { + _id: SofieIngestDataCacheObjId + modified: number + type: SofieIngestCacheType + + /** Id of the Rundown */ + rundownId: RundownId + segmentId?: SegmentId + partId?: PartId + + data: SofieIngestCacheData +} + +export interface SofieIngestDataCacheObjRundown extends SofieIngestDataCacheBase { + type: SofieIngestCacheType.RUNDOWN + rundownId: RundownId + data: SofieIngestRundownWithSource +} + +export interface SofieIngestDataCacheObjSegment extends SofieIngestDataCacheBase { + type: SofieIngestCacheType.SEGMENT + rundownId: RundownId + segmentId: SegmentId + data: SofieIngestSegment +} + +export interface SofieIngestDataCacheObjPart extends SofieIngestDataCacheBase { + type: SofieIngestCacheType.PART + rundownId: RundownId + segmentId: SegmentId + partId: PartId + data: SofieIngestPart +} + +export type SofieIngestDataCacheObj = + | SofieIngestDataCacheObjRundown + | SofieIngestDataCacheObjSegment + | SofieIngestDataCacheObjPart diff --git a/packages/corelib/src/dataModel/UserEditingDefinitions.ts b/packages/corelib/src/dataModel/UserEditingDefinitions.ts new file mode 100644 index 00000000000..4930fbfbdaa --- /dev/null +++ b/packages/corelib/src/dataModel/UserEditingDefinitions.ts @@ -0,0 +1,30 @@ +import type { UserEditingType, JSONBlob, JSONSchema } from '@sofie-automation/blueprints-integration' +import type { ITranslatableMessage } from '../TranslatableMessage' + +export type CoreUserEditingDefinition = CoreUserEditingDefinitionAction | CoreUserEditingDefinitionForm + +export interface CoreUserEditingDefinitionAction { + type: UserEditingType.ACTION + /** Id of this operation */ + id: string + /** Label to show to the user for this operation */ + label: ITranslatableMessage + /** Icon to show to when this action is 'active' */ + svgIcon?: string + /** Whether this action should be indicated as being active */ + isActive?: boolean +} + +export interface CoreUserEditingDefinitionForm { + type: UserEditingType.FORM + /** Id of this operation */ + id: string + /** Label to show to the user for this operation */ + label: ITranslatableMessage + /** The json schema describing the form to display */ + schema: JSONBlob + /** Current values to populate the form with */ + currentValues: Record + /** Translation namespaces to use when rendering this form */ + translationNamespaces: string[] +} diff --git a/packages/corelib/src/pubsub.ts b/packages/corelib/src/pubsub.ts index 9f4cbebc39e..a8436a1403c 100644 --- a/packages/corelib/src/pubsub.ts +++ b/packages/corelib/src/pubsub.ts @@ -12,7 +12,7 @@ import { DBSegment } from './dataModel/Segment' import { DBShowStyleBase } from './dataModel/ShowStyleBase' import { DBShowStyleVariant } from './dataModel/ShowStyleVariant' import { DBStudio } from './dataModel/Studio' -import { IngestDataCacheObj } from './dataModel/IngestDataCache' +import { NrcsIngestDataCacheObj } from './dataModel/NrcsIngestDataCache' import { DBTimelineDatastoreEntry } from '@sofie-automation/shared-lib/dist/core/model/TimelineDatastore' import { Blueprint } from './dataModel/Blueprint' import { BucketAdLibAction } from './dataModel/BucketAdLibAction' @@ -211,9 +211,9 @@ export interface CorelibPubSubTypes { token?: string ) => CollectionName.RundownBaselineAdLibActions [CorelibPubSub.ingestDataCache]: ( - selector: MongoQuery, + selector: MongoQuery, token?: string - ) => CollectionName.IngestDataCache + ) => CollectionName.NrcsIngestDataCache [CorelibPubSub.rundownPlaylists]: ( /** RundownPlaylistIds to fetch for, or null to fetch all */ rundownPlaylistIds: RundownPlaylistId[] | null, @@ -329,7 +329,7 @@ export type CorelibPubSubCollections = { [CollectionName.ExpectedPackages]: ExpectedPackageDBBase [CollectionName.ExpectedPackageWorkStatuses]: ExpectedPackageWorkStatus [CollectionName.ExternalMessageQueue]: ExternalMessageQueueObj - [CollectionName.IngestDataCache]: IngestDataCacheObj + [CollectionName.NrcsIngestDataCache]: NrcsIngestDataCacheObj [CollectionName.PartInstances]: DBPartInstance [CollectionName.PackageContainerStatuses]: PackageContainerStatusDB [CollectionName.PackageInfos]: PackageInfoDB diff --git a/packages/corelib/src/snapshots.ts b/packages/corelib/src/snapshots.ts index c75df55d03f..b56f89420c5 100644 --- a/packages/corelib/src/snapshots.ts +++ b/packages/corelib/src/snapshots.ts @@ -4,7 +4,7 @@ import { ExpectedMediaItem } from './dataModel/ExpectedMediaItem' import { ExpectedPackageDB } from './dataModel/ExpectedPackages' import { ExpectedPlayoutItem } from './dataModel/ExpectedPlayoutItem' import { RundownPlaylistId } from './dataModel/Ids' -import { IngestDataCacheObj } from './dataModel/IngestDataCache' +import { NrcsIngestDataCacheObj } from './dataModel/NrcsIngestDataCache' import { DBPart } from './dataModel/Part' import { DBPartInstance } from './dataModel/PartInstance' import { Piece } from './dataModel/Piece' @@ -15,13 +15,15 @@ import { RundownBaselineAdLibItem } from './dataModel/RundownBaselineAdLibPiece' import { RundownBaselineObj } from './dataModel/RundownBaselineObj' import { DBRundownPlaylist } from './dataModel/RundownPlaylist' import { DBSegment } from './dataModel/Segment' +import { SofieIngestDataCacheObj } from './dataModel/SofieIngestDataCache' export interface CoreRundownPlaylistSnapshot { version: string playlistId: RundownPlaylistId playlist: DBRundownPlaylist rundowns: Array - ingestData: Array + ingestData: Array + sofieIngestData: Array | undefined // Added in 1.52 baselineObjs: Array baselineAdlibs: Array segments: Array diff --git a/packages/corelib/src/worker/ingest.ts b/packages/corelib/src/worker/ingest.ts index 4e5ba19e0d8..ffa7c3013b8 100644 --- a/packages/corelib/src/worker/ingest.ts +++ b/packages/corelib/src/worker/ingest.ts @@ -11,7 +11,13 @@ import { StudioId, } from '../dataModel/Ids' import type { MOS } from '@sofie-automation/shared-lib/dist/mos' -import { IngestAdlib, IngestPart, IngestRundown, IngestSegment } from '@sofie-automation/blueprints-integration' +import { + IngestAdlib, + IngestPart, + IngestRundown, + IngestSegment, + UserOperationTarget, +} from '@sofie-automation/blueprints-integration' import { BucketAdLibAction } from '../dataModel/BucketAdLibAction' import { RundownSource } from '../dataModel/Rundown' @@ -117,6 +123,11 @@ export enum IngestJobs { */ UserUnsyncRundown = 'userUnsyncRundown', + /** + * User executed a change operation + */ + UserExecuteChangeOperation = 'userExecuteChangeOperation', + // For now these are in this queue, but if this gets split up to be per rundown, then a single bucket queue will be needed BucketItemImport = 'bucketItemImport', BucketItemRegenerate = 'bucketItemRegenerate', @@ -234,6 +245,11 @@ export interface UserRemoveRundownProps extends UserRundownPropsBase { } export type UserUnsyncRundownProps = UserRundownPropsBase +export interface UserExecuteChangeOperationProps extends IngestPropsBase { + operationTarget: UserOperationTarget + operation: { id: string; [key: string]: any } +} + export interface BucketItemImportProps { bucketId: BucketId showStyleBaseId: ShowStyleBaseId @@ -275,7 +291,7 @@ export interface CreateAdlibTestingRundownForShowStyleVariantProps { */ export type IngestJobFunc = { [IngestJobs.RemoveRundown]: (data: IngestRemoveRundownProps) => void - [IngestJobs.UpdateRundown]: (data: IngestUpdateRundownProps) => RundownId + [IngestJobs.UpdateRundown]: (data: IngestUpdateRundownProps) => void [IngestJobs.UpdateRundownMetaData]: (data: IngestUpdateRundownMetaDataProps) => void [IngestJobs.RemoveSegment]: (data: IngestRemoveSegmentProps) => void [IngestJobs.UpdateSegment]: (data: IngestUpdateSegmentProps) => void @@ -302,6 +318,7 @@ export type IngestJobFunc = { [IngestJobs.UserRemoveRundown]: (data: UserRemoveRundownProps) => void [IngestJobs.UserUnsyncRundown]: (data: UserUnsyncRundownProps) => void + [IngestJobs.UserExecuteChangeOperation]: (data: UserExecuteChangeOperationProps) => void [IngestJobs.BucketItemImport]: (data: BucketItemImportProps) => void [IngestJobs.BucketItemRegenerate]: (data: BucketItemRegenerateProps) => void diff --git a/packages/job-worker/src/__mocks__/collection.ts b/packages/job-worker/src/__mocks__/collection.ts index 932e06386ec..4b2a71b25cd 100644 --- a/packages/job-worker/src/__mocks__/collection.ts +++ b/packages/job-worker/src/__mocks__/collection.ts @@ -6,7 +6,8 @@ import { BucketAdLib } from '@sofie-automation/corelib/dist/dataModel/BucketAdLi import { CollectionName } from '@sofie-automation/corelib/dist/dataModel/Collections' import { ExpectedPackageDB } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItem } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' -import { IngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/IngestDataCache' +import { NrcsIngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' +import { SofieIngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' @@ -283,7 +284,8 @@ export function getMockCollections(): { BucketAdLibPieces: new MockMongoCollection(CollectionName.BucketAdLibPieces), ExpectedMediaItems: new MockMongoCollection(CollectionName.ExpectedMediaItems), ExpectedPlayoutItems: new MockMongoCollection(CollectionName.ExpectedPlayoutItems), - IngestDataCache: new MockMongoCollection(CollectionName.IngestDataCache), + SofieIngestDataCache: new MockMongoCollection(CollectionName.SofieIngestDataCache), + NrcsIngestDataCache: new MockMongoCollection(CollectionName.NrcsIngestDataCache), Parts: new MockMongoCollection(CollectionName.Parts), PartInstances: new MockMongoCollection(CollectionName.PartInstances), PeripheralDevices: new MockMongoCollection(CollectionName.PeripheralDevices), @@ -339,7 +341,8 @@ export interface IMockCollections { BucketAdLibPieces: MockMongoCollection ExpectedMediaItems: MockMongoCollection ExpectedPlayoutItems: MockMongoCollection - IngestDataCache: MockMongoCollection + SofieIngestDataCache: MockMongoCollection + NrcsIngestDataCache: MockMongoCollection Parts: MockMongoCollection PartInstances: MockMongoCollection PeripheralDevices: MockMongoCollection diff --git a/packages/job-worker/src/__mocks__/context.ts b/packages/job-worker/src/__mocks__/context.ts index 9e2751de4e5..9316d943aa5 100644 --- a/packages/job-worker/src/__mocks__/context.ts +++ b/packages/job-worker/src/__mocks__/context.ts @@ -312,7 +312,10 @@ const MockShowStyleBlueprint: () => ShowStyleBlueprintManifest = () => ({ getShowStyleVariantId: (_context, variants): string | null => { return variants[0]._id }, - getRundown: (_context: IShowStyleContext, ingestRundown: ExtendedIngestRundown): BlueprintResultRundown => { + getRundown: ( + _context: IShowStyleContext, + ingestRundown: ExtendedIngestRundown + ): BlueprintResultRundown => { const rundown: IBlueprintRundown = { externalId: ingestRundown.externalId, name: ingestRundown.name, @@ -338,7 +341,7 @@ const MockShowStyleBlueprint: () => ShowStyleBlueprintManifest = () => ({ baseline: { timelineObjects: [] }, } }, - getSegment: (_context: ISegmentUserContext, ingestSegment: IngestSegment): BlueprintResultSegment => { + getSegment: (_context: ISegmentUserContext, ingestSegment: IngestSegment): BlueprintResultSegment => { const segment: IBlueprintSegment = { name: ingestSegment.name ? ingestSegment.name : ingestSegment.externalId, privateData: ingestSegment.payload, diff --git a/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts b/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts index 171a929dba7..5f13ba6285b 100644 --- a/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts +++ b/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts @@ -131,7 +131,6 @@ export function defaultSegment(_id: SegmentId, rundownId: RundownId): DBSegment externalId: unprotectString(_id), rundownId: rundownId, name: 'Default Segment', - externalModified: 1, } } diff --git a/packages/job-worker/src/__mocks__/helpers/snapshot.ts b/packages/job-worker/src/__mocks__/helpers/snapshot.ts index 09b0f0e27df..4b1400f5062 100644 --- a/packages/job-worker/src/__mocks__/helpers/snapshot.ts +++ b/packages/job-worker/src/__mocks__/helpers/snapshot.ts @@ -76,7 +76,6 @@ export function fixSnapshot(data: Data | Array, sortData?: boolean): Data // } else if (isPiece(o)) { // } else if (isPart(o)) { } else if (isSegment(o)) { - if (o.externalModified) o.externalModified = 0 // } else if (isPieceInstance(o)) { } return o diff --git a/packages/job-worker/src/__mocks__/presetCollections.ts b/packages/job-worker/src/__mocks__/presetCollections.ts index 68feee6b7ae..7c1cf9e9918 100644 --- a/packages/job-worker/src/__mocks__/presetCollections.ts +++ b/packages/job-worker/src/__mocks__/presetCollections.ts @@ -224,7 +224,6 @@ export async function setupDefaultRundown( externalId: 'MOCK_SEGMENT_0', rundownId: rundownId, name: 'Segment 0', - externalModified: 1, }) const part00: DBPart = { @@ -332,7 +331,6 @@ export async function setupDefaultRundown( externalId: 'MOCK_SEGMENT_2', rundownId: rundownId, name: 'Segment 1', - externalModified: 1, }) const part10: DBPart = { @@ -374,7 +372,6 @@ export async function setupDefaultRundown( externalId: 'MOCK_SEGMENT_2', rundownId: rundownId, name: 'Segment 2', - externalModified: 1, }) const globalAdLib0: RundownBaselineAdLibItem = { diff --git a/packages/job-worker/src/blueprints/context/ProcessIngestDataContext.ts b/packages/job-worker/src/blueprints/context/ProcessIngestDataContext.ts new file mode 100644 index 00000000000..dd30f957cb0 --- /dev/null +++ b/packages/job-worker/src/blueprints/context/ProcessIngestDataContext.ts @@ -0,0 +1,55 @@ +import type { + GroupPartsInMosRundownAndChangesResult, + IProcessIngestDataContext, + IngestDefaultChangesOptions, + IngestRundown, + IngestSegment, + MutableIngestRundown, + NrcsIngestChangeDetails, +} from '@sofie-automation/blueprints-integration' +import { StudioContext } from './StudioContext' +import { defaultApplyIngestChanges } from '../ingest/defaultApplyIngestChanges' +import { groupMosPartsIntoIngestSegments, groupPartsInRundownAndChanges } from '../ingest/groupPartsInRundownAndChanges' + +export class ProcessIngestDataContext extends StudioContext implements IProcessIngestDataContext { + defaultApplyIngestChanges( + mutableIngestRundown: MutableIngestRundown, + nrcsIngestRundown: IngestRundown, + ingestChanges: NrcsIngestChangeDetails, + options?: IngestDefaultChangesOptions + ): void { + defaultApplyIngestChanges(mutableIngestRundown, nrcsIngestRundown, ingestChanges, { + transformRundownPayload: (payload) => payload as TRundownPayload, + transformSegmentPayload: (payload) => payload as TSegmentPayload, + transformPartPayload: (payload) => payload as TPartPayload, + ...options, + }) + } + + groupMosPartsInRundownAndChangesWithSeparator( + ingestRundown: IngestRundown, + previousIngestRundown: IngestRundown | undefined, + ingestChanges: NrcsIngestChangeDetails, + partNameSeparator: string + ): GroupPartsInMosRundownAndChangesResult { + if (ingestRundown.type !== 'mos') throw new Error('Only supported for mos rundowns') + + return groupPartsInRundownAndChanges(ingestRundown, previousIngestRundown, ingestChanges, (segments) => + groupMosPartsIntoIngestSegments(ingestRundown.externalId, segments, partNameSeparator) + ) + } + + groupPartsInRundownAndChanges( + ingestRundown: IngestRundown, + previousIngestRundown: IngestRundown | undefined, + ingestChanges: NrcsIngestChangeDetails, + groupPartsIntoSegments: (ingestSegments: IngestSegment[]) => IngestSegment[] + ): GroupPartsInMosRundownAndChangesResult { + return groupPartsInRundownAndChanges( + ingestRundown, + previousIngestRundown, + ingestChanges, + groupPartsIntoSegments + ) + } +} diff --git a/packages/job-worker/src/blueprints/context/SyncIngestUpdateToPartInstanceContext.ts b/packages/job-worker/src/blueprints/context/SyncIngestUpdateToPartInstanceContext.ts index 6b4d4a137a0..1515ae71ecc 100644 --- a/packages/job-worker/src/blueprints/context/SyncIngestUpdateToPartInstanceContext.ts +++ b/packages/job-worker/src/blueprints/context/SyncIngestUpdateToPartInstanceContext.ts @@ -22,6 +22,7 @@ import { IBlueprintPieceObjectsSampleKeys, convertPieceInstanceToBlueprints, convertPartInstanceToBlueprints, + convertPartialBlueprintMutablePartToCore, } from './lib' import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' @@ -175,7 +176,12 @@ export class SyncIngestUpdateToPartInstanceContext } } - if (!this.partInstance.updatePartProps(updatePart)) { + const playoutUpdatePart = convertPartialBlueprintMutablePartToCore( + updatePart, + this.showStyleCompound.blueprintId + ) + + if (!this.partInstance.updatePartProps(playoutUpdatePart)) { throw new Error(`Cannot update PartInstance. Some valid properties must be defined`) } diff --git a/packages/job-worker/src/blueprints/context/index.ts b/packages/job-worker/src/blueprints/context/index.ts index 647dc57b5c3..c13d880bc60 100644 --- a/packages/job-worker/src/blueprints/context/index.ts +++ b/packages/job-worker/src/blueprints/context/index.ts @@ -5,6 +5,7 @@ export * from './OnSetAsNextContext' export * from './OnTakeContext' export * from './OnTimelineGenerateContext' export * from './PartEventContext' +export * from './ProcessIngestDataContext' export * from './RundownContext' export * from './RundownDataChangedEventContext' export * from './RundownEventContext' diff --git a/packages/job-worker/src/blueprints/context/lib.ts b/packages/job-worker/src/blueprints/context/lib.ts index a3463616d55..a48630b6a88 100644 --- a/packages/job-worker/src/blueprints/context/lib.ts +++ b/packages/job-worker/src/blueprints/context/lib.ts @@ -9,9 +9,14 @@ import { ResolvedPieceInstance, } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' import { DBRundown, Rundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' +import { + CoreUserEditingDefinition, + CoreUserEditingDefinitionAction, + CoreUserEditingDefinitionForm, +} from '@sofie-automation/corelib/dist/dataModel/UserEditingDefinitions' import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' -import { clone, Complete, literal } from '@sofie-automation/corelib/dist/lib' -import { unprotectString } from '@sofie-automation/corelib/dist/protectedString' +import { assertNever, clone, Complete, literal, omit } from '@sofie-automation/corelib/dist/lib' +import { unprotectString, unprotectStringArray } from '@sofie-automation/corelib/dist/protectedString' import { ReadonlyDeep } from 'type-fest' import { ExpectedPackage, @@ -44,6 +49,16 @@ import { } from '@sofie-automation/blueprints-integration' import { JobContext, ProcessedShowStyleBase, ProcessedShowStyleVariant } from '../../jobs' import { DBRundownPlaylist, QuickLoopMarkerType } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import _ = require('underscore') +import { BlueprintId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { wrapTranslatableMessageFromBlueprints } from '@sofie-automation/corelib/dist/TranslatableMessage' +import { + UserEditingDefinition, + UserEditingDefinitionAction, + UserEditingDefinitionForm, + UserEditingType, +} from '@sofie-automation/blueprints-integration/dist/userEditing' +import type { PlayoutMutatablePart } from '../../playout/model/PlayoutPartInstanceModel' /** * Convert an object to have all the values of all keys (including optionals) be 'true' @@ -80,6 +95,7 @@ export const IBlueprintPieceObjectsSampleKeys = allKeysOfObject allowDirectPlay: true, notInVision: true, abSessions: true, + userEditOperations: true, }) // Compile a list of the keys which are allowed to be set @@ -103,6 +119,7 @@ export const IBlueprintMutatablePartSampleKeys = allKeysOfObject pieceType: piece.pieceType, extendOnHold: piece.extendOnHold, notInVision: piece.notInVision, + userEditOperations: translateUserEditsToBlueprint(piece.userEditOperations), } return obj @@ -262,6 +280,7 @@ export function convertPartToBlueprints(part: ReadonlyDeep): IBlueprintP hackListenToMediaObjectUpdates: clone( part.hackListenToMediaObjectUpdates ), + userEditOperations: translateUserEditsToBlueprint(part.userEditOperations), } return obj @@ -329,6 +348,7 @@ export function convertSegmentToBlueprints(segment: ReadonlyDeep): IB displayAs: segment.displayAs, showShelf: segment.showShelf, segmentTiming: segment.segmentTiming, + userEditOperations: translateUserEditsToBlueprint(segment.userEditOperations), } return obj @@ -353,6 +373,7 @@ export function convertRundownToBlueprints(rundown: ReadonlyDeep): IB showStyleVariantId: unprotectString(rundown.showStyleVariantId), playlistId: unprotectString(rundown.playlistId), airStatus: rundown.airStatus, + userEditOperations: translateUserEditsToBlueprint(rundown.userEditOperations), } return obj @@ -476,3 +497,89 @@ export async function getMediaObjectDuration(context: JobContext, mediaId: strin return durations.length > 0 ? durations[0] : undefined } + +function translateUserEditsToBlueprint( + userEdits: ReadonlyDeep | undefined +): UserEditingDefinition[] | undefined { + if (!userEdits) return undefined + + return _.compact( + userEdits.map((userEdit) => { + switch (userEdit.type) { + case UserEditingType.ACTION: + return { + type: UserEditingType.ACTION, + id: userEdit.id, + label: omit(userEdit.label, 'namespaces'), + svgIcon: userEdit.svgIcon, + isActive: userEdit.isActive, + } satisfies Complete + case UserEditingType.FORM: + return { + type: UserEditingType.FORM, + id: userEdit.id, + label: omit(userEdit.label, 'namespaces'), + schema: clone(userEdit.schema), + currentValues: clone(userEdit.currentValues), + } satisfies Complete + default: + assertNever(userEdit) + return undefined + } + }) + ) +} + +export function translateUserEditsFromBlueprint( + userEdits: UserEditingDefinition[] | undefined, + blueprintIds: BlueprintId[] +): CoreUserEditingDefinition[] | undefined { + if (!userEdits) return undefined + + return _.compact( + userEdits.map((userEdit) => { + switch (userEdit.type) { + case UserEditingType.ACTION: + return { + type: UserEditingType.ACTION, + id: userEdit.id, + label: wrapTranslatableMessageFromBlueprints(userEdit.label, blueprintIds), + svgIcon: userEdit.svgIcon, + isActive: userEdit.isActive, + } satisfies Complete + case UserEditingType.FORM: + return { + type: UserEditingType.FORM, + id: userEdit.id, + label: wrapTranslatableMessageFromBlueprints(userEdit.label, blueprintIds), + schema: clone(userEdit.schema), + currentValues: clone(userEdit.currentValues), + translationNamespaces: unprotectStringArray(blueprintIds), + } satisfies Complete + default: + assertNever(userEdit) + return undefined + } + }) + ) +} + +export function convertPartialBlueprintMutablePartToCore( + updatePart: Partial, + blueprintId: BlueprintId +): Partial { + const playoutUpdatePart: Partial = { + ...updatePart, + userEditOperations: undefined, + } + + if ('userEditOperations' in updatePart) { + playoutUpdatePart.userEditOperations = translateUserEditsFromBlueprint(updatePart.userEditOperations, [ + blueprintId, + ]) + } else { + delete playoutUpdatePart.userEditOperations + } + + return playoutUpdatePart +} diff --git a/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts b/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts index 095fdaa114a..45d96f383c4 100644 --- a/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts +++ b/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts @@ -18,6 +18,7 @@ import { IBlueprintPieceObjectsSampleKeys, convertPartInstanceToBlueprints, convertPartToBlueprints, + convertPartialBlueprintMutablePartToCore, convertPieceInstanceToBlueprints, convertPieceToBlueprints, convertResolvedPieceInstanceToBlueprints, @@ -338,7 +339,9 @@ export class PartAndPieceInstanceActionService { throw new Error('PartInstance could not be found') } - if (!partInstance.updatePartProps(props)) { + const playoutUpdatePart = convertPartialBlueprintMutablePartToCore(props, this.showStyleCompound.blueprintId) + + if (!partInstance.updatePartProps(playoutUpdatePart)) { throw new Error('Some valid properties must be defined') } @@ -384,6 +387,7 @@ export class PartAndPieceInstanceActionService { invalidReason: undefined, floated: false, expectedDurationWithTransition: undefined, // Filled in later + userEditOperations: [], // Adlibbed parts can't be edited by ingest } const pieces = postProcessPieces( diff --git a/packages/job-worker/src/blueprints/ingest/MutableIngestPartImpl.ts b/packages/job-worker/src/blueprints/ingest/MutableIngestPartImpl.ts new file mode 100644 index 00000000000..e35c397f94a --- /dev/null +++ b/packages/job-worker/src/blueprints/ingest/MutableIngestPartImpl.ts @@ -0,0 +1,79 @@ +import type { SofieIngestPart, MutableIngestPart } from '@sofie-automation/blueprints-integration' +import { clone } from '@sofie-automation/corelib/dist/lib' +import { ReadonlyDeep } from 'type-fest' +import _ = require('underscore') + +export class MutableIngestPartImpl implements MutableIngestPart { + readonly #ingestPart: Omit, 'rank'> + #hasChanges = false + + constructor(ingestPart: Omit, 'rank'>, hasChanges = false) { + this.#ingestPart = ingestPart + this.#hasChanges = hasChanges + } + + get externalId(): string { + return this.#ingestPart.externalId + } + + get name(): string { + return this.#ingestPart.name + } + + get payload(): ReadonlyDeep | undefined { + return this.#ingestPart.payload as ReadonlyDeep + } + + get userEditStates(): Record { + return this.#ingestPart.userEditStates ?? {} + } + + setName(name: string): void { + if (this.#ingestPart.name !== name) { + this.#ingestPart.name = name + this.#hasChanges = true + } + } + + replacePayload(payload: ReadonlyDeep | TPartPayload): void { + if (this.#hasChanges || !_.isEqual(this.#ingestPart.payload, payload)) { + this.#ingestPart.payload = clone(payload) + this.#hasChanges = true + } + } + + setPayloadProperty( + key: TKey, + value: ReadonlyDeep | TPartPayload[TKey] + ): void { + if (!this.#ingestPart.payload) { + throw new Error('Part payload is not set') + } + + if (this.#hasChanges || !_.isEqual(this.#ingestPart.payload[key], value)) { + // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion + ;(this.#ingestPart.payload as any)[key] = clone(value) + this.#hasChanges = true + } + } + + setUserEditState(key: string, value: boolean): void { + if (!this.#ingestPart.userEditStates) this.#ingestPart.userEditStates = {} + if (this.#hasChanges || this.#ingestPart.userEditStates[key] !== value) { + this.#ingestPart.userEditStates[key] = value + this.#hasChanges = true + } + } + + /** + * Check if the part has changes and clear any changes flags + * Note: this is not visible to blueprints + */ + checkAndClearChangesFlags(): boolean { + const hasChanges = this.#hasChanges + + this.#hasChanges = false + + return hasChanges + } +} diff --git a/packages/job-worker/src/blueprints/ingest/MutableIngestRundownImpl.ts b/packages/job-worker/src/blueprints/ingest/MutableIngestRundownImpl.ts new file mode 100644 index 00000000000..993294c270a --- /dev/null +++ b/packages/job-worker/src/blueprints/ingest/MutableIngestRundownImpl.ts @@ -0,0 +1,406 @@ +import type { + MutableIngestRundown, + MutableIngestSegment, + MutableIngestPart, + IngestSegment, + SofieIngestSegment, +} from '@sofie-automation/blueprints-integration' +import { Complete, clone, omit } from '@sofie-automation/corelib/dist/lib' +import { ReadonlyDeep } from 'type-fest' +import _ = require('underscore') +import { MutableIngestSegmentImpl } from './MutableIngestSegmentImpl' +import { SofieIngestDataCacheObjId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { SofieIngestRundownDataCacheGenerator } from '../../ingest/sofieIngestCache' +import { + SofieIngestDataCacheObj, + SofieIngestRundownWithSource, +} from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' +import type { ComputedIngestChangeObject } from '../../ingest/runOperation' +import { RundownSource } from '@sofie-automation/corelib/dist/dataModel/Rundown' + +export interface MutableIngestRundownChanges { + // define what needs regenerating + computedChanges: ComputedIngestChangeObject + + // define what portions of the ingestRundown need saving + changedCacheObjects: SofieIngestDataCacheObj[] + allCacheObjectIds: SofieIngestDataCacheObjId[] +} + +export class MutableIngestRundownImpl + implements MutableIngestRundown +{ + readonly ingestRundown: Omit< + SofieIngestRundownWithSource, + 'segments' + > + #hasChangesToRundown = false + // #segmentOrderChanged = false + + readonly #segments: MutableIngestSegmentImpl[] + + readonly #originalSegmentRanks = new Map() + + constructor( + ingestRundown: SofieIngestRundownWithSource, + isExistingRundown: boolean + ) { + this.ingestRundown = omit(ingestRundown, 'segments') + this.#segments = ingestRundown.segments + .slice() // shallow copy + .sort((a, b) => a.rank - b.rank) + .map((segment) => new MutableIngestSegmentImpl(segment, !isExistingRundown)) + this.#hasChangesToRundown = !isExistingRundown + + for (const segment of ingestRundown.segments) { + this.#originalSegmentRanks.set(segment.externalId, segment.rank) + } + } + + get segments(): MutableIngestSegmentImpl[] { + return this.#segments.slice() // shallow copy + } + + get externalId(): string { + return this.ingestRundown.externalId + } + + get type(): string { + return this.ingestRundown.type + } + + get name(): string { + return this.ingestRundown.name + } + + get payload(): ReadonlyDeep | undefined { + return this.ingestRundown.payload as ReadonlyDeep + } + + get userEditStates(): Record { + return this.ingestRundown.userEditStates ?? {} + } + + /** + * Internal method to propogate the rundown source + */ + updateRundownSource(source: RundownSource): void { + if (!_.isEqual(source, this.ingestRundown.rundownSource)) { + this.ingestRundown.rundownSource = source + this.#hasChangesToRundown = true + } + } + + setName(name: string): void { + if (this.ingestRundown.name !== name) { + this.ingestRundown.name = name + this.#hasChangesToRundown = true + } + } + + forceFullRegenerate(): void { + this.#hasChangesToRundown = true + } + + replacePayload(payload: ReadonlyDeep | TRundownPayload): void { + if (this.#hasChangesToRundown || !_.isEqual(this.ingestRundown.payload, payload)) { + this.ingestRundown.payload = clone(payload) + this.#hasChangesToRundown = true + } + } + + setPayloadProperty( + key: TKey, + value: ReadonlyDeep | TRundownPayload[TKey] + ): void { + if (!this.ingestRundown.payload) { + throw new Error('Rundown payload is not set') + } + + if (this.#hasChangesToRundown || !_.isEqual(this.ingestRundown.payload[key], value)) { + // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion + ;(this.ingestRundown.payload as any)[key] = clone(value) + this.#hasChangesToRundown = true + } + } + + findPart(partExternalId: string): MutableIngestPart | undefined { + for (const segment of this.#segments) { + const part = segment.getPart(partExternalId) + if (part) return part + } + + return undefined + } + + findPartAndSegment(partExternalId: string): + | { + part: MutableIngestPart + segment: MutableIngestSegment + } + | undefined { + for (const segment of this.#segments) { + const part = segment.getPart(partExternalId) + if (part) return { part, segment } + } + return undefined + } + + getSegment(segmentExternalId: string): MutableIngestSegment | undefined { + return this.#segments.find((s) => s.externalId === segmentExternalId) + } + + moveSegmentBefore(segmentExternalId: string, beforeSegmentExternalId: string | null): void { + if (segmentExternalId === beforeSegmentExternalId) throw new Error('Cannot move Segment before itself') + + const segment = this.#segments.find((s) => s.externalId === segmentExternalId) + if (!segment) throw new Error(`Segment "${segmentExternalId}" not found`) + + this.#removeSegment(segmentExternalId) + + if (beforeSegmentExternalId) { + const beforeIndex = this.#segments.findIndex((s) => s.externalId === beforeSegmentExternalId) + if (beforeIndex === -1) throw new Error(`Segment "${beforeSegmentExternalId}" not found`) + + this.#segments.splice(beforeIndex, 0, segment) + } else { + this.#segments.push(segment) + } + + // this.#segmentOrderChanged = true + } + + moveSegmentAfter(segmentExternalId: string, afterSegmentExternalId: string | null): void { + if (segmentExternalId === afterSegmentExternalId) throw new Error('Cannot move Segment after itself') + + const segment = this.#segments.find((s) => s.externalId === segmentExternalId) + if (!segment) throw new Error(`Segment "${segmentExternalId}" not found`) + + this.#removeSegment(segmentExternalId) + + if (afterSegmentExternalId) { + const beforeIndex = this.#segments.findIndex((s) => s.externalId === afterSegmentExternalId) + if (beforeIndex === -1) throw new Error(`Segment "${afterSegmentExternalId}" not found`) + + this.#segments.splice(beforeIndex + 1, 0, segment) + } else { + this.#segments.unshift(segment) + } + + // this.#segmentOrderChanged = true + } + + replaceSegment( + segment: Omit, 'rank'>, + beforeSegmentExternalId: string | null + ): MutableIngestSegment { + if (segment.externalId === beforeSegmentExternalId) throw new Error('Cannot insert Segment before itself') + + const newSegment = new MutableIngestSegmentImpl( + { ...segment, userEditStates: {}, parts: segment.parts.map((p) => ({ ...p, userEditStates: {} })) }, + true + ) + + const oldSegment = this.#segments.find((s) => s.externalId === segment.externalId) + if (oldSegment?.originalExternalId) { + newSegment.setOriginalExternalId(oldSegment.originalExternalId) + } + + this.#removeSegment(segment.externalId) + + if (beforeSegmentExternalId) { + const beforeIndex = this.#segments.findIndex((s) => s.externalId === beforeSegmentExternalId) + if (beforeIndex === -1) throw new Error(`Segment "${beforeSegmentExternalId}" not found`) + + this.#segments.splice(beforeIndex, 0, newSegment) + } else { + this.#segments.push(newSegment) + } + + // this.#segmentOrderChanged = true + + return newSegment + } + + changeSegmentExternalId( + oldSegmentExternalId: string, + newSegmentExternalId: string + ): MutableIngestSegment { + const segment = this.#segments.find((s) => s.externalId === oldSegmentExternalId) + if (!segment) throw new Error(`Segment "${oldSegmentExternalId}" not found`) + + const targetSegment = this.#segments.find((s) => s.externalId === newSegmentExternalId) + if (targetSegment) throw new Error(`Segment "${newSegmentExternalId}" already exists`) + + segment.setExternalId(newSegmentExternalId) + + return segment + } + + changeSegmentOriginalExternalId( + segmentExternalId: string, + originalSegmentExternalId: string + ): MutableIngestSegment { + const segment = this.#segments.find((s) => s.externalId === segmentExternalId) + if (!segment) throw new Error(`Segment "${segmentExternalId}" not found`) + + const targetSegment = this.#segments.find((s) => s.externalId === originalSegmentExternalId) + if (targetSegment) throw new Error(`Segment "${originalSegmentExternalId}" exists`) + + segment.setOriginalExternalId(originalSegmentExternalId) + + return segment + } + + /** + * Remove a segment + * Note: this is separate from the removeSegment method to allow for internal use when methods are overridden in tests + */ + #removeSegment(segmentExternalId: string): boolean { + const existingIndex = this.#segments.findIndex((s) => s.externalId === segmentExternalId) + if (existingIndex !== -1) { + this.#segments.splice(existingIndex, 1) + + // this.#segmentOrderChanged = true + + return true + } else { + return false + } + } + + removeSegment(segmentExternalId: string): boolean { + return this.#removeSegment(segmentExternalId) + } + + removeAllSegments(): void { + this.#segments.length = 0 + + // this.#segmentOrderChanged = true + } + + setUserEditState(key: string, value: boolean): void { + if (!this.ingestRundown.userEditStates) this.ingestRundown.userEditStates = {} + if (this.#hasChangesToRundown || this.ingestRundown.userEditStates[key] !== value) { + this.ingestRundown.userEditStates[key] = value + this.#hasChangesToRundown = true + } + } + + /** Note: This is NOT exposed to blueprints */ + intoIngestRundown(ingestObjectGenerator: SofieIngestRundownDataCacheGenerator): MutableIngestRundownChanges { + const ingestSegments: SofieIngestSegment[] = [] + const changedCacheObjects: SofieIngestDataCacheObj[] = [] + const allCacheObjectIds: SofieIngestDataCacheObjId[] = [] + + const segmentsToRegenerate: SofieIngestSegment[] = [] + const segmentExternalIdChanges: Record = {} + const segmentsUpdatedRanks: Record = {} + + const usedSegmentIds = new Set() + const usedPartIds = new Set() + + this.#segments.forEach((segment, rank) => { + if (usedSegmentIds.has(segment.externalId)) { + throw new Error(`Segment "${segment.externalId}" is used more than once`) + } + usedSegmentIds.add(segment.externalId) + + const segmentInfo = segment.intoChangesInfo(ingestObjectGenerator) + + for (const part of segmentInfo.ingestParts) { + if (usedPartIds.has(part.externalId)) { + throw new Error(`Part "${part.externalId}" is used more than once`) + } + usedPartIds.add(part.externalId) + } + + const ingestSegment: Complete = { + externalId: segment.externalId, + rank, + name: segment.name, + payload: segment.payload, + parts: segmentInfo.ingestParts, + userEditStates: { ...segment.userEditStates }, + } + + ingestSegments.push(ingestSegment) + allCacheObjectIds.push(ingestObjectGenerator.getSegmentObjectId(ingestSegment.externalId)) + + changedCacheObjects.push(...segmentInfo.changedCacheObjects) + allCacheObjectIds.push(...segmentInfo.allCacheObjectIds) + + // Check for any changes to the rank + const oldRank = + (segment.originalExternalId ? this.#originalSegmentRanks.get(segment.originalExternalId) : null) ?? + this.#originalSegmentRanks.get(segment.externalId) + const rankChanged = ingestSegment.rank !== oldRank + if (rankChanged) { + segmentsUpdatedRanks[segment.externalId] = ingestSegment.rank + } + + // Check for any changes to the externalId + const externalIdChanged = segmentInfo.originalExternalId !== segment.externalId + if (externalIdChanged) { + segmentExternalIdChanges[segmentInfo.originalExternalId] = segment.externalId + } + + // Update ingest cache if there are changes + if (segmentInfo.segmentHasChanges || rankChanged || externalIdChanged) { + changedCacheObjects.push(ingestObjectGenerator.generateSegmentObject(ingestSegment)) + } + + // Regenerate the segment if there are substantial changes + if ( + segmentInfo.segmentHasChanges || + segmentInfo.partOrderHasChanged || + segmentInfo.partIdsWithChanges.length > 0 + ) { + segmentsToRegenerate.push(ingestSegment) + } + }) + + // Find any removed segments + const newSegmentIds = new Set(ingestSegments.map((s) => s.externalId)) + const removedSegmentIds = Array.from(this.#originalSegmentRanks.keys()).filter( + (id) => !newSegmentIds.has(id) && !segmentExternalIdChanges[id] + ) + + // Check if this rundown object has changed + if (this.#hasChangesToRundown) { + changedCacheObjects.push(ingestObjectGenerator.generateRundownObject(this.ingestRundown)) + } + allCacheObjectIds.push(ingestObjectGenerator.getRundownObjectId()) + + const regenerateRundown = this.#hasChangesToRundown + + this.#hasChangesToRundown = false + // this.#segmentOrderChanged = false + + // Reset this.#originalSegmentRanks + this.#originalSegmentRanks.clear() + this.#segments.forEach((segment, rank) => { + this.#originalSegmentRanks.set(segment.externalId, rank) + }) + + const result: MutableIngestRundownChanges = { + computedChanges: { + ingestRundown: { + ...this.ingestRundown, + segments: ingestSegments, + }, + + segmentsToRemove: removedSegmentIds, + segmentsUpdatedRanks, + segmentsToRegenerate, + regenerateRundown, + segmentExternalIdChanges: segmentExternalIdChanges, + }, + + changedCacheObjects, + allCacheObjectIds, + } + + return result + } +} diff --git a/packages/job-worker/src/blueprints/ingest/MutableIngestSegmentImpl.ts b/packages/job-worker/src/blueprints/ingest/MutableIngestSegmentImpl.ts new file mode 100644 index 00000000000..a11561848ad --- /dev/null +++ b/packages/job-worker/src/blueprints/ingest/MutableIngestSegmentImpl.ts @@ -0,0 +1,262 @@ +import type { + IngestPart, + MutableIngestPart, + MutableIngestSegment, + SofieIngestPart, + SofieIngestSegment, +} from '@sofie-automation/blueprints-integration' +import { Complete, clone, omit } from '@sofie-automation/corelib/dist/lib' +import { ReadonlyDeep } from 'type-fest' +import _ = require('underscore') +import { MutableIngestPartImpl } from './MutableIngestPartImpl' +import { SofieIngestRundownDataCacheGenerator } from '../../ingest/sofieIngestCache' +import { getSegmentId } from '../../ingest/lib' +import { SofieIngestDataCacheObjId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { SofieIngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' + +export interface MutableIngestSegmentChanges { + ingestParts: SofieIngestPart[] + changedCacheObjects: SofieIngestDataCacheObj[] + allCacheObjectIds: SofieIngestDataCacheObjId[] + segmentHasChanges: boolean + partIdsWithChanges: string[] + partOrderHasChanged: boolean + originalExternalId: string +} + +export class MutableIngestSegmentImpl + implements MutableIngestSegment +{ + readonly #ingestSegment: Omit, 'rank' | 'parts'> + #originalExternalId: string + #segmentHasChanges = false + #partOrderHasChanged = false + + readonly #parts: MutableIngestPartImpl[] + + get originalExternalId(): string | undefined { + if (this.#originalExternalId !== this.externalId) { + return this.#originalExternalId + } else { + return undefined + } + } + + constructor(ingestSegment: Omit, 'rank'>, hasChanges = false) { + this.#originalExternalId = ingestSegment.externalId + this.#ingestSegment = omit(ingestSegment, 'parts') + this.#parts = ingestSegment.parts + .slice() // shallow copy + .sort((a, b) => a.rank - b.rank) + .map((part) => new MutableIngestPartImpl(part, hasChanges)) + this.#segmentHasChanges = hasChanges + } + + get parts(): MutableIngestPart[] { + return this.#parts.slice() // shallow copy + } + + get externalId(): string { + return this.#ingestSegment.externalId + } + + get name(): string { + return this.#ingestSegment.name + } + + get payload(): ReadonlyDeep | undefined { + return this.#ingestSegment.payload as ReadonlyDeep + } + + get userEditStates(): Record { + return this.#ingestSegment.userEditStates ?? {} + } + + getPart(partExternalId: string): MutableIngestPart | undefined { + return this.#parts.find((part) => part.externalId === partExternalId) + } + + movePartBefore(partExternalId: string, beforePartExternalId: string | null): void { + if (partExternalId === beforePartExternalId) throw new Error('Cannot move Part before itself') + + const part = this.#parts.find((p) => p.externalId === partExternalId) + if (!part) throw new Error(`Part "${partExternalId}" not found`) + + this.#removePart(partExternalId) + + if (beforePartExternalId) { + const beforeIndex = this.#parts.findIndex((p) => p.externalId === beforePartExternalId) + if (beforeIndex === -1) throw new Error(`Part "${beforePartExternalId}" not found`) + + this.#parts.splice(beforeIndex, 0, part) + } else { + this.#parts.push(part) + } + + this.#partOrderHasChanged = true + } + + movePartAfter(partExternalId: string, afterPartExternalId: string | null): void { + if (partExternalId === afterPartExternalId) throw new Error('Cannot move Part after itself') + + const part = this.#parts.find((p) => p.externalId === partExternalId) + if (!part) throw new Error(`Part "${partExternalId}" not found`) + + this.#removePart(partExternalId) + + if (afterPartExternalId) { + const beforeIndex = this.#parts.findIndex((p) => p.externalId === afterPartExternalId) + if (beforeIndex === -1) throw new Error(`Part "${afterPartExternalId}" not found`) + + this.#parts.splice(beforeIndex + 1, 0, part) + } else { + this.#parts.unshift(part) + } + + this.#partOrderHasChanged = true + } + + replacePart( + ingestPart: Omit, 'rank'>, + beforePartExternalId: string | null + ): MutableIngestPart { + if (ingestPart.externalId === beforePartExternalId) throw new Error('Cannot insert Part before itself') + + this.#removePart(ingestPart.externalId) + + const newPart = new MutableIngestPartImpl({ ...ingestPart, userEditStates: {} }, true) + + if (beforePartExternalId) { + const beforeIndex = this.#parts.findIndex((s) => s.externalId === beforePartExternalId) + if (beforeIndex === -1) throw new Error(`Part "${beforePartExternalId}" not found`) + + this.#parts.splice(beforeIndex, 0, newPart) + } else { + this.#parts.push(newPart) + } + + this.#partOrderHasChanged = true + + return newPart + } + + /** + * Remove a part + * Note: this is separate from the removePart method to allow for internal use when methods are overridden in tests + */ + #removePart(partExternalId: string): boolean { + const index = this.#parts.findIndex((part) => part.externalId === partExternalId) + if (index === -1) { + return false + } + + this.#parts.splice(index, 1) + this.#partOrderHasChanged = true + + return true + } + + removePart(partExternalId: string): boolean { + return this.#removePart(partExternalId) + } + + forceRegenerate(): void { + this.#segmentHasChanges = true + } + + /** + * Note: This is not exposed to blueprints + */ + setExternalId(newSegmentExternalId: string): void { + this.#ingestSegment.externalId = newSegmentExternalId + } + /** + * Note: This is not exposed to blueprints + */ + setOriginalExternalId(oldSegmentExternalId: string): void { + this.#originalExternalId = oldSegmentExternalId + } + + setName(name: string): void { + if (this.#ingestSegment.name !== name) { + this.#ingestSegment.name = name + this.#segmentHasChanges = true + } + } + + replacePayload(payload: ReadonlyDeep | TSegmentPayload): void { + if (this.#segmentHasChanges || !_.isEqual(this.#ingestSegment.payload, payload)) { + this.#ingestSegment.payload = clone(payload) + this.#segmentHasChanges = true + } + } + + setPayloadProperty( + key: TKey, + value: ReadonlyDeep | TSegmentPayload[TKey] + ): void { + if (!this.#ingestSegment.payload) { + throw new Error('Segment payload is not set') + } + + if (this.#segmentHasChanges || !_.isEqual(this.#ingestSegment.payload[key], value)) { + // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion + ;(this.#ingestSegment.payload as any)[key] = clone(value) + this.#segmentHasChanges = true + } + } + + setUserEditState(key: string, value: boolean): void { + if (!this.#ingestSegment.userEditStates) this.#ingestSegment.userEditStates = {} + if (this.#segmentHasChanges || this.#ingestSegment.userEditStates[key] !== value) { + this.#ingestSegment.userEditStates[key] = value + this.#segmentHasChanges = true + } + } + + intoChangesInfo(generator: SofieIngestRundownDataCacheGenerator): MutableIngestSegmentChanges { + const ingestParts: SofieIngestPart[] = [] + const changedCacheObjects: SofieIngestDataCacheObj[] = [] + const allCacheObjectIds: SofieIngestDataCacheObjId[] = [] + const partIdsWithChanges: string[] = [] + + const segmentId = getSegmentId(generator.rundownId, this.#ingestSegment.externalId) + + this.#parts.forEach((part, rank) => { + const ingestPart: Complete = { + externalId: part.externalId, + rank, + name: part.name, + payload: part.payload, + userEditStates: part.userEditStates, + } + + allCacheObjectIds.push(generator.getPartObjectId(ingestPart.externalId)) + ingestParts.push(ingestPart) + + if (part.checkAndClearChangesFlags()) { + changedCacheObjects.push(generator.generatePartObject(segmentId, ingestPart)) + partIdsWithChanges.push(ingestPart.externalId) + } + }) + + const segmentHasChanges = this.#segmentHasChanges + const partOrderHasChanged = this.#partOrderHasChanged + const originalExternalId = this.#originalExternalId + + // clear flags + this.#segmentHasChanges = false + this.#partOrderHasChanged = false + this.#originalExternalId = this.#ingestSegment.externalId + + return { + ingestParts, + changedCacheObjects, + allCacheObjectIds, + segmentHasChanges, + partIdsWithChanges, + partOrderHasChanged, + originalExternalId, + } + } +} diff --git a/packages/job-worker/src/blueprints/ingest/__tests__/MutableIngestPartImpl.spec.ts b/packages/job-worker/src/blueprints/ingest/__tests__/MutableIngestPartImpl.spec.ts new file mode 100644 index 00000000000..e36cac887b4 --- /dev/null +++ b/packages/job-worker/src/blueprints/ingest/__tests__/MutableIngestPartImpl.spec.ts @@ -0,0 +1,162 @@ +import { SofieIngestPart } from '@sofie-automation/blueprints-integration' +import { MutableIngestPartImpl } from '../MutableIngestPartImpl' +import { clone } from '@sofie-automation/corelib/dist/lib' + +describe('MutableIngestPartImpl', () => { + function getBasicIngestPart(): SofieIngestPart { + return { + externalId: 'externalId', + name: 'name', + rank: 0, + payload: { + val: 'some-val', + second: 5, + }, + userEditStates: { + one: true, + two: false, + }, + } + } + + test('create basic', () => { + const ingestPart = getBasicIngestPart() + const mutablePart = new MutableIngestPartImpl(clone(ingestPart)) + + // compare properties + expect(mutablePart.externalId).toBe(ingestPart.externalId) + expect(mutablePart.name).toBe(ingestPart.name) + expect(mutablePart.payload).toEqual(ingestPart.payload) + + // check it has no changes + expect(mutablePart.checkAndClearChangesFlags()).toBe(false) + }) + + test('create basic with changes', () => { + const ingestPart = getBasicIngestPart() + const mutablePart = new MutableIngestPartImpl(clone(ingestPart), true) + + // compare properties + expect(mutablePart.externalId).toBe(ingestPart.externalId) + expect(mutablePart.name).toBe(ingestPart.name) + expect(mutablePart.payload).toEqual(ingestPart.payload) + + // check it has changes + expect(mutablePart.checkAndClearChangesFlags()).toBe(true) + + // check flag has been cleared + expect(mutablePart.checkAndClearChangesFlags()).toBe(false) + }) + + test('set name', () => { + const ingestPart = getBasicIngestPart() + const mutablePart = new MutableIngestPartImpl(clone(ingestPart)) + + // compare properties + expect(mutablePart.name).toBe(ingestPart.name) + expect(mutablePart.checkAndClearChangesFlags()).toBe(false) + + mutablePart.setName('new-name') + expect(mutablePart.name).toBe('new-name') + + // check it has changes + expect(mutablePart.checkAndClearChangesFlags()).toBe(true) + }) + + test('replace payload with change', () => { + const ingestPart = getBasicIngestPart() + const mutablePart = new MutableIngestPartImpl(clone(ingestPart)) + + // compare properties + expect(mutablePart.payload).toEqual(ingestPart.payload) + expect(mutablePart.checkAndClearChangesFlags()).toBe(false) + + const newPayload = { val: 'new-val' } + mutablePart.replacePayload(newPayload) + expect(mutablePart.payload).toEqual(newPayload) + + // check it has changes + expect(mutablePart.checkAndClearChangesFlags()).toBe(true) + }) + + test('replace payload with no change', () => { + const ingestPart = getBasicIngestPart() + const mutablePart = new MutableIngestPartImpl(clone(ingestPart)) + + // compare properties + expect(mutablePart.payload).toEqual(ingestPart.payload) + expect(mutablePart.checkAndClearChangesFlags()).toBe(false) + + mutablePart.replacePayload(ingestPart.payload) + expect(mutablePart.payload).toEqual(ingestPart.payload) + + // check it has no changes + expect(mutablePart.checkAndClearChangesFlags()).toBe(false) + }) + + test('set payload property change', () => { + const ingestPart = getBasicIngestPart() + const mutablePart = new MutableIngestPartImpl(clone(ingestPart)) + + // compare properties + expect(mutablePart.payload).toEqual(ingestPart.payload) + expect(mutablePart.checkAndClearChangesFlags()).toBe(false) + + const newPayload = { ...ingestPart.payload, test: 123, second: undefined } + mutablePart.setPayloadProperty('test', 123) + mutablePart.setPayloadProperty('second', undefined) + expect(mutablePart.payload).toEqual(newPayload) + + // check it has changes + expect(mutablePart.checkAndClearChangesFlags()).toBe(true) + }) + + test('set payload property unchanged', () => { + const ingestPart = getBasicIngestPart() + const mutablePart = new MutableIngestPartImpl(clone(ingestPart)) + + // compare properties + expect(mutablePart.payload).toEqual(ingestPart.payload) + expect(mutablePart.checkAndClearChangesFlags()).toBe(false) + + mutablePart.setPayloadProperty('val', ingestPart.payload.val) + mutablePart.setPayloadProperty('another', undefined) + expect(mutablePart.payload).toEqual(ingestPart.payload) + + // check it has changes + expect(mutablePart.checkAndClearChangesFlags()).toBe(false) + }) + + test('set user edit state change', () => { + const ingestPart = getBasicIngestPart() + const mutablePart = new MutableIngestPartImpl(clone(ingestPart)) + + // compare properties + expect(mutablePart.userEditStates).toEqual(ingestPart.userEditStates) + expect(mutablePart.checkAndClearChangesFlags()).toBe(false) + + const newUserEditStates = { ...ingestPart.userEditStates, two: true, another: false } + mutablePart.setUserEditState('two', true) + mutablePart.setUserEditState('another', false) + expect(mutablePart.userEditStates).toEqual(newUserEditStates) + + // check it has changes + expect(mutablePart.checkAndClearChangesFlags()).toBe(true) + }) + + test('set user edit state unchanged', () => { + const ingestPart = getBasicIngestPart() + const mutablePart = new MutableIngestPartImpl(clone(ingestPart)) + + // compare properties + expect(mutablePart.userEditStates).toEqual(ingestPart.userEditStates) + expect(mutablePart.checkAndClearChangesFlags()).toBe(false) + + mutablePart.setUserEditState('one', true) + mutablePart.setUserEditState('two', false) + expect(mutablePart.userEditStates).toEqual(ingestPart.userEditStates) + + // check it has changes + expect(mutablePart.checkAndClearChangesFlags()).toBe(false) + }) +}) diff --git a/packages/job-worker/src/blueprints/ingest/__tests__/MutableIngestRundownImpl.spec.ts b/packages/job-worker/src/blueprints/ingest/__tests__/MutableIngestRundownImpl.spec.ts new file mode 100644 index 00000000000..a8030891b1f --- /dev/null +++ b/packages/job-worker/src/blueprints/ingest/__tests__/MutableIngestRundownImpl.spec.ts @@ -0,0 +1,901 @@ +import { clone } from '@sofie-automation/corelib/dist/lib' +import { MutableIngestRundownChanges, MutableIngestRundownImpl } from '../MutableIngestRundownImpl' +import { SofieIngestRundownDataCacheGenerator } from '../../../ingest/sofieIngestCache' +import { protectString } from '@sofie-automation/corelib/dist/protectedString' +import { getSegmentId } from '../../../ingest/lib' +import { SofieIngestDataCacheObjId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { MutableIngestSegmentImpl } from '../MutableIngestSegmentImpl' +import { IngestRundown, IngestSegment, SofieIngestSegment } from '@sofie-automation/blueprints-integration' +import { SofieIngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' + +describe('MutableIngestRundownImpl', () => { + function getBasicIngestRundown(): SofieIngestRundownWithSource { + return { + externalId: 'rundown0', + type: 'mock', + name: 'rundown-name', + payload: { + val: 'some-val', + second: 5, + }, + userEditStates: { + one: true, + two: false, + }, + rundownSource: { type: 'http' }, + segments: [ + { + externalId: 'seg0', + name: 'name', + rank: 0, + payload: { + val: 'first-val', + second: 5, + }, + userEditStates: {}, + parts: [ + { + externalId: 'part0', + name: 'my first part', + rank: 0, + payload: { + val: 'some-val', + }, + userEditStates: {}, + }, + ], + }, + { + externalId: 'seg1', + name: 'name 2', + rank: 1, + payload: { + val: 'next-val', + }, + userEditStates: {}, + parts: [ + { + externalId: 'part1', + name: 'my second part', + rank: 0, + payload: { + val: 'some-val', + }, + userEditStates: {}, + }, + ], + }, + { + externalId: 'seg2', + name: 'name 3', + rank: 2, + payload: { + val: 'last-val', + }, + userEditStates: {}, + parts: [ + { + externalId: 'part2', + name: 'my third part', + rank: 0, + payload: { + val: 'some-val', + }, + userEditStates: {}, + }, + ], + }, + ], + } + } + + const ingestObjectGenerator = new SofieIngestRundownDataCacheGenerator(protectString('rundownId')) + + function createNoChangesObject(ingestRundown: SofieIngestRundownWithSource): MutableIngestRundownChanges { + const allCacheObjectIds: SofieIngestDataCacheObjId[] = [] + for (const segment of ingestRundown.segments) { + allCacheObjectIds.push(ingestObjectGenerator.getSegmentObjectId(segment.externalId)) + for (const part of segment.parts) { + allCacheObjectIds.push(ingestObjectGenerator.getPartObjectId(part.externalId)) + } + } + allCacheObjectIds.push(ingestObjectGenerator.getRundownObjectId()) + + return { + computedChanges: { + ingestRundown, + + segmentsToRemove: [], + segmentsUpdatedRanks: {}, + segmentsToRegenerate: [], + regenerateRundown: false, + + segmentExternalIdChanges: {}, + }, + changedCacheObjects: [], + allCacheObjectIds: allCacheObjectIds, + } + } + + function addChangedSegments( + changes: MutableIngestRundownChanges, + _ingestRundown: IngestRundown, + ...ingestSegments: SofieIngestSegment[] + ): void { + for (const ingestSegment of ingestSegments) { + const segmentId = getSegmentId(ingestObjectGenerator.rundownId, ingestSegment.externalId) + + changes.computedChanges.segmentsToRegenerate.push(ingestSegment) + + for (const part of ingestSegment.parts) { + changes.changedCacheObjects.push(ingestObjectGenerator.generatePartObject(segmentId, part)) + } + + changes.changedCacheObjects.push(ingestObjectGenerator.generateSegmentObject(ingestSegment)) + } + } + function addChangedRankSegments( + changes: MutableIngestRundownChanges, + _ingestRundown: IngestRundown, + ...ingestSegments: SofieIngestSegment[] + ): void { + for (const ingestSegment of ingestSegments) { + changes.changedCacheObjects.push(ingestObjectGenerator.generateSegmentObject(ingestSegment)) + } + } + function addChangedRundown(changes: MutableIngestRundownChanges): void { + changes.computedChanges.regenerateRundown = true + changes.changedCacheObjects.push( + ingestObjectGenerator.generateRundownObject(changes.computedChanges.ingestRundown) + ) + } + function removeSegmentFromIngestRundown(ingestRundown: IngestRundown, segmentId: string): void { + const ingestSegment = ingestRundown.segments.find((p) => p.externalId === segmentId) + ingestRundown.segments = ingestRundown.segments.filter((p) => p.externalId !== segmentId) + if (ingestSegment) { + for (const part of ingestRundown.segments) { + if (part.rank > ingestSegment.rank) part.rank-- + } + } + } + function getSegmentIdOrder(mutableRundown: MutableIngestRundownImpl): string[] { + return mutableRundown.segments.map((p) => p.externalId) + } + function getSegmentOriginalIdOrder(mutableRundown: MutableIngestRundownImpl): Array { + return mutableRundown.segments.map((p) => p.originalExternalId) + } + + test('create basic', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + // compare properties + expect(mutableRundown.externalId).toBe(ingestRundown.externalId) + expect(mutableRundown.name).toBe(ingestRundown.name) + expect(mutableRundown.payload).toEqual(ingestRundown.payload) + expect(mutableRundown.segments.length).toBe(ingestRundown.segments.length) + + // check it has no changes + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + }) + + test('create basic with changes', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), false) + + // compare properties + expect(mutableRundown.externalId).toBe(ingestRundown.externalId) + expect(mutableRundown.name).toBe(ingestRundown.name) + expect(mutableRundown.payload).toEqual(ingestRundown.payload) + expect(mutableRundown.segments.length).toBe(ingestRundown.segments.length) + + // check it has changes + const expectedChanges = createNoChangesObject(ingestRundown) + addChangedSegments(expectedChanges, ingestRundown, ...ingestRundown.segments) + addChangedRundown(expectedChanges) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + + // check changes have been cleared + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + }) + + test('set name', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + // compare properties + expect(mutableRundown.name).toBe(ingestRundown.name) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + + mutableRundown.setName('new-name') + expect(mutableRundown.name).toBe('new-name') + + // check it has changes + const expectedChanges = createNoChangesObject(clone(ingestRundown)) + expectedChanges.computedChanges.ingestRundown.name = 'new-name' + addChangedRundown(expectedChanges) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('replace payload with change', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + // compare properties + expect(mutableRundown.payload).toEqual(ingestRundown.payload) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + + const newPayload = { val: 'new-val' } + mutableRundown.replacePayload(newPayload) + expect(mutableRundown.payload).toEqual(newPayload) + + // check it has changes + const expectedChanges = createNoChangesObject(clone(ingestRundown)) + expectedChanges.computedChanges.ingestRundown.payload = newPayload + addChangedRundown(expectedChanges) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('replace payload with no change', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + // compare properties + expect(mutableRundown.payload).toEqual(ingestRundown.payload) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + + mutableRundown.replacePayload(ingestRundown.payload) + expect(mutableRundown.payload).toEqual(ingestRundown.payload) + + // check it has no changes + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + }) + + test('set payload property change', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + // compare properties + expect(mutableRundown.payload).toEqual(ingestRundown.payload) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + + const newPayload = { ...ingestRundown.payload, test: 123, second: undefined } + mutableRundown.setPayloadProperty('test', 123) + mutableRundown.setPayloadProperty('second', undefined) + expect(mutableRundown.payload).toEqual(newPayload) + + // check it has changes + const expectedChanges = createNoChangesObject(clone(ingestRundown)) + expectedChanges.computedChanges.ingestRundown.payload = newPayload + addChangedRundown(expectedChanges) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('set payload property unchanged', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + // compare properties + expect(mutableRundown.payload).toEqual(ingestRundown.payload) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + + mutableRundown.setPayloadProperty('val', ingestRundown.payload.val) + mutableRundown.setPayloadProperty('another', undefined) + expect(mutableRundown.payload).toEqual(ingestRundown.payload) + + // check it has no changes + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + }) + + test('set user edit state change', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + // compare properties + expect(mutableRundown.userEditStates).toEqual(ingestRundown.userEditStates) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + + const newUserEditStates = { ...ingestRundown.userEditStates, two: true, another: false } + mutableRundown.setUserEditState('two', true) + mutableRundown.setUserEditState('another', false) + expect(mutableRundown.userEditStates).toEqual(newUserEditStates) + + // check it has changes + const expectedChanges = createNoChangesObject(ingestRundown) + ingestRundown.userEditStates = newUserEditStates + addChangedRundown(expectedChanges) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('set user edit state unchanged', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + // compare properties + expect(mutableRundown.userEditStates).toEqual(ingestRundown.userEditStates) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + + mutableRundown.setUserEditState('one', true) + mutableRundown.setUserEditState('two', false) + expect(mutableRundown.userEditStates).toEqual(ingestRundown.userEditStates) + + // check it has changes + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + }) + + test('get segments', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + // compare properties + expect(mutableRundown.segments.length).toBe(ingestRundown.segments.length) + expect(mutableRundown.getSegment('seg0')).toStrictEqual(mutableRundown.segments[0]) + expect(mutableRundown.getSegment('seg0') instanceof MutableIngestSegmentImpl).toBe(true) + expect(mutableRundown.getSegment('seg1')).toStrictEqual(mutableRundown.segments[1]) + expect(mutableRundown.getSegment('seg1') instanceof MutableIngestSegmentImpl).toBe(true) + expect(mutableRundown.getSegment('seg2')).toStrictEqual(mutableRundown.segments[2]) + expect(mutableRundown.getSegment('seg2') instanceof MutableIngestSegmentImpl).toBe(true) + expect(mutableRundown.getSegment('seg3')).toBeUndefined() + + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + }) + + test('findPart & findPartAndSegment', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + // compare properties + expect(mutableRundown.segments.length).toBe(ingestRundown.segments.length) + expect(mutableRundown.findPart('part1')).toStrictEqual(mutableRundown.segments[1].parts[0]) + expect(mutableRundown.findPart('part1')).toStrictEqual(mutableRundown.findPartAndSegment('part1')?.part) + expect(mutableRundown.getSegment('seg1')).toStrictEqual(mutableRundown.findPartAndSegment('part1')?.segment) + + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + }) + + describe('removeSegment', () => { + test('good', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + expect(mutableRundown.removeSegment('seg1')).toBeTruthy() + + // compare properties + expect(mutableRundown.segments.length).toBe(2) + expect(mutableRundown.getSegment('seg1')).toBeUndefined() + + // check it has changes + const expectedIngestRundown = clone(ingestRundown) + removeSegmentFromIngestRundown(expectedIngestRundown, 'seg1') + const expectedChanges = createNoChangesObject(expectedIngestRundown) + expectedChanges.computedChanges.segmentsToRemove.push('seg1') + expectedChanges.computedChanges.segmentsUpdatedRanks = { seg2: 1 } + addChangedRankSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[1]) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + + // try removing a second time + expect(mutableRundown.removeSegment('seg1')).toBeFalsy() + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual( + createNoChangesObject(expectedIngestRundown) + ) + }) + + test('unknown id', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + expect(mutableRundown.removeSegment('segX')).toBeFalsy() + + // compare properties + expect(mutableRundown.segments.length).toBe(ingestRundown.segments.length) + + // ensure no changes + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual( + createNoChangesObject(ingestRundown) + ) + }) + }) + + test('removeAllSegments', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + mutableRundown.removeAllSegments() + + // compare properties + expect(mutableRundown.segments.length).toBe(0) + + // ensure no changes + const expectedIngestRundown = clone(ingestRundown) + expectedIngestRundown.segments = [] + const expectedChanges = createNoChangesObject(expectedIngestRundown) + for (const segment of ingestRundown.segments) { + expectedChanges.computedChanges.segmentsToRemove.push(segment.externalId) + } + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('forceFullRegenerate', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + // ensure no changes + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + + mutableRundown.forceFullRegenerate() + + // check it has changes + const expectedChanges = createNoChangesObject(ingestRundown) + addChangedRundown(expectedChanges) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + describe('replaceSegment', () => { + test('replace existing with a move', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + const segmentBefore = mutableRundown.getSegment('seg1') + expect(segmentBefore).toBeDefined() + for (const part of segmentBefore?.parts || []) { + expect(mutableRundown.findPart(part.externalId)).toStrictEqual(part) + } + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + + const newSegment: Omit = { + externalId: 'seg1', + name: 'new name', + payload: { + val: 'new-val', + }, + userEditStates: {}, + parts: [ + { + externalId: 'part1', + name: 'new part name', + rank: 0, + payload: { + val: 'new-part-val', + }, + userEditStates: {}, + }, + ], + } + const replacedPart = mutableRundown.replaceSegment(newSegment, null) + expect(replacedPart).toBeDefined() + // ensure the inserted part looks correct + expect(replacedPart?.externalId).toBe(newSegment.externalId) + expect(replacedPart?.name).toBe(newSegment.name) + expect(replacedPart?.payload).toEqual(newSegment.payload) + + // check it has changes + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg2', 'seg1']) + const expectedIngestRundown = clone(ingestRundown) + removeSegmentFromIngestRundown(expectedIngestRundown, 'seg1') + expectedIngestRundown.segments.push({ ...newSegment, rank: 2 }) + + const expectedChanges = createNoChangesObject(expectedIngestRundown) + addChangedRankSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[1]) + addChangedSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[2]) + expectedChanges.computedChanges.segmentsUpdatedRanks = { seg2: 1, seg1: 2 } + + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + + // ensure the MutableSegment is a new object + expect(mutableRundown.getSegment('seg1')).not.toBe(segmentBefore) + for (const part of segmentBefore?.parts || []) { + expect(mutableRundown.findPart(part.externalId)).not.toBe(part) + } + }) + + test('insert new', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + expect(mutableRundown.getSegment('partX')).toBeUndefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + + const newSegment: Omit = { + externalId: 'segX', + name: 'new name', + payload: { + val: 'new-val', + }, + userEditStates: {}, + parts: [ + { + externalId: 'partX', + name: 'new part name', + rank: 0, + payload: { + val: 'new-part-val', + }, + userEditStates: {}, + }, + ], + } + const replacedPart = mutableRundown.replaceSegment(newSegment, null) + expect(replacedPart).toBeDefined() + // ensure the inserted part looks correct + expect(replacedPart?.externalId).toBe(newSegment.externalId) + expect(replacedPart?.name).toBe(newSegment.name) + expect(replacedPart?.payload).toEqual(newSegment.payload) + + // check it has changes + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2', 'segX']) + const expectedIngestRundown = clone(ingestRundown) + expectedIngestRundown.segments.push({ ...newSegment, rank: 3 }) + + const expectedChanges = createNoChangesObject(expectedIngestRundown) + addChangedSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[3]) + expectedChanges.computedChanges.segmentsUpdatedRanks = { segX: 3 } + + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('insert at position', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + expect(mutableRundown.getSegment('partX')).toBeUndefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + + const newSegment: Omit = { + externalId: 'segX', + name: 'new name', + payload: { + val: 'new-val', + }, + parts: [ + { + externalId: 'partX', + name: 'new part name', + rank: 0, + payload: { + val: 'new-part-val', + }, + }, + ], + } + + // insert at the end + expect(mutableRundown.replaceSegment(newSegment, null)).toBeDefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2', 'segX']) + + // insert at the beginning + expect(mutableRundown.replaceSegment(newSegment, 'seg0')).toBeDefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['segX', 'seg0', 'seg1', 'seg2']) + + // insert in the middle + expect(mutableRundown.replaceSegment(newSegment, 'seg2')).toBeDefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'segX', 'seg2']) + + // Only the one should have changes + expect( + mutableRundown + .intoIngestRundown(ingestObjectGenerator) + .computedChanges.segmentsToRegenerate.map((s) => s.externalId) + ).toEqual(['segX']) + + // Try inserting before itself + expect(() => mutableRundown.replaceSegment(newSegment, newSegment.externalId)).toThrow( + /Cannot insert Segment before itself/ + ) + + // Try inserting before an unknown part + expect(() => mutableRundown.replaceSegment(newSegment, 'segY')).toThrow(/Segment(.*)not found/) + }) + }) + + describe('moveSegmentBefore', () => { + test('move unknown', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + expect(mutableRundown.getSegment('segX')).toBeUndefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + + expect(() => mutableRundown.moveSegmentBefore('segX', null)).toThrow(/Segment(.*)not found/) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + }) + + test('move to position', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + + // insert at the end + mutableRundown.moveSegmentBefore('seg1', null) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg2', 'seg1']) + + // insert in the middle + mutableRundown.moveSegmentBefore('seg1', 'seg2') + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + + // insert at the beginning + mutableRundown.moveSegmentBefore('seg1', 'seg0') + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg1', 'seg0', 'seg2']) + + // Check the reported changes + const expectedIngestRundown = clone(ingestRundown) + expectedIngestRundown.segments.splice(0, 0, expectedIngestRundown.segments.splice(1, 1)[0]) + expectedIngestRundown.segments[0].rank = 0 + expectedIngestRundown.segments[1].rank = 1 + const expectedChanges = createNoChangesObject(expectedIngestRundown) + addChangedRankSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[0]) + addChangedRankSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[1]) + expectedChanges.computedChanges.segmentsUpdatedRanks = { seg1: 0, seg0: 1 } + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + + // Try inserting before itself + expect(() => mutableRundown.moveSegmentBefore('seg1', 'seg1')).toThrow(/Cannot move Segment before itself/) + + // Try inserting before an unknown part + expect(() => mutableRundown.moveSegmentBefore('seg1', 'segY')).toThrow(/Segment(.*)not found/) + }) + }) + + describe('moveSegmentAfter', () => { + test('move unknown', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + expect(mutableRundown.getSegment('segX')).toBeUndefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + + expect(() => mutableRundown.moveSegmentAfter('segX', null)).toThrow(/Segment(.*)not found/) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + }) + + test('move to position', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + + // insert at the beginning + mutableRundown.moveSegmentAfter('seg1', null) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg1', 'seg0', 'seg2']) + + // insert in the middle + mutableRundown.moveSegmentAfter('seg1', 'seg0') + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + + // insert at the end + mutableRundown.moveSegmentAfter('seg1', 'seg2') + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg2', 'seg1']) + + // Check the reported changes + const expectedIngestRundown = clone(ingestRundown) + expectedIngestRundown.segments.splice(1, 0, expectedIngestRundown.segments.splice(2, 1)[0]) + expectedIngestRundown.segments[1].rank = 1 + expectedIngestRundown.segments[2].rank = 2 + const expectedChanges = createNoChangesObject(expectedIngestRundown) + addChangedRankSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[1]) + addChangedRankSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[2]) + expectedChanges.computedChanges.segmentsUpdatedRanks = { seg2: 1, seg1: 2 } + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + + // Try inserting before itself + expect(() => mutableRundown.moveSegmentAfter('seg1', 'seg1')).toThrow(/Cannot move Segment after itself/) + + // Try inserting before an unknown part + expect(() => mutableRundown.moveSegmentAfter('seg1', 'segY')).toThrow(/Segment(.*)not found/) + }) + }) + + describe('changeSegmentExternalId', () => { + test('rename unknown', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + expect(mutableRundown.getSegment('segX')).toBeUndefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, undefined, undefined]) + + expect(() => mutableRundown.changeSegmentExternalId('segX', 'segY')).toThrow(/Segment(.*)not found/) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, undefined, undefined]) + }) + + test('rename to duplicate', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + expect(mutableRundown.getSegment('seg1')).toBeDefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, undefined, undefined]) + + expect(() => mutableRundown.changeSegmentExternalId('seg1', 'seg2')).toThrow(/Segment(.*)already exists/) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, undefined, undefined]) + }) + + test('good', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + const beforeSegment = mutableRundown.getSegment('seg1') as MutableIngestSegmentImpl + expect(beforeSegment).toBeDefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, undefined, undefined]) + + // rename and check + expect(mutableRundown.changeSegmentExternalId('seg1', 'segX')).toStrictEqual(beforeSegment) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'segX', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, 'seg1', undefined]) + expect(beforeSegment.originalExternalId).toBe('seg1') + expect(beforeSegment.externalId).toBe('segX') + + // Check the reported changes + const expectedIngestRundown = clone(ingestRundown) + expectedIngestRundown.segments[1].externalId = 'segX' + const expectedChanges = createNoChangesObject(expectedIngestRundown) + addChangedRankSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[1]) + expectedChanges.computedChanges.segmentsUpdatedRanks = { segX: 1 } + expectedChanges.computedChanges.segmentExternalIdChanges = { seg1: 'segX' } + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('rename twice', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + const beforeSegment = mutableRundown.getSegment('seg1') as MutableIngestSegmentImpl + expect(beforeSegment).toBeDefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, undefined, undefined]) + + // rename and check + expect(mutableRundown.changeSegmentExternalId('seg1', 'segX')).toStrictEqual(beforeSegment) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'segX', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, 'seg1', undefined]) + expect(beforeSegment.originalExternalId).toBe('seg1') + expect(beforeSegment.externalId).toBe('segX') + + // rename again + expect(mutableRundown.changeSegmentExternalId('segX', 'segY')).toStrictEqual(beforeSegment) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'segY', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, 'seg1', undefined]) + expect(beforeSegment.originalExternalId).toBe('seg1') + expect(beforeSegment.externalId).toBe('segY') + + // Check the reported changes + const expectedIngestRundown = clone(ingestRundown) + expectedIngestRundown.segments[1].externalId = 'segY' + const expectedChanges = createNoChangesObject(expectedIngestRundown) + addChangedRankSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[1]) + expectedChanges.computedChanges.segmentsUpdatedRanks = { segY: 1 } + expectedChanges.computedChanges.segmentExternalIdChanges = { seg1: 'segY' } + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('rename circle', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + const beforeSegment1 = mutableRundown.getSegment('seg1') as MutableIngestSegmentImpl + expect(beforeSegment1).toBeDefined() + const beforeSegment2 = mutableRundown.getSegment('seg2') as MutableIngestSegmentImpl + expect(beforeSegment2).toBeDefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, undefined, undefined]) + + // rename seg1 to segX + expect(mutableRundown.changeSegmentExternalId('seg1', 'segX')).toStrictEqual(beforeSegment1) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'segX', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, 'seg1', undefined]) + expect(beforeSegment1.originalExternalId).toBe('seg1') + expect(beforeSegment1.externalId).toBe('segX') + + // rename seg2 to seg1 + expect(mutableRundown.changeSegmentExternalId('seg2', 'seg1')).toStrictEqual(beforeSegment2) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'segX', 'seg1']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, 'seg1', 'seg2']) + expect(beforeSegment2.originalExternalId).toBe('seg2') + expect(beforeSegment2.externalId).toBe('seg1') + + // rename segX to seg2 + expect(mutableRundown.changeSegmentExternalId('segX', 'seg2')).toStrictEqual(beforeSegment1) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg2', 'seg1']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, 'seg1', 'seg2']) + expect(beforeSegment1.originalExternalId).toBe('seg1') + expect(beforeSegment1.externalId).toBe('seg2') + + // Check the reported changes + const expectedIngestRundown = clone(ingestRundown) + expectedIngestRundown.segments[1].externalId = 'seg2' + expectedIngestRundown.segments[2].externalId = 'seg1' + const expectedChanges = createNoChangesObject(expectedIngestRundown) + addChangedRankSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[1]) + addChangedRankSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[2]) + expectedChanges.computedChanges.segmentsUpdatedRanks = { seg2: 1, seg1: 2 } + expectedChanges.computedChanges.segmentExternalIdChanges = { seg1: 'seg2', seg2: 'seg1' } + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + }) + }) + + describe('changeSegmentOriginalExternalId', () => { + test('rename unknown', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + expect(mutableRundown.getSegment('segX')).toBeUndefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, undefined, undefined]) + + expect(() => mutableRundown.changeSegmentOriginalExternalId('segX', 'segY')).toThrow(/Segment(.*)not found/) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, undefined, undefined]) + }) + + test('rename to duplicate', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + expect(mutableRundown.getSegment('seg1')).toBeDefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, undefined, undefined]) + + expect(() => mutableRundown.changeSegmentOriginalExternalId('seg1', 'seg2')).toThrow(/Segment(.*)exists/) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, undefined, undefined]) + }) + + test('good', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + const beforeSegment = mutableRundown.getSegment('seg1') as MutableIngestSegmentImpl + expect(beforeSegment).toBeDefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, undefined, undefined]) + + // rename and check + expect(mutableRundown.changeSegmentOriginalExternalId('seg1', 'segX')).toStrictEqual(beforeSegment) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, 'segX', undefined]) + expect(beforeSegment.originalExternalId).toBe('segX') + expect(beforeSegment.externalId).toBe('seg1') + + // Check the reported changes + const expectedIngestRundown = clone(ingestRundown) + const expectedChanges = createNoChangesObject(expectedIngestRundown) + addChangedRankSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[1]) + expectedChanges.computedChanges.segmentExternalIdChanges = { segX: 'seg1' } + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('rename twice', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + const beforeSegment = mutableRundown.getSegment('seg1') as MutableIngestSegmentImpl + expect(beforeSegment).toBeDefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, undefined, undefined]) + + // rename and check + expect(mutableRundown.changeSegmentOriginalExternalId('seg1', 'segX')).toStrictEqual(beforeSegment) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, 'segX', undefined]) + expect(beforeSegment.originalExternalId).toBe('segX') + expect(beforeSegment.externalId).toBe('seg1') + + // rename again + expect(mutableRundown.changeSegmentOriginalExternalId('seg1', 'segY')).toStrictEqual(beforeSegment) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, 'segY', undefined]) + expect(beforeSegment.originalExternalId).toBe('segY') + expect(beforeSegment.externalId).toBe('seg1') + + // Check the reported changes + const expectedIngestRundown = clone(ingestRundown) + const expectedChanges = createNoChangesObject(expectedIngestRundown) + addChangedRankSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[1]) + expectedChanges.computedChanges.segmentExternalIdChanges = { segY: 'seg1' } + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + }) + }) +}) diff --git a/packages/job-worker/src/blueprints/ingest/__tests__/MutableIngestSegmentImpl.spec.ts b/packages/job-worker/src/blueprints/ingest/__tests__/MutableIngestSegmentImpl.spec.ts new file mode 100644 index 00000000000..3d53c7849d0 --- /dev/null +++ b/packages/job-worker/src/blueprints/ingest/__tests__/MutableIngestSegmentImpl.spec.ts @@ -0,0 +1,526 @@ +import { clone } from '@sofie-automation/corelib/dist/lib' +import { MutableIngestSegmentChanges, MutableIngestSegmentImpl } from '../MutableIngestSegmentImpl' +import { SofieIngestRundownDataCacheGenerator } from '../../../ingest/sofieIngestCache' +import { protectString } from '@sofie-automation/corelib/dist/protectedString' +import { getSegmentId } from '../../../ingest/lib' +import { MutableIngestPartImpl } from '../MutableIngestPartImpl' +import { IngestPart, IngestSegment, SofieIngestSegment } from '@sofie-automation/blueprints-integration' + +describe('MutableIngestSegmentImpl', () => { + function getBasicIngestSegment(): SofieIngestSegment { + return { + externalId: 'externalId', + name: 'name', + rank: 0, + payload: { + val: 'some-val', + second: 5, + }, + userEditStates: { + one: true, + two: false, + }, + parts: [ + { + externalId: 'part0', + name: 'my first part', + rank: 0, + payload: { + val: 'some-val', + }, + userEditStates: {}, + }, + { + externalId: 'part1', + name: 'another part', + rank: 1, + payload: { + val: 'second-val', + }, + userEditStates: {}, + }, + { + externalId: 'part2', + name: 'third part', + rank: 2, + payload: { + val: 'third-val', + }, + userEditStates: {}, + }, + { + externalId: 'part3', + name: 'last part', + rank: 3, + payload: { + val: 'last-val', + }, + userEditStates: {}, + }, + ], + } + } + + const ingestObjectGenerator = new SofieIngestRundownDataCacheGenerator(protectString('rundownId')) + + function createNoChangesObject(ingestSegment: SofieIngestSegment): MutableIngestSegmentChanges { + return { + ingestParts: ingestSegment.parts, + changedCacheObjects: [], + allCacheObjectIds: ingestSegment.parts.map((p) => ingestObjectGenerator.getPartObjectId(p.externalId)), + segmentHasChanges: false, + partIdsWithChanges: [], + partOrderHasChanged: false, + originalExternalId: ingestSegment.externalId, + } + } + function removePartFromIngestSegment(ingestSegment: IngestSegment, partId: string): void { + const ingestPart = ingestSegment.parts.find((p) => p.externalId === partId) + ingestSegment.parts = ingestSegment.parts.filter((p) => p.externalId !== partId) + if (ingestPart) { + for (const part of ingestSegment.parts) { + if (part.rank > ingestPart.rank) part.rank-- + } + } + } + function getPartIdOrder(mutableSegment: MutableIngestSegmentImpl): string[] { + return mutableSegment.parts.map((p) => p.externalId) + } + + test('create basic', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + // compare properties + expect(mutableSegment.externalId).toBe(ingestSegment.externalId) + expect(mutableSegment.name).toBe(ingestSegment.name) + expect(mutableSegment.payload).toEqual(ingestSegment.payload) + expect(mutableSegment.parts.length).toBe(ingestSegment.parts.length) + + // check it has no changes + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + }) + + test('create basic with changes', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment), true) + + // compare properties + expect(mutableSegment.externalId).toBe(ingestSegment.externalId) + expect(mutableSegment.name).toBe(ingestSegment.name) + expect(mutableSegment.payload).toEqual(ingestSegment.payload) + expect(mutableSegment.parts.length).toBe(ingestSegment.parts.length) + + // check it has no changes + const expectedChanges = createNoChangesObject(ingestSegment) + expectedChanges.segmentHasChanges = true + const segmentId = getSegmentId(ingestObjectGenerator.rundownId, ingestSegment.externalId) + for (const ingestPart of ingestSegment.parts) { + expectedChanges.partIdsWithChanges.push(ingestPart.externalId) + expectedChanges.changedCacheObjects.push(ingestObjectGenerator.generatePartObject(segmentId, ingestPart)) + } + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(expectedChanges) + + // check changes have been cleared + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + }) + + test('set name', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + // compare properties + expect(mutableSegment.name).toBe(ingestSegment.name) + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + + mutableSegment.setName('new-name') + expect(mutableSegment.name).toBe('new-name') + + // check it has changes + const expectedChanges = createNoChangesObject(ingestSegment) + expectedChanges.segmentHasChanges = true + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('replace payload with change', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + // compare properties + expect(mutableSegment.payload).toEqual(ingestSegment.payload) + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + + const newPayload = { val: 'new-val' } + mutableSegment.replacePayload(newPayload) + expect(mutableSegment.payload).toEqual(newPayload) + + // check it has changes + const expectedChanges = createNoChangesObject(ingestSegment) + expectedChanges.segmentHasChanges = true + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('replace payload with no change', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + // compare properties + expect(mutableSegment.payload).toEqual(ingestSegment.payload) + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + + mutableSegment.replacePayload(ingestSegment.payload) + expect(mutableSegment.payload).toEqual(ingestSegment.payload) + + // check it has no changes + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + }) + + test('set payload property change', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + // compare properties + expect(mutableSegment.payload).toEqual(ingestSegment.payload) + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + + const newPayload = { ...ingestSegment.payload, test: 123, second: undefined } + mutableSegment.setPayloadProperty('test', 123) + mutableSegment.setPayloadProperty('second', undefined) + expect(mutableSegment.payload).toEqual(newPayload) + + // check it has changes + const expectedChanges = createNoChangesObject(ingestSegment) + expectedChanges.segmentHasChanges = true + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('set payload property unchanged', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + // compare properties + expect(mutableSegment.payload).toEqual(ingestSegment.payload) + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + + mutableSegment.setPayloadProperty('val', ingestSegment.payload.val) + mutableSegment.setPayloadProperty('another', undefined) + expect(mutableSegment.payload).toEqual(ingestSegment.payload) + + // check it has no changes + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + }) + + test('set user edit state change', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + // compare properties + expect(mutableSegment.userEditStates).toEqual(ingestSegment.userEditStates) + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + + const newUserEditStates = { ...ingestSegment.userEditStates, two: true, another: false } + mutableSegment.setUserEditState('two', true) + mutableSegment.setUserEditState('another', false) + expect(mutableSegment.userEditStates).toEqual(newUserEditStates) + + // check it has changes + const expectedChanges = createNoChangesObject(ingestSegment) + expectedChanges.segmentHasChanges = true + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('set user edit state unchanged', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + // compare properties + expect(mutableSegment.userEditStates).toEqual(ingestSegment.userEditStates) + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + + mutableSegment.setUserEditState('one', true) + mutableSegment.setUserEditState('two', false) + expect(mutableSegment.userEditStates).toEqual(ingestSegment.userEditStates) + + // check it has changes + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + }) + + test('get parts', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + // compare properties + expect(mutableSegment.parts.length).toBe(ingestSegment.parts.length) + expect(mutableSegment.getPart('part0')).toStrictEqual(mutableSegment.parts[0]) + expect(mutableSegment.getPart('part0') instanceof MutableIngestPartImpl).toBe(true) + expect(mutableSegment.getPart('part1')).toStrictEqual(mutableSegment.parts[1]) + expect(mutableSegment.getPart('part1') instanceof MutableIngestPartImpl).toBe(true) + expect(mutableSegment.getPart('part2')).toStrictEqual(mutableSegment.parts[2]) + expect(mutableSegment.getPart('part2') instanceof MutableIngestPartImpl).toBe(true) + expect(mutableSegment.getPart('part3')).toStrictEqual(mutableSegment.parts[3]) + expect(mutableSegment.getPart('part3') instanceof MutableIngestPartImpl).toBe(true) + expect(mutableSegment.getPart('part4')).toBeUndefined() + + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + }) + + describe('removePart', () => { + test('good', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + expect(mutableSegment.removePart('part1')).toBeTruthy() + + // compare properties + expect(mutableSegment.parts.length).toBe(3) + expect(mutableSegment.getPart('part1')).toBeUndefined() + + // check it has changes + const expectedIngestSegment = clone(ingestSegment) + removePartFromIngestSegment(expectedIngestSegment, 'part1') + const expectedChanges = createNoChangesObject(expectedIngestSegment) + expectedChanges.partOrderHasChanged = true + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(expectedChanges) + + // try removing a second time + expect(mutableSegment.removePart('part1')).toBeFalsy() + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual( + createNoChangesObject(expectedIngestSegment) + ) + }) + + test('unknown id', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + expect(mutableSegment.removePart('partX')).toBeFalsy() + + // compare properties + expect(mutableSegment.parts.length).toBe(ingestSegment.parts.length) + + // ensure no changes + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + }) + }) + + test('forceRegenerate', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + // ensure no changes + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + + mutableSegment.forceRegenerate() + + // check it has changes + const expectedChanges = createNoChangesObject(ingestSegment) + expectedChanges.segmentHasChanges = true + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + describe('replacePart', () => { + test('replace existing with a move', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + expect(mutableSegment.getPart('part1')).toBeDefined() + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'part2', 'part3']) + + const newPart: Omit = { + externalId: 'part1', + name: 'new name', + payload: { + val: 'new-val', + }, + } + const replacedPart = mutableSegment.replacePart(newPart, null) + expect(replacedPart).toBeDefined() + // ensure the inserted part looks correct + expect(replacedPart?.externalId).toBe(newPart.externalId) + expect(replacedPart?.name).toBe(newPart.name) + expect(replacedPart?.payload).toEqual(newPart.payload) + + // check it has changes + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part2', 'part3', 'part1']) + const expectedIngestSegment = clone(ingestSegment) + removePartFromIngestSegment(expectedIngestSegment, 'part1') + expectedIngestSegment.parts.push({ ...newPart, rank: 3, userEditStates: {} }) + + const expectedChanges = createNoChangesObject(expectedIngestSegment) + expectedChanges.partOrderHasChanged = true + expectedChanges.partIdsWithChanges.push('part1') + expectedChanges.changedCacheObjects.push( + ingestObjectGenerator.generatePartObject( + getSegmentId(ingestObjectGenerator.rundownId, ingestSegment.externalId), + { ...newPart, rank: 3, userEditStates: {} } + ) + ) + + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('insert new', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + expect(mutableSegment.getPart('partX')).toBeUndefined() + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'part2', 'part3']) + + const newPart: Omit = { + externalId: 'partX', + name: 'new name', + payload: { + val: 'new-val', + }, + } + const replacedPart = mutableSegment.replacePart(newPart, null) + expect(replacedPart).toBeDefined() + // ensure the inserted part looks correct + expect(replacedPart?.externalId).toBe(newPart.externalId) + expect(replacedPart?.name).toBe(newPart.name) + expect(replacedPart?.payload).toEqual(newPart.payload) + + // check it has changes + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'part2', 'part3', 'partX']) + const expectedIngestSegment = clone(ingestSegment) + expectedIngestSegment.parts.push({ ...newPart, rank: 4, userEditStates: {} }) + + const expectedChanges = createNoChangesObject(expectedIngestSegment) + expectedChanges.partOrderHasChanged = true + expectedChanges.partIdsWithChanges.push('partX') + expectedChanges.changedCacheObjects.push( + ingestObjectGenerator.generatePartObject( + getSegmentId(ingestObjectGenerator.rundownId, ingestSegment.externalId), + { ...newPart, rank: 4, userEditStates: {} } + ) + ) + + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('insert at position', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + expect(mutableSegment.getPart('partX')).toBeUndefined() + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'part2', 'part3']) + + const newPart: Omit = { + externalId: 'partX', + name: 'new name', + payload: { + val: 'new-val', + }, + } + + // insert at the end + expect(mutableSegment.replacePart(newPart, null)).toBeDefined() + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'part2', 'part3', 'partX']) + + // insert at the beginning + expect(mutableSegment.replacePart(newPart, 'part0')).toBeDefined() + expect(getPartIdOrder(mutableSegment)).toEqual(['partX', 'part0', 'part1', 'part2', 'part3']) + + // insert in the middle + expect(mutableSegment.replacePart(newPart, 'part2')).toBeDefined() + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'partX', 'part2', 'part3']) + + // Only the one should have changes + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator).partIdsWithChanges).toEqual(['partX']) + + // Try inserting before itself + expect(() => mutableSegment.replacePart(newPart, newPart.externalId)).toThrow( + /Cannot insert Part before itself/ + ) + + // Try inserting before an unknown part + expect(() => mutableSegment.replacePart(newPart, 'partY')).toThrow(/Part(.*)not found/) + }) + }) + + describe('movePartBefore', () => { + test('move unknown', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + expect(mutableSegment.getPart('partX')).toBeUndefined() + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'part2', 'part3']) + + expect(() => mutableSegment.movePartBefore('partX', null)).toThrow(/Part(.*)not found/) + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'part2', 'part3']) + }) + + test('move to position', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'part2', 'part3']) + + // insert at the end + mutableSegment.movePartBefore('part1', null) + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part2', 'part3', 'part1']) + + // insert at the beginning + mutableSegment.movePartBefore('part1', 'part0') + expect(getPartIdOrder(mutableSegment)).toEqual(['part1', 'part0', 'part2', 'part3']) + + // insert in the middle + mutableSegment.movePartBefore('part1', 'part2') + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'part2', 'part3']) + + // Only the one should have changes + const expectedChanges = createNoChangesObject(ingestSegment) + expectedChanges.partOrderHasChanged = true + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(expectedChanges) + + // Try inserting before itself + expect(() => mutableSegment.movePartBefore('part1', 'part1')).toThrow(/Cannot move Part before itself/) + + // Try inserting before an unknown part + expect(() => mutableSegment.movePartBefore('part1', 'partY')).toThrow(/Part(.*)not found/) + }) + }) + + describe('movePartAfter', () => { + test('move unknown', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + expect(mutableSegment.getPart('partX')).toBeUndefined() + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'part2', 'part3']) + + expect(() => mutableSegment.movePartAfter('partX', null)).toThrow(/Part(.*)not found/) + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'part2', 'part3']) + }) + + test('move to position', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'part2', 'part3']) + + // insert at the beginning + mutableSegment.movePartAfter('part1', null) + expect(getPartIdOrder(mutableSegment)).toEqual(['part1', 'part0', 'part2', 'part3']) + + // insert at the end + mutableSegment.movePartAfter('part1', 'part3') + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part2', 'part3', 'part1']) + + // insert in the middle + mutableSegment.movePartAfter('part1', 'part0') + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'part2', 'part3']) + + // Only the one should have changes + const expectedChanges = createNoChangesObject(ingestSegment) + expectedChanges.partOrderHasChanged = true + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(expectedChanges) + + // Try inserting before itself + expect(() => mutableSegment.movePartAfter('part1', 'part1')).toThrow(/Cannot move Part after itself/) + + // Try inserting before an unknown part + expect(() => mutableSegment.movePartAfter('part1', 'partY')).toThrow(/Part(.*)not found/) + }) + }) +}) diff --git a/packages/job-worker/src/blueprints/ingest/__tests__/defaultApplyIngestChanges.spec.ts b/packages/job-worker/src/blueprints/ingest/__tests__/defaultApplyIngestChanges.spec.ts new file mode 100644 index 00000000000..4d5d91440cd --- /dev/null +++ b/packages/job-worker/src/blueprints/ingest/__tests__/defaultApplyIngestChanges.spec.ts @@ -0,0 +1,960 @@ +import { IngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' +import { MutableIngestRundownImpl } from '../MutableIngestRundownImpl' +import { defaultApplyIngestChanges } from '../defaultApplyIngestChanges' +import { + NrcsIngestChangeDetails, + NrcsIngestPartChangeDetails, + NrcsIngestRundownChangeDetails, + NrcsIngestSegmentChangeDetailsEnum, + IngestDefaultChangesOptions, + IngestRundown, + MutableIngestPart, + MutableIngestRundown, + MutableIngestSegment, + IngestChangeType, +} from '@sofie-automation/blueprints-integration' +import { clone } from '@sofie-automation/corelib/dist/lib' +import { toSofieIngestRundown } from './util' + +describe('defaultApplyIngestChanges', () => { + function createBasicIngestRundown(): IngestRundownWithSource { + return { + externalId: 'rd0', + name: 'my rundown', + type: 'mock', + rundownSource: { type: 'http' }, + payload: { + myData: 'data', + }, + segments: [ + { + externalId: 'seg0', + rank: 0, + name: 'my segment', + payload: { + segmentData: 'data', + }, + parts: [ + { + externalId: 'part0', + rank: 0, + name: 'my part', + payload: { + partData: 'data', + }, + }, + ], + }, + ], + } + } + function createMediumIngestRundown(): IngestRundownWithSource { + return { + externalId: 'rd0', + name: 'my rundown', + type: 'mock', + rundownSource: { type: 'http' }, + payload: { + myData: 'data', + }, + segments: [ + { + externalId: 'seg0', + rank: 0, + name: 'my segment', + payload: { + segmentData: 'data', + }, + parts: [ + { + externalId: 'part0', + rank: 0, + name: 'my part', + payload: { + partData: 'data', + }, + }, + { + externalId: 'part1', + rank: 1, + name: 'my part', + payload: { + partData: 'data', + }, + }, + ], + }, + { + externalId: 'seg1', + rank: 1, + name: 'my segment', + payload: { + segmentData: 'data', + }, + parts: [ + { + externalId: 'part2', + rank: 0, + name: 'my part', + payload: { + partData: 'data', + }, + }, + ], + }, + { + externalId: 'seg2', + rank: 2, + name: 'my segment', + payload: { + segmentData: 'data', + }, + parts: [ + { + externalId: 'part3', + rank: 0, + name: 'my part', + payload: { + partData: 'data', + }, + }, + { + externalId: 'part4', + rank: 1, + name: 'my part', + payload: { + partData: 'data', + }, + }, + { + externalId: 'part5', + rank: 1, + name: 'my part', + payload: { + partData: 'data', + }, + }, + ], + }, + ], + } + } + function createIngestRundownWithManySegments(): IngestRundownWithSource { + return { + externalId: 'rd0', + name: 'my rundown', + type: 'mock', + rundownSource: { type: 'http' }, + payload: { + myData: 'data', + }, + segments: [ + { + externalId: 'seg0', + rank: 0, + name: 'my segment', + payload: { + segmentData: 'data', + }, + parts: [], + }, + { + externalId: 'seg1', + rank: 1, + name: 'my segment', + payload: { + segmentData: 'data', + }, + parts: [], + }, + { + externalId: 'seg2', + rank: 2, + name: 'my segment', + payload: { + segmentData: 'data', + }, + parts: [], + }, + { + externalId: 'seg3', + rank: 3, + name: 'my segment', + payload: { + segmentData: 'data', + }, + parts: [], + }, + { + externalId: 'seg4', + rank: 4, + name: 'my segment', + payload: { + segmentData: 'data', + }, + parts: [], + }, + ], + } + } + + /** + * This creates a MutableIngestRundownImpl from an IngestRundown, and wraps all methods to record the mutation calls made to the rundown and its contents + */ + function createMutableIngestRundown(nrcsRundown: IngestRundownWithSource) { + const mutableIngestRundown = new MutableIngestRundownImpl(toSofieIngestRundown(nrcsRundown), true) + + const mockCalls: Array<{ target: string; name: string; args: any[] }> = [] + + const defaultOptions: IngestDefaultChangesOptions = { + transformRundownPayload: jest.fn((payload, oldPayload) => { + mockCalls.push({ target: 'options', name: 'transformRundownPayload', args: [!!oldPayload] }) + return payload + }), + transformSegmentPayload: jest.fn((payload, oldPayload) => { + mockCalls.push({ target: 'options', name: 'transformSegmentPayload', args: [!!oldPayload] }) + return payload + }), + transformPartPayload: jest.fn((payload, oldPayload) => { + mockCalls.push({ target: 'options', name: 'transformPartPayload', args: [!!oldPayload] }) + return payload + }), + } + + function wrapMethod( + target: string, + name: TName, + obj: TObj, + interceptReturn?: (val: ReturnType) => ReturnType + ) { + const rawMethod = obj[name] + if (typeof rawMethod !== 'function') throw new Error(`Cant wrap non-method ${name}`) + const origMethod = rawMethod.bind(obj) + + const mockMethod = jest.fn((...args) => { + mockCalls.push({ target, name, args }) + const returnVal = origMethod(...args) + if (interceptReturn) { + return interceptReturn(returnVal) + } else { + return returnVal + } + }) + obj[name] = mockMethod as any + + return mockMethod + } + + function wrapPart(part: MutableIngestPart) { + const target = `part ${part.externalId}` + wrapMethod(target, 'setName', part) + wrapMethod(target, 'replacePayload', part) + wrapMethod(target, 'setPayloadProperty', part) + } + + function wrapSegment(segment: MutableIngestSegment) { + const target = `segment ${segment.externalId}` + wrapMethod(target, 'movePartBefore', segment) + wrapMethod(target, 'movePartAfter', segment) + wrapMethod(target, 'replacePart', segment, (part: MutableIngestPart) => { + wrapPart(part) + return part + }) + wrapMethod(target, 'removePart', segment) + wrapMethod(target, 'setName', segment) + wrapMethod(target, 'replacePayload', segment) + wrapMethod(target, 'setPayloadProperty', segment) + + segment.parts.forEach(wrapPart) + } + + wrapMethod('rundown', 'moveSegmentAfter', mutableIngestRundown) + wrapMethod('rundown', 'moveSegmentBefore', mutableIngestRundown) + wrapMethod('rundown', 'removeAllSegments', mutableIngestRundown) + wrapMethod('rundown', 'replaceSegment', mutableIngestRundown, (segment: MutableIngestSegment) => { + wrapSegment(segment) + return segment + }) + wrapMethod('rundown', 'changeSegmentExternalId', mutableIngestRundown) + wrapMethod('rundown', 'changeSegmentOriginalExternalId', mutableIngestRundown) + wrapMethod('rundown', 'removeSegment', mutableIngestRundown) + wrapMethod('rundown', 'forceFullRegenerate', mutableIngestRundown) + wrapMethod('rundown', 'setName', mutableIngestRundown) + wrapMethod('rundown', 'replacePayload', mutableIngestRundown) + wrapMethod('rundown', 'setPayloadProperty', mutableIngestRundown) + + mutableIngestRundown.segments.forEach(wrapSegment) + + return { + mutableIngestRundown: mutableIngestRundown as MutableIngestRundown, + defaultOptions, + mockCalls, + } + } + + describe('rundown changes', () => { + it('no changes', async () => { + const nrcsRundown = createBasicIngestRundown() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown(clone(nrcsRundown)) + + const changes: NrcsIngestChangeDetails = { source: IngestChangeType.Ingest } + + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + expect(mockCalls).toHaveLength(0) + }) + it('rundown name and payload change', async () => { + const nrcsRundown = createBasicIngestRundown() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown(clone(nrcsRundown)) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Payload, + } + + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + expect(mockCalls).toHaveLength(3) + expect(mockCalls[0]).toEqual({ target: 'options', name: 'transformRundownPayload', args: [true] }) + expect(mockCalls[1]).toEqual({ target: 'rundown', name: 'replacePayload', args: [nrcsRundown.payload] }) + expect(mockCalls[2]).toEqual({ target: 'rundown', name: 'setName', args: [nrcsRundown.name] }) + }) + it('rundown regenerate', async () => { + const nrcsRundown = createBasicIngestRundown() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown(clone(nrcsRundown)) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Regenerate, + segmentOrderChanged: true, // will be ignored + segmentChanges: {}, // will be ignored + } + + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + // Ensure the segments were regenerated + expect(mockCalls).toHaveLength(8) + expect(mockCalls[0]).toEqual({ target: 'options', name: 'transformRundownPayload', args: [true] }) + expect(mockCalls[1]).toEqual({ target: 'rundown', name: 'replacePayload', args: [nrcsRundown.payload] }) + expect(mockCalls[2]).toEqual({ target: 'rundown', name: 'setName', args: [nrcsRundown.name] }) + expect(mockCalls[3]).toEqual({ target: 'rundown', name: 'removeAllSegments', args: [] }) + expect(mockCalls[4]).toEqual({ target: 'rundown', name: 'forceFullRegenerate', args: [] }) + expect(mockCalls[5]).toEqual({ target: 'options', name: 'transformSegmentPayload', args: [true] }) + expect(mockCalls[6]).toEqual({ target: 'options', name: 'transformPartPayload', args: [true] }) + expect(mockCalls[7]).toMatchObject({ target: 'rundown', name: 'replaceSegment' }) + expect(mutableIngestRundown.segments).toHaveLength(1) + }) + }) + + describe('segment order changes', () => { + function createIngestRundownWithManySegmentsAlternateOrder(): IngestRundown { + const ingestRundown = createIngestRundownWithManySegments() + + // reorder segments + ingestRundown.segments = [ + ingestRundown.segments[3], + ingestRundown.segments[1], + ingestRundown.segments[4], + ingestRundown.segments[0], + ingestRundown.segments[2], + ] + + return ingestRundown + } + + it('no changes', async () => { + const nrcsRundown = createIngestRundownWithManySegments() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown(nrcsRundown) + + const changes: NrcsIngestChangeDetails = { source: IngestChangeType.Ingest, segmentOrderChanged: true } + + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + // always ensures the order is sane + expect(mockCalls).toHaveLength(5) + expect(mockCalls[0]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg4', null] }) + expect(mockCalls[1]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg3', 'seg4'] }) + expect(mockCalls[2]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg2', 'seg3'] }) + expect(mockCalls[3]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg1', 'seg2'] }) + expect(mockCalls[4]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg0', 'seg1'] }) + }) + + it('good changes', async () => { + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown( + createIngestRundownWithManySegments() + ) + + // include some changes, which should be ignored + const modifiedRundown = createIngestRundownWithManySegmentsAlternateOrder() + + const changes: NrcsIngestChangeDetails = { source: IngestChangeType.Ingest, segmentOrderChanged: true } + + defaultApplyIngestChanges(mutableIngestRundown, modifiedRundown, changes, defaultOptions) + + // performs reorder + expect(mockCalls).toHaveLength(5) + expect(mockCalls[0]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg2', null] }) + expect(mockCalls[1]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg0', 'seg2'] }) + expect(mockCalls[2]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg4', 'seg0'] }) + expect(mockCalls[3]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg1', 'seg4'] }) + expect(mockCalls[4]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg3', 'seg1'] }) + }) + + it('missing segment in new order', async () => { + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown( + createIngestRundownWithManySegments() + ) + + // include some changes + const modifiedRundown = createIngestRundownWithManySegmentsAlternateOrder() + modifiedRundown.segments.splice(2, 1) // remove seg4 + + const changes: NrcsIngestChangeDetails = { source: IngestChangeType.Ingest, segmentOrderChanged: true } + + defaultApplyIngestChanges(mutableIngestRundown, modifiedRundown, changes, defaultOptions) + + // performs reorder + expect(mockCalls).toHaveLength(5) + expect(mockCalls[0]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg2', null] }) + expect(mockCalls[1]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg0', 'seg2'] }) + expect(mockCalls[2]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg1', 'seg0'] }) + expect(mockCalls[3]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg3', 'seg1'] }) + expect(mockCalls[4]).toEqual({ target: 'rundown', name: 'moveSegmentAfter', args: ['seg4', 'seg3'] }) // follows original order + }) + + it('extra segment in new order', async () => { + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown( + createIngestRundownWithManySegments() + ) + + // include some changes + const modifiedRundown = createIngestRundownWithManySegmentsAlternateOrder() + modifiedRundown.segments.splice(2, 0, { + externalId: 'segX', + rank: 2, + name: 'my segment', + payload: { + segmentData: 'data', + }, + parts: [], + }) + + const changes: NrcsIngestChangeDetails = { source: IngestChangeType.Ingest, segmentOrderChanged: true } + + defaultApplyIngestChanges(mutableIngestRundown, modifiedRundown, changes, defaultOptions) + + // performs reorder, ignoring segX + expect(mockCalls).toHaveLength(5) + expect(mockCalls[0]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg2', null] }) + expect(mockCalls[1]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg0', 'seg2'] }) + expect(mockCalls[2]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg4', 'seg0'] }) + expect(mockCalls[3]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg1', 'seg4'] }) + expect(mockCalls[4]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg3', 'seg1'] }) + }) + }) + + describe('segment changes', () => { + it('mix of operations', async () => { + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown( + createIngestRundownWithManySegments() + ) + + // include some changes, which should be ignored + const modifiedRundown = createIngestRundownWithManySegments() + modifiedRundown.segments[1].externalId = 'segX' // replace seg1 + modifiedRundown.segments[2].externalId = 'segY' // repalce seg2 + modifiedRundown.segments.splice(4, 1) // remove seg4 + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + seg1: NrcsIngestSegmentChangeDetailsEnum.Deleted, + segX: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + seg3: { + payloadChanged: true, + }, + seg4: NrcsIngestSegmentChangeDetailsEnum.Deleted, + segY: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + seg2: NrcsIngestSegmentChangeDetailsEnum.Deleted, + }, + } + + defaultApplyIngestChanges(mutableIngestRundown, modifiedRundown, changes, defaultOptions) + + // performs deletes and inserts + expect(mockCalls).toHaveLength(10) + + // Note: this happens in the order of the changes object, but that is not guaranteed in the future + + // remove and update first + expect(mockCalls[0]).toEqual({ target: 'rundown', name: 'removeSegment', args: ['seg1'] }) + expect(mockCalls[1]).toEqual({ target: 'options', name: 'transformSegmentPayload', args: [true] }) + expect(mockCalls[2]).toMatchObject({ target: 'segment seg3', name: 'replacePayload' }) + expect(mockCalls[3]).toMatchObject({ target: 'segment seg3', name: 'setName' }) + expect(mockCalls[4]).toEqual({ target: 'rundown', name: 'removeSegment', args: ['seg4'] }) + expect(mockCalls[5]).toEqual({ target: 'rundown', name: 'removeSegment', args: ['seg2'] }) + + // insert new ones in order starting at the end + expect(mockCalls[6]).toEqual({ target: 'options', name: 'transformSegmentPayload', args: [false] }) + expect(mockCalls[7]).toMatchObject({ + target: 'rundown', + name: 'replaceSegment', + args: [{ externalId: 'segY' }, 'seg3'], + }) + expect(mockCalls[8]).toEqual({ target: 'options', name: 'transformSegmentPayload', args: [false] }) + expect(mockCalls[9]).toMatchObject({ + target: 'rundown', + name: 'replaceSegment', + args: [{ externalId: 'segX' }, 'segY'], + }) + }) + + it('insert missing', async () => { + const nrcsRundown = createIngestRundownWithManySegments() + const { mutableIngestRundown, defaultOptions } = createMutableIngestRundown(clone(nrcsRundown)) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + segX: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + } + + expect(() => defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions)).toThrow( + /Segment(.*)not found/ + ) + }) + + it('delete missing', async () => { + const nrcsRundown = createIngestRundownWithManySegments() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown(clone(nrcsRundown)) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + segX: NrcsIngestSegmentChangeDetailsEnum.Deleted, + }, + } + + // should run without error + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + expect(mockCalls).toHaveLength(1) + expect(mockCalls[0]).toEqual({ target: 'rundown', name: 'removeSegment', args: ['segX'] }) + }) + + it('update missing', async () => { + const nrcsRundown = createIngestRundownWithManySegments() + const { mutableIngestRundown, defaultOptions } = createMutableIngestRundown(clone(nrcsRundown)) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + segX: { + payloadChanged: true, + }, + }, + } + + // should run without error + expect(() => defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions)).toThrow( + /Segment(.*)not found/ + ) + }) + + it('update without changes', async () => { + const nrcsRundown = createIngestRundownWithManySegments() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown(clone(nrcsRundown)) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + seg1: {}, + }, + } + + // should run without error + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + expect(mockCalls).toHaveLength(0) + }) + + it('change segment id', async () => { + const nrcsRundown = createIngestRundownWithManySegments() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown(clone(nrcsRundown)) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + segX: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + changedSegmentExternalIds: { + seg1: 'segX', + }, + } + + nrcsRundown.segments[1].externalId = 'segX' + + // should run without error + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + expect(mockCalls).toHaveLength(3) + expect(mockCalls[0]).toEqual({ target: 'rundown', name: 'changeSegmentExternalId', args: ['seg1', 'segX'] }) + expect(mockCalls[1]).toEqual({ target: 'options', name: 'transformSegmentPayload', args: [true] }) + expect(mockCalls[2]).toMatchObject({ + target: 'rundown', + name: 'replaceSegment', + args: [{ externalId: 'segX' }, 'seg2'], + }) + }) + + it('change unknown segment id', async () => { + const nrcsRundown = createIngestRundownWithManySegments() + const { mutableIngestRundown, defaultOptions } = createMutableIngestRundown(clone(nrcsRundown)) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + segX: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + changedSegmentExternalIds: { + segY: 'segX', + }, + } + + nrcsRundown.segments[1].externalId = 'segX' + + // should run without error + expect(() => defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions)).toThrow( + /Segment(.*)not found/ + ) + }) + + describe('partOrderChanged', () => { + it('with single part', async () => { + const nrcsRundown = createMediumIngestRundown() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown( + clone(nrcsRundown) + ) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + seg1: { + partOrderChanged: true, + }, + }, + } + + // should run without error + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + expect(mockCalls).toHaveLength(1) + expect(mockCalls[0]).toEqual({ target: 'segment seg1', name: 'movePartBefore', args: ['part2', null] }) + }) + it('with multiple parts', async () => { + const nrcsRundown = createMediumIngestRundown() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown( + clone(nrcsRundown) + ) + + // reorder parts + const origParts = nrcsRundown.segments[2].parts + nrcsRundown.segments[2].parts = [origParts[1], origParts[0], origParts[2]] + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + seg2: { + partOrderChanged: true, + }, + }, + } + + // should run without error + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + expect(mockCalls).toHaveLength(3) + expect(mockCalls[0]).toEqual({ target: 'segment seg2', name: 'movePartBefore', args: ['part5', null] }) + expect(mockCalls[1]).toEqual({ + target: 'segment seg2', + name: 'movePartBefore', + args: ['part3', 'part5'], + }) + expect(mockCalls[2]).toEqual({ + target: 'segment seg2', + name: 'movePartBefore', + args: ['part4', 'part3'], + }) + }) + + it('missing part in new order', async () => { + const nrcsRundown = createMediumIngestRundown() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown( + clone(nrcsRundown) + ) + + // remove a part + nrcsRundown.segments[2].parts.splice(1, 1) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + seg2: { + partOrderChanged: true, + }, + }, + } + + // should run without error + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + expect(mockCalls).toHaveLength(3) + expect(mockCalls[0]).toEqual({ target: 'segment seg2', name: 'movePartBefore', args: ['part5', null] }) + expect(mockCalls[1]).toEqual({ + target: 'segment seg2', + name: 'movePartBefore', + args: ['part3', 'part5'], + }) + expect(mockCalls[2]).toEqual({ + target: 'segment seg2', + name: 'movePartAfter', + args: ['part4', 'part3'], + }) + }) + + it('extra segment in new order', async () => { + const nrcsRundown = createMediumIngestRundown() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown( + clone(nrcsRundown) + ) + + // add an extra nrcs part + nrcsRundown.segments[2].parts.splice(1, 0, { + externalId: 'partX', + rank: 0, + name: 'my part', + payload: { + partData: 'data', + }, + }) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + seg2: { + partOrderChanged: true, + }, + }, + } + + // should run without error + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + // performs reorder, ignoring segX + expect(mockCalls).toHaveLength(3) + expect(mockCalls[0]).toEqual({ target: 'segment seg2', name: 'movePartBefore', args: ['part5', null] }) + expect(mockCalls[1]).toEqual({ + target: 'segment seg2', + name: 'movePartBefore', + args: ['part4', 'part5'], + }) + expect(mockCalls[2]).toEqual({ + target: 'segment seg2', + name: 'movePartBefore', + args: ['part3', 'part4'], + }) + }) + }) + + describe('partsChanges', () => { + it('mix of operations', async () => { + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown( + createMediumIngestRundown() + ) + + // include some changes, which should be ignored + const modifiedRundown = createMediumIngestRundown() + const segment0 = modifiedRundown.segments[0] + segment0.parts[0].externalId = 'partX' // replace part0 + const segment2 = modifiedRundown.segments[2] + segment2.parts[0].externalId = 'partY' // replace part3 + segment2.parts.splice(1, 1) // remove part4 + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + seg0: { + partChanges: { + part0: NrcsIngestPartChangeDetails.Deleted, + partX: NrcsIngestPartChangeDetails.Inserted, + part1: NrcsIngestPartChangeDetails.Updated, + }, + }, + seg2: { + partChanges: { + part3: NrcsIngestPartChangeDetails.Deleted, + partY: NrcsIngestPartChangeDetails.Inserted, + part4: NrcsIngestPartChangeDetails.Deleted, + }, + }, + }, + } + + defaultApplyIngestChanges(mutableIngestRundown, modifiedRundown, changes, defaultOptions) + + // performs deletes and inserts + expect(mockCalls).toHaveLength(10) + + // Note: this happens in the order of the changes object, but that is not guaranteed in the future + + // first segment + expect(mockCalls[0]).toEqual({ target: 'segment seg0', name: 'removePart', args: ['part0'] }) + expect(mockCalls[1]).toEqual({ target: 'options', name: 'transformPartPayload', args: [true] }) + expect(mockCalls[2]).toMatchObject({ target: 'part part1', name: 'replacePayload' }) + expect(mockCalls[3]).toMatchObject({ target: 'part part1', name: 'setName' }) + expect(mockCalls[4]).toEqual({ target: 'options', name: 'transformPartPayload', args: [false] }) + expect(mockCalls[5]).toMatchObject({ + target: 'segment seg0', + name: 'replacePart', + args: [{ externalId: 'partX' }, 'part1'], + }) + + // second segment + expect(mockCalls[6]).toEqual({ target: 'segment seg2', name: 'removePart', args: ['part3'] }) + expect(mockCalls[7]).toEqual({ target: 'segment seg2', name: 'removePart', args: ['part4'] }) + expect(mockCalls[8]).toEqual({ target: 'options', name: 'transformPartPayload', args: [false] }) + expect(mockCalls[9]).toMatchObject({ + target: 'segment seg2', + name: 'replacePart', + args: [{ externalId: 'partY' }, 'part5'], + }) + }) + + it('insert missing', async () => { + const nrcsRundown = createMediumIngestRundown() + const { mutableIngestRundown, defaultOptions } = createMutableIngestRundown(clone(nrcsRundown)) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + seg0: { + partChanges: { + partX: NrcsIngestPartChangeDetails.Inserted, + }, + }, + }, + } + + expect(() => + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + ).toThrow(/Part(.*)not found/) + }) + + it('delete missing', async () => { + const nrcsRundown = createMediumIngestRundown() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown( + clone(nrcsRundown) + ) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + seg0: { + partChanges: { + partX: NrcsIngestPartChangeDetails.Deleted, + }, + }, + }, + } + + // should run without error + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + expect(mockCalls).toHaveLength(1) + expect(mockCalls[0]).toEqual({ target: 'segment seg0', name: 'removePart', args: ['partX'] }) + }) + + it('update missing', async () => { + const nrcsRundown = createMediumIngestRundown() + const { mutableIngestRundown, defaultOptions } = createMutableIngestRundown(clone(nrcsRundown)) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + seg0: { + partChanges: { + partX: NrcsIngestPartChangeDetails.Updated, + }, + }, + }, + } + + // should run without error + expect(() => + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + ).toThrow(/Part(.*)not found/) + }) + + it('update without changes', async () => { + const nrcsRundown = createMediumIngestRundown() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown( + clone(nrcsRundown) + ) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + seg0: { + partChanges: {}, + }, + }, + } + + // should run without error + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + expect(mockCalls).toHaveLength(0) + }) + + it('move part across segments', async () => { + const nrcsRundown = createMediumIngestRundown() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown( + clone(nrcsRundown) + ) + + // Move the part + const removed = nrcsRundown.segments[0].parts.splice(0, 1) + nrcsRundown.segments[1].parts.unshift(...removed) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + seg0: { + partChanges: { + part0: NrcsIngestPartChangeDetails.Deleted, + }, + }, + seg1: { + partChanges: { + part0: NrcsIngestPartChangeDetails.Inserted, + }, + }, + }, + } + + // should run without error + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + expect(mockCalls).toHaveLength(3) + expect(mockCalls[0]).toEqual({ target: 'segment seg0', name: 'removePart', args: ['part0'] }) + expect(mockCalls[1]).toEqual({ target: 'options', name: 'transformPartPayload', args: [true] }) + expect(mockCalls[2]).toMatchObject({ + target: 'segment seg1', + name: 'replacePart', + args: [{ externalId: 'part0' }, 'part2'], + }) + }) + }) + }) +}) diff --git a/packages/job-worker/src/blueprints/ingest/__tests__/groupPartsInMosRundownAndChanges.spec.ts b/packages/job-worker/src/blueprints/ingest/__tests__/groupPartsInMosRundownAndChanges.spec.ts new file mode 100644 index 00000000000..18dfc9c39ae --- /dev/null +++ b/packages/job-worker/src/blueprints/ingest/__tests__/groupPartsInMosRundownAndChanges.spec.ts @@ -0,0 +1,698 @@ +import { + NrcsIngestChangeDetails, + NrcsIngestPartChangeDetails, + NrcsIngestRundownChangeDetails, + NrcsIngestSegmentChangeDetailsEnum, + IngestRundown, + GroupPartsInMosRundownAndChangesResult, + IngestChangeType, +} from '@sofie-automation/blueprints-integration' +import { Complete, clone } from '@sofie-automation/corelib/dist/lib' +import { groupMosPartsIntoIngestSegments, groupPartsInRundownAndChanges } from '../groupPartsInRundownAndChanges' +import { updateRanksBasedOnOrder } from '../../../ingest/mosDevice/lib' + +describe('groupPartsInMosRundownAndChanges', () => { + function groupMosPartsInRundownAndChanges( + nrcsIngestRundown: IngestRundown, + previousNrcsIngestRundown: IngestRundown | undefined, + ingestChanges: Omit + ) { + return groupPartsInRundownAndChanges(nrcsIngestRundown, previousNrcsIngestRundown, ingestChanges, (segments) => + groupMosPartsIntoIngestSegments(nrcsIngestRundown.externalId, segments, ';') + ) + } + + function createBasicMosIngestRundown(): { nrcsIngestRundown: IngestRundown; combinedIngestRundown: IngestRundown } { + const rawRundown: IngestRundown = { + externalId: 'rundown0', + type: 'mos', + name: 'Rundown', + payload: undefined, + segments: [ + { + externalId: 'segment-s1p1', + name: 'SEGMENT1;PART1', + rank: 0, + payload: undefined, + parts: [ + { + externalId: 's1p1', + name: 'SEGMENT1;PART1', + rank: 0, + payload: undefined, + }, + ], + }, + { + externalId: 'segment-s1p2', + name: 'SEGMENT1;PART2', + rank: 1, + payload: undefined, + parts: [ + { + externalId: 's1p2', + name: 'SEGMENT1;PART2', + rank: 0, + payload: undefined, + }, + ], + }, + { + externalId: 'segment-s2p1', + name: 'SEGMENT2;PART1', + rank: 2, + payload: undefined, + parts: [ + { + externalId: 's2p1', + name: 'SEGMENT2;PART1', + rank: 0, + payload: undefined, + }, + ], + }, + { + externalId: 'segment-s2p2', + name: 'SEGMENT2;PART2', + rank: 3, + payload: undefined, + parts: [ + { + externalId: 's2p2', + name: 'SEGMENT2;PART2', + rank: 0, + payload: undefined, + }, + ], + }, + ], + } + const groupedRundown: IngestRundown = { + externalId: 'rundown0', + type: 'mos', + name: 'Rundown', + payload: undefined, + segments: [ + { + externalId: 'rundown0_s1p1', + name: 'SEGMENT1', + rank: 0, + payload: undefined, + parts: [ + { + externalId: 's1p1', + name: 'SEGMENT1;PART1', + rank: 0, + payload: undefined, + }, + { + externalId: 's1p2', + name: 'SEGMENT1;PART2', + rank: 1, + payload: undefined, + }, + ], + }, + { + externalId: 'rundown0_s2p1', + name: 'SEGMENT2', + rank: 1, + payload: undefined, + parts: [ + { + externalId: 's2p1', + name: 'SEGMENT2;PART1', + rank: 0, + payload: undefined, + }, + { + externalId: 's2p2', + name: 'SEGMENT2;PART2', + rank: 1, + payload: undefined, + }, + ], + }, + ], + } + + return { + nrcsIngestRundown: rawRundown, + combinedIngestRundown: groupedRundown, + } + } + + it('no previous rundown, always performs full regeneration', () => { + const { nrcsIngestRundown, combinedIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + 'segment-s1p1': NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + segmentOrderChanged: true, + } + + const result = groupMosPartsInRundownAndChanges(clone(nrcsIngestRundown), undefined, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Regenerate, + }, + } satisfies Complete) + }) + + it('no change in rundown', () => { + const { nrcsIngestRundown, combinedIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { source: IngestChangeType.Ingest } + + const result = groupMosPartsInRundownAndChanges(clone(nrcsIngestRundown), nrcsIngestRundown, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + changedSegmentExternalIds: {}, + segmentChanges: {}, + segmentOrderChanged: false, + }, + } satisfies Complete) + }) + + it('propogate full regeneration', () => { + const { nrcsIngestRundown, combinedIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Regenerate, + segmentOrderChanged: true, + } + + const result = groupMosPartsInRundownAndChanges(clone(nrcsIngestRundown), nrcsIngestRundown, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + changedSegmentExternalIds: {}, + segmentChanges: {}, + segmentOrderChanged: false, + rundownChanges: NrcsIngestRundownChangeDetails.Regenerate, + }, + } satisfies Complete) + }) + + describe('segment changes', () => { + it('part added to end of segment', () => { + const { nrcsIngestRundown, combinedIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: {}, // Note: this is ignored for inserts/deletes + } + + const previousIngestRundown = clone(nrcsIngestRundown) + previousIngestRundown.segments.splice(1, 1) + + const result = groupMosPartsInRundownAndChanges(nrcsIngestRundown, previousIngestRundown, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + changedSegmentExternalIds: {}, + segmentChanges: { + rundown0_s1p1: { + partChanges: { + s1p2: NrcsIngestPartChangeDetails.Inserted, + }, + partOrderChanged: true, + payloadChanged: false, + }, + }, + segmentOrderChanged: false, + }, + } satisfies Complete) + }) + + it('part added to beginning of segment', () => { + const { nrcsIngestRundown, combinedIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: {}, // Note: this is ignored for inserts/deletes + } + + const previousIngestRundown = clone(nrcsIngestRundown) + previousIngestRundown.segments.splice(2, 1) + + const result = groupMosPartsInRundownAndChanges(nrcsIngestRundown, previousIngestRundown, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + changedSegmentExternalIds: { + rundown0_s2p2: 'rundown0_s2p1', + }, + segmentChanges: { + rundown0_s2p1: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + segmentOrderChanged: true, + }, + } satisfies Complete) + }) + + it('part removed from end of segment', () => { + const { nrcsIngestRundown, combinedIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: {}, // Note: this is ignored for inserts/deletes + } + + const previousIngestRundown = clone(nrcsIngestRundown) + previousIngestRundown.segments.splice(2, 0, { + externalId: 'segment-s1p3', + name: 'SEGMENT1;PART3', + rank: 3, + payload: undefined, + parts: [ + { + externalId: 's1p3', + name: 'SEGMENT1;PART3', + rank: 0, + payload: undefined, + }, + ], + }) + + const result = groupMosPartsInRundownAndChanges(nrcsIngestRundown, previousIngestRundown, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + changedSegmentExternalIds: {}, + segmentChanges: { + rundown0_s1p1: { + partChanges: { + s1p3: NrcsIngestPartChangeDetails.Deleted, + }, + partOrderChanged: true, + payloadChanged: false, + }, + }, + segmentOrderChanged: false, + }, + } satisfies Complete) + }) + + it('part removed from beginning of segment', () => { + const { nrcsIngestRundown, combinedIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: {}, // Note: this is ignored for inserts/deletes + } + + const previousIngestRundown = clone(nrcsIngestRundown) + previousIngestRundown.segments.splice(2, 0, { + externalId: 'segment-s2p0', + name: 'SEGMENT2;PART0', + rank: 3, + payload: undefined, + parts: [ + { + externalId: 's2p0', + name: 'SEGMENT2;PART0', + rank: 0, + payload: undefined, + }, + ], + }) + + const result = groupMosPartsInRundownAndChanges(nrcsIngestRundown, previousIngestRundown, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + changedSegmentExternalIds: { + rundown0_s2p0: 'rundown0_s2p1', + }, + segmentChanges: { + rundown0_s2p1: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + segmentOrderChanged: true, + }, + } satisfies Complete) + }) + + it('part has changes', () => { + const { nrcsIngestRundown, combinedIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + // Note: this is ignored for inserts/deletes + 'segment-s1p2': { anything: 'here' } as any, // Note: contents is ignored + 'segment-s2p2': NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, // Explicitly force regeneration + }, + } + + const previousIngestRundown = clone(nrcsIngestRundown) + + const result = groupMosPartsInRundownAndChanges(nrcsIngestRundown, previousIngestRundown, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + changedSegmentExternalIds: {}, + segmentChanges: { + rundown0_s1p1: { + partChanges: { + s1p2: NrcsIngestPartChangeDetails.Updated, + }, + partOrderChanged: false, + payloadChanged: false, + }, + rundown0_s2p1: { + partChanges: { + s2p2: NrcsIngestPartChangeDetails.Updated, + }, + partOrderChanged: false, + payloadChanged: false, + }, + }, + segmentOrderChanged: false, + }, + } satisfies Complete) + }) + + it('segment renamed', () => { + const { nrcsIngestRundown, combinedIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: {}, // Note: this is ignored for inserts/deletes + } + + const previousIngestRundown = clone(nrcsIngestRundown) + previousIngestRundown.segments[0].name = 'SEGMENT0;PART1' + previousIngestRundown.segments[0].parts[0].name = 'SEGMENT0;PART1' + previousIngestRundown.segments[1].name = 'SEGMENT0;PART2' + previousIngestRundown.segments[1].parts[0].name = 'SEGMENT0;PART2' + + const result = groupMosPartsInRundownAndChanges(nrcsIngestRundown, previousIngestRundown, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + changedSegmentExternalIds: {}, + segmentChanges: { + rundown0_s1p1: { + partChanges: { + // name change counts as a payload change + s1p1: NrcsIngestPartChangeDetails.Updated, + s1p2: NrcsIngestPartChangeDetails.Updated, + }, + partOrderChanged: false, + payloadChanged: true, + }, + }, + segmentOrderChanged: false, + }, + } satisfies Complete) + }) + + it('segment id changed', () => { + const { nrcsIngestRundown, combinedIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: {}, // Note: this is ignored for inserts/deletes + } + + const previousIngestRundown = clone(nrcsIngestRundown) + previousIngestRundown.segments[0].externalId = 'segment-s1p1-old' + previousIngestRundown.segments[0].parts[0].externalId = 's1p1-old' + previousIngestRundown.segments[0].parts[0].name = 'SEGMENT0;PART1' + previousIngestRundown.segments[1].externalId = 'segment-s1p2-old' + previousIngestRundown.segments[1].parts[0].externalId = 's1p2-old' + previousIngestRundown.segments[1].parts[0].name = 'SEGMENT0;PART2' + + const result = groupMosPartsInRundownAndChanges(nrcsIngestRundown, previousIngestRundown, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + changedSegmentExternalIds: { + 'rundown0_s1p1-old': 'rundown0_s1p1', + }, + segmentChanges: { + rundown0_s1p1: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + segmentOrderChanged: true, + }, + } satisfies Complete) + }) + + it('segment id changed and moved', () => { + const { nrcsIngestRundown, combinedIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: {}, // Note: this is ignored for inserts/deletes + } + + const previousIngestRundown = clone(nrcsIngestRundown) + previousIngestRundown.segments[0].externalId = 'segment-s1p1-old' + previousIngestRundown.segments[0].parts[0].externalId = 's1p1-old' + previousIngestRundown.segments[0].parts[0].name = 'SEGMENT0;PART1' + previousIngestRundown.segments[1].externalId = 'segment-s1p2-old' + previousIngestRundown.segments[1].parts[0].externalId = 's1p2-old' + previousIngestRundown.segments[1].parts[0].name = 'SEGMENT0;PART2' + previousIngestRundown.segments = [ + previousIngestRundown.segments[2], + previousIngestRundown.segments[3], + previousIngestRundown.segments[0], + previousIngestRundown.segments[1], + ] + updateRanksBasedOnOrder(previousIngestRundown) + + const result = groupMosPartsInRundownAndChanges(nrcsIngestRundown, previousIngestRundown, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + changedSegmentExternalIds: { + 'rundown0_s1p1-old': 'rundown0_s1p1', + }, + segmentChanges: { + rundown0_s1p1: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + segmentOrderChanged: true, + }, + } satisfies Complete) + }) + + it('swap segment parts', () => { + const { nrcsIngestRundown, combinedIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: {}, // Note: this is ignored for inserts/deletes + } + + const previousIngestRundown = clone(nrcsIngestRundown) + previousIngestRundown.segments = [ + previousIngestRundown.segments[1], + previousIngestRundown.segments[0], + previousIngestRundown.segments[2], + previousIngestRundown.segments[3], + ] + updateRanksBasedOnOrder(previousIngestRundown) + + const result = groupMosPartsInRundownAndChanges(nrcsIngestRundown, previousIngestRundown, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + changedSegmentExternalIds: { + rundown0_s1p2: 'rundown0_s1p1', + }, + segmentChanges: { + rundown0_s1p1: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + segmentOrderChanged: true, + }, + } satisfies Complete) + }) + + it('merge segments', () => { + const { nrcsIngestRundown, combinedIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: {}, // Note: this is ignored for inserts/deletes + } + + const previousIngestRundown = clone(nrcsIngestRundown) + previousIngestRundown.segments = [ + previousIngestRundown.segments[0], + previousIngestRundown.segments[2], + previousIngestRundown.segments[1], + previousIngestRundown.segments[3], + ] + updateRanksBasedOnOrder(previousIngestRundown) + + const result = groupMosPartsInRundownAndChanges(nrcsIngestRundown, previousIngestRundown, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + changedSegmentExternalIds: {}, + segmentChanges: { + rundown0_s1p1: { + partChanges: { + s1p2: NrcsIngestPartChangeDetails.Inserted, + }, + partOrderChanged: true, + payloadChanged: false, + }, + rundown0_s1p2: NrcsIngestSegmentChangeDetailsEnum.Deleted, + rundown0_s2p1: { + partChanges: { + s2p2: NrcsIngestPartChangeDetails.Inserted, + }, + partOrderChanged: true, + payloadChanged: false, + }, + rundown0_s2p2: NrcsIngestSegmentChangeDetailsEnum.Deleted, + }, + segmentOrderChanged: true, + }, + } satisfies Complete) + }) + + it('split segments', () => { + const { nrcsIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: {}, // Note: this is ignored for inserts/deletes + } + + const previousIngestRundown = clone(nrcsIngestRundown) + nrcsIngestRundown.segments = [ + nrcsIngestRundown.segments[0], + nrcsIngestRundown.segments[2], + nrcsIngestRundown.segments[1], + nrcsIngestRundown.segments[3], + ] + updateRanksBasedOnOrder(nrcsIngestRundown) + + const result = groupMosPartsInRundownAndChanges(nrcsIngestRundown, previousIngestRundown, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: { + externalId: 'rundown0', + type: 'mos', + name: 'Rundown', + payload: undefined, + segments: [ + { + externalId: 'rundown0_s1p1', + name: 'SEGMENT1', + rank: 0, + payload: undefined, + parts: [ + { + externalId: 's1p1', + name: 'SEGMENT1;PART1', + rank: 0, + payload: undefined, + }, + ], + }, + { + externalId: 'rundown0_s2p1', + name: 'SEGMENT2', + rank: 1, + payload: undefined, + parts: [ + { + externalId: 's2p1', + name: 'SEGMENT2;PART1', + rank: 0, + payload: undefined, + }, + ], + }, + { + externalId: 'rundown0_s1p2', + name: 'SEGMENT1', + rank: 2, + payload: undefined, + parts: [ + { + externalId: 's1p2', + name: 'SEGMENT1;PART2', + rank: 0, + payload: undefined, + }, + ], + }, + { + externalId: 'rundown0_s2p2', + name: 'SEGMENT2', + rank: 3, + payload: undefined, + parts: [ + { + externalId: 's2p2', + name: 'SEGMENT2;PART2', + rank: 0, + payload: undefined, + }, + ], + }, + ], + }, + ingestChanges: { + source: IngestChangeType.Ingest, + changedSegmentExternalIds: {}, + segmentChanges: { + rundown0_s1p1: { + partChanges: { + s1p2: NrcsIngestPartChangeDetails.Deleted, + }, + partOrderChanged: true, + payloadChanged: false, + }, + rundown0_s1p2: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + rundown0_s2p1: { + partChanges: { + s2p2: NrcsIngestPartChangeDetails.Deleted, + }, + partOrderChanged: true, + payloadChanged: false, + }, + rundown0_s2p2: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + segmentOrderChanged: true, + }, + } satisfies Complete) + }) + }) +}) diff --git a/packages/job-worker/src/blueprints/ingest/__tests__/util.ts b/packages/job-worker/src/blueprints/ingest/__tests__/util.ts new file mode 100644 index 00000000000..3ea6a573daf --- /dev/null +++ b/packages/job-worker/src/blueprints/ingest/__tests__/util.ts @@ -0,0 +1,29 @@ +import type { + IngestPart, + SofieIngestPart, + IngestSegment, + SofieIngestSegment, +} from '@sofie-automation/blueprints-integration' +import type { IngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' +import type { SofieIngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' + +function toSofieIngestPart(ingestPart: IngestPart): SofieIngestPart { + return { + ...ingestPart, + userEditStates: {}, + } +} +function toSofieIngestSegment(ingestSegment: IngestSegment): SofieIngestSegment { + return { + ...ingestSegment, + userEditStates: {}, + parts: ingestSegment.parts.map(toSofieIngestPart), + } +} +export function toSofieIngestRundown(ingestRundown: IngestRundownWithSource): SofieIngestRundownWithSource { + return { + ...ingestRundown, + userEditStates: {}, + segments: ingestRundown.segments.map(toSofieIngestSegment), + } +} diff --git a/packages/job-worker/src/blueprints/ingest/defaultApplyIngestChanges.ts b/packages/job-worker/src/blueprints/ingest/defaultApplyIngestChanges.ts new file mode 100644 index 00000000000..4a69ea438a5 --- /dev/null +++ b/packages/job-worker/src/blueprints/ingest/defaultApplyIngestChanges.ts @@ -0,0 +1,440 @@ +import { + IngestRundown, + NrcsIngestChangeDetails, + IngestDefaultChangesOptions, + NrcsIngestRundownChangeDetails, + MutableIngestRundown, + NrcsIngestSegmentChangeDetails, + IngestSegment, + NrcsIngestSegmentChangeDetailsEnum, + MutableIngestSegment, + NrcsIngestSegmentChangeDetailsObject, + NrcsIngestPartChangeDetails, + IngestPart, + MutableIngestPart, +} from '@sofie-automation/blueprints-integration' +import { assertNever, normalizeArrayToMap } from '@sofie-automation/corelib/dist/lib' +import { ReadonlyDeep } from 'type-fest' + +export function defaultApplyIngestChanges( + mutableIngestRundown: MutableIngestRundown, + nrcsRundown: IngestRundown, + ingestChanges: NrcsIngestChangeDetails, + options: IngestDefaultChangesOptions +): void { + if (ingestChanges.source !== 'ingest') + throw new Error(`Changes passed to defaultApplyIngestChanges must be from ingest source`) + + const payloadTransformers = new PayloadTransformers(options, mutableIngestRundown) + + let regenerateAllContents = false + + switch (ingestChanges.rundownChanges) { + case NrcsIngestRundownChangeDetails.Regenerate: { + mutableIngestRundown.replacePayload( + payloadTransformers.transformRundownPayload(nrcsRundown, mutableIngestRundown) + ) + + mutableIngestRundown.setName(nrcsRundown.name) + regenerateAllContents = true + + break + } + case NrcsIngestRundownChangeDetails.Payload: { + mutableIngestRundown.replacePayload( + payloadTransformers.transformRundownPayload(nrcsRundown, mutableIngestRundown) + ) + + mutableIngestRundown.setName(nrcsRundown.name) + break + } + case undefined: + case null: + // No changes + break + default: + assertNever(ingestChanges.rundownChanges) + } + + // Perform any renames before any other changes + if (ingestChanges.changedSegmentExternalIds) { + applySegmentRenames(mutableIngestRundown, ingestChanges.changedSegmentExternalIds) + } + + if (regenerateAllContents) { + // Track any existing segment externalId changes + const existingSegmentExternalIdChanges = new Map() + for (const segment of mutableIngestRundown.segments) { + const originalExternalId = segment.originalExternalId + if (originalExternalId) { + existingSegmentExternalIdChanges.set(segment.externalId, originalExternalId) + } + } + + mutableIngestRundown.removeAllSegments() + mutableIngestRundown.forceFullRegenerate() + + // Regenerate all the segments + for (const nrcsSegment of nrcsRundown.segments) { + mutableIngestRundown.replaceSegment( + payloadTransformers.transformPayloadsOnSegmentAndParts( + nrcsSegment, + mutableIngestRundown.getSegment(nrcsSegment.externalId) + ), + null + ) + } + + // Preserve any segment externalIds changes that were performed before this + // This allows blueprints to do renames, and then trigger a full regeneration and remember the relationship + // this is important to avoid leaking adlibbed parts into segments that will get stuck until a reset + for (const nrcsSegment of nrcsRundown.segments) { + const originalExternalId = existingSegmentExternalIdChanges.get(nrcsSegment.externalId) + if (originalExternalId) { + mutableIngestRundown.changeSegmentOriginalExternalId(nrcsSegment.externalId, originalExternalId) + } + } + } else { + // Propogate segment changes + if (ingestChanges.segmentChanges) { + applyAllSegmentChanges(mutableIngestRundown, nrcsRundown, ingestChanges.segmentChanges, payloadTransformers) + } + + if (ingestChanges.segmentOrderChanged) { + applySegmentOrder(mutableIngestRundown, nrcsRundown) + } + } +} + +function applySegmentOrder( + mutableIngestRundown: MutableIngestRundown, + nrcsRundown: IngestRundown +) { + // Figure out which segments don't have a new rank, and will need interpolating + const missingNewRank: Array<{ segmentId: string; afterId: string | null }> = [] + const segmentIdRanksInRundown = normalizeArrayToMap(nrcsRundown.segments, 'externalId') + mutableIngestRundown.segments.forEach((segment, i) => { + if (!segmentIdRanksInRundown.has(segment.externalId)) { + missingNewRank.push({ + segmentId: segment.externalId, + afterId: i > 0 ? mutableIngestRundown.segments[i - 1].externalId : null, + }) + } + }) + + // Run through the segments in reverse order, so that we can insert them in the correct order + for (let i = nrcsRundown.segments.length - 1; i >= 0; i--) { + const nrcsSegment = nrcsRundown.segments[i] + + // If the Segment doesn't exist, ignore it + if (!mutableIngestRundown.getSegment(nrcsSegment.externalId)) continue + + // Find the first valid segment after this one + let beforeNrcsSegmentId: string | null = null + for (let o = i + 1; o < nrcsRundown.segments.length; o++) { + const otherSegment = nrcsRundown.segments[o] + if (mutableIngestRundown.getSegment(otherSegment.externalId)) { + beforeNrcsSegmentId = otherSegment.externalId + break + } + } + + mutableIngestRundown.moveSegmentBefore(nrcsSegment.externalId, beforeNrcsSegmentId) + } + + // Run through the segments without a defined rank, and ensure they are positioned after the same segment as before + for (const segmentInfo of missingNewRank) { + mutableIngestRundown.moveSegmentAfter(segmentInfo.segmentId, segmentInfo.afterId) + } +} + +function applyAllSegmentChanges( + mutableIngestRundown: MutableIngestRundown, + nrcsRundown: IngestRundown, + changes: Record, + payloadTransformers: PayloadTransformers +) { + const nrcsSegmentMap = normalizeArrayToMap(nrcsRundown.segments, 'externalId') + const nrcsSegmentIds = nrcsRundown.segments.map((s) => s.externalId) + + // Perform the inserts last, so that we can ensure they happen in a sensible order + const segmentsToInsert: IngestSegment[] = [] + + // Apply changes and delete segments + for (const [segmentId, change] of Object.entries(changes)) { + if (!change) continue + + const nrcsSegment = nrcsSegmentMap.get(segmentId) + applyChangesForSingleSegment( + mutableIngestRundown, + nrcsSegment, + segmentsToInsert, + segmentId, + change, + payloadTransformers + ) + } + + // Now we can insert the new ones in descending order + segmentsToInsert.sort((a, b) => nrcsSegmentIds.indexOf(b.externalId) - nrcsSegmentIds.indexOf(a.externalId)) + for (const nrcsSegment of segmentsToInsert) { + const segmentIndex = nrcsSegmentIds.indexOf(nrcsSegment.externalId) + const beforeSegmentId = segmentIndex !== -1 ? nrcsSegmentIds[segmentIndex + 1] ?? null : null + + mutableIngestRundown.replaceSegment( + payloadTransformers.transformPayloadsOnSegmentAndParts( + nrcsSegment, + mutableIngestRundown.getSegment(nrcsSegment.externalId) + ), + beforeSegmentId + ) + } +} + +function applySegmentRenames( + mutableIngestRundown: MutableIngestRundown, + changedSegmentExternalIds: Record +) { + for (const [oldExternalId, newExternalId] of Object.entries(changedSegmentExternalIds)) { + if (!oldExternalId || !newExternalId) continue + + mutableIngestRundown.changeSegmentExternalId(oldExternalId, newExternalId) + } + // for (const [segmentId, change] of Object.entries(changes)) { + // if (!change) continue + + // if (change && typeof change === 'object' && change.oldExternalId) { + // const mutableSegment = mutableIngestRundown.getSegment(change.oldExternalId) + // if (!mutableSegment) throw new Error(`Segment ${change.oldExternalId} not found in rundown`) + + // mutableIngestRundown.renameSegment(change.oldExternalId, segmentId) + // } + // } +} + +function applyChangesForSingleSegment( + mutableIngestRundown: MutableIngestRundown, + nrcsSegment: IngestSegment | undefined, + segmentsToInsert: IngestSegment[], + segmentId: string, + change: NrcsIngestSegmentChangeDetails, + payloadTransformers: PayloadTransformers +) { + const mutableSegment = mutableIngestRundown.getSegment(segmentId) + + switch (change) { + case NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated: { + if (!nrcsSegment) throw new Error(`Segment ${segmentId} not found in nrcs rundown`) + + segmentsToInsert.push(nrcsSegment) + + break + } + case NrcsIngestSegmentChangeDetailsEnum.Deleted: { + mutableIngestRundown.removeSegment(segmentId) + + break + } + default: { + if (!mutableSegment) throw new Error(`Segment ${segmentId} not found in rundown`) + if (!nrcsSegment) throw new Error(`Segment ${segmentId} not found in nrcs rundown`) + + applyChangesObjectForSingleSegment(mutableSegment, nrcsSegment, change, payloadTransformers) + + break + } + } +} + +function applyChangesObjectForSingleSegment( + mutableSegment: MutableIngestSegment, + nrcsSegment: IngestSegment, + segmentChange: NrcsIngestSegmentChangeDetailsObject, + payloadTransformers: PayloadTransformers +) { + if (segmentChange.payloadChanged) { + mutableSegment.replacePayload(payloadTransformers.transformSegmentPayload(nrcsSegment, mutableSegment)) + mutableSegment.setName(nrcsSegment.name) + } + + if (segmentChange.partChanges) { + const nrcsPartMap = normalizeArrayToMap(nrcsSegment.parts, 'externalId') + const nrcsPartIds = nrcsSegment.parts.map((s) => s.externalId) + + // Perform the inserts last, so that we can ensure they happen in a sensible order + const partsToInsert: IngestPart[] = [] + + for (const [partId, change] of Object.entries( + segmentChange.partChanges + )) { + if (!change) continue + + const nrcsPart = nrcsPartMap.get(partId) + applyChangesForPart(mutableSegment, nrcsPart, partsToInsert, partId, change, payloadTransformers) + } + + // Now we can insert them in descending order + partsToInsert.sort((a, b) => nrcsPartIds.indexOf(b.externalId) - nrcsPartIds.indexOf(a.externalId)) + for (const nrcsPart of partsToInsert) { + const partIndex = nrcsPartIds.indexOf(nrcsPart.externalId) + const beforePartId = partIndex !== -1 ? nrcsPartIds[partIndex + 1] ?? null : null + + mutableSegment.replacePart( + payloadTransformers.transformPayloadOnPart(nrcsPart, mutableSegment.getPart(nrcsPart.externalId)), + beforePartId + ) + } + } + + if (segmentChange.partOrderChanged) { + applyPartOrder(mutableSegment, nrcsSegment) + } +} + +function applyChangesForPart( + mutableSegment: MutableIngestSegment, + nrcsPart: IngestPart | undefined, + partsToInsert: IngestPart[], + partId: string, + change: NrcsIngestPartChangeDetails, + payloadTransformers: PayloadTransformers +) { + const mutablePart = mutableSegment.getPart(partId) + + switch (change) { + case NrcsIngestPartChangeDetails.Inserted: { + if (!nrcsPart) throw new Error(`Part ${partId} not found in nrcs rundown`) + + // Batch the inserts to be performed last + partsToInsert.push(nrcsPart) + break + } + case NrcsIngestPartChangeDetails.Deleted: { + mutableSegment.removePart(partId) + + break + } + case NrcsIngestPartChangeDetails.Updated: { + if (!mutablePart) throw new Error(`Part ${partId} not found in segment`) + if (!nrcsPart) throw new Error(`Part ${partId} not found in nrcs segment`) + + mutablePart.replacePayload(payloadTransformers.transformPartPayload(nrcsPart, mutablePart)) + mutablePart.setName(nrcsPart.name) + + break + } + default: { + assertNever(change) + } + } +} + +function applyPartOrder(mutableSegment: MutableIngestSegment, nrcsSegment: IngestSegment) { + // Figure out which segments don't have a new rank, and will need interpolating + const missingNewRank: Array<{ partId: string; afterId: string | null }> = [] + const partIdRanksInSegment = normalizeArrayToMap(nrcsSegment.parts, 'externalId') + mutableSegment.parts.forEach((part, i) => { + if (!partIdRanksInSegment.has(part.externalId)) { + missingNewRank.push({ + partId: part.externalId, + afterId: i > 0 ? mutableSegment.parts[i - 1].externalId : null, + }) + } + }) + + // Run through the segments in reverse order, so that we can insert them in the correct order + for (let i = nrcsSegment.parts.length - 1; i >= 0; i--) { + const nrcsPart = nrcsSegment.parts[i] + + // If the Part doesn't exist, ignore it + if (!mutableSegment.getPart(nrcsPart.externalId)) continue + + // Find the first valid segment after this one + let beforeNrcsPartId: string | null = null + for (let o = i + 1; o < nrcsSegment.parts.length; o++) { + const otherPart = nrcsSegment.parts[o] + if (mutableSegment.getPart(otherPart.externalId)) { + beforeNrcsPartId = otherPart.externalId + break + } + } + + mutableSegment.movePartBefore(nrcsPart.externalId, beforeNrcsPartId) + } + + // Run through the segments without a defined rank, and ensure they are positioned after the same segment as before + for (const segmentInfo of missingNewRank) { + mutableSegment.movePartAfter(segmentInfo.partId, segmentInfo.afterId) + } +} + +class PayloadTransformers { + readonly #options: IngestDefaultChangesOptions + readonly #initialMutableParts = new Map>() + readonly #initialMutableSegments = new Map>() + + constructor( + options: IngestDefaultChangesOptions, + mutableIngestRundown: MutableIngestRundown + ) { + this.#options = options + + // Collect all of the Part payloads before any operation was run + for (const segment of mutableIngestRundown.segments) { + this.#initialMutableSegments.set(segment.externalId, segment) + + for (const part of segment.parts) { + this.#initialMutableParts.set(part.externalId, part) + } + } + } + + transformRundownPayload( + nrcsRundown: IngestRundown, + mutableIngestRundown: MutableIngestRundown + ): ReadonlyDeep | TRundownPayload { + return this.#options.transformRundownPayload(nrcsRundown.payload, mutableIngestRundown.payload) + } + + transformSegmentPayload( + nrcsSegment: IngestSegment, + mutableSegment: MutableIngestSegment + ): ReadonlyDeep | TSegmentPayload { + return this.#options.transformSegmentPayload(nrcsSegment.payload, mutableSegment?.payload) + } + + transformPartPayload( + nrcsPart: IngestPart, + mutablePart: MutableIngestPart + ): ReadonlyDeep | TPartPayload { + return this.#options.transformPartPayload(nrcsPart.payload, mutablePart?.payload) + } + + transformPayloadsOnSegmentAndParts( + segment: IngestSegment, + mutableSegment: MutableIngestSegment | undefined + ): IngestSegment { + return { + ...segment, + payload: this.#options.transformSegmentPayload( + segment.payload, + mutableSegment ? mutableSegment.payload : this.#initialMutableSegments.get(segment.externalId)?.payload + ) as TSegmentPayload, + parts: segment.parts.map((part) => + this.transformPayloadOnPart(part, mutableSegment?.getPart(part.externalId)) + ), + } + } + transformPayloadOnPart( + part: IngestPart, + mutablePart: MutableIngestPart | undefined + ): IngestPart { + return { + ...part, + payload: this.#options.transformPartPayload( + part.payload, + mutablePart ? mutablePart.payload : this.#initialMutableParts.get(part.externalId)?.payload + ) as TPartPayload, + } + } +} diff --git a/packages/job-worker/src/blueprints/ingest/groupPartsInRundownAndChanges.ts b/packages/job-worker/src/blueprints/ingest/groupPartsInRundownAndChanges.ts new file mode 100644 index 00000000000..77a2d149312 --- /dev/null +++ b/packages/job-worker/src/blueprints/ingest/groupPartsInRundownAndChanges.ts @@ -0,0 +1,287 @@ +import { + GroupPartsInMosRundownAndChangesResult, + IngestChangeType, + IngestPart, + IngestRundown, + IngestSegment, + NrcsIngestChangeDetails, + NrcsIngestPartChangeDetails, + NrcsIngestRundownChangeDetails, + NrcsIngestSegmentChangeDetails, + NrcsIngestSegmentChangeDetailsEnum, +} from '@sofie-automation/blueprints-integration' +import { Complete, normalizeArrayToMap } from '@sofie-automation/corelib/dist/lib' +import _ = require('underscore') + +export function groupMosPartsIntoIngestSegments( + rundownExternalId: string, + ingestSegments: IngestSegment[], + separator: string +): IngestSegment[] { + const groupedParts: { name: string; parts: IngestPart[] }[] = [] + + for (const ingestSegment of ingestSegments) { + const segmentName = ingestSegment.name.split(separator)[0] || ingestSegment.name + + const lastSegment = _.last(groupedParts) + if (lastSegment && lastSegment.name === segmentName) { + lastSegment.parts.push(...ingestSegment.parts) + } else { + groupedParts.push({ name: segmentName, parts: [...ingestSegment.parts] }) + } + } + + return groupedParts.map( + (partGroup, i) => + ({ + externalId: `${rundownExternalId}_${partGroup.parts[0].externalId}`, + name: partGroup.name, + rank: i, + parts: partGroup.parts.map((part, i) => ({ ...part, rank: i })), + payload: undefined, + } satisfies IngestSegment) + ) +} + +/** + * Group Parts in a Rundown and return a new changes object + * Note: This ignores a lot of the contents of the `ingestChanges` object, and relies more on the `previousNrcsIngestRundown` instead + * @param nrcsIngestRundown The rundown whose parts needs grouping + * @param previousNrcsIngestRundown The rundown prior to the changes, if known + * @param ingestChanges The changes which have been performed in `nrcsIngestRundown`, that need to translating + * @param groupPartsIntoSegmentsOrSeparator A string to split the segment name on, or a function to group parts into segments + * @returns A transformed rundown and changes object + */ +export function groupPartsInRundownAndChanges( + nrcsIngestRundown: IngestRundown, + previousNrcsIngestRundown: IngestRundown | undefined, + ingestChanges: Omit, + groupPartsIntoSegments: (ingestSegments: IngestSegment[]) => IngestSegment[] +): GroupPartsInMosRundownAndChangesResult { + // Combine parts into segments + const combinedIngestRundown = groupPartsIntoNewIngestRundown( + nrcsIngestRundown, + groupPartsIntoSegments + ) + + // If there is no previous rundown, we need to regenerate everything + if (!previousNrcsIngestRundown) { + return { + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Regenerate, + }, + } + } + + // Combine parts into segments, in both the new and old ingest rundowns + const oldCombinedIngestRundown = groupPartsIntoNewIngestRundown(previousNrcsIngestRundown, groupPartsIntoSegments) + + // Calculate the changes to each segment + const allPartWithChanges = findAllPartsWithChanges(nrcsIngestRundown, ingestChanges) + const segmentChanges = calculateSegmentChanges(oldCombinedIngestRundown, combinedIngestRundown, allPartWithChanges) + + // Calculate other changes + const changedSegmentExternalIds = calculateSegmentExternalIdChanges(oldCombinedIngestRundown, combinedIngestRundown) + const segmentOrderChanged = hasSegmentOrderChanged( + combinedIngestRundown.segments, + oldCombinedIngestRundown.segments + ) + + // Ensure id changes aren't flagged as deletions + for (const [oldSegmentExternalId, newSegmentExternalId] of Object.entries(changedSegmentExternalIds)) { + if (!oldSegmentExternalId || !newSegmentExternalId) continue + + if (segmentChanges[oldSegmentExternalId] === NrcsIngestSegmentChangeDetailsEnum.Deleted) { + delete segmentChanges[oldSegmentExternalId] + } + } + + return { + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + rundownChanges: ingestChanges.rundownChanges, + segmentOrderChanged, + segmentChanges, + changedSegmentExternalIds, + } satisfies Complete, + } +} + +function findAllPartsWithChanges( + nrcsIngestRundown: IngestRundown, + sourceChanges: NrcsIngestChangeDetails +): Set { + if (!sourceChanges.segmentChanges) return new Set() + + const partChanges = new Set() + + for (const segment of nrcsIngestRundown.segments) { + const segmentChanges = sourceChanges.segmentChanges[segment.externalId] + if (!segmentChanges) continue + + for (const part of segment.parts) { + switch (segmentChanges) { + case NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated: + // This could have been an update, ensure that gets propogated + partChanges.add(part.externalId) + break + case NrcsIngestSegmentChangeDetailsEnum.Deleted: + // Deletions will be tracked elsewhere + break + default: + if (typeof segmentChanges !== 'object') + throw new Error(`Unexpected segment change for "${segment.externalId}": ${segmentChanges}`) + + // Something changed, this will cause the necessary propogation + partChanges.add(part.externalId) + + break + } + } + } + + return partChanges +} + +function calculateSegmentChanges( + oldCombinedIngestRundown: IngestRundown, + combinedIngestRundown: IngestRundown, + allPartWithChanges: Set +): Record { + const oldIngestSegments = normalizeArrayToMap(oldCombinedIngestRundown.segments, 'externalId') + + const segmentChanges: Record = {} + + // Track any segment changes + for (const segment of combinedIngestRundown.segments) { + const oldIngestSegment = oldIngestSegments.get(segment.externalId) + + if (!oldIngestSegment) { + segmentChanges[segment.externalId] = NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated + } else { + const segmentPartChanges: Record = {} + + const newPartIds = new Set(segment.parts.map((p) => p.externalId)) + const oldPartMap = normalizeArrayToMap(oldIngestSegment.parts, 'externalId') + + for (const part of segment.parts) { + const oldPart = oldPartMap.get(part.externalId) + if (!oldPart) { + segmentPartChanges[part.externalId] = NrcsIngestPartChangeDetails.Inserted + } else if ( + allPartWithChanges.has(part.externalId) || + oldPart.name !== part.name || + !_.isEqual(oldPart.payload, part.payload) + ) { + segmentPartChanges[part.externalId] = NrcsIngestPartChangeDetails.Updated + } + } + for (const oldPart of oldIngestSegment.parts) { + if (!newPartIds.has(oldPart.externalId)) { + segmentPartChanges[oldPart.externalId] = NrcsIngestPartChangeDetails.Deleted + } + } + + const payloadChanged = + oldIngestSegment.name !== segment.name || !_.isEqual(oldIngestSegment.payload, segment.payload) + + const partOrderChanged = hasPartOrderChanged(segment.parts, oldIngestSegment.parts) + if (partOrderChanged || payloadChanged || Object.keys(segmentPartChanges).length > 0) { + segmentChanges[segment.externalId] = { + partChanges: segmentPartChanges, + partOrderChanged, + payloadChanged, + } + } + } + } + + // Track any segment deletions + if (oldCombinedIngestRundown) { + const newSegmentIds = new Set(combinedIngestRundown.segments.map((s) => s.externalId)) + for (const oldSegment of oldCombinedIngestRundown.segments) { + if (!newSegmentIds.has(oldSegment.externalId)) { + segmentChanges[oldSegment.externalId] = NrcsIngestSegmentChangeDetailsEnum.Deleted + } + } + } + + return segmentChanges +} + +function hasSegmentOrderChanged(ingestSegments: IngestSegment[], oldIngestSegments: IngestSegment[]): boolean { + if (ingestSegments.length !== oldIngestSegments.length) return true + + for (let i = 0; i < ingestSegments.length; i++) { + if (ingestSegments[i].externalId !== oldIngestSegments[i].externalId) return true + } + + return false +} + +function hasPartOrderChanged(ingestParts: IngestPart[], oldIngestParts: IngestPart[]): boolean { + if (ingestParts.length !== oldIngestParts.length) return true + + for (let i = 0; i < ingestParts.length; i++) { + if (ingestParts[i].externalId !== oldIngestParts[i].externalId) return true + } + + return false +} + +function groupPartsIntoNewIngestRundown( + ingestRundown: IngestRundown, + groupPartsIntoIngestSements: (ingestSegments: IngestSegment[]) => IngestSegment[] +): IngestRundown { + return { + ...(ingestRundown as IngestRundown), + segments: groupPartsIntoIngestSements(ingestRundown.segments), + } +} + +function calculateSegmentExternalIdChanges( + oldIngestRundown: IngestRundown, + newIngestRundown: IngestRundown +): Record { + const segmentExternalIdChanges: Record = {} + + const oldIngestSegmentMap = normalizeArrayToMap(oldIngestRundown.segments, 'externalId') + const newIngestSegmentMap = normalizeArrayToMap(newIngestRundown.segments, 'externalId') + + const removedSegments = oldIngestRundown.segments.filter((s) => !newIngestSegmentMap.has(s.externalId)) + let addedSegments = newIngestRundown.segments.filter((s) => !oldIngestSegmentMap.has(s.externalId)) + + if (removedSegments.length === 0 || addedSegments.length === 0) return {} + + for (const removedSegment of removedSegments) { + let newSegmentExternalId: string | undefined + + // try finding "it" in the added, using name + // Future: this may not be particularly accurate, as multiple could have been formed + newSegmentExternalId = addedSegments.find((se) => se.name === removedSegment.name)?.externalId + + if (!newSegmentExternalId) { + // second try, match with any parts: + newSegmentExternalId = addedSegments.find((se) => { + for (const part of removedSegment.parts) { + if (se.parts.find((p) => p.externalId === part.externalId)) { + return true + } + } + + return false + })?.externalId + } + if (newSegmentExternalId) { + segmentExternalIdChanges[removedSegment.externalId] = newSegmentExternalId + + // Ensure the same id doesn't get used multiple times + addedSegments = addedSegments.filter((s) => s.externalId !== newSegmentExternalId) + } + } + + return segmentExternalIdChanges +} diff --git a/packages/job-worker/src/blueprints/postProcess.ts b/packages/job-worker/src/blueprints/postProcess.ts index 6e9d2f8fcaa..acc2a898408 100644 --- a/packages/job-worker/src/blueprints/postProcess.ts +++ b/packages/job-worker/src/blueprints/postProcess.ts @@ -44,6 +44,7 @@ import { setDefaultIdOnExpectedPackages } from '../ingest/expectedPackages' import { logger } from '../logging' import { validateTimeline } from 'superfly-timeline' import { ReadonlyDeep } from 'type-fest' +import { translateUserEditsFromBlueprint } from './context/lib' function getIdHash(docType: string, usedIds: Map, uniqueId: string): string { const count = usedIds.get(uniqueId) @@ -108,6 +109,7 @@ export function postProcessPieces( startPartId: partId, invalid: setInvalid ?? false, timelineObjectsString: EmptyPieceTimelineObjectsBlob, + userEditOperations: translateUserEditsFromBlueprint(orgPiece.userEditOperations, [blueprintId]), } if (piece.pieceType !== IBlueprintPieceType.Normal) { diff --git a/packages/job-worker/src/db/collections.ts b/packages/job-worker/src/db/collections.ts index faa2a64fc27..1b9b7142227 100644 --- a/packages/job-worker/src/db/collections.ts +++ b/packages/job-worker/src/db/collections.ts @@ -17,7 +17,8 @@ import { BucketAdLibAction } from '@sofie-automation/corelib/dist/dataModel/Buck import { BucketAdLib } from '@sofie-automation/corelib/dist/dataModel/BucketAdLibPiece' import { ExpectedMediaItem } from '@sofie-automation/corelib/dist/dataModel/ExpectedMediaItem' import { ExpectedPlayoutItem } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' -import { IngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/IngestDataCache' +import { NrcsIngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' +import { SofieIngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' @@ -97,7 +98,8 @@ export interface IDirectCollections { BucketAdLibPieces: ICollection ExpectedMediaItems: ICollection ExpectedPlayoutItems: ICollection - IngestDataCache: ICollection + SofieIngestDataCache: ICollection + NrcsIngestDataCache: ICollection Parts: ICollection PartInstances: ICollection PeripheralDevices: IReadOnlyCollection @@ -158,7 +160,14 @@ export function getMongoCollections( database.collection(CollectionName.ExpectedPlayoutItems), allowWatchers ), - IngestDataCache: wrapMongoCollection(database.collection(CollectionName.IngestDataCache), allowWatchers), + SofieIngestDataCache: wrapMongoCollection( + database.collection(CollectionName.SofieIngestDataCache), + allowWatchers + ), + NrcsIngestDataCache: wrapMongoCollection( + database.collection(CollectionName.NrcsIngestDataCache), + allowWatchers + ), Parts: wrapMongoCollection(database.collection(CollectionName.Parts), allowWatchers), PartInstances: wrapMongoCollection(database.collection(CollectionName.PartInstances), allowWatchers), PeripheralDevices: wrapMongoCollection( diff --git a/packages/job-worker/src/ingest/__tests__/ingest.test.ts b/packages/job-worker/src/ingest/__tests__/ingest.test.ts index 76aacf22b5b..e46a0f827bf 100644 --- a/packages/job-worker/src/ingest/__tests__/ingest.test.ts +++ b/packages/job-worker/src/ingest/__tests__/ingest.test.ts @@ -45,27 +45,42 @@ import { protectString } from '@sofie-automation/corelib/dist/protectedString' import { insertQueuedPartWithPieces } from '../../playout/adlibUtils' import { UserErrorMessage } from '@sofie-automation/corelib/dist/error' import { PlayoutPartInstanceModel } from '../../playout/model/PlayoutPartInstanceModel' +import { NrcsIngestCacheType } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' +import { wrapGenericIngestJob, wrapGenericIngestJobWithPrecheck } from '../jobWrappers' + +const handleRemovedRundownWrapped = wrapGenericIngestJob(handleRemovedRundown) +const handleUpdatedRundownWrapped = wrapGenericIngestJob(handleUpdatedRundown) +const handleUpdatedRundownMetaDataWrapped = wrapGenericIngestJob(handleUpdatedRundownMetaData) +const handleRemovedSegmentWrapped = wrapGenericIngestJob(handleRemovedSegment) +const handleUpdatedSegmentWrapped = wrapGenericIngestJobWithPrecheck(handleUpdatedSegment) +const handleUpdatedSegmentRanksWrapped = wrapGenericIngestJob(handleUpdatedSegmentRanks) +const handleRemovedPartWrapped = wrapGenericIngestJob(handleRemovedPart) +const handleUpdatedPartWrapped = wrapGenericIngestJob(handleUpdatedPart) const externalId = 'abcde' const rundownData1: IngestRundown = { externalId: externalId, name: 'MyMockRundown', type: 'mock', + payload: undefined, segments: [ { externalId: 'segment0', name: 'Segment 0', rank: 0, + payload: undefined, parts: [ { externalId: 'part0', name: 'Part 0', rank: 0, + payload: undefined, }, { externalId: 'part1', name: 'Part 1', rank: 0, + payload: undefined, }, ], }, @@ -73,11 +88,13 @@ const rundownData1: IngestRundown = { externalId: 'segment1', name: 'Segment 1', rank: 0, + payload: undefined, parts: [ { externalId: 'part2', name: 'Part 2', rank: 0, + payload: undefined, }, ], }, @@ -161,7 +178,7 @@ describe('Test ingest actions for rundowns and segments', () => { async function recreateRundown(data: IngestRundown): Promise { await context.clearAllRundownsAndPlaylists() - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: data.externalId, ingestRundown: data, isCreateAction: true, @@ -177,6 +194,7 @@ describe('Test ingest actions for rundowns and segments', () => { await expect(context.mockCollections.Rundowns.findOne()).resolves.toBeTruthy() await context.mockCollections.Rundowns.update({}, { $set: { orphaned: RundownOrphanedReason.DELETED } }) + await context.mockCollections.NrcsIngestDataCache.remove({}) } test('dataRundownCreate', async () => { @@ -184,7 +202,7 @@ describe('Test ingest actions for rundowns and segments', () => { await expect(context.mockCollections.Rundowns.findOne()).resolves.toBeFalsy() - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData1.externalId, ingestRundown: rundownData1, isCreateAction: true, @@ -219,7 +237,7 @@ describe('Test ingest actions for rundowns and segments', () => { name: 'MyMockRundownRenamed', } - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData.externalId, ingestRundown: rundownData, isCreateAction: false, @@ -259,16 +277,18 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: 'segment2', name: 'Segment 2', rank: 0, + payload: undefined, parts: [ { externalId: 'part3', name: 'Part 3', rank: 0, + payload: undefined, }, ], }) - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData.externalId, ingestRundown: rundownData, isCreateAction: false, @@ -303,11 +323,13 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: 'segment2', name: 'Segment 2', rank: 0, + payload: undefined, parts: [ { externalId: 'part3', name: 'Part 3', rank: 0, + payload: undefined, }, ], }) @@ -315,9 +337,10 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: 'partZ', name: 'Part Z', rank: 0, + payload: undefined, }) - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData.externalId, ingestRundown: rundownData, isCreateAction: false, @@ -355,17 +378,19 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: 'segment2', name: 'Segment 2', rank: 0, + payload: undefined, parts: [ { externalId: 'part3', name: 'Part 3', rank: 0, + payload: undefined, }, ], }) await recreateRundown(initialRundownData) - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData1.externalId, ingestRundown: rundownData1, isCreateAction: false, @@ -399,7 +424,7 @@ describe('Test ingest actions for rundowns and segments', () => { const rundownData = clone(rundownData1) expect(rundownData.segments[0].parts.shift()).toBeTruthy() - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData.externalId, ingestRundown: rundownData, isCreateAction: false, @@ -434,9 +459,10 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: externalId, name: 'MyMockRundownRenamed', type: 'mock', + payload: undefined, } - await handleUpdatedRundownMetaData(context, { + await handleUpdatedRundownMetaDataWrapped(context, { rundownExternalId: rundownData.externalId, ingestRundown: rundownData, rundownSource: createRundownSource(device), @@ -469,7 +495,7 @@ describe('Test ingest actions for rundowns and segments', () => { test('dataRundownDelete', async () => { await recreateRundown(rundownData1) - await handleRemovedRundown(context, { + await handleRemovedRundownWrapped(context, { rundownExternalId: externalId, }) @@ -482,7 +508,7 @@ describe('Test ingest actions for rundowns and segments', () => { await expect(context.mockCollections.Rundowns.findOne()).resolves.toBeFalsy() await expect( - handleRemovedRundown(context, { + handleRemovedRundownWrapped(context, { rundownExternalId: externalId, }) ).rejects.toThrow(/Rundown.*not found/i) @@ -518,16 +544,19 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: externalId, name: 'MyMockRundown', type: 'mock', + payload: undefined, segments: [ { externalId: 'segment0', name: 'Segment 0', rank: 0, + payload: undefined, parts: [ { externalId: 'part1', name: 'Part 1', rank: 0, + payload: undefined, }, ], }, @@ -535,11 +564,13 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: 'segment2', name: 'Segment 2', rank: 0, + payload: undefined, parts: [ { externalId: 'part3', name: 'Part 3', rank: 0, + payload: undefined, }, ], }, @@ -547,7 +578,7 @@ describe('Test ingest actions for rundowns and segments', () => { } await expect( - handleUpdatedRundown(context, { + handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData.externalId, ingestRundown: rundownData, isCreateAction: false, @@ -571,16 +602,19 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: externalId, name: 'MyMockRundown', type: 'mock', + payload: undefined, segments: [ { externalId: 'segment0', name: 'Segment 0', rank: 0, + payload: undefined, parts: [ { externalId: 'part1', name: 'Part 1', rank: 0, + payload: undefined, }, ], }, @@ -588,12 +622,14 @@ describe('Test ingest actions for rundowns and segments', () => { } // Submit an update trying to remove a segment - await handleUpdatedRundown(context, { - rundownExternalId: rundownData.externalId, - ingestRundown: rundownData, - isCreateAction: false, - rundownSource: createRundownSource(device), - }) + await expect( + handleUpdatedRundownWrapped(context, { + rundownExternalId: rundownData.externalId, + ingestRundown: rundownData, + isCreateAction: false, + rundownSource: createRundownSource(device2), + }) + ).rejects.toThrow(/Rundown(.+)not found/) // Segment count should not have changed const rundown1 = (await context.mockCollections.Rundowns.findOne({ externalId: externalId })) as DBRundown @@ -606,7 +642,7 @@ describe('Test ingest actions for rundowns and segments', () => { await recreateRundown(rundownData1) await setRundownsOrphaned() - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData1.externalId, ingestRundown: rundownData1, isCreateAction: true, @@ -631,14 +667,17 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: segExternalId, name: 'MyMockSegment', rank: 0, + payload: undefined, parts: [], } - await handleUpdatedSegment(context, { - rundownExternalId: externalId, - ingestSegment: ingestSegment, - isCreateAction: true, - }) + await expect( + handleUpdatedSegmentWrapped(context, { + rundownExternalId: externalId, + ingestSegment: ingestSegment, + isCreateAction: true, + }) + ).rejects.toThrow(/Rundown(.+)not found/) await expect(context.mockCollections.Segments.findOne({ externalId: segExternalId })).resolves.toBeFalsy() @@ -656,10 +695,11 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: segExternalId, name: 'MyMockSegment', rank: 0, + payload: undefined, parts: [], } - await handleUpdatedSegment(context, { + await handleUpdatedSegmentWrapped(context, { rundownExternalId: externalId, ingestSegment: ingestSegment, isCreateAction: true, @@ -694,10 +734,11 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: segExternalId, name: 'MyMockSegment2', rank: 0, + payload: undefined, parts: [], } - await handleUpdatedSegment(context, { + await handleUpdatedSegmentWrapped(context, { rundownExternalId: externalId, ingestSegment: ingestSegment, isCreateAction: true, @@ -719,16 +760,18 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: segExternalId, name: 'MyMockSegment', rank: 0, + payload: undefined, parts: [ { externalId: 'part42', name: 'Part 42', rank: 0, + payload: undefined, }, ], } - await handleUpdatedSegment(context, { + await handleUpdatedSegmentWrapped(context, { rundownExternalId: externalId, ingestSegment: ingestSegment, isCreateAction: false, @@ -754,6 +797,10 @@ describe('Test ingest actions for rundowns and segments', () => { { rundownId: rundown._id }, { $set: { orphaned: SegmentOrphanedReason.DELETED } } ) + await context.mockCollections.NrcsIngestDataCache.remove({ + type: NrcsIngestCacheType.SEGMENT, + rundownId: rundown._id, + }) const segExternalId = rundownData1.segments[0].externalId @@ -766,20 +813,24 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: segExternalId, name: 'MyMockSegment2', rank: 0, + payload: undefined, parts: [ { externalId: 'part423', name: 'Part 423', rank: 0, + payload: undefined, }, ], } - await handleUpdatedSegment(context, { - rundownExternalId: externalId, - ingestSegment: ingestSegment, - isCreateAction: false, - }) + await expect( + handleUpdatedSegmentWrapped(context, { + rundownExternalId: externalId, + ingestSegment: ingestSegment, + isCreateAction: false, + }) + ).rejects.toThrow(/Segment.*not found/) await expect(context.mockCollections.Segments.findFetch({ rundownId: rundown._id })).resolves.toHaveLength(2) @@ -805,20 +856,24 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: segExternalId, name: 'MyMockSegment2', rank: 0, + payload: undefined, parts: [ { externalId: 'part423', name: 'Part 423', rank: 0, + payload: undefined, }, ], } - await handleUpdatedSegment(context, { - rundownExternalId: externalId, - ingestSegment: ingestSegment, - isCreateAction: false, - }) + await expect( + handleUpdatedSegmentWrapped(context, { + rundownExternalId: externalId, + ingestSegment: ingestSegment, + isCreateAction: false, + }) + ).rejects.toThrow(/Rundown.*not found/) await expect(context.mockCollections.Segments.findFetch({ rundownId: rundown._id })).resolves.toHaveLength(2) @@ -835,13 +890,14 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: segExternalId2, name: 'MyMockSegment', rank: 0, + payload: undefined, parts: [], } await expect(context.mockCollections.Segments.findOne({ externalId: segExternalId2 })).resolves.toBeFalsy() await expect( - handleUpdatedSegment(context, { + handleUpdatedSegmentWrapped(context, { rundownExternalId: 'wibble', ingestSegment: ingestSegment, isCreateAction: false, @@ -864,7 +920,7 @@ describe('Test ingest actions for rundowns and segments', () => { const ingestSegment = rundownData1.segments[0] - await handleUpdatedSegment(context, { + await handleUpdatedSegmentWrapped(context, { rundownExternalId: externalId, ingestSegment: ingestSegment, isCreateAction: false, @@ -895,7 +951,7 @@ describe('Test ingest actions for rundowns and segments', () => { }) expect(partsBefore).toHaveLength(2) - await handleUpdatedSegment(context, { + await handleUpdatedSegmentWrapped(context, { rundownExternalId: externalId, ingestSegment: ingestSegment, isCreateAction: false, @@ -915,20 +971,25 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: '', name: 'MyMockSegment', rank: 0, + payload: undefined, parts: [], } await expect( - handleUpdatedSegment(context, { + handleUpdatedSegmentWrapped(context, { rundownExternalId: externalId, ingestSegment: ingestSegment, isCreateAction: false, }) - ).rejects.toThrow(`getSegmentId: segmentExternalId must be set!`) + ).rejects.toThrow(`Segment externalId must be set!`) }) test('dataSegmentDelete already orphaned segment', async () => { const rundown = await recreateRundown(rundownData1) + await context.mockCollections.NrcsIngestDataCache.remove({ + type: NrcsIngestCacheType.SEGMENT, + rundownId: rundown._id, + }) const segExternalId = rundownData1.segments[0].externalId @@ -937,10 +998,12 @@ describe('Test ingest actions for rundowns and segments', () => { { $set: { orphaned: SegmentOrphanedReason.DELETED } } ) - await handleRemovedSegment(context, { - rundownExternalId: externalId, - segmentExternalId: segExternalId, - }) + await expect( + handleRemovedSegmentWrapped(context, { + rundownExternalId: externalId, + segmentExternalId: segExternalId, + }) + ).rejects.toThrow(/Rundown(.*) does not have a Segment(.*) to remove/) await expect(context.mockCollections.Segments.findFetch({ rundownId: rundown._id })).resolves.toHaveLength(2) await expect(context.mockCollections.Segments.findOne({ externalId: segExternalId })).resolves.toBeTruthy() @@ -952,16 +1015,19 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: externalId, name: 'MyMockRundown', type: 'mock', + payload: undefined, segments: [ { externalId: 'segment0', name: 'Segment 0', rank: 0, + payload: undefined, parts: [ { externalId: 'part1', name: 'Part 1', rank: 0, + payload: undefined, }, ], }, @@ -969,11 +1035,13 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: 'segment2', name: 'Segment 2', rank: 0, + payload: undefined, parts: [ { externalId: 'part2', name: 'Part 2', rank: 0, + payload: undefined, }, ], }, @@ -981,18 +1049,20 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: segExternalId, name: 'Segment 3', rank: 0, + payload: undefined, parts: [ { externalId: 'part3', name: 'Part 3', rank: 0, + payload: undefined, }, ], }, ], } - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: externalId, ingestRundown: rundownData, isCreateAction: true, @@ -1006,14 +1076,17 @@ describe('Test ingest actions for rundowns and segments', () => { ).resolves.toHaveLength(1) await context.mockCollections.Rundowns.update({}, { $set: { orphaned: RundownOrphanedReason.DELETED } }) + await context.mockCollections.NrcsIngestDataCache.remove({}) await context.mockCollections.Segments.update({ rundownId: rundown._id }, { $unset: { orphaned: 1 } }) await expect(context.mockCollections.Segments.findFetch({ rundownId: rundown._id })).resolves.toHaveLength(3) - await handleRemovedSegment(context, { - rundownExternalId: externalId, - segmentExternalId: segExternalId, - }) + await expect( + handleRemovedSegmentWrapped(context, { + rundownExternalId: externalId, + segmentExternalId: segExternalId, + }) + ).rejects.toThrow(/Rundown(.+)not found/) await expect(context.mockCollections.Segments.findFetch({ rundownId: rundown._id })).resolves.toHaveLength(3) await expect(context.mockCollections.Segments.findOne({ externalId: segExternalId })).resolves.toBeTruthy() @@ -1024,7 +1097,7 @@ describe('Test ingest actions for rundowns and segments', () => { const segExternalId = rundownData1.segments[1].externalId - await handleRemovedSegment(context, { + await handleRemovedSegmentWrapped(context, { rundownExternalId: externalId, segmentExternalId: segExternalId, }) @@ -1040,7 +1113,7 @@ describe('Test ingest actions for rundowns and segments', () => { ).resolves.toHaveLength(0) await expect( - handleRemovedSegment(context, { + handleRemovedSegmentWrapped(context, { rundownExternalId: externalId, segmentExternalId: segExternalId, }) @@ -1054,7 +1127,7 @@ describe('Test ingest actions for rundowns and segments', () => { expect(rundown).toBeFalsy() await expect( - handleRemovedSegment(context, { + handleRemovedSegmentWrapped(context, { rundownExternalId: 'wibble', segmentExternalId: segExternalId, }) @@ -1069,10 +1142,11 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: segExternalId, name: 'MyMockSegment', rank: 0, + payload: undefined, parts: [], } await expect( - handleUpdatedSegment(context, { + handleUpdatedSegmentWrapped(context, { rundownExternalId: 'wibble', ingestSegment: ingestSegment, isCreateAction: true, @@ -1093,9 +1167,10 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: 'party', name: 'Part Y', rank: 0, + payload: undefined, } - await handleUpdatedPart(context, { + await handleUpdatedPartWrapped(context, { rundownExternalId: externalId, segmentExternalId: segment.externalId, ingestPart: ingestPart, @@ -1125,7 +1200,7 @@ describe('Test ingest actions for rundowns and segments', () => { const ingestPart = clone(rundownData1.segments[0].parts[0]) ingestPart.name = 'My special part' - await handleUpdatedPart(context, { + await handleUpdatedPartWrapped(context, { rundownExternalId: externalId, segmentExternalId: segment.externalId, ingestPart: ingestPart, @@ -1158,7 +1233,7 @@ describe('Test ingest actions for rundowns and segments', () => { }) ).resolves.toHaveLength(1) - await handleRemovedPart(context, { + await handleRemovedPartWrapped(context, { rundownExternalId: externalId, segmentExternalId: segment.externalId, partExternalId: partExternalId, @@ -1180,52 +1255,53 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: externalId, name: 'MyMockRundown', type: 'mock', + payload: undefined, segments: [ { externalId: 'segment0', name: 'Segment 0', rank: 1, - // payload?: any, + payload: undefined, parts: [], }, { externalId: 'segment1', name: 'Segment 1', rank: 2, - // payload?: any, + payload: undefined, parts: [], }, { externalId: 'segment2', name: 'Segment 2', rank: 3, - // payload?: any, + payload: undefined, parts: [], }, { externalId: 'segment3', name: 'Segment 3', rank: 4, - // payload?: any, + payload: undefined, parts: [], }, { externalId: 'segment4', name: 'Segment 4', rank: 5, - // payload?: any, + payload: undefined, parts: [], }, { externalId: 'segment5', name: 'Segment 5', rank: 6, - // payload?: any, + payload: undefined, parts: [], }, ], } - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: externalId, ingestRundown: rundownData, isCreateAction: true, @@ -1235,7 +1311,7 @@ describe('Test ingest actions for rundowns and segments', () => { const rundown = (await context.mockCollections.Rundowns.findOne({ externalId: externalId })) as DBRundown expect(rundown).toBeTruthy() - await handleUpdatedSegmentRanks(context, { + await handleUpdatedSegmentRanksWrapped(context, { rundownExternalId: externalId, newRanks: { ['segment0']: 6, @@ -1247,19 +1323,19 @@ describe('Test ingest actions for rundowns and segments', () => { const segments = await context.mockCollections.Segments.findFetch({ rundownId: rundown._id }) expect(segments).toHaveLength(6) - expect(segments.find((s) => s.externalId === 'segment0')?._rank).toBe(6) - expect(segments.find((s) => s.externalId === 'segment1')?._rank).toBe(2) - expect(segments.find((s) => s.externalId === 'segment2')?._rank).toBe(1) - expect(segments.find((s) => s.externalId === 'segment3')?._rank).toBe(4) - expect(segments.find((s) => s.externalId === 'segment4')?._rank).toBe(5) - expect(segments.find((s) => s.externalId === 'segment5')?._rank).toBe(3) + expect(segments.find((s) => s.externalId === 'segment0')?._rank).toBe(5) + expect(segments.find((s) => s.externalId === 'segment1')?._rank).toBe(1) + expect(segments.find((s) => s.externalId === 'segment2')?._rank).toBe(0) + expect(segments.find((s) => s.externalId === 'segment3')?._rank).toBe(3) + expect(segments.find((s) => s.externalId === 'segment4')?._rank).toBe(4) + expect(segments.find((s) => s.externalId === 'segment5')?._rank).toBe(2) }) test('unsyncing of rundown', async () => { // Preparation: set up rundown await expect(context.mockCollections.Rundowns.findOne()).resolves.toBeFalsy() - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData1.externalId, ingestRundown: rundownData1, isCreateAction: true, @@ -1285,7 +1361,7 @@ describe('Test ingest actions for rundowns and segments', () => { const resyncRundown = async () => { // simulate a resync. we don't have a gateway to call out to, but this is how it will respond - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData1.externalId, ingestRundown: rundownData1, isCreateAction: true, @@ -1307,7 +1383,7 @@ describe('Test ingest actions for rundowns and segments', () => { await expect(getRundownOrphaned()).resolves.toBeUndefined() await expect( - handleRemovedRundown(context, { + handleRemovedRundownWrapped(context, { rundownExternalId: rundownData1.externalId, }) ).rejects.toMatchUserError(UserErrorMessage.RundownRemoveWhileActive) @@ -1324,7 +1400,7 @@ describe('Test ingest actions for rundowns and segments', () => { }) expect(partInstance[0].segmentId).toEqual(segments[0]._id) - await handleRemovedSegment(context, { + await handleRemovedSegmentWrapped(context, { rundownExternalId: rundown.externalId, segmentExternalId: segments[0].externalId, }) @@ -1335,7 +1411,7 @@ describe('Test ingest actions for rundowns and segments', () => { await expect(getRundownOrphaned()).resolves.toBeUndefined() await expect(getSegmentOrphaned(segments[0]._id)).resolves.toBeUndefined() - await handleRemovedPart(context, { + await handleRemovedPartWrapped(context, { rundownExternalId: rundown.externalId, segmentExternalId: segments[0].externalId, partExternalId: parts[0].externalId, @@ -1353,11 +1429,13 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: externalId, name: 'MyMockRundown', type: 'mock', + payload: undefined, segments: [ { externalId: 'segment0', name: 'Segment 0', rank: 0, + payload: undefined, parts: [ { externalId: 'part0', @@ -1405,11 +1483,13 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: 'segment1', name: 'Segment 1', rank: 1, + payload: undefined, parts: [ { externalId: 'part2', name: 'Part 2', rank: 0, + payload: undefined, }, ], }, @@ -1419,7 +1499,7 @@ describe('Test ingest actions for rundowns and segments', () => { // Preparation: set up rundown await expect(context.mockCollections.Rundowns.findOne()).resolves.toBeFalsy() - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData.externalId, ingestRundown: rundownData, isCreateAction: true, @@ -1479,7 +1559,7 @@ describe('Test ingest actions for rundowns and segments', () => { const updatedSegmentData: IngestSegment = rundownData.segments[0] updatedSegmentData.parts[1].externalId = 'new-part' - await handleUpdatedSegment(context, { + await handleUpdatedSegmentWrapped(context, { rundownExternalId: rundownData.externalId, ingestSegment: updatedSegmentData, isCreateAction: false, @@ -1529,11 +1609,13 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: externalId, name: 'MyMockRundown', type: 'mock', + payload: undefined, segments: [ { externalId: 'segment0', name: 'Segment 0', rank: 0, + payload: undefined, parts: [ { externalId: 'part0', @@ -1581,11 +1663,13 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: 'segment1', name: 'Segment 1', rank: 1, + payload: undefined, parts: [ { externalId: 'part2', name: 'Part 2', rank: 0, + payload: undefined, }, ], }, @@ -1593,18 +1677,20 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: 'segment2', name: 'Segment 2', rank: 1, + payload: undefined, parts: [ { externalId: 'part3', name: 'Part 3', rank: 0, + payload: undefined, }, ], }, ], } - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData.externalId, ingestRundown: rundownData, isCreateAction: true, @@ -1692,11 +1778,13 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: 'segment2', name: 'Segment 2a', rank: 1, + payload: undefined, parts: [ { externalId: 'part3', name: 'Part 3', rank: 0, + payload: undefined, }, ], } @@ -1711,7 +1799,7 @@ describe('Test ingest actions for rundowns and segments', () => { expect(segment2.name).not.toBe(ingestSegment.name) } - await handleUpdatedSegment(context, { + await handleUpdatedSegmentWrapped(context, { rundownExternalId: rundownData.externalId, ingestSegment: ingestSegment, isCreateAction: false, @@ -1788,7 +1876,7 @@ describe('Test ingest actions for rundowns and segments', () => { expect(segment2.name).not.toBe(ingestSegment.name) } - await handleUpdatedSegment(context, { + await handleUpdatedSegmentWrapped(context, { rundownExternalId: rundownData.externalId, ingestSegment: ingestSegment, isCreateAction: false, @@ -1829,6 +1917,7 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: externalId, name: 'MyMockRundown', type: 'mock', + payload: undefined, segments: [ { externalId: 'segment0', @@ -1884,6 +1973,7 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: 'part2', name: 'Part 2', rank: 0, + payload: undefined, }, ], }, @@ -1892,7 +1982,7 @@ describe('Test ingest actions for rundowns and segments', () => { // Preparation: set up rundown await expect(context.mockCollections.Rundowns.findOne()).resolves.toBeFalsy() - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData.externalId, ingestRundown: rundownData, isCreateAction: true, @@ -1944,7 +2034,7 @@ describe('Test ingest actions for rundowns and segments', () => { // Delete segment 0, while on air const segmentExternalId = rundownData.segments[0].externalId - await handleRemovedSegment(context, { + await handleRemovedSegmentWrapped(context, { rundownExternalId: rundownData.externalId, segmentExternalId: segmentExternalId, }) @@ -1972,7 +2062,7 @@ describe('Test ingest actions for rundowns and segments', () => { // Trigger an 'resync' of the rundown rundownData.segments.splice(0, 1) - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData.externalId, ingestRundown: rundownData, isCreateAction: false, @@ -1995,6 +2085,7 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: externalId, name: 'MyMockRundown', type: 'mock', + payload: undefined, segments: [ { externalId: 'segment0', @@ -2050,6 +2141,7 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: 'part2', name: 'Part 2', rank: 0, + payload: undefined, }, ], }, @@ -2058,7 +2150,7 @@ describe('Test ingest actions for rundowns and segments', () => { // Preparation: set up rundown await expect(context.mockCollections.Rundowns.findOne()).resolves.toBeFalsy() - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData.externalId, ingestRundown: rundownData, isCreateAction: true, diff --git a/packages/job-worker/src/ingest/__tests__/ingestPartJobs.spec.ts b/packages/job-worker/src/ingest/__tests__/ingestPartJobs.spec.ts new file mode 100644 index 00000000000..8bebfab3f58 --- /dev/null +++ b/packages/job-worker/src/ingest/__tests__/ingestPartJobs.spec.ts @@ -0,0 +1,314 @@ +import { setupDefaultJobEnvironment } from '../../__mocks__/context' +import { handleRemovedPart, handleUpdatedPart } from '../ingestPartJobs' +import { clone } from '@sofie-automation/corelib/dist/lib' +import { IngestChangeType, IngestPart, NrcsIngestPartChangeDetails } from '@sofie-automation/blueprints-integration' +import { UpdateIngestRundownChange } from '../runOperation' +import { IngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' + +function getDefaultIngestRundown(): IngestRundownWithSource { + return { + externalId: 'rundown0', + type: 'mos', + name: 'Rundown', + payload: undefined, + segments: [ + { + externalId: 'segment0', + name: 'Segment 0', + rank: 0, + payload: undefined, + parts: [ + { + externalId: 'part0', + name: 'Part 0', + rank: 0, + payload: undefined, + }, + { + externalId: 'part1', + name: 'Part 1', + rank: 1, + payload: undefined, + }, + ], + }, + { + externalId: 'segment1', + name: 'Segment 1', + rank: 1, + payload: undefined, + parts: [ + { + externalId: 'part2', + name: 'Part 2', + rank: 0, + payload: undefined, + }, + { + externalId: 'part3', + name: 'Part 3', + rank: 1, + payload: undefined, + }, + ], + }, + ], + rundownSource: { type: 'http' }, + } +} + +describe('handleRemovedPart', () => { + it('no rundown', () => { + const context = setupDefaultJobEnvironment() + + expect(() => + handleRemovedPart( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segment0', + partExternalId: 'part0', + }, + undefined + ) + ).toThrow(/Rundown(.*)not found/) + }) + + it('missing segment', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + expect(() => + handleRemovedPart( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segmentX', + partExternalId: 'part0', + }, + clone(ingestRundown) + ) + ).toThrow(/Rundown(.*)does not have a Segment/) + }) + + it('missing part', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleRemovedPart( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segment0', + partExternalId: 'partX', + }, + clone(ingestRundown) + ) + expect(changes).toEqual({ + ingestRundown, + changes: { + // No changes + source: IngestChangeType.Ingest, + }, + } satisfies UpdateIngestRundownChange) + }) + + it('part belongs to different segment', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleRemovedPart( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segment1', + partExternalId: 'part0', + }, + clone(ingestRundown) + ) + expect(changes).toEqual({ + ingestRundown, + changes: { + // No changes + source: IngestChangeType.Ingest, + }, + } satisfies UpdateIngestRundownChange) + }) + + it('good', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleRemovedPart( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segment1', + partExternalId: 'part2', + }, + clone(ingestRundown) + ) + + // update the expected ingestRundown + ingestRundown.segments[1].parts.splice(0, 1) + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + segment1: { + partChanges: { + part2: NrcsIngestPartChangeDetails.Deleted, + }, + }, + }, + }, + } satisfies UpdateIngestRundownChange) + }) +}) + +describe('handleUpdatedPart', () => { + const newIngestPart: IngestPart = { + externalId: 'partX', + name: 'New Part', + rank: 66, + payload: { + val: 'my new part', + }, + } + + it('no rundown', () => { + const context = setupDefaultJobEnvironment() + + expect(() => + handleUpdatedPart( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segment0', + ingestPart: clone(newIngestPart), + isCreateAction: true, + }, + undefined + ) + ).toThrow(/Rundown(.*)not found/) + }) + + it('missing segment', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + expect(() => + handleUpdatedPart( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segmentX', + ingestPart: clone(newIngestPart), + isCreateAction: true, + }, + clone(ingestRundown) + ) + ).toThrow(/Rundown(.*)does not have a Segment/) + }) + + it('insert part', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleUpdatedPart( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segment1', + ingestPart: clone(newIngestPart), + isCreateAction: true, + }, + clone(ingestRundown) + ) + + // update the expected ingestRundown + ingestRundown.segments[1].parts.push(newIngestPart) + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + segment1: { + partChanges: { + partX: NrcsIngestPartChangeDetails.Inserted, + }, + }, + }, + }, + } satisfies UpdateIngestRundownChange) + }) + + it('update part', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const newIngestPart2 = { ...newIngestPart, externalId: 'part2' } + + const changes = handleUpdatedPart( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segment1', + ingestPart: clone(newIngestPart2), + isCreateAction: true, + }, + clone(ingestRundown) + ) + + // update the expected ingestRundown + ingestRundown.segments[1].parts.splice(0, 1) + ingestRundown.segments[1].parts.push(newIngestPart2) + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + segment1: { + partChanges: { + part2: NrcsIngestPartChangeDetails.Updated, + }, + }, + }, + }, + } satisfies UpdateIngestRundownChange) + }) + + // TODO: should this be a test case? + // it('part belongs to different segment', () => { + // const context = setupDefaultJobEnvironment() + + // const ingestRundown = getDefaultIngestRundown() + + // const newIngestPart2 = { ...newIngestPart, externalId: 'part0' } + + // expect(() => + // handleUpdatedPart( + // context, + // { + // peripheralDeviceId: null, + // rundownExternalId: 'rundown0', + // segmentExternalId: 'segment1', + // ingestPart: clone(newIngestPart2), + // isCreateAction: true, + // }, + // clone(ingestRundown) + // ) + // ).toThrow('TODO fill out this error') + // }) +}) diff --git a/packages/job-worker/src/ingest/__tests__/ingestRundownJobs.spec.ts b/packages/job-worker/src/ingest/__tests__/ingestRundownJobs.spec.ts new file mode 100644 index 00000000000..0d241dafdf2 --- /dev/null +++ b/packages/job-worker/src/ingest/__tests__/ingestRundownJobs.spec.ts @@ -0,0 +1,431 @@ +import { MockJobContext, setupDefaultJobEnvironment } from '../../__mocks__/context' +import { clone } from '@sofie-automation/corelib/dist/lib' +import { IngestChangeType, NrcsIngestRundownChangeDetails } from '@sofie-automation/blueprints-integration' +import { ComputedIngestChangeAction, UpdateIngestRundownChange } from '../runOperation' +import { + handleRegenerateRundown, + handleRemovedRundown, + handleUpdatedRundown, + handleUpdatedRundownMetaData, + handleUserUnsyncRundown, +} from '../ingestRundownJobs' +import { RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { protectString } from '@sofie-automation/corelib/dist/protectedString' +import { DBRundown, RundownOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Rundown' +import { IngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' + +function getDefaultIngestRundown(): IngestRundownWithSource { + return { + externalId: 'rundown0', + type: 'mos', + name: 'Rundown', + rundownSource: { type: 'http' }, + payload: undefined, + segments: [ + { + externalId: 'segment0', + name: 'Segment 0', + rank: 0, + payload: undefined, + parts: [ + { + externalId: 'part0', + name: 'Part 0', + rank: 0, + payload: undefined, + }, + { + externalId: 'part1', + name: 'Part 1', + rank: 1, + payload: undefined, + }, + ], + }, + { + externalId: 'segment1', + name: 'Segment 1', + rank: 1, + payload: undefined, + parts: [ + { + externalId: 'part2', + name: 'Part 2', + rank: 0, + payload: undefined, + }, + { + externalId: 'part3', + name: 'Part 3', + rank: 1, + payload: undefined, + }, + ], + }, + ], + } +} + +describe('handleRemovedRundown', () => { + it('no rundown, normal delete', () => { + const context = setupDefaultJobEnvironment() + + expect( + handleRemovedRundown( + context, + { + rundownExternalId: 'rundown0', + // forceDelete: false, + }, + undefined + ) + ).toBe(ComputedIngestChangeAction.DELETE) + }) + + it('no rundown, force delete', () => { + const context = setupDefaultJobEnvironment() + + expect( + handleRemovedRundown( + context, + { + rundownExternalId: 'rundown0', + forceDelete: true, + }, + undefined + ) + ).toBe(ComputedIngestChangeAction.FORCE_DELETE) + }) + + it('with rundown, normal delete', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + expect( + handleRemovedRundown( + context, + { + rundownExternalId: 'rundown0', + forceDelete: false, + }, + ingestRundown + ) + ).toBe(ComputedIngestChangeAction.DELETE) + }) + + it('with rundown, force delete', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + expect( + handleRemovedRundown( + context, + { + rundownExternalId: 'rundown0', + forceDelete: true, + }, + ingestRundown + ) + ).toBe(ComputedIngestChangeAction.FORCE_DELETE) + }) +}) + +// TODO: handleUserRemoveRundown + +describe('handleRegenerateRundown', () => { + it('no rundown', () => { + const context = setupDefaultJobEnvironment() + + expect(() => + handleRegenerateRundown( + context, + { + rundownExternalId: 'rundown0', + }, + undefined + ) + ).toThrow(/Rundown(.*)not found/) + }) + + it('good', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleRegenerateRundown( + context, + { + rundownExternalId: 'rundown0', + }, + clone(ingestRundown) + ) + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Regenerate, + }, + } satisfies UpdateIngestRundownChange) + }) +}) + +describe('handleUserUnsyncRundown', () => { + const rundownId: RundownId = protectString('rundown0') + + async function createRundown(context: MockJobContext, fragment?: Partial) { + await context.mockCollections.Rundowns.insertOne({ + _id: rundownId, + organizationId: protectString('organization0'), + studioId: context.studioId, + showStyleBaseId: protectString('showStyleBase0'), + showStyleVariantId: protectString('showStyleVariant0'), + created: 0, + modified: 0, + importVersions: {} as any, + externalId: 'rundownExternal0', + name: 'Rundown', + timing: {} as any, + playlistId: protectString('playlist0'), + source: { + type: 'testing', + showStyleVariantId: protectString('showStyleVariant0'), + }, + ...fragment, + }) + context.mockCollections.Rundowns.clearOpLog() + } + + it('no rundown', async () => { + const context = setupDefaultJobEnvironment() + + await handleUserUnsyncRundown(context, { rundownId }) + + expect(context.mockCollections.Rundowns.operations).toHaveLength(1) + expect(context.mockCollections.Rundowns.operations[0]).toEqual({ + type: 'findOne', + args: ['rundown0', undefined], + }) + }) + + it('already orphaned', async () => { + const context = setupDefaultJobEnvironment() + + await createRundown(context, { orphaned: RundownOrphanedReason.MANUAL }) + + await handleUserUnsyncRundown(context, { rundownId }) + + expect(context.mockCollections.Rundowns.operations).toHaveLength(1) + expect(context.mockCollections.Rundowns.operations[0]).toEqual({ + type: 'findOne', + args: ['rundown0', undefined], + }) + }) + + it('good', async () => { + const context = setupDefaultJobEnvironment() + + await createRundown(context, {}) + + await handleUserUnsyncRundown(context, { rundownId }) + + expect(context.mockCollections.Rundowns.operations).toHaveLength(2) + expect(context.mockCollections.Rundowns.operations[0]).toEqual({ + type: 'findOne', + args: ['rundown0', undefined], + }) + expect(context.mockCollections.Rundowns.operations[1]).toEqual({ + type: 'update', + args: [ + 'rundown0', + { + $set: { + orphaned: RundownOrphanedReason.MANUAL, + }, + }, + ], + }) + }) +}) + +describe('handleUpdatedRundown', () => { + const newIngestRundown: IngestRundownWithSource = { + externalId: 'rundown0', + type: 'mos', + name: 'Rundown2', + rundownSource: { type: 'http' }, + payload: undefined, + segments: [ + { + externalId: 'segment0', + name: 'Segment 0b', + rank: 0, + payload: undefined, + parts: [ + { + externalId: 'part0', + name: 'Part 0b', + rank: 0, + payload: undefined, + }, + { + externalId: 'part1', + name: 'Part 1b', + rank: 1, + payload: undefined, + }, + ], + }, + { + externalId: 'segment2', + name: 'Segment 2', + rank: 1, + payload: undefined, + parts: [ + { + externalId: 'part4', + name: 'Part 4', + rank: 0, + payload: undefined, + }, + { + externalId: 'part5', + name: 'Part 5', + rank: 1, + payload: undefined, + }, + ], + }, + ], + } + + it('create rundown', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleUpdatedRundown( + context, + { + rundownExternalId: 'rundown0', + ingestRundown: clone(ingestRundown), + isCreateAction: true, + rundownSource: { type: 'http' }, + }, + undefined + ) + + expect(changes).toEqual({ + ingestRundown: ingestRundown, + changes: { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Regenerate, + }, + } satisfies UpdateIngestRundownChange) + }) + + it('update missing rundown', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + expect(() => + handleUpdatedRundown( + context, + { + rundownExternalId: 'rundown0', + ingestRundown: clone(ingestRundown), + isCreateAction: false, + rundownSource: { type: 'http' }, + }, + undefined + ) + ).toThrow(/Rundown(.*)not found/) + }) + + it('update existing rundown', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleUpdatedRundown( + context, + { + rundownExternalId: 'rundown0', + ingestRundown: clone(newIngestRundown), + isCreateAction: false, + rundownSource: { type: 'http' }, + }, + clone(ingestRundown) + ) + + expect(changes).toEqual({ + ingestRundown: newIngestRundown, + changes: { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Regenerate, + }, + } satisfies UpdateIngestRundownChange) + }) +}) + +describe('handleUpdatedRundownMetaData', () => { + const newIngestRundown: IngestRundownWithSource = { + externalId: 'rundown0', + type: 'mos', + name: 'Rundown2', + rundownSource: { type: 'http' }, + segments: [], + payload: { + key: 'value', + }, + } + + it('no rundown', () => { + const context = setupDefaultJobEnvironment() + + expect(() => + handleUpdatedRundownMetaData( + context, + { + rundownExternalId: 'rundown0', + ingestRundown: clone(newIngestRundown), + rundownSource: { type: 'http' }, + }, + undefined + ) + ).toThrow(/Rundown(.*)not found/) + }) + + it('update existing rundown', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleUpdatedRundownMetaData( + context, + { + rundownExternalId: 'rundown0', + ingestRundown: clone(newIngestRundown), + rundownSource: { type: 'http' }, + }, + clone(ingestRundown) + ) + + // update the expected ingestRundown + const expectedIngestRundown: IngestRundownWithSource = { + ...newIngestRundown, + segments: ingestRundown.segments, + } + + expect(changes).toEqual({ + ingestRundown: expectedIngestRundown, + changes: { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Payload, + }, + } satisfies UpdateIngestRundownChange) + }) +}) diff --git a/packages/job-worker/src/ingest/__tests__/ingestSegmentJobs.spec.ts b/packages/job-worker/src/ingest/__tests__/ingestSegmentJobs.spec.ts new file mode 100644 index 00000000000..7060dfce615 --- /dev/null +++ b/packages/job-worker/src/ingest/__tests__/ingestSegmentJobs.spec.ts @@ -0,0 +1,409 @@ +import { setupDefaultJobEnvironment } from '../../__mocks__/context' +import { + handleRegenerateSegment, + handleRemovedSegment, + handleUpdatedSegment, + handleUpdatedSegmentRanks, +} from '../ingestSegmentJobs' +import { clone } from '@sofie-automation/corelib/dist/lib' +import { + IngestChangeType, + IngestSegment, + NrcsIngestSegmentChangeDetailsEnum, +} from '@sofie-automation/blueprints-integration' +import { UpdateIngestRundownChange } from '../runOperation' +import { IngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' + +function getDefaultIngestRundown(): IngestRundownWithSource { + return { + externalId: 'rundown0', + type: 'mos', + name: 'Rundown', + rundownSource: { type: 'http' }, + payload: undefined, + segments: [ + { + externalId: 'segment0', + name: 'Segment 0', + rank: 0, + payload: undefined, + parts: [ + { + externalId: 'part0', + name: 'Part 0', + rank: 0, + payload: undefined, + }, + { + externalId: 'part1', + name: 'Part 1', + rank: 1, + payload: undefined, + }, + ], + }, + { + externalId: 'segment1', + name: 'Segment 1', + rank: 1, + payload: undefined, + parts: [ + { + externalId: 'part2', + name: 'Part 2', + rank: 0, + payload: undefined, + }, + { + externalId: 'part3', + name: 'Part 3', + rank: 1, + payload: undefined, + }, + ], + }, + ], + } +} + +describe('handleRegenerateSegment', () => { + it('no rundown', () => { + const context = setupDefaultJobEnvironment() + + expect(() => + handleRegenerateSegment( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segment0', + }, + undefined + ) + ).toThrow(/Rundown(.*)not found/) + }) + + it('missing segment', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + expect(() => + handleRegenerateSegment( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segmentX', + }, + clone(ingestRundown) + ) + ).toThrow(/Rundown(.*)does not have a Segment/) + }) + + it('good', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleRegenerateSegment( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segment1', + }, + clone(ingestRundown) + ) + + // update the expected ingestRundown + // ingestRundown.modified = 1 + // ingestRundown.segments.splice(1, 1) + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + segment1: { + payloadChanged: true, + }, + }, + }, + } satisfies UpdateIngestRundownChange) + }) +}) + +describe('handleRemovedSegment', () => { + it('no rundown', () => { + const context = setupDefaultJobEnvironment() + + expect(() => + handleRemovedSegment( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segment0', + }, + undefined + ) + ).toThrow(/Rundown(.*)not found/) + }) + + it('missing segment', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + expect(() => + handleRemovedSegment( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segmentX', + }, + clone(ingestRundown) + ) + ).toThrow(/Rundown(.*)does not have a Segment/) + }) + + it('good', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleRemovedSegment( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segment1', + }, + clone(ingestRundown) + ) + + // update the expected ingestRundown + ingestRundown.segments.splice(1, 1) + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + segment1: NrcsIngestSegmentChangeDetailsEnum.Deleted, + }, + }, + } satisfies UpdateIngestRundownChange) + }) +}) + +describe('handleUpdatedSegment', () => { + const newIngestSegment: IngestSegment = { + externalId: 'segmentX', + name: 'New Segment', + rank: 66, + payload: { + val: 'my new segment', + }, + parts: [ + { + externalId: 'partX', + name: 'New Part', + rank: 0, + payload: undefined, + }, + ], + } + + it('no rundown', () => { + const context = setupDefaultJobEnvironment() + + expect(() => + handleUpdatedSegment(context, { + rundownExternalId: 'rundown0', + ingestSegment: clone(newIngestSegment), + isCreateAction: true, + })(undefined) + ).toThrow(/Rundown(.*)not found/) + }) + + it('missing id', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const customIngestSegment = clone(newIngestSegment) + customIngestSegment.externalId = '' + + expect(() => + handleUpdatedSegment(context, { + rundownExternalId: 'rundown0', + ingestSegment: customIngestSegment, + isCreateAction: true, + })(clone(ingestRundown)) + ).toThrow(/Segment externalId must be set!/) + }) + + it('insert segment', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleUpdatedSegment(context, { + rundownExternalId: 'rundown0', + ingestSegment: clone(newIngestSegment), + isCreateAction: true, + })(clone(ingestRundown)) as UpdateIngestRundownChange + + // update the expected ingestRundown + ingestRundown.segments.push(newIngestSegment) + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + segmentX: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + }, + } satisfies UpdateIngestRundownChange) + }) + + it('update missing segment', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + expect(() => + handleUpdatedSegment(context, { + rundownExternalId: 'rundown0', + ingestSegment: clone(newIngestSegment), + isCreateAction: false, + })(clone(ingestRundown)) + ).toThrow(/Segment(.*)not found/) + }) + + it('update segment', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const customIngestSegment = clone(newIngestSegment) + customIngestSegment.externalId = 'segment1' + + const changes = handleUpdatedSegment(context, { + rundownExternalId: 'rundown0', + ingestSegment: clone(customIngestSegment), + isCreateAction: false, // has no impact + })(clone(ingestRundown)) as UpdateIngestRundownChange + + // update the expected ingestRundown + ingestRundown.segments.splice(1, 1, customIngestSegment) + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + segment1: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + }, + } satisfies UpdateIngestRundownChange) + }) +}) + +describe('handleUpdatedSegmentRanks', () => { + it('no rundown', () => { + const context = setupDefaultJobEnvironment() + + expect(() => + handleUpdatedSegmentRanks( + context, + { + rundownExternalId: 'rundown0', + newRanks: { + segment0: 1, + segment1: 0, + }, + }, + undefined + ) + ).toThrow(/Rundown(.*)not found/) + }) + + it('no valid changes', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleUpdatedSegmentRanks( + context, + { + rundownExternalId: 'rundown0', + newRanks: { + segmentX: 2, + }, + }, + clone(ingestRundown) + ) + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentOrderChanged: false, + }, + } satisfies UpdateIngestRundownChange) + }) + + it('update some segments', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleUpdatedSegmentRanks( + context, + { + rundownExternalId: 'rundown0', + newRanks: { + segmentX: 2, + segment0: 5, + }, + }, + clone(ingestRundown) + ) + + ingestRundown.segments[0].rank = 5 + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentOrderChanged: true, + }, + } satisfies UpdateIngestRundownChange) + }) + + it('invalid rank value type', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleUpdatedSegmentRanks( + context, + { + rundownExternalId: 'rundown0', + newRanks: { + segmentX: 2, + segment0: 'a' as any, + }, + }, + clone(ingestRundown) + ) + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentOrderChanged: false, + }, + } satisfies UpdateIngestRundownChange) + }) +}) + +// Future: tests for handleRemoveOrphanedSegemnts diff --git a/packages/job-worker/src/ingest/__tests__/selectShowStyleVariant.test.ts b/packages/job-worker/src/ingest/__tests__/selectShowStyleVariant.test.ts index 9fd132c4167..095a4ef0975 100644 --- a/packages/job-worker/src/ingest/__tests__/selectShowStyleVariant.test.ts +++ b/packages/job-worker/src/ingest/__tests__/selectShowStyleVariant.test.ts @@ -15,6 +15,8 @@ describe('selectShowStyleVariant', () => { type: 'mock', segments: [], coreData: undefined, + userEditStates: {}, + payload: undefined, } } function createBlueprintContext(context: MockJobContext): StudioUserContext { diff --git a/packages/job-worker/src/ingest/__tests__/updateNext.test.ts b/packages/job-worker/src/ingest/__tests__/updateNext.test.ts index 4df4a21361f..e051cfddd39 100644 --- a/packages/job-worker/src/ingest/__tests__/updateNext.test.ts +++ b/packages/job-worker/src/ingest/__tests__/updateNext.test.ts @@ -69,7 +69,6 @@ async function createMockRO(context: MockJobContext): Promise { externalId: 's1', rundownId: rundownId, name: 'Segment1', - externalModified: 1, }), literal({ _id: protectString('mock_segment2'), @@ -77,7 +76,6 @@ async function createMockRO(context: MockJobContext): Promise { externalId: 's2', rundownId: rundownId, name: 'Segment2', - externalModified: 1, }), literal({ _id: protectString('mock_segment3'), @@ -85,7 +83,6 @@ async function createMockRO(context: MockJobContext): Promise { externalId: 's3', rundownId: rundownId, name: 'Segment3', - externalModified: 1, }), literal({ _id: protectString('mock_segment4'), @@ -93,7 +90,6 @@ async function createMockRO(context: MockJobContext): Promise { externalId: 's4', rundownId: rundownId, name: 'Segment4', - externalModified: 1, }), ] ) diff --git a/packages/job-worker/src/ingest/createAdlibTestingRundown.ts b/packages/job-worker/src/ingest/createAdlibTestingRundown.ts index e24a942db46..e19c6810055 100644 --- a/packages/job-worker/src/ingest/createAdlibTestingRundown.ts +++ b/packages/job-worker/src/ingest/createAdlibTestingRundown.ts @@ -1,17 +1,21 @@ import { UserError, UserErrorMessage } from '@sofie-automation/corelib/dist/error' -import type { CreateAdlibTestingRundownForShowStyleVariantProps } from '@sofie-automation/corelib/dist/worker/ingest' +import type { + CreateAdlibTestingRundownForShowStyleVariantProps, + IngestUpdateRundownProps, +} from '@sofie-automation/corelib/dist/worker/ingest' import type { JobContext } from '../jobs' -import type { RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { convertShowStyleVariantToBlueprints } from '../blueprints/context/lib' import { ShowStyleUserContext } from '../blueprints/context' import { WatchedPackagesHelper } from '../blueprints/context/watchedPackages' -import { handleUpdatedRundown } from './ingestRundownJobs' import type { IShowStyleUserContext, IBlueprintShowStyleVariant, IngestRundown, } from '@sofie-automation/blueprints-integration' import { logger } from '../logging' +import { RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { handleUpdatedRundown } from './ingestRundownJobs' +import { runIngestUpdateOperation } from './runOperation' export async function handleCreateAdlibTestingRundownForShowStyleVariant( context: JobContext, @@ -49,7 +53,7 @@ export async function handleCreateAdlibTestingRundownForShowStyleVariant( `Creating adlib testing rundown "${ingestRundown.name}" for showStyleVariant "${showStyleVariant.name}"` ) - return handleUpdatedRundown(context, { + const updateData: IngestUpdateRundownProps = { rundownExternalId: ingestRundown.externalId, ingestRundown, isCreateAction: true, @@ -57,7 +61,10 @@ export async function handleCreateAdlibTestingRundownForShowStyleVariant( type: 'testing', showStyleVariantId: showStyleVariant._id, }, - }) + } + return runIngestUpdateOperation(context, updateData, (ingestRundown) => + handleUpdatedRundown(context, updateData, ingestRundown) + ) } function fallbackBlueprintMethod( diff --git a/packages/job-worker/src/ingest/generationRundown.ts b/packages/job-worker/src/ingest/generationRundown.ts index f41ed7db49c..42c12b41e20 100644 --- a/packages/job-worker/src/ingest/generationRundown.ts +++ b/packages/job-worker/src/ingest/generationRundown.ts @@ -1,5 +1,5 @@ import { ExpectedPackageDBType } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' -import { BlueprintId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { BlueprintId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { RundownNote } from '@sofie-automation/corelib/dist/dataModel/Notes' import { serializePieceTimelineObjectsBlob } from '@sofie-automation/corelib/dist/dataModel/Piece' import { DBRundown, RundownSource } from '@sofie-automation/corelib/dist/dataModel/Rundown' @@ -16,7 +16,6 @@ import { import { logger } from '../logging' import _ = require('underscore') import { IngestModel } from './model/IngestModel' -import { LocalIngestRundown } from './ingestCache' import { extendIngestRundownCore, canRundownBeUpdated } from './lib' import { JobContext } from '../jobs' import { CommitIngestData } from './lock' @@ -25,8 +24,19 @@ import { updateExpectedPackagesForRundownBaseline } from './expectedPackages' import { ReadonlyDeep } from 'type-fest' import { BlueprintResultRundown, ExtendedIngestRundown } from '@sofie-automation/blueprints-integration' import { wrapTranslatableMessageFromBlueprints } from '@sofie-automation/corelib/dist/TranslatableMessage' -import { convertRundownToBlueprintSegmentRundown } from '../blueprints/context/lib' +import { convertRundownToBlueprintSegmentRundown, translateUserEditsFromBlueprint } from '../blueprints/context/lib' import { calculateSegmentsAndRemovalsFromIngestData } from './generationSegment' +import { SofieIngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' + +export enum GenerateRundownMode { + Create = 'create', + Update = 'update', + MetadataChange = 'metadata-change', +} + +export interface CommitIngestDataExt extends CommitIngestData { + didRegenerateRundown: boolean +} /** * Regenerate and save a whole Rundown @@ -40,106 +50,64 @@ import { calculateSegmentsAndRemovalsFromIngestData } from './generationSegment' export async function updateRundownFromIngestData( context: JobContext, ingestModel: IngestModel, - ingestRundown: LocalIngestRundown, - isCreateAction: boolean, - rundownSource: RundownSource -): Promise { + ingestRundown: SofieIngestRundownWithSource, + generateMode: GenerateRundownMode +): Promise { const span = context.startSpan('ingest.rundownInput.updateRundownFromIngestData') - if (!canRundownBeUpdated(ingestModel.rundown, isCreateAction)) return null - - logger.info(`${ingestModel.rundown ? 'Updating' : 'Adding'} rundown ${ingestModel.rundownId}`) - - // canBeUpdated is to be run by the callers - - const extendedIngestRundown = extendIngestRundownCore(ingestRundown, ingestModel.rundown) - - const selectShowStyleContext = new StudioUserContext( - { - name: 'selectShowStyleVariant', - identifier: `studioId=${context.studio._id},rundownId=${ingestModel.rundownId},ingestRundownId=${ingestModel.rundownExternalId}`, - tempSendUserNotesIntoBlackHole: true, - }, - context.studio, - context.getStudioBlueprintConfig() - ) - // TODO-CONTEXT save any user notes from selectShowStyleContext - const showStyle = await selectShowStyleVariant( - context, - selectShowStyleContext, - extendedIngestRundown, - rundownSource - ) - if (!showStyle) { - logger.debug('Blueprint rejected the rundown') - throw new Error('Blueprint rejected the rundown') - } - - const pAllRundownWatchedPackages = WatchedPackagesHelper.createForIngestRundown(context, ingestModel) - - const showStyleBlueprint = await context.getShowStyleBlueprint(showStyle.base._id) - const allRundownWatchedPackages = await pAllRundownWatchedPackages - - // Call blueprints, get rundown - const dbRundown = await regenerateRundownAndBaselineFromIngestData( + const regenerateAllContents = await updateRundownFromIngestDataInner( context, ingestModel, - extendedIngestRundown, - rundownSource, - showStyle, - showStyleBlueprint, - allRundownWatchedPackages + ingestRundown, + generateMode ) - if (!dbRundown) { - // We got no rundown, abort: - return null - } - // TODO - store notes from rundownNotesContext + if (!regenerateAllContents) return null - const { changedSegmentIds, removedSegmentIds } = await calculateSegmentsAndRemovalsFromIngestData( - context, - ingestModel, - ingestRundown, - allRundownWatchedPackages - ) + const regenerateSegmentsChanges = regenerateAllContents.regenerateAllContents + ? await calculateSegmentsAndRemovalsFromIngestData( + context, + ingestModel, + ingestRundown, + regenerateAllContents.allRundownWatchedPackages + ) + : undefined - logger.info(`Rundown ${dbRundown._id} update complete`) + logger.info(`Rundown ${ingestModel.rundownId} update complete`) span?.end() - return literal({ - changedSegmentIds: changedSegmentIds, - removedSegmentIds: removedSegmentIds, - renamedSegments: null, + return literal({ + changedSegmentIds: regenerateSegmentsChanges?.changedSegmentIds ?? [], + removedSegmentIds: regenerateSegmentsChanges?.removedSegmentIds ?? [], + renamedSegments: new Map(), + + didRegenerateRundown: regenerateAllContents.regenerateAllContents, removeRundown: false, }) } -/** - * Regenerate Rundown if necessary from metadata change - * Note: callers are expected to check the change is allowed by calling `canBeUpdated` prior to this - * @param context Context for the running job - * @param ingestModel The ingest model of the rundown - * @param ingestRundown The rundown to regenerate - * @param rundownSource Source of this Rundown - * @returns CommitIngestData describing the change - */ -export async function updateRundownMetadataFromIngestData( +export interface UpdateRundownInnerResult { + allRundownWatchedPackages: WatchedPackagesHelper + regenerateAllContents: boolean +} + +export async function updateRundownFromIngestDataInner( context: JobContext, ingestModel: IngestModel, - ingestRundown: LocalIngestRundown, - rundownSource: RundownSource -): Promise { - if (!canRundownBeUpdated(ingestModel.rundown, false)) return null + ingestRundown: SofieIngestRundownWithSource, + generateMode: GenerateRundownMode +): Promise { + if (!canRundownBeUpdated(ingestModel.rundown, generateMode === GenerateRundownMode.Create)) return null + const existingRundown = ingestModel.rundown - if (!existingRundown) { + if (!existingRundown && generateMode === GenerateRundownMode.MetadataChange) { throw new Error(`Rundown "${ingestRundown.externalId}" does not exist`) } - const span = context.startSpan('ingest.rundownInput.handleUpdatedRundownMetaDataInner') + logger.info(`${ingestModel.rundown ? 'Updating' : 'Adding'} rundown ${ingestModel.rundownId}`) - logger.info(`Updating rundown ${ingestModel.rundownId}`) + // canBeUpdated is to be run by the callers const extendedIngestRundown = extendIngestRundownCore(ingestRundown, ingestModel.rundown) @@ -158,7 +126,7 @@ export async function updateRundownMetadataFromIngestData( context, selectShowStyleContext, extendedIngestRundown, - rundownSource + ingestRundown.rundownSource ) if (!showStyle) { logger.debug('Blueprint rejected the rundown') @@ -175,7 +143,7 @@ export async function updateRundownMetadataFromIngestData( context, ingestModel, extendedIngestRundown, - rundownSource, + ingestRundown.rundownSource, showStyle, showStyleBlueprint, allRundownWatchedPackages @@ -185,35 +153,25 @@ export async function updateRundownMetadataFromIngestData( return null } - let changedSegmentIds: SegmentId[] | undefined - let removedSegmentIds: SegmentId[] | undefined - if ( - !_.isEqual( - convertRundownToBlueprintSegmentRundown(existingRundown, true), - convertRundownToBlueprintSegmentRundown(dbRundown, true) - ) - ) { - logger.info(`MetaData of rundown ${dbRundown.externalId} has been modified, regenerating segments`) - const changes = await calculateSegmentsAndRemovalsFromIngestData( - context, - ingestModel, - ingestRundown, - allRundownWatchedPackages - ) - changedSegmentIds = changes.changedSegmentIds - removedSegmentIds = changes.removedSegmentIds - } - - logger.info(`Rundown ${dbRundown._id} update complete`) + // TODO - store notes from rundownNotesContext - span?.end() - return literal({ - changedSegmentIds: changedSegmentIds ?? [], - removedSegmentIds: removedSegmentIds ?? [], - renamedSegments: null, + let regenerateAllContents = true + if (generateMode == GenerateRundownMode.MetadataChange) { + regenerateAllContents = + !existingRundown || + !_.isEqual( + convertRundownToBlueprintSegmentRundown(existingRundown, true), + convertRundownToBlueprintSegmentRundown(dbRundown, true) + ) + if (regenerateAllContents) { + logger.info(`MetaData of rundown ${dbRundown.externalId} has been modified, regenerating segments`) + } + } - removeRundown: false, - }) + return { + allRundownWatchedPackages, + regenerateAllContents, + } } /** @@ -313,7 +271,8 @@ export async function regenerateRundownAndBaselineFromIngestData( showStyle.variant, showStyleBlueprint, rundownSource, - rundownNotes + rundownNotes, + translateUserEditsFromBlueprint(rundownRes.rundown.userEditOperations, translationNamespaces) ) // get the rundown separetely to ensure it exists now diff --git a/packages/job-worker/src/ingest/generationSegment.ts b/packages/job-worker/src/ingest/generationSegment.ts index e90d17dcdf2..2898cef866c 100644 --- a/packages/job-worker/src/ingest/generationSegment.ts +++ b/packages/job-worker/src/ingest/generationSegment.ts @@ -8,15 +8,15 @@ import { WatchedPackagesHelper } from '../blueprints/context/watchedPackages' import { postProcessAdLibActions, postProcessAdLibPieces, postProcessPieces } from '../blueprints/postProcess' import { logger } from '../logging' import { IngestModel, IngestModelReadonly, IngestReplaceSegmentType } from './model/IngestModel' -import { LocalIngestSegment, LocalIngestRundown } from './ingestCache' import { getSegmentId, canSegmentBeUpdated } from './lib' import { JobContext, ProcessedShowStyleCompound } from '../jobs' import { CommitIngestData } from './lock' import { BlueprintResultPart, BlueprintResultSegment, - IngestSegment, NoteSeverity, + SofieIngestRundown, + SofieIngestSegment, } from '@sofie-automation/blueprints-integration' import { wrapTranslatableMessageFromBlueprints } from '@sofie-automation/corelib/dist/TranslatableMessage' import { updateExpectedPackagesForPartModel } from './expectedPackages' @@ -24,12 +24,13 @@ import { IngestReplacePartType, IngestSegmentModel } from './model/IngestSegment import { ReadonlyDeep } from 'type-fest' import { Rundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { WrappedShowStyleBlueprint } from '../blueprints/cache' +import { translateUserEditsFromBlueprint } from '../blueprints/context/lib' async function getWatchedPackagesHelper( context: JobContext, allRundownWatchedPackages0: WatchedPackagesHelper | null, ingestModel: IngestModelReadonly, - ingestSegments: LocalIngestSegment[] + ingestSegments: SofieIngestSegment[] ): Promise { if (allRundownWatchedPackages0) { return allRundownWatchedPackages0 @@ -50,7 +51,7 @@ async function getWatchedPackagesHelper( export async function calculateSegmentsFromIngestData( context: JobContext, ingestModel: IngestModel, - ingestSegments: LocalIngestSegment[], + ingestSegments: SofieIngestSegment[], allRundownWatchedPackages0: WatchedPackagesHelper | null ): Promise { const span = context.startSpan('ingest.rundownInput.calculateSegmentsFromIngestData') @@ -97,7 +98,7 @@ async function regenerateSegmentAndUpdateModelFull( blueprint: ReadonlyDeep, allRundownWatchedPackages: WatchedPackagesHelper, ingestModel: IngestModel, - ingestSegment: LocalIngestSegment + ingestSegment: SofieIngestSegment ): Promise { // Ensure the parts are sorted by rank ingestSegment.parts.sort((a, b) => a.rank - b.rank) @@ -151,7 +152,7 @@ async function regenerateSegmentAndUpdateModel( showStyle: ReadonlyDeep, blueprint: ReadonlyDeep, ingestModel: IngestModel, - ingestSegment: LocalIngestSegment, + ingestSegment: SofieIngestSegment, watchedPackages: WatchedPackagesHelper ): Promise { const rundown = ingestModel.getRundown() @@ -214,7 +215,7 @@ async function generateSegmentWithBlueprints( showStyle: ReadonlyDeep, blueprint: ReadonlyDeep, rundown: ReadonlyDeep, - ingestSegment: IngestSegment, + ingestSegment: SofieIngestSegment, watchedPackages: WatchedPackagesHelper ): Promise<{ blueprintSegment: BlueprintResultSegment @@ -247,11 +248,10 @@ async function generateSegmentWithBlueprints( function createInternalErrorSegment( blueprintId: BlueprintId, - ingestSegment: LocalIngestSegment + ingestSegment: SofieIngestSegment ): IngestReplaceSegmentType { return { externalId: ingestSegment.externalId, - externalModified: ingestSegment.modified, _rank: ingestSegment.rank, notes: [ { @@ -275,7 +275,7 @@ function updateModelWithGeneratedSegment( context: JobContext, blueprintId: BlueprintId, ingestModel: IngestModel, - ingestSegment: LocalIngestSegment, + ingestSegment: SofieIngestSegment, blueprintSegment: BlueprintResultSegment, blueprintNotes: RawPartNote[] ): IngestSegmentModel { @@ -288,9 +288,11 @@ function updateModelWithGeneratedSegment( literal({ ...blueprintSegment.segment, externalId: ingestSegment.externalId, - externalModified: ingestSegment.modified, _rank: ingestSegment.rank, notes: segmentNotes, + userEditOperations: translateUserEditsFromBlueprint(blueprintSegment.segment.userEditOperations, [ + blueprintId, + ]), }) ) @@ -373,6 +375,7 @@ function updateModelWithGeneratedPart( ]), } : undefined, + userEditOperations: translateUserEditsFromBlueprint(blueprintPart.part.userEditOperations, [blueprintId]), }) // Update pieces @@ -479,7 +482,7 @@ function preserveOrphanedSegmentPositionInRundown( export async function updateSegmentFromIngestData( context: JobContext, ingestModel: IngestModel, - ingestSegment: LocalIngestSegment, + ingestSegment: SofieIngestSegment, isNewSegment: boolean ): Promise { const span = context.startSpan('ingest.rundownInput.handleUpdatedPartInner') @@ -515,7 +518,7 @@ export async function updateSegmentFromIngestData( export async function regenerateSegmentsFromIngestData( context: JobContext, ingestModel: IngestModel, - ingestRundown: LocalIngestRundown, + ingestRundown: SofieIngestRundown, segmentIds: SegmentId[] ): Promise<{ result: CommitIngestData | null; skippedSegments: SegmentId[] }> { const span = context.startSpan('ingest.rundownInput.handleUpdatedPartInner') @@ -527,7 +530,7 @@ export async function regenerateSegmentsFromIngestData( const rundown = ingestModel.getRundown() const skippedSegments: SegmentId[] = [] - const ingestSegments: LocalIngestSegment[] = [] + const ingestSegments: SofieIngestSegment[] = [] for (const segmentId of segmentIds) { const segment = ingestModel.getSegment(segmentId) @@ -573,7 +576,7 @@ export async function regenerateSegmentsFromIngestData( export async function calculateSegmentsAndRemovalsFromIngestData( context: JobContext, ingestModel: IngestModel, - ingestRundown: LocalIngestRundown, + ingestRundown: SofieIngestRundown, allRundownWatchedPackages: WatchedPackagesHelper ): Promise<{ changedSegmentIds: SegmentId[]; removedSegmentIds: SegmentId[] }> { const changedSegmentIds = await calculateSegmentsFromIngestData( @@ -591,6 +594,8 @@ export async function calculateSegmentsAndRemovalsFromIngestData( removedSegmentIds.push(oldSegment.segment._id) changedSegmentIds.push(oldSegment.segment._id) oldSegment.setOrphaned(SegmentOrphanedReason.DELETED) + + oldSegment.removeAllParts() } return { changedSegmentIds, removedSegmentIds } diff --git a/packages/job-worker/src/ingest/ingestCache.ts b/packages/job-worker/src/ingest/ingestCache.ts deleted file mode 100644 index 6fcb5a5904f..00000000000 --- a/packages/job-worker/src/ingest/ingestCache.ts +++ /dev/null @@ -1,219 +0,0 @@ -import { RundownId, SegmentId, IngestDataCacheObjId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { - IngestDataCacheObj, - IngestCacheType, - IngestDataCacheObjRundown, - IngestDataCacheObjSegment, - IngestDataCacheObjPart, -} from '@sofie-automation/corelib/dist/dataModel/IngestDataCache' -import { protectString, unprotectString } from '@sofie-automation/corelib/dist/protectedString' -import { getCurrentTime } from '../lib' -import _ = require('underscore') -import { IngestRundown, IngestSegment, IngestPart } from '@sofie-automation/blueprints-integration' -import { JobContext } from '../jobs' -import { getPartId, getSegmentId } from './lib' -import { SetOptional } from 'type-fest' -import { groupByToMap, normalizeArrayToMap } from '@sofie-automation/corelib/dist/lib' -import { AnyBulkWriteOperation } from 'mongodb' -import { diffAndReturnLatestObjects } from './model/implementation/utils' - -interface LocalIngestBase { - modified: number -} -export interface LocalIngestRundown extends IngestRundown, LocalIngestBase { - segments: LocalIngestSegment[] -} -export interface LocalIngestSegment extends IngestSegment, LocalIngestBase { - parts: LocalIngestPart[] -} -export interface LocalIngestPart extends IngestPart, LocalIngestBase {} -export function isLocalIngestRundown(o: IngestRundown | LocalIngestRundown): o is LocalIngestRundown { - return 'modified' in o -} -export function makeNewIngestRundown(ingestRundown: SetOptional): LocalIngestRundown { - return { - ...ingestRundown, - segments: ingestRundown.segments ? _.map(ingestRundown.segments, makeNewIngestSegment) : [], - modified: getCurrentTime(), - } -} -export function makeNewIngestSegment(ingestSegment: IngestSegment): LocalIngestSegment { - return { - ...ingestSegment, - parts: _.map(ingestSegment.parts, makeNewIngestPart), - modified: getCurrentTime(), - } -} -export function makeNewIngestPart(ingestPart: IngestPart): LocalIngestPart { - return { ...ingestPart, modified: getCurrentTime() } -} - -export class RundownIngestDataCache { - readonly #changedDocumentIds = new Set() - - private constructor( - private readonly context: JobContext, - private readonly rundownId: RundownId, - private documents: IngestDataCacheObj[] - ) {} - - static async create(context: JobContext, rundownId: RundownId): Promise { - const docs = await context.directCollections.IngestDataCache.findFetch({ rundownId }) - - return new RundownIngestDataCache(context, rundownId, docs) - } - - fetchRundown(): LocalIngestRundown | undefined { - const span = this.context.startSpan('ingest.ingestCache.loadCachedRundownData') - - const cachedRundown = this.documents.find((e) => e.type === IngestCacheType.RUNDOWN) - if (!cachedRundown) { - span?.end() - return undefined - } - - const ingestRundown = cachedRundown.data as LocalIngestRundown - ingestRundown.modified = cachedRundown.modified - - const hasSegmentId = (obj: IngestDataCacheObj): obj is IngestDataCacheObjSegment | IngestDataCacheObjPart => { - return !!obj.segmentId - } - - const segmentMap = groupByToMap(this.documents.filter(hasSegmentId), 'segmentId') - for (const objs of segmentMap.values()) { - const segmentEntry = objs.find((e) => e.type === IngestCacheType.SEGMENT) - if (segmentEntry) { - const ingestSegment = segmentEntry.data as LocalIngestSegment - ingestSegment.modified = segmentEntry.modified - - for (const entry of objs) { - if (entry.type === IngestCacheType.PART) { - const ingestPart = entry.data as LocalIngestPart - ingestPart.modified = entry.modified - - ingestSegment.parts.push(ingestPart) - } - } - - ingestSegment.parts = _.sortBy(ingestSegment.parts, (s) => s.rank) - ingestRundown.segments.push(ingestSegment) - } - } - - ingestRundown.segments = _.sortBy(ingestRundown.segments, (s) => s.rank) - - span?.end() - return ingestRundown - } - - update(ingestRundown: LocalIngestRundown): void { - const cacheEntries: IngestDataCacheObj[] = generateCacheForRundown(this.rundownId, ingestRundown) - - this.documents = diffAndReturnLatestObjects(this.#changedDocumentIds, this.documents, cacheEntries) - } - - delete(): void { - // Mark each document for deletion - for (const doc of this.documents) { - this.#changedDocumentIds.add(doc._id) - } - - this.documents = [] - } - - async saveToDatabase(): Promise { - const documentsMap = normalizeArrayToMap(this.documents, '_id') - - const updates: AnyBulkWriteOperation[] = [] - const removedIds: IngestDataCacheObjId[] = [] - for (const changedId of this.#changedDocumentIds) { - const newDoc = documentsMap.get(changedId) - if (!newDoc) { - removedIds.push(changedId) - } else { - updates.push({ - replaceOne: { - filter: { - _id: changedId, - }, - replacement: newDoc, - upsert: true, - }, - }) - } - } - - if (removedIds.length) { - updates.push({ - deleteMany: { - filter: { - _id: { $in: removedIds as any }, - }, - }, - }) - } - - await this.context.directCollections.IngestDataCache.bulkWrite(updates) - } -} - -function generateCacheForRundown(rundownId: RundownId, ingestRundown: LocalIngestRundown): IngestDataCacheObj[] { - // cache the Data - const cacheEntries: IngestDataCacheObj[] = [] - const rundown: IngestDataCacheObjRundown = { - _id: protectString(unprotectString(rundownId)), - type: IngestCacheType.RUNDOWN, - rundownId: rundownId, - modified: ingestRundown.modified, - data: { - ..._.omit(ingestRundown, 'modified'), - segments: [], // omit the segments, they come as separate objects - }, - } - cacheEntries.push(rundown) - - for (const segment of ingestRundown.segments) { - cacheEntries.push(...generateCacheForSegment(rundownId, segment)) - } - - return cacheEntries -} -function generateCacheForSegment(rundownId: RundownId, ingestSegment: LocalIngestSegment): IngestDataCacheObj[] { - const segmentId = getSegmentId(rundownId, ingestSegment.externalId) - const cacheEntries: Array = [] - - const segment: IngestDataCacheObjSegment = { - _id: protectString(`${rundownId}_${segmentId}`), - type: IngestCacheType.SEGMENT, - rundownId: rundownId, - segmentId: segmentId, - modified: ingestSegment.modified, - data: { - ..._.omit(ingestSegment, 'modified'), - parts: [], // omit the parts, they come as separate objects - }, - } - cacheEntries.push(segment) - - for (const part of ingestSegment.parts) { - cacheEntries.push(generateCacheForPart(rundownId, segmentId, part)) - } - - return cacheEntries -} -function generateCacheForPart( - rundownId: RundownId, - segmentId: SegmentId, - part: LocalIngestPart -): IngestDataCacheObjPart { - const partId = getPartId(rundownId, part.externalId) - return { - _id: protectString(`${rundownId}_${partId}`), - type: IngestCacheType.PART, - rundownId: rundownId, - segmentId: segmentId, - partId: partId, - modified: part.modified, - data: _.omit(part, 'modified'), - } -} diff --git a/packages/job-worker/src/ingest/ingestPartJobs.ts b/packages/job-worker/src/ingest/ingestPartJobs.ts index 2886deeb241..5ed0b148bb4 100644 --- a/packages/job-worker/src/ingest/ingestPartJobs.ts +++ b/packages/job-worker/src/ingest/ingestPartJobs.ts @@ -1,71 +1,90 @@ -import { getCurrentTime } from '../lib' import { JobContext } from '../jobs' -import { updateSegmentFromIngestData } from './generationSegment' -import { makeNewIngestPart } from './ingestCache' -import { runIngestJob } from './lock' import { IngestRemovePartProps, IngestUpdatePartProps } from '@sofie-automation/corelib/dist/worker/ingest' +import { UpdateIngestRundownChange } from './runOperation' +import { IngestChangeType, NrcsIngestPartChangeDetails } from '@sofie-automation/blueprints-integration' +import { IngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' /** * Remove a Part from a Segment */ -export async function handleRemovedPart(context: JobContext, data: IngestRemovePartProps): Promise { - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - const ingestSegment = ingestRundown.segments.find((s) => s.externalId === data.segmentExternalId) - if (!ingestSegment) { - throw new Error( - `Rundown "${data.rundownExternalId}" does not have a Segment "${data.segmentExternalId}" to update` - ) - } - ingestSegment.parts = ingestSegment.parts.filter((p) => p.externalId !== data.partExternalId) - ingestSegment.modified = getCurrentTime() +export function handleRemovedPart( + _context: JobContext, + data: IngestRemovePartProps, + ingestRundown: IngestRundownWithSource | undefined +): UpdateIngestRundownChange { + if (!ingestRundown) throw new Error(`Rundown "${data.rundownExternalId}" not found`) - // We modify in-place - return ingestRundown - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } + const ingestSegment = ingestRundown.segments.find((s) => s.externalId === data.segmentExternalId) + if (!ingestSegment) { + throw new Error( + `Rundown "${data.rundownExternalId}" does not have a Segment "${data.segmentExternalId}" to update` + ) + } + const partCountBefore = ingestSegment.parts.length + ingestSegment.parts = ingestSegment.parts.filter((p) => p.externalId !== data.partExternalId) + + if (partCountBefore === ingestSegment.parts.length) { + return { + // No change + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + }, + } satisfies UpdateIngestRundownChange + } + + return { + // We modify in-place + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + [data.segmentExternalId]: { + partChanges: { + [data.partExternalId]: NrcsIngestPartChangeDetails.Deleted, + }, + }, + }, }, - async (context, ingestModel, ingestRundown) => { - const ingestSegment = ingestRundown?.segments?.find((s) => s.externalId === data.segmentExternalId) - if (!ingestSegment) throw new Error(`IngestSegment "${data.segmentExternalId}" is missing!`) - return updateSegmentFromIngestData(context, ingestModel, ingestSegment, false) - } - ) + } satisfies UpdateIngestRundownChange } /** * Insert or update a Part in a Segment */ -export async function handleUpdatedPart(context: JobContext, data: IngestUpdatePartProps): Promise { - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - const ingestSegment = ingestRundown.segments.find((s) => s.externalId === data.segmentExternalId) - if (!ingestSegment) { - throw new Error( - `Rundown "${data.rundownExternalId}" does not have a Segment "${data.segmentExternalId}" to update` - ) - } - ingestSegment.parts = ingestSegment.parts.filter((p) => p.externalId !== data.ingestPart.externalId) - ingestSegment.parts.push(makeNewIngestPart(data.ingestPart)) - ingestSegment.modified = getCurrentTime() +export function handleUpdatedPart( + _context: JobContext, + data: IngestUpdatePartProps, + ingestRundown: IngestRundownWithSource | undefined +): UpdateIngestRundownChange { + if (!ingestRundown) throw new Error(`Rundown "${data.rundownExternalId}" not found`) + + const ingestSegment = ingestRundown.segments.find((s) => s.externalId === data.segmentExternalId) + if (!ingestSegment) { + throw new Error( + `Rundown "${data.rundownExternalId}" does not have a Segment "${data.segmentExternalId}" to update` + ) + } + const partCountBefore = ingestSegment.parts.length + ingestSegment.parts = ingestSegment.parts.filter((p) => p.externalId !== data.ingestPart.externalId) + const isUpdate = partCountBefore !== ingestSegment.parts.length + + ingestSegment.parts.push(data.ingestPart) - // We modify in-place - return ingestRundown - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } + return { + // We modify in-place + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + [data.segmentExternalId]: { + partChanges: { + [data.ingestPart.externalId]: isUpdate + ? NrcsIngestPartChangeDetails.Updated + : NrcsIngestPartChangeDetails.Inserted, + }, + }, + }, }, - async (context, ingestModel, ingestRundown) => { - const ingestSegment = ingestRundown?.segments?.find((s) => s.externalId === data.segmentExternalId) - if (!ingestSegment) throw new Error(`IngestSegment "${data.segmentExternalId}" is missing!`) - return updateSegmentFromIngestData(context, ingestModel, ingestSegment, false) - } - ) + } satisfies UpdateIngestRundownChange } diff --git a/packages/job-worker/src/ingest/ingestRundownJobs.ts b/packages/job-worker/src/ingest/ingestRundownJobs.ts index 12c13dfb828..dc949929bb0 100644 --- a/packages/job-worker/src/ingest/ingestRundownJobs.ts +++ b/packages/job-worker/src/ingest/ingestRundownJobs.ts @@ -1,11 +1,8 @@ import { JobContext } from '../jobs' import { logger } from '../logging' -import { updateRundownFromIngestData, updateRundownMetadataFromIngestData } from './generationRundown' -import { makeNewIngestRundown } from './ingestCache' -import { canRundownBeUpdated, getRundownId } from './lib' -import { CommitIngestData, runIngestJob, runWithRundownLock, UpdateIngestRundownAction } from './lock' +import { runWithRundownLock } from './lock' +import { getRundownId } from './lib' import { removeRundownFromDb } from '../rundownPlaylists' -import { literal } from '@sofie-automation/corelib/dist/lib' import { DBRundown, RundownOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { IngestRegenerateRundownProps, @@ -15,36 +12,27 @@ import { UserRemoveRundownProps, UserUnsyncRundownProps, } from '@sofie-automation/corelib/dist/worker/ingest' -import { UserError, UserErrorMessage } from '@sofie-automation/corelib/dist/error' -import { RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { ComputedIngestChangeAction, UpdateIngestRundownChange, UpdateIngestRundownResult } from './runOperation' +import { + IngestChangeType, + IngestRundown, + NrcsIngestRundownChangeDetails, +} from '@sofie-automation/blueprints-integration' +import { wrapGenericIngestJob } from './jobWrappers' +import { IngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' /** * Attempt to remove a rundown, or orphan it */ -export async function handleRemovedRundown(context: JobContext, data: IngestRemoveRundownProps): Promise { - await runIngestJob( - context, - data, - () => { - // Remove it - return UpdateIngestRundownAction.DELETE - }, - async (_context, ingestModel) => { - const rundown = ingestModel.getRundown() - - const canRemove = data.forceDelete || canRundownBeUpdated(rundown, false) - if (!canRemove) throw UserError.create(UserErrorMessage.RundownRemoveWhileActive, { name: rundown.name }) - - return literal({ - changedSegmentIds: [], - removedSegmentIds: [], - renamedSegments: null, - removeRundown: true, - returnRemoveFailure: true, - }) - } - ) +export function handleRemovedRundown( + _context: JobContext, + data: IngestRemoveRundownProps, + _ingestRundown: IngestRundown | undefined +): UpdateIngestRundownResult { + // Remove it + return data.forceDelete ? ComputedIngestChangeAction.FORCE_DELETE : ComputedIngestChangeAction.DELETE } +const handleRemovedRundownWrapped = wrapGenericIngestJob(handleRemovedRundown) /** * User requested removing a rundown @@ -81,8 +69,8 @@ export async function handleUserRemoveRundown(context: JobContext, data: UserRem } }) } else { - // The ids match, meaning the typical ingest operation flow will work - return handleRemovedRundown(context, { + // Its a real rundown, so defer to the proper route for deletion + return handleRemovedRundownWrapped(context, { rundownExternalId: tmpRundown.externalId, forceDelete: data.force, }) @@ -92,84 +80,66 @@ export async function handleUserRemoveRundown(context: JobContext, data: UserRem /** * Insert or update a rundown with a new IngestRundown */ -export async function handleUpdatedRundown(context: JobContext, data: IngestUpdateRundownProps): Promise { - return runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown || data.isCreateAction) { - // We want to regenerate unmodified - return makeNewIngestRundown(data.ingestRundown) - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } - }, - async (context, ingestModel, ingestRundown) => { - if (!ingestRundown) throw new Error(`regenerateRundown lost the IngestRundown...`) +export function handleUpdatedRundown( + _context: JobContext, + data: IngestUpdateRundownProps, + ingestRundown: IngestRundownWithSource | undefined +): UpdateIngestRundownChange { + if (!ingestRundown && !data.isCreateAction) throw new Error(`Rundown "${data.rundownExternalId}" not found`) - return updateRundownFromIngestData( - context, - ingestModel, - ingestRundown, - data.isCreateAction, - data.rundownSource - ) - } - ) + return { + ingestRundown: { + ...data.ingestRundown, + rundownSource: data.rundownSource, + }, + changes: { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Regenerate, + }, + } satisfies UpdateIngestRundownChange } /** * Update a rundown from a new IngestRundown (ingoring IngestSegments) */ -export async function handleUpdatedRundownMetaData( - context: JobContext, - data: IngestUpdateRundownMetaDataProps -): Promise { - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - return { - ...makeNewIngestRundown(data.ingestRundown), - segments: ingestRundown.segments, - } - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } - }, - async (context, ingestModel, ingestRundown) => { - if (!ingestRundown) throw new Error(`handleUpdatedRundownMetaData lost the IngestRundown...`) +export function handleUpdatedRundownMetaData( + _context: JobContext, + data: IngestUpdateRundownMetaDataProps, + ingestRundown: IngestRundownWithSource | undefined +): UpdateIngestRundownChange { + if (!ingestRundown) throw new Error(`Rundown "${data.rundownExternalId}" not found`) - return updateRundownMetadataFromIngestData(context, ingestModel, ingestRundown, data.rundownSource) - } - ) + return { + ingestRundown: { + ...data.ingestRundown, + rundownSource: data.rundownSource, + segments: ingestRundown.segments, + }, + changes: { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Payload, + }, + } satisfies UpdateIngestRundownChange } /** * Regnerate a Rundown from the cached IngestRundown */ -export async function handleRegenerateRundown(context: JobContext, data: IngestRegenerateRundownProps): Promise { - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - // We want to regenerate unmodified - return ingestRundown - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } - }, - async (context, ingestModel, ingestRundown) => { - // If the rundown is orphaned, then we can't regenerate as there wont be any data to use! - if (!ingestRundown) return null +export function handleRegenerateRundown( + _context: JobContext, + data: IngestRegenerateRundownProps, + ingestRundown: IngestRundownWithSource | undefined +): UpdateIngestRundownChange { + if (!ingestRundown) throw new Error(`Rundown "${data.rundownExternalId}" not found`) - if (!ingestModel.rundown) throw new Error(`Rundown "${data.rundownExternalId}" not found`) - - return updateRundownFromIngestData(context, ingestModel, ingestRundown, false, ingestModel.rundown.source) - } - ) + return { + // We want to regenerate unmodified + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Regenerate, + }, + } satisfies UpdateIngestRundownChange } /** @@ -177,16 +147,16 @@ export async function handleRegenerateRundown(context: JobContext, data: IngestR */ export async function handleUserUnsyncRundown(context: JobContext, data: UserUnsyncRundownProps): Promise { return runWithRundownLock(context, data.rundownId, async (rundown) => { - if (rundown) { - if (!rundown.orphaned) { - await context.directCollections.Rundowns.update(rundown._id, { - $set: { - orphaned: RundownOrphanedReason.MANUAL, - }, - }) - } else { - logger.info(`Rundown "${rundown._id}" was already unsynced`) - } + if (!rundown) return // Ignore if rundown is not found + + if (!rundown.orphaned) { + await context.directCollections.Rundowns.update(rundown._id, { + $set: { + orphaned: RundownOrphanedReason.MANUAL, + }, + }) + } else { + logger.info(`Rundown "${rundown._id}" was already unsynced`) } }) } diff --git a/packages/job-worker/src/ingest/ingestSegmentJobs.ts b/packages/job-worker/src/ingest/ingestSegmentJobs.ts index bfe4fc20a14..06ac710f40d 100644 --- a/packages/job-worker/src/ingest/ingestSegmentJobs.ts +++ b/packages/job-worker/src/ingest/ingestSegmentJobs.ts @@ -1,11 +1,6 @@ -import { SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { getCurrentTime } from '../lib' import { JobContext } from '../jobs' -import { logger } from '../logging' -import { regenerateSegmentsFromIngestData, updateSegmentFromIngestData } from './generationSegment' -import { makeNewIngestSegment } from './ingestCache' -import { canSegmentBeUpdated, getSegmentId } from './lib' -import { CommitIngestData, runIngestJob, UpdateIngestRundownAction } from './lock' +import { regenerateSegmentsFromIngestData } from './generationSegment' +import { CommitIngestData } from './lock' import { SegmentOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Segment' import { literal } from '@sofie-automation/corelib/dist/lib' import { @@ -15,157 +10,140 @@ import { IngestUpdateSegmentRanksProps, RemoveOrphanedSegmentsProps, } from '@sofie-automation/corelib/dist/worker/ingest' +import { IngestUpdateOperationFunction, UpdateIngestRundownChange, UpdateIngestRundownResult } from './runOperation' +import { + IngestChangeType, + NrcsIngestSegmentChangeDetailsEnum, + SofieIngestRundown, +} from '@sofie-automation/blueprints-integration' +import { IngestModel } from './model/IngestModel' +import { IngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' /** * Regnerate a Segment from the cached IngestSegment */ -export async function handleRegenerateSegment(context: JobContext, data: IngestRegenerateSegmentProps): Promise { - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - // Ensure the target segment exists in the cache - const ingestSegment = ingestRundown.segments.find((s) => s.externalId === data.segmentExternalId) - if (!ingestSegment) { - throw new Error( - `Rundown "${data.rundownExternalId}" does not have a Segment "${data.segmentExternalId}" to update` - ) - } - - // We modify in-place - return ingestRundown - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } +export function handleRegenerateSegment( + _context: JobContext, + data: IngestRegenerateSegmentProps, + ingestRundown: IngestRundownWithSource | undefined +): UpdateIngestRundownChange { + if (!ingestRundown) throw new Error(`Rundown "${data.rundownExternalId}" not found`) + + // Ensure the target segment exists in the cache + const ingestSegment = ingestRundown.segments.find((s) => s.externalId === data.segmentExternalId) + if (!ingestSegment) { + throw new Error( + `Rundown "${data.rundownExternalId}" does not have a Segment "${data.segmentExternalId}" to update` + ) + } + + return { + // We modify in-place + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + [data.segmentExternalId]: { + payloadChanged: true, + }, + }, }, - async (context, ingestModel, ingestRundown) => { - const ingestSegment = ingestRundown?.segments?.find((s) => s.externalId === data.segmentExternalId) - if (!ingestSegment) throw new Error(`IngestSegment "${data.segmentExternalId}" is missing!`) - return updateSegmentFromIngestData(context, ingestModel, ingestSegment, false) - } - ) + } } /** * Attempt to remove a segment, or orphan it */ -export async function handleRemovedSegment(context: JobContext, data: IngestRemoveSegmentProps): Promise { - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - const oldSegmentsLength = ingestRundown.segments.length - ingestRundown.segments = ingestRundown.segments.filter((s) => s.externalId !== data.segmentExternalId) - ingestRundown.modified = getCurrentTime() - - if (ingestRundown.segments.length === oldSegmentsLength) { - throw new Error( - `Rundown "${data.rundownExternalId}" does not have a Segment "${data.segmentExternalId}" to remove` - ) - } - - // We modify in-place - return ingestRundown - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } +export function handleRemovedSegment( + _context: JobContext, + data: IngestRemoveSegmentProps, + ingestRundown: IngestRundownWithSource | undefined +): UpdateIngestRundownChange { + if (!ingestRundown) throw new Error(`Rundown "${data.rundownExternalId}" not found`) + + const oldSegmentsLength = ingestRundown.segments.length + ingestRundown.segments = ingestRundown.segments.filter((s) => s.externalId !== data.segmentExternalId) + + if (ingestRundown.segments.length === oldSegmentsLength) { + throw new Error( + `Rundown "${data.rundownExternalId}" does not have a Segment "${data.segmentExternalId}" to remove` + ) + } + + return { + // We modify in-place + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + [data.segmentExternalId]: NrcsIngestSegmentChangeDetailsEnum.Deleted, + }, }, - async (_context, ingestModel) => { - const rundown = ingestModel.getRundown() - const segmentId = getSegmentId(rundown._id, data.segmentExternalId) - const segment = ingestModel.getSegment(segmentId) - - if (!canSegmentBeUpdated(rundown, segment, false)) { - // segment has already been deleted - return null - } else { - return literal({ - changedSegmentIds: [], - removedSegmentIds: [segmentId], - renamedSegments: null, - - removeRundown: false, - }) - } - } - ) + } } /** * Insert or update a segment from a new IngestSegment */ -export async function handleUpdatedSegment(context: JobContext, data: IngestUpdateSegmentProps): Promise { +export function handleUpdatedSegment( + _context: JobContext, + data: IngestUpdateSegmentProps +): IngestUpdateOperationFunction { const segmentExternalId = data.ingestSegment.externalId - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - ingestRundown.segments = ingestRundown.segments.filter((s) => s.externalId !== segmentExternalId) - ingestRundown.segments.push(makeNewIngestSegment(data.ingestSegment)) - ingestRundown.modified = getCurrentTime() - - // We modify in-place - return ingestRundown - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } - }, - async (context, ingestModel, ingestRundown) => { - const ingestSegment = ingestRundown?.segments?.find((s) => s.externalId === segmentExternalId) - if (!ingestSegment) throw new Error(`IngestSegment "${segmentExternalId}" is missing!`) - return updateSegmentFromIngestData(context, ingestModel, ingestSegment, data.isCreateAction) + if (!segmentExternalId) throw new Error('Segment externalId must be set!') + + return (ingestRundown) => { + if (!ingestRundown) throw new Error(`Rundown "${data.rundownExternalId}" not found`) + + const countBefore = ingestRundown.segments.length + ingestRundown.segments = ingestRundown.segments.filter((s) => s.externalId !== segmentExternalId) + if (countBefore === ingestRundown.segments.length && !data.isCreateAction) + throw new Error(`Segment "${data.ingestSegment.externalId}" not found`) + + ingestRundown.segments.push(data.ingestSegment) + + return { + // We modify in-place + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + [segmentExternalId]: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, // This forces downstream to do a full diff themselves + }, + }, } - ) + } } /** * Update the ranks of the Segments in a Rundown */ -export async function handleUpdatedSegmentRanks( - context: JobContext, - data: IngestUpdateSegmentRanksProps -): Promise { - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - // Update ranks on ingest data - for (const segment of ingestRundown.segments) { - segment.rank = data.newRanks[segment.externalId] ?? segment.rank - } - // We modify in-place - return ingestRundown - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } - }, - async (_context, ingestModel) => { - const changedSegmentIds: SegmentId[] = [] - for (const [externalId, rank] of Object.entries(data.newRanks)) { - const segment = ingestModel.getSegmentByExternalId(externalId) - if (segment) { - const changed = segment.setRank(rank) - - if (!changed) { - logger.warn(`Failed to update rank of segment "${externalId}" (${data.rundownExternalId})`) - } else { - changedSegmentIds.push(segment?.segment._id) - } - } - } - - return literal({ - changedSegmentIds, - removedSegmentIds: [], - renamedSegments: null, - removeRundown: false, - }) +export function handleUpdatedSegmentRanks( + _context: JobContext, + data: IngestUpdateSegmentRanksProps, + ingestRundown: IngestRundownWithSource | undefined +): UpdateIngestRundownResult { + if (!ingestRundown) throw new Error(`Rundown "${data.rundownExternalId}" not found`) + + let hasChange = false + + // Update ranks on ingest data + for (const segment of ingestRundown.segments) { + const newRank = Number(data.newRanks[segment.externalId]) + if (!isNaN(newRank)) { + segment.rank = newRank + hasChange = true } - ) + } + + return { + // We modify in-place + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentOrderChanged: hasChange, + }, + } } /** @@ -173,70 +151,57 @@ export async function handleUpdatedSegmentRanks( */ export async function handleRemoveOrphanedSegemnts( context: JobContext, - data: RemoveOrphanedSegmentsProps -): Promise { - await runIngestJob( - context, - data, - (ingestRundown) => ingestRundown ?? UpdateIngestRundownAction.DELETE, - async (_context, ingestModel, ingestRundown) => { - if (!ingestRundown) throw new Error(`handleRemoveOrphanedSegemnts lost the IngestRundown...`) - - // Find the segments that are still orphaned (in case they have resynced before this executes) - // We flag them for deletion again, and they will either be kept if they are somehow playing, or purged if they are not - const stillOrphanedSegments = ingestModel.getOrderedSegments().filter((s) => !!s.segment.orphaned) - - // Note: AdlibTesting segments are ignored here, as they will never be in the ingestModel - - const stillHiddenSegments = stillOrphanedSegments.filter( - (s) => - s.segment.orphaned === SegmentOrphanedReason.HIDDEN && - data.orphanedHiddenSegmentIds.includes(s.segment._id) - ) - - const stillDeletedSegmentIds = stillOrphanedSegments - .filter( - (s) => - s.segment.orphaned === SegmentOrphanedReason.DELETED && - data.orphanedDeletedSegmentIds.includes(s.segment._id) - ) - .map((s) => s.segment._id) - - const hiddenSegmentIds = ingestModel - .getOrderedSegments() - .filter((s) => !!stillHiddenSegments.find((a) => a.segment._id === s.segment._id)) - .map((s) => s.segment._id) - - const { result } = await regenerateSegmentsFromIngestData( - context, - ingestModel, - ingestRundown, - hiddenSegmentIds - ) - - const changedHiddenSegments = result?.changedSegmentIds ?? [] - - // Make sure any orphaned hidden segments arent marked as hidden - for (const segment of stillHiddenSegments) { - if (!changedHiddenSegments.includes(segment.segment._id)) { - if (segment.segment.isHidden && segment.segment.orphaned === SegmentOrphanedReason.HIDDEN) { - segment.setOrphaned(undefined) - changedHiddenSegments.push(segment.segment._id) - } - } - } + data: RemoveOrphanedSegmentsProps, + ingestModel: IngestModel, + ingestRundown: SofieIngestRundown +): Promise { + // Find the segments that are still orphaned (in case they have resynced before this executes) + // We flag them for deletion again, and they will either be kept if they are somehow playing, or purged if they are not + const stillOrphanedSegments = ingestModel.getOrderedSegments().filter((s) => !!s.segment.orphaned) + + // Note: AdlibTesting segments are ignored here, as they will never be in the ingestModel + + const stillHiddenSegments = stillOrphanedSegments.filter( + (s) => + s.segment.orphaned === SegmentOrphanedReason.HIDDEN && data.orphanedHiddenSegmentIds.includes(s.segment._id) + ) - if (changedHiddenSegments.length === 0 && stillDeletedSegmentIds.length === 0) { - // Nothing could have changed, so take a shortcut and skip any saving - return null + const stillDeletedSegmentIds = stillOrphanedSegments + .filter( + (s) => + s.segment.orphaned === SegmentOrphanedReason.DELETED && + data.orphanedDeletedSegmentIds.includes(s.segment._id) + ) + .map((s) => s.segment._id) + + const hiddenSegmentIds = ingestModel + .getOrderedSegments() + .filter((s) => !!stillHiddenSegments.find((a) => a.segment._id === s.segment._id)) + .map((s) => s.segment._id) + + const { result } = await regenerateSegmentsFromIngestData(context, ingestModel, ingestRundown, hiddenSegmentIds) + + const changedHiddenSegments = result?.changedSegmentIds ?? [] + + // Make sure any orphaned hidden segments arent marked as hidden + for (const segment of stillHiddenSegments) { + if (!changedHiddenSegments.includes(segment.segment._id)) { + if (segment.segment.isHidden && segment.segment.orphaned === SegmentOrphanedReason.HIDDEN) { + segment.setOrphaned(undefined) + changedHiddenSegments.push(segment.segment._id) } - - return literal({ - changedSegmentIds: changedHiddenSegments, - removedSegmentIds: stillDeletedSegmentIds, - renamedSegments: null, - removeRundown: false, - }) } - ) + } + + if (changedHiddenSegments.length === 0 && stillDeletedSegmentIds.length === 0) { + // Nothing could have changed, so take a shortcut and skip any saving + return null + } + + return literal({ + changedSegmentIds: changedHiddenSegments, + removedSegmentIds: stillDeletedSegmentIds, + renamedSegments: new Map(), + removeRundown: false, + }) } diff --git a/packages/job-worker/src/ingest/jobWrappers.ts b/packages/job-worker/src/ingest/jobWrappers.ts new file mode 100644 index 00000000000..c4fbc9024c0 --- /dev/null +++ b/packages/job-worker/src/ingest/jobWrappers.ts @@ -0,0 +1,83 @@ +import { IngestPropsBase } from '@sofie-automation/corelib/dist/worker/ingest' +import { JobContext } from '../jobs' +import { + IngestUpdateOperationFunction, + UpdateIngestRundownResult, + runCustomIngestUpdateOperation, + runIngestUpdateOperation, +} from './runOperation' +import { CommitIngestData } from './lock' +import { IngestModel } from './model/IngestModel' +import { IngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' +import { SofieIngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' + +/** + * Wrap a mos specific ingest job to be an ingest update operation, with a provided function which runs a precheck and returns the final ingestRundown mutator + * @param fcn Function to generate the ingestRundown mutator + */ +export function wrapMosIngestJob( + fcn: (context: JobContext, data: TData) => IngestUpdateOperationFunction | null +): (context: JobContext, data: TData) => Promise { + return async (context, data) => { + const executeFcn = fcn(context, data) + if (!executeFcn) return + + await runIngestUpdateOperation(context, data, (ingestRundown) => { + if (ingestRundown && ingestRundown.type !== 'mos') { + throw new Error(`Rundown "${data.rundownExternalId}" is not a MOS rundown`) + } + + return executeFcn(ingestRundown) + }) + } +} + +/** + * Wrap an ingest job to be an ingest update operation, with a provided function which can mutate the ingestRundown + * @param fcn Function to mutate the ingestRundown + */ +export function wrapGenericIngestJob( + fcn: ( + context: JobContext, + data: TData, + oldIngestRundown: IngestRundownWithSource | undefined + ) => UpdateIngestRundownResult +): (context: JobContext, data: TData) => Promise { + return async (context, data) => { + await runIngestUpdateOperation(context, data, (ingestRundown) => fcn(context, data, ingestRundown)) + } +} + +/** + * Wrap an ingest job to be an ingest update operation, with a provided function which runs a precheck and returns the final ingestRundown mutator + * @param fcn Function to generate the ingestRundown mutator + */ +export function wrapGenericIngestJobWithPrecheck( + fcn: (context: JobContext, data: TData) => IngestUpdateOperationFunction | null +): (context: JobContext, data: TData) => Promise { + return async (context, data) => { + const executeFcn = fcn(context, data) + if (!executeFcn) return + + await runIngestUpdateOperation(context, data, (ingestRundown) => executeFcn(ingestRundown)) + } +} + +/** + * Wrap an ingest job to be an ingest update operation, with a provided function to run the job to modify the IngestModel + * @param fcn Function to mutate the IngestModel + */ +export function wrapCustomIngestJob( + fcn: ( + context: JobContext, + data: TData, + ingestModel: IngestModel, + ingestRundown: SofieIngestRundownWithSource + ) => Promise +): (context: JobContext, data: TData) => Promise { + return async (context, data) => { + await runCustomIngestUpdateOperation(context, data, async (_context, ingestModel, ingestRundown) => { + return fcn(context, data, ingestModel, ingestRundown) + }) + } +} diff --git a/packages/job-worker/src/ingest/lib.ts b/packages/job-worker/src/ingest/lib.ts index c059bf10e20..af031cce0fe 100644 --- a/packages/job-worker/src/ingest/lib.ts +++ b/packages/job-worker/src/ingest/lib.ts @@ -4,7 +4,7 @@ import { getHash } from '@sofie-automation/corelib/dist/lib' import { protectString } from '@sofie-automation/corelib/dist/protectedString' import { ReadonlyDeep } from 'type-fest' import { logger } from '../logging' -import { ExtendedIngestRundown, IngestRundown } from '@sofie-automation/blueprints-integration' +import { ExtendedIngestRundown, SofieIngestRundown } from '@sofie-automation/blueprints-integration' import { SegmentOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Segment' import { convertRundownToBlueprints } from '../blueprints/context/lib' import { IngestSegmentModel } from './model/IngestSegmentModel' @@ -61,7 +61,7 @@ export function canSegmentBeUpdated( } export function extendIngestRundownCore( - ingestRundown: IngestRundown, + ingestRundown: SofieIngestRundown, existingDbRundown: ReadonlyDeep | undefined ): ExtendedIngestRundown { const extendedIngestRundown: ExtendedIngestRundown = { diff --git a/packages/job-worker/src/ingest/lock.ts b/packages/job-worker/src/ingest/lock.ts index 67999b33be2..7645ff3abfc 100644 --- a/packages/job-worker/src/ingest/lock.ts +++ b/packages/job-worker/src/ingest/lock.ts @@ -1,15 +1,7 @@ -import { SegmentId, PartId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { IngestModel, IngestModelReadonly } from './model/IngestModel' -import { BeforeIngestOperationPartMap, CommitIngestOperation } from './commit' -import { LocalIngestRundown, RundownIngestDataCache } from './ingestCache' -import { getRundownId } from './lib' +import { SegmentId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { JobContext } from '../jobs' -import { IngestPropsBase } from '@sofie-automation/corelib/dist/worker/ingest' import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { RundownLock } from '../jobs/lock' -import { UserError } from '@sofie-automation/corelib/dist/error' -import { loadIngestModelFromRundownExternalId } from './model/implementation/LoadIngestModel' -import { clone } from '@sofie-automation/corelib/dist/lib' /** * The result of the initial stage of an Ingest operation @@ -34,106 +26,6 @@ export interface CommitIngestData { returnRemoveFailure?: boolean } -export enum UpdateIngestRundownAction { - REJECT = 'reject', - DELETE = 'delete', -} - -/** - * Perform an ingest update operation on a rundown - * This will automatically do some post-update data changes, to ensure the playout side (partinstances etc) is updated with the changes - * @param context Context of the job being run - * @param studioId Id of the studio the rundown belongs to - * @param rundownExternalId ExternalId of the rundown to lock - * @param updateCacheFcn Function to mutate the ingestData. Throw if the requested change is not valid. Return undefined to indicate the ingestData should be deleted - * @param calcFcn Function to run to update the Rundown. Return the blob of data about the change to help the post-update perform its duties. Return null to indicate that nothing changed - */ -export async function runIngestJob( - context: JobContext, - data: IngestPropsBase, - updateCacheFcn: ( - oldIngestRundown: LocalIngestRundown | undefined - ) => LocalIngestRundown | UpdateIngestRundownAction, - calcFcn: ( - context: JobContext, - ingestModel: IngestModel, - newIngestRundown: LocalIngestRundown | undefined, - oldIngestRundown: LocalIngestRundown | undefined - ) => Promise -): Promise { - if (!data.rundownExternalId) { - throw new Error(`Job is missing rundownExternalId`) - } - - const rundownId = getRundownId(context.studioId, data.rundownExternalId) - return runWithRundownLockInner(context, rundownId, async (rundownLock) => { - const span = context.startSpan(`ingestLockFunction.${context.studioId}`) - - // Load the old ingest data - const pIngestModel = loadIngestModelFromRundownExternalId(context, rundownLock, data.rundownExternalId) - const ingestObjCache = await RundownIngestDataCache.create(context, rundownId) - - // Recalculate the ingest data - const oldIngestRundown = ingestObjCache.fetchRundown() - const updatedIngestRundown = updateCacheFcn(clone(oldIngestRundown)) - let newIngestRundown: LocalIngestRundown | undefined - switch (updatedIngestRundown) { - case UpdateIngestRundownAction.REJECT: - // Reject change - return rundownId - case UpdateIngestRundownAction.DELETE: - ingestObjCache.delete() - newIngestRundown = undefined - break - default: - ingestObjCache.update(updatedIngestRundown) - newIngestRundown = updatedIngestRundown - break - } - // Start saving the ingest data - const pSaveIngestChanges = ingestObjCache.saveToDatabase() - - let resultingError: UserError | void | undefined - - try { - const ingestModel = await pIngestModel - - // Load any 'before' data for the commit - const beforeRundown = ingestModel.rundown - const beforePartMap = generatePartMap(ingestModel) - - const span = context.startSpan('ingest.calcFcn') - const commitData = await calcFcn(context, ingestModel, newIngestRundown, oldIngestRundown) - span?.end() - - if (commitData) { - const span = context.startSpan('ingest.commit') - // The change is accepted. Perform some playout calculations and save it all - resultingError = await CommitIngestOperation( - context, - ingestModel, - beforeRundown, - beforePartMap, - commitData - ) - span?.end() - } else { - // Should be no changes - ingestModel.assertNoChanges() - } - } finally { - // Ensure we save the ingest data - await pSaveIngestChanges - - span?.end() - } - - if (resultingError) throw resultingError - - return rundownId - }) -} - /** * Run a minimal rundown job. This is an alternative to `runIngestJob`, for operations to operate on a Rundown without the full Ingest flow * This automatically aquires the RundownLock, loads the Rundown and does a basic access check @@ -151,7 +43,7 @@ export async function runWithRundownLock( throw new Error(`Job is missing rundownId`) } - return runWithRundownLockInner(context, rundownId, async (lock) => { + return runWithRundownLockWithoutFetchingRundown(context, rundownId, async (lock) => { const rundown = await context.directCollections.Rundowns.findOne(rundownId) if (rundown && rundown.studioId !== context.studioId) { throw new Error(`Job rundown "${rundownId}" not found or for another studio`) @@ -164,7 +56,7 @@ export async function runWithRundownLock( /** * Lock the rundown for a quick task without the cache */ -async function runWithRundownLockInner( +export async function runWithRundownLockWithoutFetchingRundown( context: JobContext, rundownId: RundownId, fcn: (lock: RundownLock) => Promise @@ -178,17 +70,3 @@ async function runWithRundownLockInner( await rundownLock.release() } } - -function generatePartMap(ingestModel: IngestModelReadonly): BeforeIngestOperationPartMap { - const rundown = ingestModel.rundown - if (!rundown) return new Map() - - const res = new Map>() - for (const segment of ingestModel.getAllSegments()) { - res.set( - segment.segment._id, - segment.parts.map((p) => ({ id: p.part._id, rank: p.part._rank })) - ) - } - return res -} diff --git a/packages/job-worker/src/ingest/model/IngestModel.ts b/packages/job-worker/src/ingest/model/IngestModel.ts index 305a4197cde..f58ad024393 100644 --- a/packages/job-worker/src/ingest/model/IngestModel.ts +++ b/packages/job-worker/src/ingest/model/IngestModel.ts @@ -15,6 +15,7 @@ import { SegmentId, } from '@sofie-automation/corelib/dist/dataModel/Ids' import { DBRundown, RundownOrphanedReason, RundownSource } from '@sofie-automation/corelib/dist/dataModel/Rundown' +import { CoreUserEditingDefinition } from '@sofie-automation/corelib/dist/dataModel/UserEditingDefinitions' import { RundownBaselineAdLibAction } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibAction' import { RundownBaselineAdLibItem } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibPiece' import { LazyInitialiseReadonly } from '../../lib/lazy' @@ -109,7 +110,7 @@ export interface IngestModelReadonly { */ getSegment(id: SegmentId): IngestSegmentModelReadonly | undefined /** - * Get the Segments of this Rundown, in order + * Get the Segments of this Rundown, in no particular order */ getAllSegments(): IngestSegmentModelReadonly[] @@ -234,7 +235,8 @@ export interface IngestModel extends IngestModelReadonly, BaseModel { showStyleVariant: ReadonlyDeep, showStyleBlueprint: ReadonlyDeep, source: RundownSource, - rundownNotes: RundownNote[] + rundownNotes: RundownNote[], + userEdits: CoreUserEditingDefinition[] | undefined ): ReadonlyDeep /** diff --git a/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts b/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts index f441921f1ab..747da75b21c 100644 --- a/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts +++ b/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts @@ -20,6 +20,7 @@ import { import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { Piece, PieceTimelineObjectsBlob } from '@sofie-automation/corelib/dist/dataModel/Piece' import { DBRundown, RundownOrphanedReason, RundownSource } from '@sofie-automation/corelib/dist/dataModel/Rundown' +import { CoreUserEditingDefinition } from '@sofie-automation/corelib/dist/dataModel/UserEditingDefinitions' import { RundownBaselineAdLibAction } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibAction' import { RundownBaselineAdLibItem } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibPiece' import { RundownBaselineObj } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineObj' @@ -291,7 +292,7 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { return undefined } /** - * Get the Segments of this Rundown, in order + * Get the Segments of this Rundown, in no particular order */ getAllSegments(): IngestSegmentModel[] { const segments: IngestSegmentModel[] = [] @@ -421,7 +422,8 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { showStyleVariant: ReadonlyDeep, showStyleBlueprint: ReadonlyDeep, source: RundownSource, - rundownNotes: RundownNote[] + rundownNotes: RundownNote[], + userEditOperations: CoreUserEditingDefinition[] | undefined ): ReadonlyDeep { const newRundown = literal>({ ...clone(rundownData as Complete), @@ -432,6 +434,7 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { studioId: this.context.studio._id, showStyleVariantId: showStyleVariant._id, showStyleBaseId: showStyleBase._id, + userEditOperations: clone(userEditOperations), orphaned: undefined, importVersions: { diff --git a/packages/job-worker/src/ingest/mosDevice/__tests__/__snapshots__/mosIngest.test.ts.snap b/packages/job-worker/src/ingest/mosDevice/__tests__/__snapshots__/mosIngest.test.ts.snap index d949eb684e2..0678fb3875d 100644 --- a/packages/job-worker/src/ingest/mosDevice/__tests__/__snapshots__/mosIngest.test.ts.snap +++ b/packages/job-worker/src/ingest/mosDevice/__tests__/__snapshots__/mosIngest.test.ts.snap @@ -174,38 +174,34 @@ exports[`Test recieved mos ingest payloads mosRoCreate 2`] = ` exports[`Test recieved mos ingest payloads mosRoCreate 3`] = ` [ { - "_id": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", - "_rank": 3, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s4;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "rSEZMzZhJ55454sqsU_7TOq_DIk_", + "_rank": 2, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p1", + "name": "SEGMENT3", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", + "_rank": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", - "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", + "_rank": 3, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s4;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", - "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p1", - "externalModified": 0, - "name": "SEGMENT3", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 1, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -221,7 +217,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART2", }, { @@ -231,7 +227,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", + "segmentId": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", "title": "SEGMENT2;PART3", }, { @@ -241,7 +237,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -251,7 +247,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART3", }, { @@ -261,7 +257,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART1", }, { @@ -271,7 +267,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -281,7 +277,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART2", }, { @@ -291,7 +287,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -471,29 +467,26 @@ exports[`Test recieved mos ingest payloads mosRoCreate: replace existing 2`] = ` exports[`Test recieved mos ingest payloads mosRoCreate: replace existing 3`] = ` [ { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "rSEZMzZhJ55454sqsU_7TOq_DIk_", + "_rank": 2, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p1", + "name": "SEGMENT3", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", - "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p1", - "externalModified": 0, - "name": "SEGMENT3", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 1, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -509,7 +502,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate: replace existing 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART2", }, { @@ -519,7 +512,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate: replace existing 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART3", }, { @@ -529,7 +522,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate: replace existing 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -539,7 +532,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate: replace existing 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART3", }, { @@ -549,7 +542,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate: replace existing 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART1", }, { @@ -559,7 +552,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate: replace existing 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -569,7 +562,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate: replace existing 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART2", }, { @@ -579,7 +572,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate: replace existing 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -762,38 +755,34 @@ exports[`Test recieved mos ingest payloads mosRoFullStory: Valid data 2`] = ` exports[`Test recieved mos ingest payloads mosRoFullStory: Valid data 3`] = ` [ { - "_id": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", - "_rank": 3, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s4;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "rSEZMzZhJ55454sqsU_7TOq_DIk_", + "_rank": 2, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p1", + "name": "SEGMENT3", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", + "_rank": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", - "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", + "_rank": 3, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s4;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", - "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p1", - "externalModified": 0, - "name": "SEGMENT3", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 1, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -809,7 +798,7 @@ exports[`Test recieved mos ingest payloads mosRoFullStory: Valid data 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART2", }, { @@ -819,7 +808,7 @@ exports[`Test recieved mos ingest payloads mosRoFullStory: Valid data 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", + "segmentId": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", "title": "SEGMENT2;PART3", }, { @@ -829,7 +818,7 @@ exports[`Test recieved mos ingest payloads mosRoFullStory: Valid data 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -839,7 +828,7 @@ exports[`Test recieved mos ingest payloads mosRoFullStory: Valid data 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART3", }, { @@ -849,7 +838,7 @@ exports[`Test recieved mos ingest payloads mosRoFullStory: Valid data 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART1", }, { @@ -859,7 +848,7 @@ exports[`Test recieved mos ingest payloads mosRoFullStory: Valid data 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -877,7 +866,7 @@ exports[`Test recieved mos ingest payloads mosRoFullStory: Valid data 4`] = ` }, }, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART2", }, { @@ -887,7 +876,7 @@ exports[`Test recieved mos ingest payloads mosRoFullStory: Valid data 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -1072,38 +1061,34 @@ exports[`Test recieved mos ingest payloads mosRoReadyToAir: Update ro 2`] = ` exports[`Test recieved mos ingest payloads mosRoReadyToAir: Update ro 3`] = ` [ { - "_id": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", - "_rank": 3, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s4;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "rSEZMzZhJ55454sqsU_7TOq_DIk_", + "_rank": 2, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p1", + "name": "SEGMENT3", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", + "_rank": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", - "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", + "_rank": 3, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s4;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", - "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p1", - "externalModified": 0, - "name": "SEGMENT3", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 1, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -1119,7 +1104,7 @@ exports[`Test recieved mos ingest payloads mosRoReadyToAir: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART2", }, { @@ -1129,7 +1114,7 @@ exports[`Test recieved mos ingest payloads mosRoReadyToAir: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", + "segmentId": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", "title": "SEGMENT2;PART3", }, { @@ -1139,7 +1124,7 @@ exports[`Test recieved mos ingest payloads mosRoReadyToAir: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -1149,7 +1134,7 @@ exports[`Test recieved mos ingest payloads mosRoReadyToAir: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART3", }, { @@ -1159,7 +1144,7 @@ exports[`Test recieved mos ingest payloads mosRoReadyToAir: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART1", }, { @@ -1169,7 +1154,7 @@ exports[`Test recieved mos ingest payloads mosRoReadyToAir: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -1179,7 +1164,7 @@ exports[`Test recieved mos ingest payloads mosRoReadyToAir: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART2", }, { @@ -1189,7 +1174,7 @@ exports[`Test recieved mos ingest payloads mosRoReadyToAir: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -1370,38 +1355,34 @@ exports[`Test recieved mos ingest payloads mosRoStatus: Update ro 2`] = ` exports[`Test recieved mos ingest payloads mosRoStatus: Update ro 3`] = ` [ { - "_id": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", - "_rank": 3, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s4;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "rSEZMzZhJ55454sqsU_7TOq_DIk_", + "_rank": 2, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p1", + "name": "SEGMENT3", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", + "_rank": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", - "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", + "_rank": 3, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s4;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", - "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p1", - "externalModified": 0, - "name": "SEGMENT3", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 1, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -1417,7 +1398,7 @@ exports[`Test recieved mos ingest payloads mosRoStatus: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART2", }, { @@ -1427,7 +1408,7 @@ exports[`Test recieved mos ingest payloads mosRoStatus: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", + "segmentId": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", "title": "SEGMENT2;PART3", }, { @@ -1437,7 +1418,7 @@ exports[`Test recieved mos ingest payloads mosRoStatus: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -1447,7 +1428,7 @@ exports[`Test recieved mos ingest payloads mosRoStatus: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART3", }, { @@ -1457,7 +1438,7 @@ exports[`Test recieved mos ingest payloads mosRoStatus: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART1", }, { @@ -1467,7 +1448,7 @@ exports[`Test recieved mos ingest payloads mosRoStatus: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -1477,7 +1458,7 @@ exports[`Test recieved mos ingest payloads mosRoStatus: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART2", }, { @@ -1487,7 +1468,7 @@ exports[`Test recieved mos ingest payloads mosRoStatus: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -1670,20 +1651,18 @@ exports[`Test recieved mos ingest payloads mosRoStoryDelete: Remove segment 2`] exports[`Test recieved mos ingest payloads mosRoStoryDelete: Remove segment 3`] = ` [ { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", + "_rank": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", - "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 1, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -1699,7 +1678,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryDelete: Remove segment 4`] "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART3", }, { @@ -1709,7 +1688,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryDelete: Remove segment 4`] "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -1719,7 +1698,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryDelete: Remove segment 4`] "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART3", }, { @@ -1729,7 +1708,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryDelete: Remove segment 4`] "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -1739,7 +1718,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryDelete: Remove segment 4`] "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART2", }, { @@ -1749,7 +1728,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryDelete: Remove segment 4`] "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -1934,38 +1913,34 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: Into segment 2`] = exports[`Test recieved mos ingest payloads mosRoStoryInsert: Into segment 3`] = ` [ { - "_id": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", - "_rank": 3, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s4;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "rSEZMzZhJ55454sqsU_7TOq_DIk_", + "_rank": 2, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p1", + "name": "SEGMENT3", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", + "_rank": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", - "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", + "_rank": 3, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s4;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", - "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p1", - "externalModified": 0, - "name": "SEGMENT3", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 1, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -1981,7 +1956,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: Into segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART2", }, { @@ -1991,7 +1966,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: Into segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", + "segmentId": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", "title": "SEGMENT2;PART3", }, { @@ -2000,7 +1975,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: Into segment 4`] = "externalId": "ro1;s1;newPart1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;new1", }, { @@ -2010,7 +1985,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: Into segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -2020,7 +1995,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: Into segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART3", }, { @@ -2030,7 +2005,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: Into segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART1", }, { @@ -2040,7 +2015,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: Into segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -2050,7 +2025,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: Into segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART2", }, { @@ -2060,7 +2035,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: Into segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -2245,47 +2220,42 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: New segment 2`] = ` exports[`Test recieved mos ingest payloads mosRoStoryInsert: New segment 3`] = ` [ { - "_id": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", - "_rank": 4, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s4;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "rSEZMzZhJ55454sqsU_7TOq_DIk_", + "_rank": 3, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p1", + "name": "SEGMENT3", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", + "_rank": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", - "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", + "_rank": 4, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s4;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "9VE_IbHiHyW6VjY6Fi8fMJEgtS4_", + "_id": "Rjo_e_rlOh2eE8XOyVmXZCMgTNY_", "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1B_ro1;s1b;newPart1", - "externalModified": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1b;newPart1", "name": "SEGMENT1B", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", - "_rank": 3, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p1", - "externalModified": 0, - "name": "SEGMENT3", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 2, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -2301,7 +2271,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: New segment 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART2", }, { @@ -2311,7 +2281,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: New segment 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", + "segmentId": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", "title": "SEGMENT2;PART3", }, { @@ -2320,7 +2290,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: New segment 4`] = ` "externalId": "ro1;s1b;newPart1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "9VE_IbHiHyW6VjY6Fi8fMJEgtS4_", + "segmentId": "Rjo_e_rlOh2eE8XOyVmXZCMgTNY_", "title": "SEGMENT1B;new1", }, { @@ -2330,7 +2300,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: New segment 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -2340,7 +2310,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: New segment 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART3", }, { @@ -2350,7 +2320,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: New segment 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART1", }, { @@ -2360,7 +2330,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: New segment 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -2370,7 +2340,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: New segment 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART2", }, { @@ -2380,7 +2350,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: New segment 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -2565,38 +2535,34 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Move whole segment to exports[`Test recieved mos ingest payloads mosRoStoryMove: Move whole segment to end 3`] = ` [ { - "_id": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", - "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s4;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "rSEZMzZhJ55454sqsU_7TOq_DIk_", + "_rank": 1, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p1", + "name": "SEGMENT3", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", + "_rank": 3, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", - "_rank": 3, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", + "_rank": 2, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s4;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", - "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p1", - "externalModified": 0, - "name": "SEGMENT3", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -2612,7 +2578,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Move whole segment to "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART2", }, { @@ -2622,7 +2588,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Move whole segment to "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", + "segmentId": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", "title": "SEGMENT2;PART3", }, { @@ -2632,7 +2598,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Move whole segment to "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -2642,7 +2608,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Move whole segment to "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART3", }, { @@ -2652,7 +2618,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Move whole segment to "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART1", }, { @@ -2662,7 +2628,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Move whole segment to "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -2672,7 +2638,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Move whole segment to "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART2", }, { @@ -2682,7 +2648,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Move whole segment to "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -2867,38 +2833,34 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Within segment 2`] = exports[`Test recieved mos ingest payloads mosRoStoryMove: Within segment 3`] = ` [ { - "_id": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", - "_rank": 3, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s4;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "rSEZMzZhJ55454sqsU_7TOq_DIk_", + "_rank": 2, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p1", + "name": "SEGMENT3", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", + "_rank": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", - "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", + "_rank": 3, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s4;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", - "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p1", - "externalModified": 0, - "name": "SEGMENT3", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 1, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -2914,7 +2876,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Within segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART2", }, { @@ -2924,7 +2886,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Within segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", + "segmentId": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", "title": "SEGMENT2;PART3", }, { @@ -2934,7 +2896,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Within segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -2944,7 +2906,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Within segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART3", }, { @@ -2954,7 +2916,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Within segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART1", }, { @@ -2964,7 +2926,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Within segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -2974,7 +2936,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Within segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART2", }, { @@ -2984,7 +2946,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Within segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -3169,38 +3131,34 @@ exports[`Test recieved mos ingest payloads mosRoStoryReplace: Same segment 2`] = exports[`Test recieved mos ingest payloads mosRoStoryReplace: Same segment 3`] = ` [ { - "_id": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", - "_rank": 3, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s4;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "rSEZMzZhJ55454sqsU_7TOq_DIk_", + "_rank": 2, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p1", + "name": "SEGMENT3", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", + "_rank": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", - "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", + "_rank": 3, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s4;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", - "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p1", - "externalModified": 0, - "name": "SEGMENT3", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 1, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -3216,7 +3174,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryReplace: Same segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART2", }, { @@ -3226,7 +3184,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryReplace: Same segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", + "segmentId": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", "title": "SEGMENT2;PART3", }, { @@ -3235,7 +3193,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryReplace: Same segment 4`] = "externalId": "ro1;s1;newPart1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;new1", }, { @@ -3245,7 +3203,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryReplace: Same segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -3255,7 +3213,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryReplace: Same segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART3", }, { @@ -3265,7 +3223,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryReplace: Same segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART1", }, { @@ -3275,7 +3233,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryReplace: Same segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -3285,7 +3243,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryReplace: Same segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -3470,29 +3428,26 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments 3`] = ` [ { - "_id": "sLfUx9cadyquE07Vw9byoX35G9I_", + "_id": "o6BHLNEWMc9FbHBRRWMOiwQ3IN0_", "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p2", - "externalModified": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p2", "name": "SEGMENT3", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", + "_rank": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", - "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 1, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -3508,7 +3463,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "sLfUx9cadyquE07Vw9byoX35G9I_", + "segmentId": "o6BHLNEWMc9FbHBRRWMOiwQ3IN0_", "title": "SEGMENT3;PART2", }, { @@ -3518,7 +3473,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART3", }, { @@ -3528,7 +3483,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -3538,7 +3493,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART3", }, { @@ -3548,7 +3503,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "sLfUx9cadyquE07Vw9byoX35G9I_", + "segmentId": "o6BHLNEWMc9FbHBRRWMOiwQ3IN0_", "title": "SEGMENT3;PART1", }, { @@ -3558,7 +3513,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -3568,7 +3523,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART2", }, { @@ -3578,7 +3533,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -3763,65 +3718,58 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments2 exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments2 3`] = ` [ { - "_id": "oLlO42uuh1jzxrJrFmnAqDH5_Do_", - "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p2", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "zz3BgLI_xxlvfTOTR55skUkKWHk_", + "_rank": 4, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p2", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "apDVfF5nk1_StK474hEUxLMZIag_", - "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p3", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "yVemxI_brsRMvHAeFVtG2tahCgU_", + "_rank": 1, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p2", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", - "_rank": 6, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s4;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "rSEZMzZhJ55454sqsU_7TOq_DIk_", + "_rank": 5, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p1", + "name": "SEGMENT3", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 3, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "o0rZ5k7WadtZ2XSmf_c3txGILuw_", + "_rank": 2, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p3", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", - "_rank": 5, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p1", - "externalModified": 0, - "name": "SEGMENT3", + "_id": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", + "_rank": 6, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s4;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "6tE1R4qyvS2U8gUoAc23Y8R50UI_", - "_rank": 4, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p2", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 3, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -3837,7 +3785,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments2 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART2", }, { @@ -3847,7 +3795,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments2 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", + "segmentId": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", "title": "SEGMENT2;PART3", }, { @@ -3857,7 +3805,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments2 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "oLlO42uuh1jzxrJrFmnAqDH5_Do_", + "segmentId": "yVemxI_brsRMvHAeFVtG2tahCgU_", "title": "SEGMENT2;PART2", }, { @@ -3867,7 +3815,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments2 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "apDVfF5nk1_StK474hEUxLMZIag_", + "segmentId": "o0rZ5k7WadtZ2XSmf_c3txGILuw_", "title": "SEGMENT1;PART3", }, { @@ -3877,7 +3825,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments2 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART1", }, { @@ -3887,7 +3835,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments2 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -3897,7 +3845,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments2 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "6tE1R4qyvS2U8gUoAc23Y8R50UI_", + "segmentId": "zz3BgLI_xxlvfTOTR55skUkKWHk_", "title": "SEGMENT1;PART2", }, { @@ -3907,7 +3855,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments2 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -4092,41 +4040,37 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: With first in same se exports[`Test recieved mos ingest payloads mosRoStorySwap: With first in same segment 3`] = ` [ { - "_id": "apDVfF5nk1_StK474hEUxLMZIag_", + "_id": "rSEZMzZhJ55454sqsU_7TOq_DIk_", + "_rank": 2, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p1", + "name": "SEGMENT3", + "notes": [], + "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", + }, + { + "_id": "o0rZ5k7WadtZ2XSmf_c3txGILuw_", "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p3", - "externalModified": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p3", "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", + "_id": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", "_rank": 3, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s4;p1", - "externalModified": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s4;p1", "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, - { - "_id": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", - "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p1", - "externalModified": 0, - "name": "SEGMENT3", - "notes": [], - "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - }, ] `; @@ -4139,7 +4083,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: With first in same se "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART2", }, { @@ -4149,7 +4093,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: With first in same se "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", + "segmentId": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", "title": "SEGMENT2;PART3", }, { @@ -4159,7 +4103,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: With first in same se "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -4169,7 +4113,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: With first in same se "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "apDVfF5nk1_StK474hEUxLMZIag_", + "segmentId": "o0rZ5k7WadtZ2XSmf_c3txGILuw_", "title": "SEGMENT1;PART3", }, { @@ -4179,7 +4123,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: With first in same se "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART1", }, { @@ -4189,7 +4133,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: With first in same se "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "apDVfF5nk1_StK474hEUxLMZIag_", + "segmentId": "o0rZ5k7WadtZ2XSmf_c3txGILuw_", "title": "SEGMENT1;PART1", }, { @@ -4199,7 +4143,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: With first in same se "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "apDVfF5nk1_StK474hEUxLMZIag_", + "segmentId": "o0rZ5k7WadtZ2XSmf_c3txGILuw_", "title": "SEGMENT1;PART2", }, { @@ -4209,7 +4153,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: With first in same se "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -4394,38 +4338,34 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Within same segment 2 exports[`Test recieved mos ingest payloads mosRoStorySwap: Within same segment 3`] = ` [ { - "_id": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", - "_rank": 3, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s4;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "rSEZMzZhJ55454sqsU_7TOq_DIk_", + "_rank": 2, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p1", + "name": "SEGMENT3", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", + "_rank": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", - "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", + "_rank": 3, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s4;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", - "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p1", - "externalModified": 0, - "name": "SEGMENT3", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 1, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -4441,7 +4381,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Within same segment 4 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART2", }, { @@ -4451,7 +4391,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Within same segment 4 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", + "segmentId": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", "title": "SEGMENT2;PART3", }, { @@ -4461,7 +4401,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Within same segment 4 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -4471,7 +4411,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Within same segment 4 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART3", }, { @@ -4481,7 +4421,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Within same segment 4 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART1", }, { @@ -4491,7 +4431,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Within same segment 4 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -4501,7 +4441,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Within same segment 4 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART2", }, { @@ -4511,7 +4451,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Within same segment 4 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] diff --git a/packages/job-worker/src/ingest/mosDevice/__tests__/diffSegmentEntries.test.ts b/packages/job-worker/src/ingest/mosDevice/__tests__/diffSegmentEntries.test.ts deleted file mode 100644 index a53f8f43b82..00000000000 --- a/packages/job-worker/src/ingest/mosDevice/__tests__/diffSegmentEntries.test.ts +++ /dev/null @@ -1,555 +0,0 @@ -import * as _ from 'underscore' - -import { diffSegmentEntries, compileSegmentEntries } from '../diff' -import { IngestSegment } from '@sofie-automation/blueprints-integration' -import { LocalIngestSegment } from '../../ingestCache' - -function clone(o: T): T { - return JSON.parse(JSON.stringify(o)) -} -function recalculateRank(ingestSegments: IngestSegment[]) { - ingestSegments.sort((a, b) => { - if (a.rank < b.rank) return -1 - if (a.rank > b.rank) return 1 - return 0 - }) - _.each(ingestSegments, (ingestSegment, i) => { - ingestSegment.rank = i - }) -} -describe('Ingest: MOS', () => { - describe('diffSegmentEntries', () => { - const ingestSegments: LocalIngestSegment[] = [ - { - rank: 0, - externalId: 'first', - name: 'Overblik', - parts: [ - { - name: 'AA3D07094F51297F', - rank: 0, - externalId: - '2012R2ENPS8VM;P_ENPSMOS\\W\\F_HOLD ROs\\R_B10067B2-434B-4CF3-AFB1A02EEF8760CB;C355011E-B1E2-425E-AA3D07094F51297F', - modified: Date.now(), - }, - { - name: 'AC9369C6A140CEBB', - rank: 1, - externalId: - '2012R2ENPS8VM;P_ENPSMOS\\W\\F_HOLD ROs\\R_B10067B2-434B-4CF3-AFB1A02EEF8760CB;484B952B-DE0F-40A0-AC9369C6A140CEBB', - modified: Date.now(), - }, - { - name: '8DAE5BF534A0EAD8', - rank: 2, - externalId: - '2012R2ENPS8VM;P_ENPSMOS\\W\\F_HOLD ROs\\R_B10067B2-434B-4CF3-AFB1A02EEF8760CB;F285B8FA-BD5F-492B-8DAE5BF534A0EAD8', - modified: Date.now(), - }, - { - name: 'B7D35BBDBFD9A4D2', - rank: 3, - externalId: - '2012R2ENPS8VM;P_ENPSMOS\\W\\F_HOLD ROs\\R_B10067B2-434B-4CF3-AFB1A02EEF8760CB;AE66B8D8-4595-4CA5-B7D35BBDBFD9A4D2', - modified: Date.now(), - }, - { - name: '8A872A00510269E', - rank: 4, - externalId: - '2012R2ENPS8VM;P_ENPSMOS\\W\\F_HOLD ROs\\R_B10067B2-434B-4CF3-AFB1A02EEF8760CB;5C4EE8B8-5459-4A94-8A872A00510269E8', - modified: Date.now(), - }, - ], - modified: Date.now(), - }, - { - rank: 1, - externalId: 'second', - name: 'Møller og DBU', - parts: [ - { - name: 'BB605A012DFAF93E', - rank: 0, - externalId: - '2012R2ENPS8VM;P_ENPSMOS\\W\\F_HOLD ROs\\R_B10067B2-434B-4CF3-AFB1A02EEF8760CB;0D1D077D-9720-4560-BB605A012DFAF93E', - modified: Date.now(), - }, - { - name: 'B21E0F016576BC73', - rank: 1, - externalId: - '2012R2ENPS8VM;P_ENPSMOS\\W\\F_HOLD ROs\\R_B10067B2-434B-4CF3-AFB1A02EEF8760CB;D7CCC920-28E9-41AC-B21E0F016576BC73', - modified: Date.now(), - }, - { - name: '8E100AB374A15DEA', - rank: 2, - externalId: - '2012R2ENPS8VM;P_ENPSMOS\\W\\F_HOLD ROs\\R_B10067B2-434B-4CF3-AFB1A02EEF8760CB;D87D86D3-FD0A-42AC-8E100AB374A15DEA', - modified: Date.now(), - }, - { - name: '86360F634827C56A', - rank: 3, - externalId: - '2012R2ENPS8VM;P_ENPSMOS\\W\\F_HOLD ROs\\R_B10067B2-434B-4CF3-AFB1A02EEF8760CB;950A200E-31CA-4DEA-86360F634827C56A', - modified: Date.now(), - }, - ], - modified: Date.now(), - }, - { - rank: 2, - externalId: 'third', - name: 'Webhenvisning TV 2 Sporten', - parts: [ - { - name: 'A0C24CCA21FE9969', - rank: 0, - externalId: - '2012R2ENPS8VM;P_ENPSMOS\\W\\F_HOLD ROs\\R_B10067B2-434B-4CF3-AFB1A02EEF8760CB;56CB0445-5782-4F92-A0C24CCA21FE9969', - modified: Date.now(), - }, - ], - modified: Date.now(), - }, - { - rank: 3, - externalId: 'fourth', - name: 'Møller og DBU', - parts: [ - { - name: 'B41C095014F35C2E', - rank: 0, - externalId: - '2012R2ENPS8VM;P_ENPSMOS\\W\\F_HOLD ROs\\R_B10067B2-434B-4CF3-AFB1A02EEF8760CB;3487F683-5939-4A37-B41C095014F35C2E', - modified: Date.now(), - }, - { - name: 'B9D0B70BA3F30F69', - rank: 1, - externalId: - '2012R2ENPS8VM;P_ENPSMOS\\W\\F_HOLD ROs\\R_B10067B2-434B-4CF3-AFB1A02EEF8760CB;A0BF2B88-EE9E-40B7-B9D0B70BA3F30F69', - modified: Date.now(), - }, - { - name: '87B4F8206386BBDD', - rank: 2, - externalId: - '2012R2ENPS8VM;P_ENPSMOS\\W\\F_HOLD ROs\\R_B10067B2-434B-4CF3-AFB1A02EEF8760CB;D05B62C7-19F8-4CD7-87B4F8206386BBDD', - modified: Date.now(), - }, - ], - modified: Date.now(), - }, - ] - - test('No changes', async () => { - const segmentEntries = compileSegmentEntries(ingestSegments) - const diff = diffSegmentEntries(segmentEntries, segmentEntries, null) - expect(_.keys(diff.added)).toHaveLength(0) - expect(_.keys(diff.changed)).toHaveLength(0) - expect(_.keys(diff.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff.removed)).toHaveLength(0) - expect(_.keys(diff.unchanged)).toEqual(['first', 'second', 'third', 'fourth']) - }) - test('Remove middle of segment', async () => { - const segmentEntries = compileSegmentEntries(ingestSegments) - // First segment - const ingestSegments2 = clone(ingestSegments) - ingestSegments2[0].parts.splice(1, 1) - const segmentEntries2 = compileSegmentEntries(ingestSegments2) - - const diff = diffSegmentEntries(segmentEntries, segmentEntries2, null) - expect(_.keys(diff.changed)).toEqual(['first']) - expect(_.keys(diff.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff.removed)).toHaveLength(0) - expect(_.keys(diff.unchanged)).toEqual(['second', 'third', 'fourth']) - - // Middle segment - const ingestSegments3 = clone(ingestSegments) - ingestSegments3[1].parts.splice(1, 1) - const segmentEntries3 = compileSegmentEntries(ingestSegments3) - - const diff2 = diffSegmentEntries(segmentEntries, segmentEntries3, null) - expect(_.keys(diff2.changed)).toEqual(['second']) - expect(_.keys(diff2.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff2.removed)).toHaveLength(0) - expect(_.keys(diff2.unchanged)).toEqual(['first', 'third', 'fourth']) - - // Last segment - const ingestSegments4 = clone(ingestSegments) - ingestSegments4[3].parts.splice(1, 1) - const segmentEntries4 = compileSegmentEntries(ingestSegments4) - - const diff3 = diffSegmentEntries(segmentEntries, segmentEntries4, null) - expect(_.keys(diff3.changed)).toEqual(['fourth']) - expect(_.keys(diff3.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff3.removed)).toHaveLength(0) - expect(_.keys(diff3.unchanged)).toEqual(['first', 'second', 'third']) - }) - - test('Remove start of segment', async () => { - const segmentEntries = compileSegmentEntries(ingestSegments) - // First segment - const ingestSegments2 = clone(ingestSegments) - ingestSegments2[0].parts.splice(0, 1) - const segmentEntries2 = compileSegmentEntries(ingestSegments2) - - const diff = diffSegmentEntries(segmentEntries, segmentEntries2, null) - expect(_.keys(diff.changed)).toEqual(['first']) - expect(_.keys(diff.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff.removed)).toHaveLength(0) - expect(_.keys(diff.unchanged)).toEqual(['second', 'third', 'fourth']) - - // Middle segment - const ingestSegments3 = clone(ingestSegments) - ingestSegments3[1].parts.splice(0, 1) - const segmentEntries3 = compileSegmentEntries(ingestSegments3) - - const diff2 = diffSegmentEntries(segmentEntries, segmentEntries3, null) - expect(_.keys(diff2.changed)).toEqual(['second']) - expect(_.keys(diff2.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff2.removed)).toHaveLength(0) - expect(_.keys(diff2.unchanged)).toEqual(['first', 'third', 'fourth']) - - // Last segment - const ingestSegments4 = clone(ingestSegments) - ingestSegments4[3].parts.splice(0, 1) - const segmentEntries4 = compileSegmentEntries(ingestSegments4) - - const diff3 = diffSegmentEntries(segmentEntries, segmentEntries4, null) - expect(_.keys(diff3.changed)).toEqual(['fourth']) - expect(_.keys(diff3.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff3.removed)).toHaveLength(0) - expect(_.keys(diff3.unchanged)).toEqual(['first', 'second', 'third']) - }) - - test('Remove end of segment', async () => { - const segmentEntries = compileSegmentEntries(ingestSegments) - // First segment - const ingestSegments2 = clone(ingestSegments) - ingestSegments2[0].parts.splice(4, 1) - const segmentEntries2 = compileSegmentEntries(ingestSegments2) - - const diff = diffSegmentEntries(segmentEntries, segmentEntries2, null) - expect(_.keys(diff.changed)).toEqual(['first']) - expect(_.keys(diff.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff.removed)).toHaveLength(0) - expect(_.keys(diff.unchanged)).toEqual(['second', 'third', 'fourth']) - - // Middle segment - const ingestSegments3 = clone(ingestSegments) - ingestSegments3[1].parts.splice(3, 1) - const segmentEntries3 = compileSegmentEntries(ingestSegments3) - - const diff2 = diffSegmentEntries(segmentEntries, segmentEntries3, null) - expect(_.keys(diff2.changed)).toEqual(['second']) - expect(_.keys(diff2.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff2.removed)).toHaveLength(0) - expect(_.keys(diff2.unchanged)).toEqual(['first', 'third', 'fourth']) - - // Last segment - const ingestSegments4 = clone(ingestSegments) - ingestSegments4[3].parts.splice(2, 1) - const segmentEntries4 = compileSegmentEntries(ingestSegments4) - - const diff3 = diffSegmentEntries(segmentEntries, segmentEntries4, null) - expect(_.keys(diff3.changed)).toEqual(['fourth']) - expect(_.keys(diff3.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff3.removed)).toHaveLength(0) - expect(_.keys(diff3.unchanged)).toEqual(['first', 'second', 'third']) - }) - - test('Remove whole segment', async () => { - const segmentEntries = compileSegmentEntries(ingestSegments) - // First segment - const ingestSegments2 = clone(ingestSegments) - ingestSegments2.splice(0, 1) - recalculateRank(ingestSegments2) - const segmentEntries2 = compileSegmentEntries(ingestSegments2) - - const diff = diffSegmentEntries(segmentEntries, segmentEntries2, null) - expect(_.keys(diff.changed)).toHaveLength(3) - expect(diff.onlyRankChanged).toEqual({ - second: 0, - third: 1, - fourth: 2, - }) - expect(_.keys(diff.removed)).toEqual(['first']) - expect(_.keys(diff.unchanged)).toHaveLength(0) - - // Middle segment - const ingestSegments3 = clone(ingestSegments) - ingestSegments3.splice(1, 1) - recalculateRank(ingestSegments3) - const segmentEntries3 = compileSegmentEntries(ingestSegments3) - - const diff2 = diffSegmentEntries(segmentEntries, segmentEntries3, null) - expect(_.keys(diff2.changed)).toHaveLength(2) - expect(diff2.onlyRankChanged).toEqual({ - third: 1, - fourth: 2, - // { oldRank: 2, newRank: 1 }, - // { oldRank: 3, newRank: 2 } - }) - expect(_.keys(diff2.removed)).toEqual(['second']) - expect(_.keys(diff2.unchanged)).toEqual(['first']) - - // Last segment - const ingestSegments4 = clone(ingestSegments) - ingestSegments4.splice(3, 1) - recalculateRank(ingestSegments4) - const segmentEntries4 = compileSegmentEntries(ingestSegments4) - - const diff3 = diffSegmentEntries(segmentEntries, segmentEntries4, null) - // expect(_.keys(diff3.changed)).toHaveLength(3) - expect(_.keys(diff3.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff3.removed)).toEqual(['fourth']) - expect(_.keys(diff3.unchanged)).toEqual(['first', 'second', 'third']) - }) - test('Remove to combine segments', async () => { - const segmentEntries = compileSegmentEntries(ingestSegments) - - const ingestSegments2 = clone(ingestSegments) - ingestSegments2[1].parts = ingestSegments2[1].parts.concat(ingestSegments2[3].parts) - - ingestSegments2.splice(2, 2) - // ingestSegments2.splice(3, 1) - recalculateRank(ingestSegments2) - const segmentEntries2 = compileSegmentEntries(ingestSegments2) - - const diff = diffSegmentEntries(segmentEntries, segmentEntries2, null) - expect(_.keys(diff.changed)).toEqual(['second']) - expect(_.keys(diff.onlyRankChanged)).toEqual([]) - expect(_.keys(diff.removed)).toEqual(['third', 'fourth']) - expect(_.keys(diff.unchanged)).toEqual(['first']) - }) - - test('Rename/replace segment', async () => { - const segmentEntries = compileSegmentEntries(ingestSegments) - // First segment - const ingestSegments2 = clone(ingestSegments) - ingestSegments2[0].externalId = 'NEW' - const segmentEntries2 = compileSegmentEntries(ingestSegments2) - - const diff = diffSegmentEntries(segmentEntries, segmentEntries2, null) - expect(_.keys(diff.added)).toEqual(['NEW']) - expect(_.keys(diff.changed)).toEqual([]) - expect(_.keys(diff.removed)).toEqual(['first']) - expect(_.keys(diff.unchanged)).toEqual(['second', 'third', 'fourth']) - expect(_.keys(diff.onlyRankChanged)).toHaveLength(0) - expect(diff.externalIdChanged).toEqual({ - first: 'NEW', - }) - - // Middle segment - const ingestSegments3 = clone(ingestSegments) - ingestSegments3[1].externalId = 'NEW' - const segmentEntries3 = compileSegmentEntries(ingestSegments3) - - const diff2 = diffSegmentEntries(segmentEntries, segmentEntries3, null) - expect(_.keys(diff2.added)).toEqual(['NEW']) - expect(_.keys(diff2.changed)).toEqual([]) - expect(_.keys(diff2.removed)).toEqual(['second']) - expect(_.keys(diff2.unchanged)).toEqual(['first', 'third', 'fourth']) - expect(_.keys(diff2.onlyRankChanged)).toHaveLength(0) - expect(diff2.externalIdChanged).toEqual({ - second: 'NEW', - }) - - // Last segment - const ingestSegments4 = clone(ingestSegments) - ingestSegments4[3].externalId = 'NEW' - const segmentEntries4 = compileSegmentEntries(ingestSegments4) - - const diff3 = diffSegmentEntries(segmentEntries, segmentEntries4, null) - expect(_.keys(diff3.added)).toEqual(['NEW']) - expect(_.keys(diff3.changed)).toEqual([]) - expect(_.keys(diff3.removed)).toEqual(['fourth']) - expect(_.keys(diff3.unchanged)).toEqual(['first', 'second', 'third']) - expect(_.keys(diff3.onlyRankChanged)).toHaveLength(0) - expect(diff3.externalIdChanged).toEqual({ - fourth: 'NEW', - }) - }) - - test('Insert into segment', async () => { - const segmentEntries = compileSegmentEntries(ingestSegments) - // First segment - const ingestSegments2 = clone(ingestSegments) - ingestSegments2[0].parts.splice(2, 0, { name: 'abc', rank: 2.5, externalId: 'abc', modified: Date.now() }) - // segmentEntries2['first'].parts.splice(2, 0, 'abc') - const segmentEntries2 = compileSegmentEntries(ingestSegments2) - - const diff = diffSegmentEntries(segmentEntries, segmentEntries2, null) - expect(_.keys(diff.changed)).toEqual(['first']) - expect(_.keys(diff.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff.removed)).toHaveLength(0) - expect(_.keys(diff.unchanged)).toEqual(['second', 'third', 'fourth']) - - // Middle segment - const ingestSegments3 = clone(ingestSegments) - ingestSegments3[1].parts.splice(2, 0, { name: 'abc', rank: 2.5, externalId: 'abc', modified: Date.now() }) - // segmentEntries3['second'].parts.splice(2, 0, 'abc') - const segmentEntries3 = compileSegmentEntries(ingestSegments3) - - const diff2 = diffSegmentEntries(segmentEntries, segmentEntries3, null) - expect(_.keys(diff2.changed)).toEqual(['second']) - expect(_.keys(diff2.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff2.removed)).toHaveLength(0) - expect(_.keys(diff2.unchanged)).toEqual(['first', 'third', 'fourth']) - - // Last segment - const ingestSegments4 = clone(ingestSegments) - ingestSegments4[3].parts.splice(2, 0, { name: 'abc', rank: 2.5, externalId: 'abc', modified: Date.now() }) - // segmentEntries4['fourth'].parts.splice(2, 0, 'abc') - const segmentEntries4 = compileSegmentEntries(ingestSegments4) - - const diff3 = diffSegmentEntries(segmentEntries, segmentEntries4, null) - expect(_.keys(diff3.changed)).toEqual(['fourth']) - expect(_.keys(diff3.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff3.removed)).toHaveLength(0) - expect(_.keys(diff3.unchanged)).toEqual(['first', 'second', 'third']) - }) - - test('Insert new segment', async () => { - const segmentEntries = compileSegmentEntries(ingestSegments) - // First segment - const ingestSegments2 = clone(ingestSegments) - ingestSegments2.splice(0, 0, { - rank: -1, - externalId: 'new', - name: 'New Name', - parts: [{ name: 'abc', rank: 0, externalId: 'abc', modified: Date.now() }], - modified: Date.now(), - }) - recalculateRank(ingestSegments2) - const segmentEntries2 = compileSegmentEntries(ingestSegments2) - - const diff = diffSegmentEntries(segmentEntries, segmentEntries2, null) - expect(_.keys(diff.changed)).toEqual(['first', 'second', 'third', 'fourth']) - expect(diff.onlyRankChanged).toEqual({ - first: 1, - second: 2, - third: 3, - fourth: 4, - }) - expect(_.keys(diff.added)).toEqual(['new']) - expect(_.keys(diff.removed)).toHaveLength(0) - expect(_.keys(diff.unchanged)).toHaveLength(0) - - // Middle segment - const ingestSegments3 = clone(ingestSegments) - ingestSegments3.splice(1, 0, { - rank: 0.5, - externalId: 'new', - name: 'New Name', - parts: [{ name: 'abc', rank: 0, externalId: 'abc', modified: Date.now() }], - modified: Date.now(), - }) - recalculateRank(ingestSegments3) - const segmentEntries3 = compileSegmentEntries(ingestSegments3) - - const diff2 = diffSegmentEntries(segmentEntries, segmentEntries3, null) - expect(_.keys(diff2.changed)).toEqual(['second', 'third', 'fourth']) - expect(diff2.onlyRankChanged).toEqual({ - second: 2, - third: 3, - fourth: 4, - }) - expect(_.keys(diff.added)).toEqual(['new']) - expect(_.keys(diff2.removed)).toHaveLength(0) - expect(_.keys(diff2.unchanged)).toEqual(['first']) - - // Last segment - const ingestSegments4 = clone(ingestSegments) - ingestSegments4.splice(-1, 0, { - rank: 99, - externalId: 'new', - name: 'New Name', - parts: [{ name: 'abc', rank: 0, externalId: 'abc', modified: Date.now() }], - modified: Date.now(), - }) - recalculateRank(ingestSegments4) - const segmentEntries4 = compileSegmentEntries(ingestSegments4) - - const diff3 = diffSegmentEntries(segmentEntries, segmentEntries4, null) - expect(_.keys(diff3.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff3.removed)).toHaveLength(0) - expect(_.keys(diff.added)).toEqual(['new']) - expect(_.keys(diff3.changed)).toEqual([]) - expect(_.keys(diff3.unchanged)).toEqual(['first', 'second', 'third', 'fourth']) - }) - test('Insert new segment, split existing', async () => { - const segmentEntries = compileSegmentEntries(ingestSegments) - // // First segment - const ingestSegments2 = clone(ingestSegments) - ingestSegments2.splice( - 1, - 0, - { - rank: 0.5, - externalId: 'new', - name: 'New Name', - parts: [{ name: 'abc', rank: 0, externalId: 'abc', modified: Date.now() }], - modified: Date.now(), - }, - { - rank: 0.7, - externalId: 'new2', - name: 'New Name2', - parts: [{ name: 'abc2', rank: 0, externalId: 'abc2', modified: Date.now() }], - modified: Date.now(), - } - ) - recalculateRank(ingestSegments2) - const segmentEntries2 = compileSegmentEntries(ingestSegments2) - - const diff = diffSegmentEntries(segmentEntries, segmentEntries2, null) - expect(_.keys(diff.added)).toEqual(['new', 'new2']) - expect(_.keys(diff.changed)).toEqual(['second', 'third', 'fourth']) - expect(diff.onlyRankChanged).toEqual({ - second: 3, - third: 4, - fourth: 5, - }) - expect(_.keys(diff.removed)).toHaveLength(0) - expect(_.keys(diff.unchanged)).toEqual(['first']) - - // Middle segment - const ingestSegments3 = clone(ingestSegments) - ingestSegments3.splice( - 2, - 0, - { - rank: 1.5, - externalId: 'new', - name: 'New Name', - parts: [{ name: 'abc', rank: 0, externalId: 'abc', modified: Date.now() }], - modified: Date.now(), - }, - { - rank: 1.7, - externalId: 'new2', - name: 'New Name2', - parts: [{ name: 'abc2', rank: 0, externalId: 'abc2', modified: Date.now() }], - modified: Date.now(), - } - ) - recalculateRank(ingestSegments3) - const segmentEntries3 = compileSegmentEntries(ingestSegments3) - - const diff2 = diffSegmentEntries(segmentEntries, segmentEntries3, null) - expect(_.keys(diff2.added)).toEqual(['new', 'new2']) - expect(_.keys(diff2.changed)).toEqual(['third', 'fourth']) - expect(diff2.onlyRankChanged).toEqual({ - third: 4, - fourth: 5, - }) - expect(_.keys(diff2.removed)).toHaveLength(0) - expect(_.keys(diff2.unchanged)).toEqual(['first', 'second']) - }) - }) -}) diff --git a/packages/job-worker/src/ingest/mosDevice/__tests__/mock-mos-data.ts b/packages/job-worker/src/ingest/mosDevice/__tests__/mock-mos-data.ts index 3dadee7153d..eb560b3a8a0 100644 --- a/packages/job-worker/src/ingest/mosDevice/__tests__/mock-mos-data.ts +++ b/packages/job-worker/src/ingest/mosDevice/__tests__/mock-mos-data.ts @@ -85,19 +85,23 @@ export const mockRO = { segmentIdMap: () => [ { parts: ['ro1;s1;p1', 'ro1;s1;p2', 'ro1;s1;p3'], - segmentId: 'MCxHIjO7_t3PRHpLiX0vbzwx4gg_', + segmentName: 'SEGMENT1', + segmentId: 'baQfD5zawLDmJTRumGpHDH2MwaM_', }, { parts: ['ro1;s2;p1', 'ro1;s2;p2'], - segmentId: 'Qz1OqWVatX_W4Sp5C0m8VhTTfME_', + segmentName: 'SEGMENT2', + segmentId: '6cEU5uY8M93lfQssMy9XaGxT23E_', }, { parts: ['ro1;s3;p1', 'ro1;s3;p2'], - segmentId: '8GUNgE7zUulco2K3yuhJ1Fyceeo_', + segmentName: 'SEGMENT3', + segmentId: 'rSEZMzZhJ55454sqsU_7TOq_DIk_', }, { parts: ['ro1;s4;p1'], - segmentId: 'XF9ZBDI5IouvkmTbounEfoJ6ijY_', + segmentName: 'SEGMENT2', + segmentId: 'YXMZjMqslZFcM3K4sGelyBYJ_rA_', }, ], newItem: (id: string, slug: string): MOS.IMOSROStory => diff --git a/packages/job-worker/src/ingest/mosDevice/__tests__/mosIngest.test.ts b/packages/job-worker/src/ingest/mosDevice/__tests__/mosIngest.test.ts index 6e495e96b6a..c96a18890d3 100644 --- a/packages/job-worker/src/ingest/mosDevice/__tests__/mosIngest.test.ts +++ b/packages/job-worker/src/ingest/mosDevice/__tests__/mosIngest.test.ts @@ -3,8 +3,7 @@ import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' import { RundownId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' -import { protectString, unprotectString } from '@sofie-automation/corelib/dist/protectedString' -import _ = require('underscore') +import { protectString } from '@sofie-automation/corelib/dist/protectedString' import { sortPartsInSortedSegments, sortSegmentsInRundowns } from '@sofie-automation/corelib/dist/playout/playlist' import { handleMosDeleteStory, @@ -14,7 +13,7 @@ import { handleMosSwapStories, } from '../mosStoryJobs' import { handleMosRundownData, handleMosRundownReadyToAir, handleMosRundownStatus } from '../mosRundownJobs' -import { parseMosString } from '../lib' +import { getMosIngestSegmentId, parseMosString } from '../lib' import { MockJobContext, setupDefaultJobEnvironment } from '../../../__mocks__/context' import { setupMockIngestDevice, setupMockShowStyleCompound } from '../../../__mocks__/presetCollections' import { fixSnapshot } from '../../../__mocks__/helpers/snapshot' @@ -23,18 +22,19 @@ import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/Rund import { MongoQuery } from '../../../db' import { handleRemovedRundown } from '../../ingestRundownJobs' import { MOS } from '@sofie-automation/corelib' -import { literal } from '@sofie-automation/corelib/dist/lib' -import { IngestCacheType } from '@sofie-automation/corelib/dist/dataModel/IngestDataCache' +import { groupByToMap, literal, normalizeArrayToMap, omit } from '@sofie-automation/corelib/dist/lib' +import { NrcsIngestCacheType } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' import { getPartId } from '../../lib' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' import { handleSetNextPart } from '../../../playout/setNextJobs' import { handleTakeNextPart } from '../../../playout/take' import { handleActivateRundownPlaylist, handleDeactivateRundownPlaylist } from '../../../playout/activePlaylistJobs' import { removeRundownPlaylistFromDb } from '../../__tests__/lib' +import { UserErrorMessage } from '@sofie-automation/corelib/dist/error' +import { wrapCustomIngestJob, wrapGenericIngestJob, wrapMosIngestJob } from '../../jobWrappers' jest.mock('../../updateNext') import { ensureNextPartIsValid } from '../../updateNext' -import { UserErrorMessage } from '@sofie-automation/corelib/dist/error' type TensureNextPartIsValid = jest.MockedFunction const ensureNextPartIsValidMock = ensureNextPartIsValid as TensureNextPartIsValid @@ -43,18 +43,33 @@ const mosTypes = MOS.getMosTypes(true) function getPartIdMap(segments: DBSegment[], parts: DBPart[]) { const sortedParts = sortPartsInSortedSegments(parts, segments) - const groupedParts = _.groupBy(sortedParts, (p) => unprotectString(p.segmentId)) - const arr: [string, DBPart[]][] = _.pairs(groupedParts) - const idMap = _.map(arr, (g) => ({ - segmentId: protectString(g[0]), - parts: _.map(g[1], (p) => p.externalId), + const segmentMap = normalizeArrayToMap(segments, '_id') + + const groupedParts = groupByToMap(sortedParts, 'segmentId') + const arr: [SegmentId, DBPart[]][] = Array.from(groupedParts.entries()) + const idMap = arr.map((group) => ({ + segmentId: group[0], + segmentName: segmentMap.get(group[0])?.name ?? null, + parts: group[1].map((p) => p.externalId), })) - return _.sortBy(idMap, (s) => { - const obj = _.find(segments, (s2) => s2._id === s.segmentId) - return obj ? obj._rank : 99999 + + return idMap.sort((a, b) => { + const aRank = segmentMap.get(a.segmentId)?._rank ?? 99999 + const bRank = segmentMap.get(b.segmentId)?._rank ?? 99999 + + return aRank - bRank }) } +const handleMosDeleteStoryWrapped = wrapMosIngestJob(handleMosDeleteStory) +const handleMosFullStoryWrapped = wrapMosIngestJob(handleMosFullStory) +const handleMosInsertStoriesWrapped = wrapMosIngestJob(handleMosInsertStories) +const handleMosMoveStoriesWrapped = wrapMosIngestJob(handleMosMoveStories) +const handleMosSwapStoriesWrapped = wrapMosIngestJob(handleMosSwapStories) +const handleMosRundownDataWrapped = wrapMosIngestJob(handleMosRundownData) +const handleRemovedRundownWrapped = wrapGenericIngestJob(handleRemovedRundown) +const handleMosRundownReadyToAirWrapped = wrapCustomIngestJob(handleMosRundownReadyToAir) + function createRundownSource(peripheralDevice: PeripheralDevice): RundownSource { return { type: 'nrcs', @@ -87,7 +102,7 @@ describe('Test recieved mos ingest payloads', () => { // Reset RO const roData = mockRO.roCreate() - await handleMosRundownData(context, { + await handleMosRundownDataWrapped(context, { rundownExternalId: parseMosString(roData.ID), mosRunningOrder: roData, isUpdateOperation: false, @@ -150,7 +165,7 @@ describe('Test recieved mos ingest payloads', () => { await expect(context.mockCollections.Rundowns.findOne()).resolves.toBeFalsy() const roData = mockRO.roCreate() - await handleMosRundownData(context, { + await handleMosRundownDataWrapped(context, { rundownExternalId: parseMosString(roData.ID), mosRunningOrder: roData, isUpdateOperation: false, @@ -184,7 +199,7 @@ describe('Test recieved mos ingest payloads', () => { await context.mockCollections.Rundowns.findOne({ externalId: mosTypes.mosString128.stringify(roData.ID) }) ).toBeTruthy() - await handleMosRundownData(context, { + await handleMosRundownDataWrapped(context, { rundownExternalId: parseMosString(roData.ID), mosRunningOrder: roData, isUpdateOperation: false, @@ -221,7 +236,7 @@ describe('Test recieved mos ingest payloads', () => { await context.mockCollections.Rundowns.findOne({ externalId: mosTypes.mosString128.stringify(roData.ID) }) ).toBeTruthy() - await handleMosRundownData(context, { + await handleMosRundownDataWrapped(context, { rundownExternalId: parseMosString(roData.ID), mosRunningOrder: roData, isUpdateOperation: false, @@ -255,7 +270,7 @@ describe('Test recieved mos ingest payloads', () => { expect(await context.mockCollections.RundownPlaylists.findOne(rundown.playlistId)).toBeTruthy() await expect( - handleRemovedRundown(context, { + handleRemovedRundownWrapped(context, { rundownExternalId: parseMosString(roData.ID), }) ).rejects.toMatchUserError(UserErrorMessage.RundownRemoveWhileActive) @@ -277,7 +292,7 @@ describe('Test recieved mos ingest payloads', () => { expect(rundown.orphaned).toBeFalsy() expect(await context.mockCollections.RundownPlaylists.findOne(rundown.playlistId)).toBeTruthy() - await handleRemovedRundown(context, { + await handleRemovedRundownWrapped(context, { rundownExternalId: parseMosString(roData.ID), }) @@ -292,7 +307,7 @@ describe('Test recieved mos ingest payloads', () => { expect(await context.mockCollections.RundownPlaylists.findOne()).toBeFalsy() await expect( - handleRemovedRundown(context, { + handleRemovedRundownWrapped(context, { rundownExternalId: parseMosString(roData.ID), }) ).rejects.toThrow(/Rundown.*not found/i) @@ -363,7 +378,7 @@ describe('Test recieved mos ingest payloads', () => { expect(rundown.status).not.toEqual(newStatus.toString()) expect((rundown.privateData as any)?.airStatus).not.toEqual(newStatus.toString()) - await handleMosRundownReadyToAir(context, { + await handleMosRundownReadyToAirWrapped(context, { rundownExternalId: rundown.externalId, status: newStatus, }) @@ -386,7 +401,7 @@ describe('Test recieved mos ingest payloads', () => { expect(rundown).toBeTruthy() expect(rundown.status).not.toEqual(newStatus.toString()) - await handleMosRundownReadyToAir(context, { + await handleMosRundownReadyToAirWrapped(context, { rundownExternalId: rundown.externalId, status: newStatus, }) @@ -405,7 +420,7 @@ describe('Test recieved mos ingest payloads', () => { expect(await context.mockCollections.Rundowns.findOne({ externalId: externalId })).toBeFalsy() await expect( - handleMosRundownReadyToAir(context, { + handleMosRundownReadyToAirWrapped(context, { rundownExternalId: externalId, status: newStatus, }) @@ -421,7 +436,7 @@ describe('Test recieved mos ingest payloads', () => { const newPartData = mockRO.newItem('ro1;s1;newPart1', 'SEGMENT1;new1') - await handleMosInsertStories(context, { + await handleMosInsertStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: mosTypes.mosString128.create('ro1;s1;p3'), newStories: [newPartData], @@ -441,9 +456,9 @@ describe('Test recieved mos ingest payloads', () => { // Clean up after ourselves: const partsToRemove = await context.mockCollections.Parts.findFetch({ externalId: 'ro1;s1;newPart1' }) await context.mockCollections.Parts.remove({ _id: { $in: partsToRemove.map((p) => p._id) } }) - await context.mockCollections.IngestDataCache.remove({ + await context.mockCollections.NrcsIngestDataCache.remove({ rundownId: rundown._id, - type: IngestCacheType.PART, + type: NrcsIngestCacheType.PART, partId: { $in: partsToRemove.map((p) => p._id) }, }) }) @@ -459,7 +474,7 @@ describe('Test recieved mos ingest payloads', () => { const newPartData = mockRO.newItem('ro1;s1;newPart2', 'SEGMENT1;new2') - await handleMosInsertStories(context, { + await handleMosInsertStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: mosTypes.mosString128.create('ro1;s1;p3'), newStories: [newPartData], @@ -485,7 +500,7 @@ describe('Test recieved mos ingest payloads', () => { const newPartData = mockRO.newItem('ro1;s1b;newPart1', 'SEGMENT1B;new1') - await handleMosInsertStories(context, { + await handleMosInsertStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: mosTypes.mosString128.create('ro1;s2;p1'), newStories: [newPartData], @@ -498,12 +513,13 @@ describe('Test recieved mos ingest payloads', () => { const partMap = mockRO.segmentIdMap() partMap.splice(1, 0, { - segmentId: '9VE_IbHiHyW6VjY6Fi8fMJEgtS4_', + segmentId: 'Rjo_e_rlOh2eE8XOyVmXZCMgTNY_', + segmentName: 'SEGMENT1B', parts: [mosTypes.mosString128.stringify(newPartData.ID)], }) - partMap[2].segmentId = 'Qz1OqWVatX_W4Sp5C0m8VhTTfME_' - partMap[3].segmentId = '8GUNgE7zUulco2K3yuhJ1Fyceeo_' - partMap[4].segmentId = 'XF9ZBDI5IouvkmTbounEfoJ6ijY_' + partMap[2].segmentId = '6cEU5uY8M93lfQssMy9XaGxT23E_' + partMap[3].segmentId = 'rSEZMzZhJ55454sqsU_7TOq_DIk_' + partMap[4].segmentId = 'YXMZjMqslZFcM3K4sGelyBYJ_rA_' expect(getPartIdMap(segments, parts)).toEqual(partMap) await expectRundownToMatchSnapshot(rundown._id, true, true) @@ -520,14 +536,16 @@ describe('Test recieved mos ingest payloads', () => { const beforeStoryId = mosTypes.mosString128.create('newFakePart') await expect( - handleMosInsertStories(context, { + handleMosInsertStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: beforeStoryId, newStories: [newPartData], replace: false, }) ).rejects.toThrow( - `Part ${mosTypes.mosString128.stringify(beforeStoryId)} in rundown ${rundown.externalId} not found` + `Part ${getMosIngestSegmentId(mosTypes.mosString128.stringify(beforeStoryId))} in rundown ${ + rundown.externalId + } not found` ) expect( @@ -546,14 +564,16 @@ describe('Test recieved mos ingest payloads', () => { const newPartData = mockRO.roCreate().Stories[0] await expect( - handleMosInsertStories(context, { + handleMosInsertStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: mosTypes.mosString128.create('ro1;s2;p1'), newStories: [newPartData], replace: false, }) ).rejects.toThrow( - `Parts ${mosTypes.mosString128.stringify(newPartData.ID)} already exist in rundown ${rundown.externalId}` + `Parts ${getMosIngestSegmentId(mosTypes.mosString128.stringify(newPartData.ID))} already exist in rundown ${ + rundown.externalId + }` ) }) @@ -595,7 +615,7 @@ describe('Test recieved mos ingest payloads', () => { const newPartData = mockRO.newItem('ro1;s1;newPart1', 'SEGMENT1;new1') - await handleMosInsertStories(context, { + await handleMosInsertStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: mosTypes.mosString128.create('ro1;s1;p2'), newStories: [newPartData], @@ -624,7 +644,7 @@ describe('Test recieved mos ingest payloads', () => { const newPartData = mockRO.newItem('ro1;s1;newPart2', 'SEGMENT1;new2') - await handleMosInsertStories(context, { + await handleMosInsertStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: mosTypes.mosString128.create('ro1;s1;p3'), newStories: [newPartData], @@ -650,14 +670,16 @@ describe('Test recieved mos ingest payloads', () => { const beforeStoryId = mosTypes.mosString128.create('fakeId2') await expect( - handleMosInsertStories(context, { + handleMosInsertStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: beforeStoryId, newStories: [newPartData], replace: true, }) ).rejects.toThrow( - `Part ${mosTypes.mosString128.stringify(beforeStoryId)} in rundown ${rundown.externalId} not found` + `Part ${getMosIngestSegmentId(mosTypes.mosString128.stringify(beforeStoryId))} in rundown ${ + rundown.externalId + } not found` ) expect( @@ -678,7 +700,7 @@ describe('Test recieved mos ingest payloads', () => { const partExternalIds = ['ro1;s3;p1', 'ro1;s3;p2'] - await handleMosDeleteStory(context, { + await handleMosDeleteStoryWrapped(context, { rundownExternalId: rundown.externalId, stories: partExternalIds.map((i) => mosTypes.mosString128.create(i)), }) @@ -704,11 +726,11 @@ describe('Test recieved mos ingest payloads', () => { const partExternalIds = ['ro1;s1;p2', 'fakeId'] await expect( - handleMosDeleteStory(context, { + handleMosDeleteStoryWrapped(context, { rundownExternalId: rundown.externalId, stories: partExternalIds.map((i) => mosTypes.mosString128.create(i)), }) - ).rejects.toThrow(`Parts fakeId in rundown ${rundown.externalId} were not found`) + ).rejects.toThrow(`Parts segment-fakeId in rundown ${rundown.externalId} were not found`) expect(await context.mockCollections.Parts.findFetch({ externalId: { $in: partExternalIds } })).toHaveLength(1) }) @@ -725,7 +747,7 @@ describe('Test recieved mos ingest payloads', () => { Body: [], }) - await handleMosFullStory(context, { + await handleMosFullStoryWrapped(context, { rundownExternalId: rundown.externalId, story: story, }) @@ -750,7 +772,7 @@ describe('Test recieved mos ingest payloads', () => { }) await expect( - handleMosFullStory(context, { + handleMosFullStoryWrapped(context, { rundownExternalId: rundown.externalId, story: story, }) @@ -772,13 +794,11 @@ describe('Test recieved mos ingest payloads', () => { }) await expect( - handleMosFullStory(context, { + handleMosFullStoryWrapped(context, { rundownExternalId: mosTypes.mosString128.stringify(story.RunningOrderId), story: story, }) - ).rejects.toThrow( - `handleMosFullStory: Missing MOS Rundown "${mosTypes.mosString128.stringify(story.RunningOrderId)}"` - ) + ).rejects.toThrow(`Rundown "${mosTypes.mosString128.stringify(story.RunningOrderId)}" not found`) }) test('mosRoStorySwap: Within same segment', async () => { @@ -793,7 +813,7 @@ describe('Test recieved mos ingest payloads', () => { const story0 = mosTypes.mosString128.create('ro1;s1;p2') const story1 = mosTypes.mosString128.create('ro1;s1;p3') - await handleMosSwapStories(context, { + await handleMosSwapStoriesWrapped(context, { rundownExternalId: rundown.externalId, story0, story1, @@ -823,7 +843,7 @@ describe('Test recieved mos ingest payloads', () => { const story0 = mosTypes.mosString128.create('ro1;s1;p1') const story1 = mosTypes.mosString128.create('ro1;s1;p3') - await handleMosSwapStories(context, { + await handleMosSwapStoriesWrapped(context, { rundownExternalId: rundown.externalId, story0, story1, @@ -834,7 +854,7 @@ describe('Test recieved mos ingest payloads', () => { const { segments, parts } = await getRundownData({ _id: rundown._id }) const partMap = mockRO.segmentIdMap() - partMap[0].segmentId = 'apDVfF5nk1_StK474hEUxLMZIag_' + partMap[0].segmentId = 'o0rZ5k7WadtZ2XSmf_c3txGILuw_' partMap[0].parts[0] = 'ro1;s1;p3' partMap[0].parts[2] = 'ro1;s1;p1' expect(getPartIdMap(segments, parts)).toEqual(partMap) @@ -843,20 +863,24 @@ describe('Test recieved mos ingest payloads', () => { }) test('mosRoStorySwap: Swap with self', async () => { + await resetOrphanedRundown() + const rundown = (await context.mockCollections.Rundowns.findOne()) as DBRundown expect(rundown).toBeTruthy() const story0 = mosTypes.mosString128.create('ro1;s1;p1') - await expect( - handleMosSwapStories(context, { - rundownExternalId: rundown.externalId, - story0, - story1: story0, - }) - ).rejects.toThrow( - `Cannot swap part ${mosTypes.mosString128.stringify(story0)} with itself in rundown ${rundown.externalId}` - ) + // Swap should happen without error + await handleMosSwapStoriesWrapped(context, { + rundownExternalId: rundown.externalId, + story0, + story1: story0, + }) + + // should match the default + const { segments, parts } = await getRundownData({ _id: rundown._id }) + const partMap = mockRO.segmentIdMap() + expect(getPartIdMap(segments, parts)).toEqual(partMap) }) test('mosRoStorySwap: Story not found', async () => { @@ -867,7 +891,7 @@ describe('Test recieved mos ingest payloads', () => { const story1 = mosTypes.mosString128.create('ro1;s1;p99') await expect( - handleMosSwapStories(context, { + handleMosSwapStoriesWrapped(context, { rundownExternalId: rundown.externalId, story0, story1, @@ -875,7 +899,7 @@ describe('Test recieved mos ingest payloads', () => { ).rejects.toThrow(`Story ${mosTypes.mosString128.stringify(story1)} not found in rundown ${rundown.externalId}`) await expect( - handleMosSwapStories(context, { + handleMosSwapStoriesWrapped(context, { rundownExternalId: rundown.externalId, story0: story1, story1: story0, @@ -895,7 +919,7 @@ describe('Test recieved mos ingest payloads', () => { const story0 = mosTypes.mosString128.create('ro1;s3;p1') const story1 = mosTypes.mosString128.create('ro1;s4;p1') - await handleMosSwapStories(context, { + await handleMosSwapStoriesWrapped(context, { rundownExternalId: rundown.externalId, story0, story1, @@ -907,7 +931,7 @@ describe('Test recieved mos ingest payloads', () => { const partMap = mockRO.segmentIdMap() partMap[1].parts.push('ro1;s4;p1') - partMap[2].segmentId = 'sLfUx9cadyquE07Vw9byoX35G9I_' + partMap[2].segmentId = 'o6BHLNEWMc9FbHBRRWMOiwQ3IN0_' partMap[2].parts.reverse() partMap.splice(3, 1) expect(getPartIdMap(segments, parts)).toEqual(partMap) @@ -927,7 +951,7 @@ describe('Test recieved mos ingest payloads', () => { const story0 = mosTypes.mosString128.create('ro1;s1;p2') const story1 = mosTypes.mosString128.create('ro1;s2;p2') - await handleMosSwapStories(context, { + await handleMosSwapStoriesWrapped(context, { rundownExternalId: rundown.externalId, story0, story1, @@ -935,7 +959,46 @@ describe('Test recieved mos ingest payloads', () => { expect(ensureNextPartIsValid).toHaveBeenCalledTimes(1) - // Don't care about the result here, just making sure there isnt an exception while updating the db + const { segments, parts } = await getRundownData({ _id: rundown._id }) + + const partMap = [ + { + parts: ['ro1;s1;p1'], + segmentName: 'SEGMENT1', + segmentId: 'baQfD5zawLDmJTRumGpHDH2MwaM_', + }, + { + parts: ['ro1;s2;p2'], + segmentName: 'SEGMENT2', + segmentId: 'yVemxI_brsRMvHAeFVtG2tahCgU_', + }, + { + parts: ['ro1;s1;p3'], + segmentName: 'SEGMENT1', + segmentId: 'o0rZ5k7WadtZ2XSmf_c3txGILuw_', + }, + { + parts: ['ro1;s2;p1'], + segmentName: 'SEGMENT2', + segmentId: '6cEU5uY8M93lfQssMy9XaGxT23E_', + }, + { + parts: ['ro1;s1;p2'], + segmentName: 'SEGMENT1', + segmentId: 'zz3BgLI_xxlvfTOTR55skUkKWHk_', + }, + { + parts: ['ro1;s3;p1', 'ro1;s3;p2'], + segmentName: 'SEGMENT3', + segmentId: 'rSEZMzZhJ55454sqsU_7TOq_DIk_', + }, + { + parts: ['ro1;s4;p1'], + segmentName: 'SEGMENT2', + segmentId: 'YXMZjMqslZFcM3K4sGelyBYJ_rA_', + }, + ] + expect(getPartIdMap(segments, parts)).toEqual(partMap) await expectRundownToMatchSnapshot(rundown._id, true, true) }) @@ -951,7 +1014,7 @@ describe('Test recieved mos ingest payloads', () => { const story0 = 'ro1;s1;p3' - await handleMosMoveStories(context, { + await handleMosMoveStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: mosTypes.mosString128.create('ro1;s1;p2'), stories: [mosTypes.mosString128.create(story0)], @@ -983,7 +1046,7 @@ describe('Test recieved mos ingest payloads', () => { mosTypes.mosString128.create('ro1;s1;p3'), ] - await handleMosMoveStories(context, { + await handleMosMoveStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: mosTypes.mosString128.create(''), stories, @@ -1014,13 +1077,15 @@ describe('Test recieved mos ingest payloads', () => { ] await expect( - handleMosMoveStories(context, { + handleMosMoveStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: beforeStoryId, stories, }) ).rejects.toThrow( - `Part ${mosTypes.mosString128.stringify(beforeStoryId)} was not found in rundown ${rundown.externalId}` + `Part ${getMosIngestSegmentId(mosTypes.mosString128.stringify(beforeStoryId))} in rundown ${ + rundown.externalId + } not found` ) }) @@ -1036,13 +1101,15 @@ describe('Test recieved mos ingest payloads', () => { ] await expect( - handleMosMoveStories(context, { + handleMosMoveStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: beforeStoryId, stories, }) ).rejects.toThrow( - `Part ${mosTypes.mosString128.stringify(beforeStoryId)} was not found in rundown ${rundown.externalId}` + `Part ${getMosIngestSegmentId(mosTypes.mosString128.stringify(beforeStoryId))} in rundown ${ + rundown.externalId + } not found` ) }) @@ -1060,7 +1127,7 @@ describe('Test recieved mos ingest payloads', () => { ] await expect( - handleMosMoveStories(context, { + handleMosMoveStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: beforeStoryId, stories, @@ -1093,7 +1160,7 @@ describe('Test recieved mos ingest payloads', () => { expect(partsInSegmentBefore).toHaveLength(3) // This should only remove the first part in the segment. The other parts will be regenerated - await handleMosDeleteStory(context, { + await handleMosDeleteStoryWrapped(context, { rundownExternalId: rundown.externalId, stories: [mosTypes.mosString128.create(partExternalId)], }) @@ -1110,9 +1177,9 @@ describe('Test recieved mos ingest payloads', () => { expect(partsInSegmentAfter).toHaveLength(2) // The other parts in the segment should not not have changed: - expect(partsInSegmentAfter[0]).toMatchObject(_.omit(partsInSegmentBefore[1], ['segmentId', '_rank'])) + expect(partsInSegmentAfter[0]).toMatchObject(omit(partsInSegmentBefore[1], 'segmentId', '_rank')) - expect(partsInSegmentAfter[1]).toMatchObject(_.omit(partsInSegmentBefore[2], ['segmentId', '_rank'])) + expect(partsInSegmentAfter[1]).toMatchObject(omit(partsInSegmentBefore[2], 'segmentId', '_rank')) }) async function mosReplaceBasicStory( @@ -1121,7 +1188,7 @@ describe('Test recieved mos ingest payloads', () => { newStoryId: string, newStoryName: string ): Promise { - return handleMosInsertStories(context, { + return handleMosInsertStoriesWrapped(context, { rundownExternalId: runningOrderId, insertBeforeStoryId: mosTypes.mosString128.create(oldStoryId), newStories: literal>([ @@ -1148,7 +1215,7 @@ describe('Test recieved mos ingest payloads', () => { const newSegment = newSegments.find((s) => s.name === newName) if (newSegment) { const oldSegmentId = oldSegment._id - expect(oldSegmentId).not.toEqual(newSegment._id) // If the id doesn't change, then the whole test is invalid + expect(oldSegmentId).toEqual(newSegment._id) // If the id doesn't change, then the whole test is invalid oldSegment.name = newSegment.name oldSegment._id = newSegment._id oldSegment.externalId = newSegment.externalId @@ -1229,7 +1296,7 @@ describe('Test recieved mos ingest payloads', () => { // cleanup await handleDeactivateRundownPlaylist(context, { playlistId: rundown.playlistId, - }) + }).catch(() => null) } }) @@ -1273,7 +1340,7 @@ describe('Test recieved mos ingest payloads', () => { } // regenerate the rundown - await handleMosRundownData(context, { + await handleMosRundownDataWrapped(context, { rundownExternalId: rundown.externalId, mosRunningOrder: mosRO, isUpdateOperation: false, @@ -1307,7 +1374,7 @@ describe('Test recieved mos ingest payloads', () => { // cleanup await handleDeactivateRundownPlaylist(context, { playlistId: rundown.playlistId, - }) + }).catch(() => null) } }) @@ -1327,7 +1394,7 @@ describe('Test recieved mos ingest payloads', () => { roData1.ID = mosTypes.mosString128.create('Rundown1') roData1.Slug = mosTypes.mosString128.create('Test Rundown 1') ;(roData1 as any).ForcePlaylistExternalId = 'playlist1' - await handleMosRundownData(context, { + await handleMosRundownDataWrapped(context, { rundownExternalId: mosTypes.mosString128.stringify(roData1.ID), mosRunningOrder: roData1, isUpdateOperation: false, @@ -1338,7 +1405,7 @@ describe('Test recieved mos ingest payloads', () => { roData2.ID = mosTypes.mosString128.create('Rundown2') roData2.Slug = mosTypes.mosString128.create('Test Rundown 2') ;(roData2 as any).ForcePlaylistExternalId = 'playlist1' - await handleMosRundownData(context, { + await handleMosRundownDataWrapped(context, { rundownExternalId: mosTypes.mosString128.stringify(roData2.ID), mosRunningOrder: roData2, isUpdateOperation: false, @@ -1364,7 +1431,7 @@ describe('Test recieved mos ingest payloads', () => { expect(playlist.name).not.toEqual(rundown2.name) // Remove the first rundown in the playlist - await handleRemovedRundown(context, { + await handleRemovedRundownWrapped(context, { rundownExternalId: mosTypes.mosString128.stringify(roData1.ID), }) expect(await context.mockCollections.Rundowns.findOne(rundown1._id)).toBeFalsy() @@ -1384,7 +1451,7 @@ describe('Test recieved mos ingest payloads', () => { const mosRO = mockRO.roCreate() // regenerate the rundown - await handleMosRundownData(context, { + await handleMosRundownDataWrapped(context, { rundownExternalId: mosTypes.mosString128.stringify(mosRO.ID), mosRunningOrder: mosRO, isUpdateOperation: false, @@ -1401,7 +1468,7 @@ describe('Test recieved mos ingest payloads', () => { // insert a part after segment1 const newPartData = mockRO.newItem('ro1;s2a;newPart1', 'SEGMENT2pre;new1') - await handleMosInsertStories(context, { + await handleMosInsertStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: mosTypes.mosString128.create('ro1;s2;p1'), newStories: [newPartData], @@ -1435,7 +1502,7 @@ describe('Test recieved mos ingest payloads', () => { // Replace the story with itself, but different slug const replacementPartData = mockRO.newItem('ro1;s2a;newPart1', 'SEGMENT2;new1') - await handleMosInsertStories(context, { + await handleMosInsertStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: replacementPartData.ID, newStories: [replacementPartData], diff --git a/packages/job-worker/src/ingest/mosDevice/__tests__/mosStoryJobs.spec.ts b/packages/job-worker/src/ingest/mosDevice/__tests__/mosStoryJobs.spec.ts new file mode 100644 index 00000000000..e6182af5fe0 --- /dev/null +++ b/packages/job-worker/src/ingest/mosDevice/__tests__/mosStoryJobs.spec.ts @@ -0,0 +1,622 @@ +import { setupDefaultJobEnvironment } from '../../../__mocks__/context' +import { clone } from '@sofie-automation/corelib/dist/lib' +import { + IngestChangeType, + MOS, + NrcsIngestPartChangeDetails, + NrcsIngestSegmentChangeDetailsEnum, +} from '@sofie-automation/blueprints-integration' +import { + handleMosDeleteStory, + handleMosFullStory, + handleMosInsertStories, + handleMosMoveStories, + handleMosSwapStories, +} from '../mosStoryJobs' +import { IngestUpdateOperationFunction, UpdateIngestRundownChange } from '../../runOperation' +import { IngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' + +function getDefaultIngestRundown(): IngestRundownWithSource { + return { + externalId: 'rundown0', + type: 'mos', + name: 'Rundown', + rundownSource: { type: 'http' }, + payload: undefined, + segments: [ + { + externalId: 'segment-part0', + name: 'Part 0', + rank: 0, + payload: undefined, + parts: [ + { + externalId: 'part0', + name: 'Part 0', + rank: 0, + payload: undefined, + }, + ], + }, + { + externalId: 'segment-part1', + name: 'Part 1', + rank: 1, + payload: undefined, + parts: [ + { + externalId: 'part1', + name: 'Part 1', + rank: 0, + payload: undefined, + }, + ], + }, + { + externalId: 'segment-part2', + name: 'Part 2', + rank: 2, + payload: undefined, + parts: [ + { + externalId: 'part2', + name: 'Part 2', + rank: 0, + payload: undefined, + }, + ], + }, + { + externalId: 'segment-part3', + name: 'Part 3', + rank: 3, + payload: undefined, + parts: [ + { + externalId: 'part3', + name: 'Part 3', + rank: 0, + payload: undefined, + }, + ], + }, + ], + } +} + +const mosTypes = MOS.getMosTypes(false) + +describe('handleMosDeleteStory', () => { + it('no stories', () => { + const context = setupDefaultJobEnvironment() + + expect( + handleMosDeleteStory(context, { + rundownExternalId: 'rundown0', + stories: [], + }) + ).toBeNull() + }) + + it('no rundown', () => { + const context = setupDefaultJobEnvironment() + + const executeJob = handleMosDeleteStory(context, { + rundownExternalId: 'rundown0', + stories: [mosTypes.mosString128.create('story0')], + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + expect(() => executeJob(undefined)).toThrow(/Rundown(.*)not found/) + }) + + it('missing story', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const executeJob = handleMosDeleteStory(context, { + rundownExternalId: 'rundown0', + stories: [mosTypes.mosString128.create('story0')], + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + expect(() => executeJob(clone(ingestRundown))).toThrow(/in rundown(.*)were not found/) + }) + + it('mixed found and missing', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const executeJob = handleMosDeleteStory(context, { + rundownExternalId: 'rundown0', + stories: [ + mosTypes.mosString128.create('story0'), // missing + mosTypes.mosString128.create('part1'), // exists + ], + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + expect(() => executeJob(clone(ingestRundown))).toThrow(/in rundown(.*)were not found/) + }) + + it('good', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const executeJob = handleMosDeleteStory(context, { + rundownExternalId: 'rundown0', + stories: [ + mosTypes.mosString128.create('part1'), // exists + ], + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + const changes = executeJob(clone(ingestRundown)) + + // update the expected ingestRundown + ingestRundown.segments.splice(1, 1) + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + 'segment-part1': NrcsIngestSegmentChangeDetailsEnum.Deleted, + }, + }, + } satisfies UpdateIngestRundownChange) + }) +}) + +describe('handleMosFullStory', () => { + const newMosStory: MOS.IMOSROFullStory = { + ID: mosTypes.mosString128.create('part1'), + RunningOrderId: mosTypes.mosString128.create('rundown0'), + Body: [ + { + itemType: 'other', + Type: 'p', + Content: 'Hello World!', + }, + ], + } + + it('no rundown', () => { + const context = setupDefaultJobEnvironment() + + const executeJob = handleMosFullStory(context, { + rundownExternalId: 'rundown0', + story: clone(newMosStory), + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + expect(() => executeJob(undefined)).toThrow(/Rundown(.*)not found/) + }) + + it('missing story', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const executeJob = handleMosFullStory(context, { + rundownExternalId: 'rundown0', + story: { + ...clone(newMosStory), + ID: mosTypes.mosString128.create('storyX'), + }, + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + expect(() => executeJob(clone(ingestRundown))).toThrow(/Missing MOS Story(.*)in Rundown/) + }) + + it('good', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const executeJob = handleMosFullStory(context, { + rundownExternalId: 'rundown0', + story: clone(newMosStory), + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + const changes = executeJob(clone(ingestRundown)) + + // update the expected ingestRundown + ingestRundown.segments[1].parts[0].payload = newMosStory + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + 'segment-part1': { + partChanges: { + part1: NrcsIngestPartChangeDetails.Updated, + }, + }, + }, + }, + } satisfies UpdateIngestRundownChange) + }) +}) + +describe('handleMosInsertStories', () => { + function createMockStory(id: string, slug: string): MOS.IMOSROStory { + return { + ID: mosTypes.mosString128.create(id), + Slug: mosTypes.mosString128.create(slug), + Items: [], + } + } + + it('no rundown', () => { + const context = setupDefaultJobEnvironment() + + const newStory = createMockStory('partX', 'Part X') + + const executeJob = handleMosInsertStories(context, { + rundownExternalId: 'rundown0', + newStories: [newStory], + insertBeforeStoryId: null, + replace: false, + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + expect(() => executeJob(undefined)).toThrow(/Rundown(.*)not found/) + }) + + it('no stories', () => { + const context = setupDefaultJobEnvironment() + + const executeJob = handleMosInsertStories(context, { + rundownExternalId: 'rundown0', + newStories: [], + insertBeforeStoryId: null, + replace: false, + }) as IngestUpdateOperationFunction + expect(executeJob).toBeNull() + }) + + it('unknown insertBeforeStoryId', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + const newStory = createMockStory('partX', 'Part X') + + const executeJob = handleMosInsertStories(context, { + rundownExternalId: 'rundown0', + newStories: [newStory], + insertBeforeStoryId: mosTypes.mosString128.create('storyX'), + replace: false, + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + expect(() => executeJob(clone(ingestRundown))).toThrow(/Part (.*)in rundown(.*)not found/) + }) + + it('insert in middle', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + const newStory = createMockStory('partX', 'Part X') + + const executeJob = handleMosInsertStories(context, { + rundownExternalId: 'rundown0', + newStories: [newStory], + insertBeforeStoryId: mosTypes.mosString128.create('part1'), + replace: false, + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + const changes = executeJob(clone(ingestRundown)) + + // update the expected ingestRundown + ingestRundown.segments.splice(1, 0, { + externalId: 'segment-partX', + name: 'Part X', + rank: 1, + payload: undefined, + parts: [ + { + externalId: 'partX', + name: 'Part X', + rank: 0, + payload: undefined, + }, + ], + }) + ingestRundown.segments[2].rank = 2 + ingestRundown.segments[3].rank = 3 + ingestRundown.segments[4].rank = 4 + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + 'segment-partX': NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + segmentOrderChanged: true, + }, + } satisfies UpdateIngestRundownChange) + }) + + it('insert in middle, with replace', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + const newStory = createMockStory('partX', 'Part X') + + const executeJob = handleMosInsertStories(context, { + rundownExternalId: 'rundown0', + newStories: [newStory], + insertBeforeStoryId: mosTypes.mosString128.create('part1'), + replace: true, + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + const changes = executeJob(clone(ingestRundown)) + + // update the expected ingestRundown + ingestRundown.segments.splice(1, 1, { + externalId: 'segment-partX', + name: 'Part X', + rank: 1, + payload: undefined, + parts: [ + { + externalId: 'partX', + name: 'Part X', + rank: 0, + payload: undefined, + }, + ], + }) + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + 'segment-part1': NrcsIngestSegmentChangeDetailsEnum.Deleted, + 'segment-partX': NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + segmentOrderChanged: true, + }, + } satisfies UpdateIngestRundownChange) + }) + + it('insert at end', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + const newStory = createMockStory('partX', 'Part X') + + const executeJob = handleMosInsertStories(context, { + rundownExternalId: 'rundown0', + newStories: [newStory], + insertBeforeStoryId: null, + replace: true, + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + const changes = executeJob(clone(ingestRundown)) + + // update the expected ingestRundown + ingestRundown.segments.push({ + externalId: 'segment-partX', + name: 'Part X', + rank: 4, + payload: undefined, + parts: [ + { + externalId: 'partX', + name: 'Part X', + rank: 0, + payload: undefined, + }, + ], + }) + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + 'segment-partX': NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + segmentOrderChanged: true, + }, + } satisfies UpdateIngestRundownChange) + }) +}) + +describe('handleMosSwapStories', () => { + it('no rundown', () => { + const context = setupDefaultJobEnvironment() + + const executeJob = handleMosSwapStories(context, { + rundownExternalId: 'rundown0', + story0: mosTypes.mosString128.create('part1'), + story1: mosTypes.mosString128.create('part3'), + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + expect(() => executeJob(undefined)).toThrow(/Rundown(.*)not found/) + }) + + it('swap with itself', () => { + const context = setupDefaultJobEnvironment() + + const executeJob = handleMosSwapStories(context, { + rundownExternalId: 'rundown0', + story0: mosTypes.mosString128.create('part1'), + story1: mosTypes.mosString128.create('part1'), + }) as IngestUpdateOperationFunction + expect(executeJob).toBeNull() + }) + + it('missing story0', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const executeJob = handleMosSwapStories(context, { + rundownExternalId: 'rundown0', + story0: mosTypes.mosString128.create('partX'), + story1: mosTypes.mosString128.create('part3'), + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + expect(() => executeJob(clone(ingestRundown))).toThrow(/Story (.*)not found in rundown(.*)/) + }) + + it('missing story1', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const executeJob = handleMosSwapStories(context, { + rundownExternalId: 'rundown0', + story0: mosTypes.mosString128.create('part1'), + story1: mosTypes.mosString128.create('partX'), + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + expect(() => executeJob(clone(ingestRundown))).toThrow(/Story (.*)not found in rundown(.*)/) + }) + + it('swap', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const executeJob = handleMosSwapStories(context, { + rundownExternalId: 'rundown0', + story0: mosTypes.mosString128.create('part1'), + story1: mosTypes.mosString128.create('part3'), + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + const changes = executeJob(clone(ingestRundown)) + + // update the expected ingestRundown + const oldPart3 = ingestRundown.segments.splice(3, 1, ingestRundown.segments[1]) + ingestRundown.segments.splice(1, 1, ...oldPart3) + ingestRundown.segments[1].rank = 1 + ingestRundown.segments[3].rank = 3 + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentOrderChanged: true, + }, + } satisfies UpdateIngestRundownChange) + }) +}) + +describe('handleMosMoveStories', () => { + it('no rundown', () => { + const context = setupDefaultJobEnvironment() + + const executeJob = handleMosMoveStories(context, { + rundownExternalId: 'rundown0', + insertBeforeStoryId: null, + stories: [mosTypes.mosString128.create('part3')], + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + expect(() => executeJob(undefined)).toThrow(/Rundown(.*)not found/) + }) + + it('no stories', () => { + const context = setupDefaultJobEnvironment() + + const executeJob = handleMosMoveStories(context, { + rundownExternalId: 'rundown0', + insertBeforeStoryId: mosTypes.mosString128.create('part1'), + stories: [], + }) as IngestUpdateOperationFunction + expect(executeJob).toBeNull() + }) + + it('missing story', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const executeJob = handleMosMoveStories(context, { + rundownExternalId: 'rundown0', + insertBeforeStoryId: null, + stories: [mosTypes.mosString128.create('partX'), mosTypes.mosString128.create('part3')], + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + expect(() => executeJob(clone(ingestRundown))).toThrow(/were not found(.*)in rundown/) + }) + + it('move to end', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const executeJob = handleMosMoveStories(context, { + rundownExternalId: 'rundown0', + insertBeforeStoryId: null, + stories: [mosTypes.mosString128.create('part1')], + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + const changes = executeJob(clone(ingestRundown)) + + // update the expected ingestRundown + const oldPart1 = ingestRundown.segments.splice(1, 1) + ingestRundown.segments.push(...oldPart1) + ingestRundown.segments[1].rank = 1 + ingestRundown.segments[2].rank = 2 + ingestRundown.segments[3].rank = 3 + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentOrderChanged: true, + }, + } satisfies UpdateIngestRundownChange) + }) + + it('move to middle', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const executeJob = handleMosMoveStories(context, { + rundownExternalId: 'rundown0', + insertBeforeStoryId: mosTypes.mosString128.create('part1'), + stories: [mosTypes.mosString128.create('part2')], + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + const changes = executeJob(clone(ingestRundown)) + + // update the expected ingestRundown + const oldPart2 = ingestRundown.segments.splice(2, 1) + ingestRundown.segments.splice(1, 0, ...oldPart2) + ingestRundown.segments[1].rank = 1 + ingestRundown.segments[2].rank = 2 + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentOrderChanged: true, + }, + } satisfies UpdateIngestRundownChange) + }) +}) diff --git a/packages/job-worker/src/ingest/mosDevice/diff.ts b/packages/job-worker/src/ingest/mosDevice/diff.ts deleted file mode 100644 index 32f42a7d5df..00000000000 --- a/packages/job-worker/src/ingest/mosDevice/diff.ts +++ /dev/null @@ -1,305 +0,0 @@ -import { JobContext } from '../../jobs' -import { ReadonlyDeep } from 'type-fest' -import { IngestModel } from '../model/IngestModel' -import { LocalIngestRundown, LocalIngestSegment } from '../ingestCache' -import { canRundownBeUpdated, getSegmentId } from '../lib' -import { calculateSegmentsFromIngestData } from '../generationSegment' -import _ = require('underscore') -import { clone, deleteAllUndefinedProperties, literal, normalizeArrayFunc } from '@sofie-automation/corelib/dist/lib' -import { SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { IngestSegment } from '@sofie-automation/blueprints-integration' -import { SegmentOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Segment' -import { CommitIngestData } from '../lock' -import { IngestSegmentModel } from '../model/IngestSegmentModel' - -/** - * Update the Ids of Segments based on new Ingest data - * This assumes that no segments/parts were added or removed between the two LocalIngestRundowns provided - * @param context Context of the job being run - * @param ingestModel Ingest model for Rundown being updated - * @param oldIngestRundown Last known ingest data - * @param newIngestRundown New ingest data - * @returns Map of the SegmentId changes - */ -export function diffAndUpdateSegmentIds( - context: JobContext, - ingestModel: IngestModel, - oldIngestRundown: ReadonlyDeep, - newIngestRundown: ReadonlyDeep -): CommitIngestData['renamedSegments'] { - const span = context.startSpan('mosDevice.ingest.diffAndApplyChanges') - - const oldSegments = ingestModel.getOrderedSegments() - const oldSegmentEntries = compileSegmentEntries(oldIngestRundown.segments) - const newSegmentEntries = compileSegmentEntries(newIngestRundown.segments) - const segmentDiff = diffSegmentEntries(oldSegmentEntries, newSegmentEntries, oldSegments) - - // Updated segments that has had their segment.externalId changed: - const renamedSegments = applyExternalIdDiff(ingestModel, segmentDiff, false) - - span?.end() - return renamedSegments -} - -/** - * Update the Rundown for new Ingest data - * Performs a diff of the ingest data, and applies the changes including re-running blueprints on any changed segments - * @param context Context of the job being run - * @param ingestModel Ingest model for Rundown being updated - * @param newIngestRundown New ingest data (if any) - * @param oldIngestRundown Last known ingest data (if any) - * @returns Map of the SegmentId changes - */ -export async function diffAndApplyChanges( - context: JobContext, - ingestModel: IngestModel, - newIngestRundown: ReadonlyDeep | undefined, - oldIngestRundown: ReadonlyDeep | undefined - // newIngestParts: AnnotatedIngestPart[] -): Promise { - if (!newIngestRundown) throw new Error(`diffAndApplyChanges lost the new IngestRundown...`) - if (!oldIngestRundown) throw new Error(`diffAndApplyChanges lost the old IngestRundown...`) - - const rundown = ingestModel.getRundown() - if (!canRundownBeUpdated(rundown, false)) return null - - const span = context.startSpan('mosDevice.ingest.diffAndApplyChanges') - - // Fetch all existing segments: - const oldSegments = ingestModel.getOrderedSegments() - - const oldSegmentEntries = compileSegmentEntries(oldIngestRundown.segments) - const newSegmentEntries = compileSegmentEntries(newIngestRundown.segments) - const segmentDiff = diffSegmentEntries(oldSegmentEntries, newSegmentEntries, oldSegments) - - // Note: We may not need to do some of these quick updates anymore, but they are cheap so can stay for now - - // Update segment ranks: - for (const [segmentExternalId, newRank] of Object.entries(segmentDiff.onlyRankChanged)) { - const segment = ingestModel.getSegmentByExternalId(segmentExternalId) - if (segment) { - segment.setRank(newRank) - } - } - - // Updated segments that has had their segment.externalId changed: - const renamedSegments = applyExternalIdDiff(ingestModel, segmentDiff, true) - - // Figure out which segments need to be regenerated - const segmentsToRegenerate = Object.values(segmentDiff.added) - for (const changedSegment of Object.values(segmentDiff.changed)) { - // Rank changes are handled above - if (!segmentDiff.onlyRankChanged[changedSegment.externalId]) { - segmentsToRegenerate.push(changedSegment) - } - } - - // Create/Update segments - const changedSegmentIds = await calculateSegmentsFromIngestData( - context, - ingestModel, - _.sortBy(segmentsToRegenerate, (se) => se.rank), - null - ) - - // Remove/orphan old segments - const orphanedSegmentIds: SegmentId[] = [] - for (const segmentExternalId of Object.keys(segmentDiff.removed)) { - const segment = ingestModel.getSegmentByExternalId(segmentExternalId) - if (segment) { - // We orphan it and queue for deletion. the commit phase will complete if possible - orphanedSegmentIds.push(segment.segment._id) - segment.setOrphaned(SegmentOrphanedReason.DELETED) - - segment.removeAllParts() - } - } - - span?.end() - return literal({ - changedSegmentIds: changedSegmentIds, - removedSegmentIds: orphanedSegmentIds, // Only inform about the ones that werent renamed - renamedSegments: renamedSegments, - - removeRundown: false, - }) -} - -/** - * Apply the externalId renames from a DiffSegmentEntries - * @param ingestModel Ingest model of the rundown being updated - * @param segmentDiff Calculated Diff - * @returns Map of the SegmentId changes - */ -function applyExternalIdDiff( - ingestModel: IngestModel, - segmentDiff: Pick, - canDiscardParts: boolean -): CommitIngestData['renamedSegments'] { - // Updated segments that has had their segment.externalId changed: - const renamedSegments = new Map() - for (const [oldSegmentExternalId, newSegmentExternalId] of Object.entries(segmentDiff.externalIdChanged)) { - const oldSegmentId = getSegmentId(ingestModel.rundownId, oldSegmentExternalId) - const newSegmentId = getSegmentId(ingestModel.rundownId, newSegmentExternalId) - - // Track the rename - renamedSegments.set(oldSegmentId, newSegmentId) - - // If the segment doesnt exist (it should), then there isn't a segment to rename - const oldSegment = ingestModel.getSegment(oldSegmentId) - if (!oldSegment) continue - - if (ingestModel.getSegment(newSegmentId)) { - // If the new SegmentId already exists, we need to discard the old one rather than trying to merge it. - // This can only be done if the caller is expecting to regenerate Segments - const canDiscardPartsForSegment = canDiscardParts && !segmentDiff.onlyRankChanged[oldSegmentExternalId] - if (!canDiscardPartsForSegment) { - throw new Error(`Cannot merge Segments with only rank changes`) - } - - // Remove the old Segment and it's contents, the new one will be generated shortly - ingestModel.removeSegment(oldSegmentId) - } else { - // Perform the rename - ingestModel.changeSegmentId(oldSegmentId, newSegmentId) - } - } - - return renamedSegments -} - -/** - * Object of IngestSegment against their external ids - */ -export type SegmentEntries = { [segmentExternalId: string]: LocalIngestSegment } -/** - * Convert an array of IngestSegment into SegmentEntries - */ -export function compileSegmentEntries(ingestSegments: ReadonlyDeep>): SegmentEntries { - const segmentEntries: SegmentEntries = {} - - for (const ingestSegment of ingestSegments) { - if (segmentEntries[ingestSegment.externalId]) { - throw new Error(`compileSegmentEntries: Non-unique segment external ID: "${ingestSegment.externalId}"`) - } - segmentEntries[ingestSegment.externalId] = clone(ingestSegment) - } - - return segmentEntries -} - -/** - * Result of diffing two SegmentEntries - */ -export interface DiffSegmentEntries { - added: { [segmentExternalId: string]: LocalIngestSegment } - changed: { [segmentExternalId: string]: LocalIngestSegment } - removed: { [segmentExternalId: string]: LocalIngestSegment } - unchanged: { [segmentExternalId: string]: LocalIngestSegment } - - // Note: The objects present below are also present in the collections above - - /** Reference to segments which only had their ranks updated */ - onlyRankChanged: { [segmentExternalId: string]: number } // contains the new rank - - /** Reference to segments which has been REMOVED, but it looks like there is an ADDED segment that is closely related to the removed one */ - externalIdChanged: { [removedSegmentExternalId: string]: string } // contains the added segment's externalId -} - -/** - * Perform a diff of SegmentEntries, to calculate what has changed. - * Considers that the ids of some IngestSegments could have changed - * @param oldSegmentEntries The last known SegmentEntries - * @param newSegmentEntries The new SegmentEntries - * @param oldSegments The Segments in the DB. This allows for maintaining a stable modified timestamp, and ranks - * @returns DiffSegmentEntries describing the found changes - */ -export function diffSegmentEntries( - oldSegmentEntries: SegmentEntries, - newSegmentEntries: SegmentEntries, - oldSegments: IngestSegmentModel[] | null -): DiffSegmentEntries { - const diff: DiffSegmentEntries = { - added: {}, - changed: {}, - removed: {}, - unchanged: {}, - - onlyRankChanged: {}, - externalIdChanged: {}, - } - const oldSegmentMap: { [externalId: string]: IngestSegmentModel } | null = - oldSegments === null ? null : normalizeArrayFunc(oldSegments, (segment) => segment.segment.externalId) - - _.each(newSegmentEntries, (newSegmentEntry, segmentExternalId) => { - const oldSegmentEntry = oldSegmentEntries[segmentExternalId] as IngestSegment | undefined - let oldSegment: IngestSegmentModel | undefined - if (oldSegmentMap) { - oldSegment = oldSegmentMap[newSegmentEntry.externalId] - if (!oldSegment) { - // Segment has been added - diff.added[segmentExternalId] = newSegmentEntry - return - } - } - if (oldSegmentEntry) { - const modifiedIsEqual = oldSegment ? newSegmentEntry.modified === oldSegment.segment.externalModified : true - - // ensure there are no 'undefined' properties - deleteAllUndefinedProperties(oldSegmentEntry) - deleteAllUndefinedProperties(newSegmentEntry) - - // deep compare: - const ingestContentIsEqual = _.isEqual(_.omit(newSegmentEntry, 'rank'), _.omit(oldSegmentEntry, 'rank')) - const rankIsEqual = oldSegment - ? newSegmentEntry.rank === oldSegment.segment._rank - : newSegmentEntry.rank === oldSegmentEntry.rank - - // Compare the modified timestamps: - if (modifiedIsEqual && ingestContentIsEqual && rankIsEqual) { - diff.unchanged[segmentExternalId] = newSegmentEntry - } else { - // Something has changed - diff.changed[segmentExternalId] = newSegmentEntry - - // Check if it's only the rank that has changed: - if (ingestContentIsEqual && !rankIsEqual) { - diff.onlyRankChanged[segmentExternalId] = newSegmentEntry.rank - } - } - } else { - // Segment has been added - diff.added[segmentExternalId] = newSegmentEntry - } - }) - - _.each(oldSegmentEntries, (oldSegmentEntry, segmentExternalId) => { - const newSegmentEntry = newSegmentEntries[segmentExternalId] as IngestSegment | undefined - if (!newSegmentEntry) { - diff.removed[segmentExternalId] = oldSegmentEntry - } - }) - - // Handle when the externalId has change - _.each(diff.removed, (segmentEntry, segmentExternalId) => { - // try finding "it" in the added, using name - let newSegmentEntry = _.find(diff.added, (se) => se.name === segmentEntry.name) - if (!newSegmentEntry) { - // second try, match with any parts: - newSegmentEntry = _.find(diff.added, (se) => { - let found = false - _.each(segmentEntry.parts, (part) => { - if (found || _.find(se.parts, (p) => p.externalId === part.externalId)) { - found = true - } - }) - return found - }) - } - if (newSegmentEntry) { - diff.externalIdChanged[segmentExternalId] = newSegmentEntry.externalId - } - }) - - return diff -} diff --git a/packages/job-worker/src/ingest/mosDevice/lib.ts b/packages/job-worker/src/ingest/mosDevice/lib.ts index 07f8b0d4256..b1961a85e70 100644 --- a/packages/job-worker/src/ingest/mosDevice/lib.ts +++ b/packages/job-worker/src/ingest/mosDevice/lib.ts @@ -1,18 +1,7 @@ import { MOS } from '@sofie-automation/corelib' -import { IngestPart } from '@sofie-automation/blueprints-integration' -import { getPartId } from '../lib' -import { PartId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { IngestRundown, IngestSegment } from '@sofie-automation/blueprints-integration' import _ = require('underscore') -export function getPartIdFromMosStory(rundownId: RundownId, partMosId: MOS.IMOSString128 | string): PartId { - if (!partMosId) throw new Error('parameter partMosId missing!') - return getPartId(rundownId, typeof partMosId === 'string' ? partMosId : parseMosString(partMosId)) -} - -export function getSegmentExternalId(rundownId: RundownId, ingestPart: IngestPart): string { - return `${rundownId}_${ingestPart.name.split(';')[0]}_${ingestPart.externalId}` -} - export function fixIllegalObject(o: unknown): void { if (_.isArray(o)) { _.each(o, (val, _key) => { @@ -38,3 +27,37 @@ export function parseMosString(str: MOS.IMOSString128): string { if (mosTypes.mosString128.is(str)) return mosTypes.mosString128.stringify(str) return (str as any).toString() } + +export function getMosIngestSegmentId(partExternalId: string): string { + return `segment-${partExternalId}` +} + +export function updateRanksBasedOnOrder(ingestRundown: IngestRundown): void { + ingestRundown.segments.forEach((segment, i) => { + segment.rank = i + + segment.parts.forEach((part, j) => { + part.rank = j + }) + }) +} + +export function mosStoryToIngestSegment(mosStory: MOS.IMOSStory, undefinedPayload: boolean): IngestSegment { + const externalId = parseMosString(mosStory.ID) + + const name = mosStory.Slug ? parseMosString(mosStory.Slug) : '' + return { + externalId: getMosIngestSegmentId(externalId), + name: name, + rank: 0, // Set later + parts: [ + { + externalId: externalId, + name: name, + rank: 0, + payload: undefinedPayload ? undefined : {}, + }, + ], + payload: undefined, + } +} diff --git a/packages/job-worker/src/ingest/mosDevice/mosRundownJobs.ts b/packages/job-worker/src/ingest/mosDevice/mosRundownJobs.ts index b0f9e50a90e..445e52c0d05 100644 --- a/packages/job-worker/src/ingest/mosDevice/mosRundownJobs.ts +++ b/packages/job-worker/src/ingest/mosDevice/mosRundownJobs.ts @@ -1,5 +1,4 @@ -import { IngestPart } from '@sofie-automation/blueprints-integration' -import { PartId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { NrcsIngestRundownChangeDetails, IngestPart, IngestChangeType } from '@sofie-automation/blueprints-integration' import { literal } from '@sofie-automation/corelib/dist/lib' import { MosRundownProps, @@ -8,117 +7,95 @@ import { MosRundownReadyToAirProps, } from '@sofie-automation/corelib/dist/worker/ingest' import { JobContext } from '../../jobs' -import { getCurrentTime } from '../../lib' -import _ = require('underscore') -import { LocalIngestRundown } from '../ingestCache' -import { getRundownId, getPartId, canRundownBeUpdated } from '../lib' -import { runIngestJob, CommitIngestData, runWithRundownLock } from '../lock' -import { diffAndUpdateSegmentIds } from './diff' -import { parseMosString } from './lib' -import { groupedPartsToSegments, groupIngestParts, storiesToIngestParts } from './mosToIngest' -import { updateRundownFromIngestData, updateRundownMetadataFromIngestData } from '../generationRundown' +import { getRundownId, canRundownBeUpdated } from '../lib' +import { CommitIngestData, runWithRundownLock } from '../lock' +import { mosStoryToIngestSegment, parseMosString, updateRanksBasedOnOrder } from './lib' +import { GenerateRundownMode, updateRundownFromIngestData } from '../generationRundown' +import { IngestUpdateOperationFunction } from '../runOperation' +import { IngestModel } from '../model/IngestModel' +import { IngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' +import { SofieIngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' /** * Insert or update a mos rundown */ -export async function handleMosRundownData(context: JobContext, data: MosRundownProps): Promise { +export function handleMosRundownData( + _context: JobContext, + data: MosRundownProps +): IngestUpdateOperationFunction | null { // Create or update a rundown (ie from rundownCreate or rundownList) if (parseMosString(data.mosRunningOrder.ID) !== data.rundownExternalId) throw new Error('mosRunningOrder.ID and rundownExternalId mismatch!') - await runIngestJob( - context, - data, - (ingestRundown) => { - const rundownId = getRundownId(context.studioId, data.rundownExternalId) - const parts = _.compact( - storiesToIngestParts(context, rundownId, data.mosRunningOrder.Stories || [], data.isUpdateOperation, []) - ) - const groupedStories = groupIngestParts(parts) - - // If this is a reload of a RO, then use cached data to make the change more seamless - if (data.isUpdateOperation && ingestRundown) { - const partCacheMap = new Map() - for (const segment of ingestRundown.segments) { - for (const part of segment.parts) { - partCacheMap.set(getPartId(rundownId, part.externalId), part) - } - } - - for (const annotatedPart of parts) { - const cached = partCacheMap.get(annotatedPart.partId) - if (cached && !annotatedPart.ingest.payload) { - annotatedPart.ingest.payload = cached.payload - } + return (ingestRundown) => { + const ingestSegments = (data.mosRunningOrder.Stories || []).map((story) => + mosStoryToIngestSegment(story, data.isUpdateOperation) + ) + + // If this is a reload of a RO, then use cached data to make the change more seamless + if (data.isUpdateOperation && ingestRundown) { + const partCacheMap = new Map() + for (const segment of ingestRundown.segments) { + for (const part of segment.parts) { + partCacheMap.set(part.externalId, part) } } - const ingestSegments = groupedPartsToSegments(rundownId, groupedStories) - - return literal({ - externalId: data.rundownExternalId, - name: parseMosString(data.mosRunningOrder.Slug), - type: 'mos', - segments: ingestSegments, - payload: data.mosRunningOrder, - modified: getCurrentTime(), - }) - }, - async (context, ingestModel, newIngestRundown, oldIngestRundown) => { - if (!newIngestRundown) throw new Error(`handleMosRundownData lost the IngestRundown...`) - - if (!canRundownBeUpdated(ingestModel.rundown, !data.isUpdateOperation)) return null - - let renamedSegments: CommitIngestData['renamedSegments'] = null - if (ingestModel.rundown && oldIngestRundown) { - // If we already have a rundown, update any modified segment ids - renamedSegments = diffAndUpdateSegmentIds(context, ingestModel, oldIngestRundown, newIngestRundown) - } + for (const newIngestSegment of ingestSegments) { + const ingestPart = newIngestSegment.parts[0] + if (!ingestPart) continue - const res = await updateRundownFromIngestData( - context, - ingestModel, - newIngestRundown, - !data.isUpdateOperation, - data.rundownSource - ) - if (res) { - return { - ...res, - renamedSegments: renamedSegments, + const cached = partCacheMap.get(ingestPart.externalId) + if (cached && !ingestPart.payload) { + ingestPart.payload = cached.payload } - } else { - return null } } - ) + + const newIngestRundown = literal({ + externalId: data.rundownExternalId, + name: parseMosString(data.mosRunningOrder.Slug), + type: 'mos', + segments: ingestSegments, + payload: data.mosRunningOrder, + rundownSource: data.rundownSource, + }) + updateRanksBasedOnOrder(newIngestRundown) + + return { + ingestRundown: newIngestRundown, + changes: { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Regenerate, + }, + } + } } /** * Update the payload of a mos rundown, without changing any parts or segments */ -export async function handleMosRundownMetadata(context: JobContext, data: MosRundownMetadataProps): Promise { - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - ingestRundown.payload = _.extend(ingestRundown.payload, data.mosRunningOrderBase) - ingestRundown.modified = getCurrentTime() - +export function handleMosRundownMetadata( + _context: JobContext, + data: MosRundownMetadataProps +): IngestUpdateOperationFunction | null { + return (ingestRundown) => { + if (ingestRundown) { + ingestRundown.payload = Object.assign(ingestRundown.payload as object, data.mosRunningOrderBase) + + return { // We modify in-place - return ingestRundown - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Payload, + }, } - }, - async (context, ingestModel, ingestRundown) => { - if (!ingestRundown) throw new Error(`handleMosRundownMetadata lost the IngestRundown...`) - - return updateRundownMetadataFromIngestData(context, ingestModel, ingestRundown, data.rundownSource) + } else { + throw new Error(`Rundown "${data.rundownExternalId}" not found`) } - ) + } } /** @@ -143,29 +120,18 @@ export async function handleMosRundownStatus(context: JobContext, data: MosRundo /** * Update the ready to air state of a mos rundown */ -export async function handleMosRundownReadyToAir(context: JobContext, data: MosRundownReadyToAirProps): Promise { - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - // No change - return ingestRundown - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } - }, - async (context, ingestModel, ingestRundown) => { - if (!ingestRundown) throw new Error(`handleMosRundownReadyToAir lost the IngestRundown...`) - - if (!ingestModel.rundown || ingestModel.rundown.airStatus === data.status) return null +export async function handleMosRundownReadyToAir( + context: JobContext, + data: MosRundownReadyToAirProps, + ingestModel: IngestModel, + ingestRundown: SofieIngestRundownWithSource +): Promise { + if (!ingestModel.rundown || ingestModel.rundown.airStatus === data.status) return null - // If rundown is orphaned, then it should be ignored - if (ingestModel.rundown.orphaned) return null + // If rundown is orphaned, then it should be ignored + if (ingestModel.rundown.orphaned) return null - ingestModel.setRundownAirStatus(data.status) + ingestModel.setRundownAirStatus(data.status) - return updateRundownMetadataFromIngestData(context, ingestModel, ingestRundown, ingestModel.rundown.source) - } - ) + return updateRundownFromIngestData(context, ingestModel, ingestRundown, GenerateRundownMode.MetadataChange) } diff --git a/packages/job-worker/src/ingest/mosDevice/mosStoryJobs.ts b/packages/job-worker/src/ingest/mosDevice/mosStoryJobs.ts index 2e4cac9be03..d63e2a382a0 100644 --- a/packages/job-worker/src/ingest/mosDevice/mosStoryJobs.ts +++ b/packages/job-worker/src/ingest/mosDevice/mosStoryJobs.ts @@ -1,4 +1,3 @@ -import { protectString } from '@sofie-automation/corelib/dist/protectedString' import { MosDeleteStoryProps, MosFullStoryProps, @@ -7,298 +6,299 @@ import { MosSwapStoryProps, } from '@sofie-automation/corelib/dist/worker/ingest' import { logger } from '../../logging' -import _ = require('underscore') import { JobContext } from '../../jobs' -import { updateSegmentFromIngestData } from '../generationSegment' -import { LocalIngestRundown } from '../ingestCache' -import { getRundownId } from '../lib' -import { runIngestJob } from '../lock' -import { diffAndApplyChanges } from './diff' -import { fixIllegalObject, parseMosString } from './lib' -import { AnnotatedIngestPart, makeChangeToIngestParts, storiesToIngestParts } from './mosToIngest' - -function getAnnotatedIngestParts(context: JobContext, ingestRundown: LocalIngestRundown): AnnotatedIngestPart[] { - const span = context.startSpan('mosDevice.ingest.getAnnotatedIngestParts') - const ingestParts: AnnotatedIngestPart[] = [] - _.each(ingestRundown.segments, (s) => { - _.each(s.parts, (p) => { - ingestParts.push({ - externalId: p.externalId, - partId: protectString(''), // Not used - segmentName: s.name, - ingest: p, - }) - }) - }) - - span?.end() - return ingestParts -} +import { + fixIllegalObject, + getMosIngestSegmentId, + mosStoryToIngestSegment, + parseMosString, + updateRanksBasedOnOrder, +} from './lib' +import { + IngestChangeType, + IngestSegment, + MOS, + NrcsIngestPartChangeDetails, + NrcsIngestSegmentChangeDetails, + NrcsIngestSegmentChangeDetailsEnum, +} from '@sofie-automation/blueprints-integration' +import { IngestUpdateOperationFunction } from '../runOperation' +import { normalizeArrayToMap } from '@sofie-automation/corelib/dist/lib' +import { IngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' /** * Update the payload of a mos story */ -export async function handleMosFullStory(context: JobContext, data: MosFullStoryProps): Promise { +export function handleMosFullStory( + _context: JobContext, + data: MosFullStoryProps +): IngestUpdateOperationFunction | null { fixIllegalObject(data.story) const partExternalId = parseMosString(data.story.ID) - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - const ingestPart = ingestRundown.segments - .map((s) => s.parts) - .flat() - .find((p) => p.externalId === partExternalId) - if (!ingestPart) { - throw new Error( - `handleMosFullStory: Missing MOS Story "${partExternalId}" in Rundown ingest data for "${data.rundownExternalId}"` - ) - } - - // TODO - can the name change during a fullStory? If so then we need to be sure to update the segment groupings too - // ingestPart.name = story.Slug ? parseMosString(story.Slug) : '' - ingestPart.payload = data.story - - // We modify in-place - return ingestRundown - } else { - throw new Error(`handleMosFullStory: Missing MOS Rundown "${data.rundownExternalId}"`) - } - }, - async (context, ingestModel, ingestRundown) => { - const ingestSegment = ingestRundown?.segments?.find((s) => - s.parts.find((p) => p.externalId === partExternalId) + return (ingestRundown: IngestRundownWithSource | undefined) => { + if (!ingestRundown) { + throw new Error(`Rundown "${data.rundownExternalId}" not found`) + } + + // It appears that the name can't change during a fullStory. (based on a few years of usage) + // If it can then we need to be sure to update the segment groupings too + + const segmentExternalId = getMosIngestSegmentId(partExternalId) + + const ingestSegment = ingestRundown.segments.find((s) => s.externalId === segmentExternalId) + const ingestPart = ingestSegment?.parts.find((p) => p.externalId === partExternalId) + + if (!ingestPart) + // Part was not found + throw new Error( + `handleMosFullStory: Missing MOS Story "${partExternalId}" in Rundown ingest data for "${data.rundownExternalId}"` ) - if (!ingestSegment) throw new Error(`IngestSegment for story "${partExternalId}" is missing!`) - return updateSegmentFromIngestData(context, ingestModel, ingestSegment, false) + + // We modify in-place + ingestPart.payload = data.story + + return { + // We modify in-place + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + [segmentExternalId]: { + partChanges: { + [ingestPart.externalId]: NrcsIngestPartChangeDetails.Updated, + }, + }, + }, + }, } - ) + } } /** * Delete a mos story */ -export async function handleMosDeleteStory(context: JobContext, data: MosDeleteStoryProps): Promise { - if (data.stories.length === 0) return - - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - const ingestParts = getAnnotatedIngestParts(context, ingestRundown) - const ingestPartIds = new Set(ingestParts.map((part) => part.externalId)) - - const storyIds = data.stories.map(parseMosString) - - logger.debug(`handleMosDeleteStory storyIds: [${storyIds.join(',')}]`) - - const missingIds = storyIds.filter((id) => !ingestPartIds.has(id)) - if (missingIds.length > 0) { - throw new Error( - `Parts ${missingIds.join(', ')} in rundown ${data.rundownExternalId} were not found` - ) - } - - const rundownId = getRundownId(context.studioId, data.rundownExternalId) - ingestRundown.segments = makeChangeToIngestParts(context, rundownId, ingestParts, (rundownParts) => { - const storyIdsSet = new Set(storyIds) - const filteredParts = rundownParts.filter((p) => !storyIdsSet.has(p.externalId)) - - logger.debug( - `handleMosDeleteStory, new part count ${filteredParts.length} (was ${rundownParts.length})` - ) - - return filteredParts - }) - - // We modify in-place - return ingestRundown - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } - }, - diffAndApplyChanges - ) +export function handleMosDeleteStory( + _context: JobContext, + data: MosDeleteStoryProps +): IngestUpdateOperationFunction | null { + if (data.stories.length === 0) return null + + return (ingestRundown: IngestRundownWithSource | undefined) => { + if (!ingestRundown) { + throw new Error(`Rundown "${data.rundownExternalId}" not found`) + } + + const storyIdsToDelete = data.stories.map(parseMosString) + const segmentIdsToDelete = storyIdsToDelete.map(getMosIngestSegmentId) + + logger.debug(`handleMosDeleteStory storyIds: [${storyIdsToDelete.join(',')}]`) + + const ingestSegmentIds = new Set(ingestRundown.segments.map((segment) => segment.externalId)) + + const missingIds = segmentIdsToDelete.filter((id) => !ingestSegmentIds.has(id)) + if (missingIds.length > 0) { + throw new Error(`Parts ${missingIds.join(', ')} in rundown ${data.rundownExternalId} were not found`) + } + + // Remove any segments + const segmentIdsToDeleteSet = new Set(segmentIdsToDelete) + ingestRundown.segments = ingestRundown.segments.filter( + (segment) => !segmentIdsToDeleteSet.has(segment.externalId) + ) + + // compute changes + const segmentChanges: Record = {} + for (const segmentId of segmentIdsToDelete) { + segmentChanges[segmentId] = NrcsIngestSegmentChangeDetailsEnum.Deleted + } + + return { + // We modify in-place + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges, + }, + } + } } /** * Insert a mos story before the referenced existing story */ -export async function handleMosInsertStories(context: JobContext, data: MosInsertStoryProps): Promise { - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - const ingestParts = getAnnotatedIngestParts(context, ingestRundown) - - // The part of which we are about to insert stories after - const insertBeforePartExternalId = data.insertBeforeStoryId - ? parseMosString(data.insertBeforeStoryId) || '' - : '' - const insertIndex = !insertBeforePartExternalId // insert last - ? ingestParts.length - : ingestParts.findIndex((p) => p.externalId === insertBeforePartExternalId) - if (insertIndex === -1) { - throw new Error(`Part ${insertBeforePartExternalId} in rundown ${data.rundownExternalId} not found`) - } - - const rundownId = getRundownId(context.studioId, data.rundownExternalId) - const newParts = storiesToIngestParts( - context, - rundownId, - data.newStories || [], - true, - ingestParts - ).filter( - (p): p is AnnotatedIngestPart => !!p // remove falsy values from array - ) - - ingestRundown.segments = makeChangeToIngestParts( - context, - rundownId, - ingestParts, - (ingestPartsToModify) => { - const modifiedIngestParts = [...ingestPartsToModify] // clone - - if (data.replace) { - modifiedIngestParts.splice(insertIndex, 1) // Replace the previous part with new parts - } - - const newPartExtenalIds = new Set(newParts.map((part) => part.externalId)) - const collidingPartIds = modifiedIngestParts - .filter((part) => newPartExtenalIds.has(part.externalId)) - .map((part) => part.externalId) - - if (collidingPartIds.length > 0) { - throw new Error( - `Parts ${collidingPartIds.join(', ')} already exist in rundown ${ - data.rundownExternalId - }` - ) - } - // Update parts list - modifiedIngestParts.splice(insertIndex, 0, ...newParts) - - return modifiedIngestParts - } - ) - - // We modify in-place - return ingestRundown - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } - }, - diffAndApplyChanges - ) +export function handleMosInsertStories( + _context: JobContext, + data: MosInsertStoryProps +): IngestUpdateOperationFunction | null { + if (data.newStories.length === 0) return null + + return (ingestRundown: IngestRundownWithSource | undefined) => { + if (!ingestRundown) { + throw new Error(`Rundown "${data.rundownExternalId}" not found`) + } + + const newIngestSegments = data.newStories.map((story) => mosStoryToIngestSegment(story, true)) + + // The part of which we are about to insert stories after + const insertBeforeSegmentExternalId = storyIdToSegmentExternalId(data.insertBeforeStoryId) + const insertIndex = insertBeforeSegmentExternalId // insert last + ? ingestRundown.segments.findIndex((p) => p.externalId === insertBeforeSegmentExternalId) + : ingestRundown.segments.length + if (insertIndex === -1) { + throw new Error(`Part ${insertBeforeSegmentExternalId} in rundown ${data.rundownExternalId} not found`) + } + + const oldSegmentIds = new Set(ingestRundown.segments.map((s) => s.externalId)) + // Allow replacing with itself + if (data.replace && insertBeforeSegmentExternalId) oldSegmentIds.delete(insertBeforeSegmentExternalId) + + const duplicateSegments = newIngestSegments.filter((segment) => oldSegmentIds.has(segment.externalId)) + if (duplicateSegments.length > 0) { + throw new Error( + `Parts ${duplicateSegments.map((s) => s.externalId).join(', ')} already exist in rundown ${ + data.rundownExternalId + }` + ) + } + + // Perform the change + ingestRundown.segments.splice(insertIndex, data.replace ? 1 : 0, ...newIngestSegments) + updateRanksBasedOnOrder(ingestRundown) + + const segmentChanges: Record = {} + for (const segment of newIngestSegments) { + segmentChanges[segment.externalId] = NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated + } + if (data.replace && insertBeforeSegmentExternalId && !segmentChanges[insertBeforeSegmentExternalId]) { + segmentChanges[insertBeforeSegmentExternalId] = NrcsIngestSegmentChangeDetailsEnum.Deleted + } + + return { + // We modify in-place + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: segmentChanges, + segmentOrderChanged: true, + }, + } + } } /** * Swap positions of two mos stories */ -export async function handleMosSwapStories(context: JobContext, data: MosSwapStoryProps): Promise { +export function handleMosSwapStories( + _context: JobContext, + data: MosSwapStoryProps +): IngestUpdateOperationFunction | null { const story0Str = parseMosString(data.story0) const story1Str = parseMosString(data.story1) - if (story0Str === story1Str) { - throw new Error(`Cannot swap part ${story0Str} with itself in rundown ${data.rundownExternalId}`) - } - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - const ingestParts = getAnnotatedIngestParts(context, ingestRundown) - - const rundownId = getRundownId(context.studioId, data.rundownExternalId) - ingestRundown.segments = makeChangeToIngestParts(context, rundownId, ingestParts, (rundownParts) => { - const story0Index = rundownParts.findIndex((p) => p.externalId === story0Str) - if (story0Index === -1) { - throw new Error(`Story ${story0Str} not found in rundown ${data.rundownExternalId}`) - } - const story1Index = rundownParts.findIndex((p) => p.externalId === story1Str) - if (story1Index === -1) { - throw new Error(`Story ${story1Str} not found in rundown ${data.rundownExternalId}`) - } - const tmp = rundownParts[story0Index] - rundownParts[story0Index] = rundownParts[story1Index] - rundownParts[story1Index] = tmp - - return rundownParts - }) - - // We modify in-place - return ingestRundown - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } - }, - diffAndApplyChanges - ) + // If the stories are the same, we don't need to do anything + if (story0Str === story1Str) return null + + return (ingestRundown: IngestRundownWithSource | undefined) => { + if (!ingestRundown) { + throw new Error(`Rundown "${data.rundownExternalId}" not found`) + } + + const segment0Id = getMosIngestSegmentId(parseMosString(data.story0)) + const story0Index = ingestRundown.segments.findIndex((s) => s.externalId === segment0Id) + if (story0Index === -1) { + throw new Error(`Story ${story0Str} not found in rundown ${data.rundownExternalId}`) + } + + const segment1Id = getMosIngestSegmentId(parseMosString(data.story1)) + const story1Index = ingestRundown.segments.findIndex((s) => s.externalId === segment1Id) + if (story1Index === -1) { + throw new Error(`Story ${story1Str} not found in rundown ${data.rundownExternalId}`) + } + + // Fetch the values + const story0Segment = ingestRundown.segments[story0Index] + const story1Segment = ingestRundown.segments[story1Index] + + // Store the values + ingestRundown.segments[story0Index] = story1Segment + ingestRundown.segments[story1Index] = story0Segment + + updateRanksBasedOnOrder(ingestRundown) + + return { + // We modify in-place + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentOrderChanged: true, + }, + } + } } /** * Move a list of mos stories */ -export async function handleMosMoveStories(context: JobContext, data: MosMoveStoryProps): Promise { - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - const ingestParts = getAnnotatedIngestParts(context, ingestRundown) - - // Get story data - const storyIds = data.stories.map(parseMosString) - - const rundownId = getRundownId(context.studioId, data.rundownExternalId) - ingestRundown.segments = makeChangeToIngestParts(context, rundownId, ingestParts, (rundownParts) => { - // Extract the parts-to-be-moved: - const movingParts = _.sortBy( - rundownParts.filter((p) => storyIds.indexOf(p.externalId) !== -1), - (p) => storyIds.indexOf(p.externalId) - ) - const filteredParts = rundownParts.filter((p) => storyIds.indexOf(p.externalId) === -1) - - // Ensure all stories to move were found - const movingIds = _.map(movingParts, (p) => p.externalId) - const missingIds = _.filter(storyIds, (id) => movingIds.indexOf(id) === -1) - if (missingIds.length > 0) { - throw new Error( - `Parts ${missingIds.join(', ')} were not found in rundown ${data.rundownExternalId}` - ) - } - - // Find insert point - const insertBeforePartExternalId = data.insertBeforeStoryId - ? parseMosString(data.insertBeforeStoryId) || '' - : '' - const insertIndex = !insertBeforePartExternalId // insert last - ? filteredParts.length - : filteredParts.findIndex((p) => p.externalId === insertBeforePartExternalId) - if (insertIndex === -1) { - throw new Error( - `Part ${insertBeforePartExternalId} was not found in rundown ${data.rundownExternalId}` - ) - } - - // Reinsert parts - filteredParts.splice(insertIndex, 0, ...movingParts) - - return filteredParts - }) - - // We modify in-place - return ingestRundown - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } - }, - diffAndApplyChanges - ) +export function handleMosMoveStories( + _context: JobContext, + data: MosMoveStoryProps +): IngestUpdateOperationFunction | null { + if (data.stories.length === 0) return null + + return (ingestRundown: IngestRundownWithSource | undefined) => { + if (!ingestRundown) { + throw new Error(`Rundown "${data.rundownExternalId}" not found`) + } + + const oldIngestSegmentMap = normalizeArrayToMap(ingestRundown.segments, 'externalId') + + const moveStoryIds = data.stories.map(parseMosString) + + const moveIngestSegments: IngestSegment[] = [] + const missingIds: string[] = [] + for (const storyId of moveStoryIds) { + const segment = oldIngestSegmentMap.get(getMosIngestSegmentId(storyId)) + if (segment) moveIngestSegments.push(segment) + else missingIds.push(storyId) + } + + if (missingIds.length > 0) { + throw new Error(`Parts ${missingIds.join(', ')} were not found in rundown ${data.rundownExternalId}`) + } + + // remove existing items + const moveIngestSegmentIds = moveIngestSegments.map((s) => s.externalId) + ingestRundown.segments = ingestRundown.segments.filter((s) => !moveIngestSegmentIds.includes(s.externalId)) + + // The part of which we are about to insert stories after + const insertBeforeSegmentExternalId = storyIdToSegmentExternalId(data.insertBeforeStoryId) + const insertIndex = insertBeforeSegmentExternalId // insert last + ? ingestRundown.segments.findIndex((p) => p.externalId === insertBeforeSegmentExternalId) + : ingestRundown.segments.length + if (insertIndex === -1) { + throw new Error(`Part ${insertBeforeSegmentExternalId} in rundown ${data.rundownExternalId} not found`) + } + + // Perform the change + ingestRundown.segments.splice(insertIndex, 0, ...moveIngestSegments) + updateRanksBasedOnOrder(ingestRundown) + + return { + // We modify in-place + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentOrderChanged: true, + }, + } + } +} + +function storyIdToSegmentExternalId(storyId: MOS.IMOSString128 | null | undefined): string | undefined { + if (!storyId) return undefined + const partExternalId = parseMosString(storyId) + if (!partExternalId) return undefined + return getMosIngestSegmentId(partExternalId) } diff --git a/packages/job-worker/src/ingest/mosDevice/mosToIngest.ts b/packages/job-worker/src/ingest/mosDevice/mosToIngest.ts deleted file mode 100644 index 3e909a81e72..00000000000 --- a/packages/job-worker/src/ingest/mosDevice/mosToIngest.ts +++ /dev/null @@ -1,135 +0,0 @@ -import { MOS } from '@sofie-automation/corelib' -import { PartId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { normalizeArray, literal } from '@sofie-automation/corelib/dist/lib' -import _ = require('underscore') -import { JobContext } from '../../jobs' -import { getCurrentTime } from '../../lib' -import { LocalIngestPart, LocalIngestSegment } from '../ingestCache' -import { parseMosString, getPartIdFromMosStory, getSegmentExternalId } from './lib' - -export interface AnnotatedIngestPart { - externalId: string - partId: PartId - segmentName: string - ingest: LocalIngestPart -} -export function storiesToIngestParts( - context: JobContext, - rundownId: RundownId, - stories: MOS.IMOSStory[], - undefinedPayload: boolean, - existingIngestParts: AnnotatedIngestPart[] -): (AnnotatedIngestPart | null)[] { - const span = context.startSpan('ingest.storiesToIngestParts') - - const existingIngestPartsMap = normalizeArray(existingIngestParts, 'externalId') - - const parts = stories.map((s, i) => { - if (!s) return null - - const externalId = parseMosString(s.ID) - const existingIngestPart = existingIngestPartsMap[externalId] - - const name = s.Slug ? parseMosString(s.Slug) : '' - return { - externalId: externalId, - partId: getPartIdFromMosStory(rundownId, s.ID), - segmentName: name.split(';')[0], - ingest: literal({ - externalId: parseMosString(s.ID), - name: name, - rank: i, - payload: undefinedPayload ? undefined : {}, - modified: existingIngestPart ? existingIngestPart.ingest.modified : getCurrentTime(), - }), - } - }) - - span?.end() - return parts -} -/** Group IngestParts together into something that could be Segments */ -export function groupIngestParts(parts: AnnotatedIngestPart[]): { name: string; parts: LocalIngestPart[] }[] { - const groupedParts: { name: string; parts: LocalIngestPart[] }[] = [] - _.each(parts, (part) => { - const lastSegment = _.last(groupedParts) - if (lastSegment && lastSegment.name === part.segmentName) { - lastSegment.parts.push(part.ingest) - } else { - groupedParts.push({ name: part.segmentName, parts: [part.ingest] }) - } - }) - - // Ensure ranks are correct - _.each(groupedParts, (group) => { - for (let i = 0; i < group.parts.length; i++) { - group.parts[i].rank = i - } - }) - - return groupedParts -} -export function groupedPartsToSegments( - rundownId: RundownId, - groupedParts: { name: string; parts: LocalIngestPart[] }[] -): LocalIngestSegment[] { - return _.map(groupedParts, (grp, i) => { - return literal({ - externalId: getSegmentExternalId(rundownId, grp.parts[0]), - name: grp.name, - rank: i, - parts: grp.parts, - modified: Math.max(...grp.parts.map((p) => p.modified)), // pick the latest - }) - }) -} - -/** Takes a list of ingestParts, modify it, then output them grouped together into ingestSegments, keeping track of the modified property */ -export function makeChangeToIngestParts( - context: JobContext, - rundownId: RundownId, - ingestParts: AnnotatedIngestPart[], - modifyFunction: (ingestParts: AnnotatedIngestPart[]) => AnnotatedIngestPart[] -): LocalIngestSegment[] { - const span = context.startSpan('mosDevice.ingest.makeChangeToIngestParts') - - // Before making the modification to ingestParts, create a list of segments from the original data, to use for calculating the - // .modified property below. - const referenceIngestSegments = groupPartsIntoIngestSegments(rundownId, ingestParts) - - const modifiedParts = modifyFunction(ingestParts) - - // Compare to reference, to make sure that ingestSegment.modified is updated in case of a change - const newIngestSegments = groupPartsIntoIngestSegments(rundownId, modifiedParts) - - _.each(newIngestSegments, (ingestSegment) => { - if (!ingestSegment.modified) { - ingestSegment.modified = getCurrentTime() - } else { - const ref = referenceIngestSegments.find((s) => s.externalId === ingestSegment.externalId) - if (ref) { - if (ref.parts.length !== ingestSegment.parts.length) { - // A part has been added, or removed - ingestSegment.modified = getCurrentTime() - } else { - // No obvious change. - // (If an individual part has been updated, the segment.modified property has already been updated anyway) - } - } else { - // The reference doesn't exist (can happen for example if a segment has been merged, or split into two) - ingestSegment.modified = getCurrentTime() - } - } - }) - - span?.end() - return newIngestSegments -} -function groupPartsIntoIngestSegments( - rundownId: RundownId, - newIngestParts: AnnotatedIngestPart[] -): LocalIngestSegment[] { - // Group the parts and make them into Segments: - const newGroupedParts = groupIngestParts(newIngestParts) - return groupedPartsToSegments(rundownId, newGroupedParts) -} diff --git a/packages/job-worker/src/ingest/nrcsIngestCache.ts b/packages/job-worker/src/ingest/nrcsIngestCache.ts new file mode 100644 index 00000000000..12e996cb431 --- /dev/null +++ b/packages/job-worker/src/ingest/nrcsIngestCache.ts @@ -0,0 +1,242 @@ +import { RundownId, SegmentId, NrcsIngestDataCacheObjId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { + NrcsIngestDataCacheObj, + NrcsIngestCacheType, + NrcsIngestDataCacheObjRundown, + NrcsIngestDataCacheObjSegment, + NrcsIngestDataCacheObjPart, + IngestRundownWithSource, +} from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' +import { ProtectedString, protectString, unprotectString } from '@sofie-automation/corelib/dist/protectedString' +import _ = require('underscore') +import { IngestPart, IngestSegment } from '@sofie-automation/blueprints-integration' +import { JobContext } from '../jobs' +import { getPartId, getSegmentId } from './lib' +import { SetOptional } from 'type-fest' +import { groupByToMap, normalizeArrayToMap } from '@sofie-automation/corelib/dist/lib' +import { AnyBulkWriteOperation } from 'mongodb' +import { diffAndReturnLatestObjects } from './model/implementation/utils' +import { ICollection } from '../db' +import { getCurrentTime } from '../lib' + +export class NrcsIngestRundownDataCache { + readonly #changedDocumentIds = new Set() + + private constructor( + private readonly context: JobContext, + private readonly collection: ICollection, + private readonly rundownId: RundownId, + private documents: NrcsIngestDataCacheObj[] + ) {} + + static async create(context: JobContext, rundownId: RundownId): Promise { + const docs = await context.directCollections.NrcsIngestDataCache.findFetch({ rundownId }) + + return new NrcsIngestRundownDataCache(context, context.directCollections.NrcsIngestDataCache, rundownId, docs) + } + + /** + * Fetch the IngestRundown contained in the cache + * Note: This does not deep clone the objects, so the returned object should not be modified + */ + fetchRundown(): IngestRundownWithSource | undefined { + const span = this.context.startSpan('ingest.ingestCache.loadCachedRundownData') + + const cachedRundown = this.documents.find( + (e): e is NrcsIngestDataCacheObjRundown => e.type === NrcsIngestCacheType.RUNDOWN + ) + if (!cachedRundown) { + span?.end() + return undefined + } + + const ingestRundown: IngestRundownWithSource = { + ...cachedRundown.data, + segments: [], + } + + const hasSegmentId = ( + obj: NrcsIngestDataCacheObj + ): obj is NrcsIngestDataCacheObjSegment | NrcsIngestDataCacheObjPart => { + return !!obj.segmentId + } + + const segmentMap = groupByToMap(this.documents.filter(hasSegmentId), 'segmentId') + for (const objs of segmentMap.values()) { + const segmentEntry = objs.find( + (e): e is NrcsIngestDataCacheObjSegment => e.type === NrcsIngestCacheType.SEGMENT + ) + if (segmentEntry) { + const ingestSegment: IngestSegment = { + ...segmentEntry.data, + parts: [], + } + + for (const entry of objs) { + if (entry.type === NrcsIngestCacheType.PART) { + ingestSegment.parts.push(entry.data) + } + } + + ingestSegment.parts = _.sortBy(ingestSegment.parts, (s) => s.rank) + ingestRundown.segments.push(ingestSegment) + } + } + + ingestRundown.segments = _.sortBy(ingestRundown.segments, (s) => s.rank) + + span?.end() + return ingestRundown + } + + /** + * Replace the contents of the cache with the given IngestRundown + * This will diff and replace the documents in the cache + * @param ingestRundown The new IngestRundown to store in the cache + */ + replace(ingestRundown: IngestRundownWithSource): void { + const generator = new RundownIngestDataCacheGenerator(this.rundownId) + const cacheEntries: NrcsIngestDataCacheObj[] = generator.generateCacheForRundown(ingestRundown) + + this.documents = diffAndReturnLatestObjects(this.#changedDocumentIds, this.documents, cacheEntries) + } + + /** + * Delete the contents of the cache + */ + delete(): void { + // Mark each document for deletion + for (const doc of this.documents) { + this.#changedDocumentIds.add(doc._id) + } + + this.documents = [] + } + + /** + * Write any changes in the cache to the database + */ + async saveToDatabase(): Promise { + if (this.#changedDocumentIds.size === 0) return + + const documentsMap = normalizeArrayToMap(this.documents, '_id') + + const modifiedTime = getCurrentTime() + + const updates: AnyBulkWriteOperation[] = [] + const removedIds: NrcsIngestDataCacheObjId[] = [] + for (const changedId of this.#changedDocumentIds) { + const newDoc = documentsMap.get(changedId) + if (!newDoc) { + removedIds.push(changedId) + } else { + updates.push({ + replaceOne: { + filter: { + _id: changedId, + }, + replacement: { + ...newDoc, + modified: modifiedTime, + }, + upsert: true, + }, + }) + } + } + + if (removedIds.length) { + updates.push({ + deleteMany: { + filter: { + _id: { $in: removedIds as any }, + }, + }, + }) + } + + await this.collection.bulkWrite(updates) + } +} + +class RundownIngestDataCacheGenerator> { + constructor(public readonly rundownId: RundownId) {} + + getPartObjectId(partExternalId: string): TId { + return protectString(`${this.rundownId}_part_${partExternalId}`) + } + getSegmentObjectId(segmentExternalId: string): TId { + return protectString(`${this.rundownId}_segment_${segmentExternalId}`) + } + getRundownObjectId(): TId { + return protectString(unprotectString(this.rundownId)) + } + + generatePartObject(segmentId: SegmentId, part: IngestPart): NrcsIngestDataCacheObjPart { + return { + _id: this.getPartObjectId(part.externalId), + type: NrcsIngestCacheType.PART, + rundownId: this.rundownId, + segmentId: segmentId, + partId: getPartId(this.rundownId, part.externalId), + modified: 0, // Populated when saving + data: part, + } + } + + generateSegmentObject(ingestSegment: SetOptional): NrcsIngestDataCacheObjSegment { + return { + _id: this.getSegmentObjectId(ingestSegment.externalId), + type: NrcsIngestCacheType.SEGMENT, + rundownId: this.rundownId, + segmentId: getSegmentId(this.rundownId, ingestSegment.externalId), + modified: 0, // Populated when saving + data: { + ...ingestSegment, + parts: [], // omit the parts, they come as separate objects + }, + } + } + + generateRundownObject( + ingestRundown: SetOptional + ): NrcsIngestDataCacheObjRundown { + return { + _id: this.getRundownObjectId(), + type: NrcsIngestCacheType.RUNDOWN, + rundownId: this.rundownId, + modified: 0, // Populated when saving + data: { + ...ingestRundown, + segments: [], // omit the segments, they come as separate objects + }, + } + } + + generateCacheForRundown(ingestRundown: IngestRundownWithSource): NrcsIngestDataCacheObj[] { + const cacheEntries: NrcsIngestDataCacheObj[] = [] + + const rundown = this.generateRundownObject(ingestRundown) + cacheEntries.push(rundown) + + for (const segment of ingestRundown.segments) { + cacheEntries.push(...this.generateCacheForSegment(segment)) + } + + return cacheEntries + } + + private generateCacheForSegment(ingestSegment: IngestSegment): NrcsIngestDataCacheObj[] { + const cacheEntries: Array = [] + + const segment = this.generateSegmentObject(ingestSegment) + cacheEntries.push(segment) + + const segmentId = getSegmentId(this.rundownId, ingestSegment.externalId) + for (const part of ingestSegment.parts) { + cacheEntries.push(this.generatePartObject(segmentId, part)) + } + + return cacheEntries + } +} diff --git a/packages/job-worker/src/ingest/packageInfo.ts b/packages/job-worker/src/ingest/packageInfo.ts index a669f65d194..728c27069e6 100644 --- a/packages/job-worker/src/ingest/packageInfo.ts +++ b/packages/job-worker/src/ingest/packageInfo.ts @@ -7,9 +7,10 @@ import { import { logger } from '../logging' import { JobContext } from '../jobs' import { regenerateSegmentsFromIngestData } from './generationSegment' -import { UpdateIngestRundownAction, runIngestJob, runWithRundownLock } from './lock' +import { runWithRundownLock } from './lock' import { updateExpectedPackagesForPartModel, updateExpectedPackagesForRundownBaseline } from './expectedPackages' import { loadIngestModelFromRundown } from './model/implementation/LoadIngestModel' +import { runCustomIngestUpdateOperation } from './runOperation' /** * Debug: Regenerate ExpectedPackages for a Rundown @@ -18,7 +19,7 @@ export async function handleExpectedPackagesRegenerate( context: JobContext, data: ExpectedPackagesRegenerateProps ): Promise { - await runWithRundownLock(context, data.rundownId, async (rundown, rundownLock) => { + return runWithRundownLock(context, data.rundownId, async (rundown, rundownLock) => { if (!rundown) throw new Error(`Rundown "${data.rundownId}" not found`) const ingestModel = await loadIngestModelFromRundown(context, rundownLock, rundown) @@ -44,74 +45,66 @@ export async function handleUpdatedPackageInfoForRundown( return } - await runIngestJob( - context, - data, - (ingestRundown) => { - if (!ingestRundown) { - logger.error( - `onUpdatedPackageInfoForRundown called but ingestRundown is undefined (rundownExternalId: "${data.rundownExternalId}")` - ) - return UpdateIngestRundownAction.REJECT - } - return ingestRundown // don't mutate any ingest data - }, - async (context, ingestModel, ingestRundown) => { - if (!ingestRundown) throw new Error('onUpdatedPackageInfoForRundown called but ingestRundown is undefined') + await runCustomIngestUpdateOperation(context, data, async (context, ingestModel, ingestRundown) => { + if (!ingestRundown) { + logger.error( + `onUpdatedPackageInfoForRundown called but ingestRundown is undefined (rundownExternalId: "${data.rundownExternalId}")` + ) + return null + } - /** All segments that need updating */ - const segmentsToUpdate = new Set() - let regenerateRundownBaseline = false + /** All segments that need updating */ + const segmentsToUpdate = new Set() + let regenerateRundownBaseline = false - for (const packageId of data.packageIds) { - const pkg = ingestModel.findExpectedPackage(packageId) - if (pkg) { - if ( - pkg.fromPieceType === ExpectedPackageDBType.PIECE || - pkg.fromPieceType === ExpectedPackageDBType.ADLIB_PIECE || - pkg.fromPieceType === ExpectedPackageDBType.ADLIB_ACTION - ) { - segmentsToUpdate.add(pkg.segmentId) - } else if ( - pkg.fromPieceType === ExpectedPackageDBType.BASELINE_ADLIB_ACTION || - pkg.fromPieceType === ExpectedPackageDBType.BASELINE_ADLIB_PIECE || - pkg.fromPieceType === ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS - ) { - regenerateRundownBaseline = true - } - } else { - logger.warn(`onUpdatedPackageInfoForRundown: Missing package: "${packageId}"`) + for (const packageId of data.packageIds) { + const pkg = ingestModel.findExpectedPackage(packageId) + if (pkg) { + if ( + pkg.fromPieceType === ExpectedPackageDBType.PIECE || + pkg.fromPieceType === ExpectedPackageDBType.ADLIB_PIECE || + pkg.fromPieceType === ExpectedPackageDBType.ADLIB_ACTION + ) { + segmentsToUpdate.add(pkg.segmentId) + } else if ( + pkg.fromPieceType === ExpectedPackageDBType.BASELINE_ADLIB_ACTION || + pkg.fromPieceType === ExpectedPackageDBType.BASELINE_ADLIB_PIECE || + pkg.fromPieceType === ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS + ) { + regenerateRundownBaseline = true } + } else { + logger.warn(`onUpdatedPackageInfoForRundown: Missing package: "${packageId}"`) } + } - logger.info( - `onUpdatedPackageInfoForRundown: PackageInfo for "${data.packageIds.join( - ', ' - )}" will trigger update of segments: ${Array.from(segmentsToUpdate).join(', ')}` - ) - - if (regenerateRundownBaseline) { - // trigger a re-generation of the rundown baseline - // TODO - to be implemented. - } + logger.info( + `onUpdatedPackageInfoForRundown: PackageInfo for "${data.packageIds.join( + ', ' + )}" will trigger update of segments: ${Array.from(segmentsToUpdate).join(', ')}` + ) - const { result, skippedSegments } = await regenerateSegmentsFromIngestData( - context, - ingestModel, - ingestRundown, - Array.from(segmentsToUpdate) - ) + if (regenerateRundownBaseline) { + // trigger a re-generation of the rundown baseline + // TODO - to be implemented. + } - if (skippedSegments.length > 0) { - logger.warn( - `onUpdatedPackageInfoForRundown: Some segments were skipped during update: ${skippedSegments.join( - ', ' - )}` - ) - } + const { result, skippedSegments } = await regenerateSegmentsFromIngestData( + context, + ingestModel, + ingestRundown, + Array.from(segmentsToUpdate) + ) - logger.warn(`onUpdatedPackageInfoForRundown: Changed ${result?.changedSegmentIds.length ?? 0} segments`) - return result + if (skippedSegments.length > 0) { + logger.warn( + `onUpdatedPackageInfoForRundown: Some segments were skipped during update: ${skippedSegments.join( + ', ' + )}` + ) } - ) + + logger.warn(`onUpdatedPackageInfoForRundown: Changed ${result?.changedSegmentIds.length ?? 0} segments`) + return result + }) } diff --git a/packages/job-worker/src/ingest/runOperation.ts b/packages/job-worker/src/ingest/runOperation.ts new file mode 100644 index 00000000000..369aa652269 --- /dev/null +++ b/packages/job-worker/src/ingest/runOperation.ts @@ -0,0 +1,577 @@ +import { IngestModel, IngestModelReadonly } from './model/IngestModel' +import { BeforeIngestOperationPartMap, CommitIngestOperation } from './commit' +import { SofieIngestRundownDataCache, SofieIngestRundownDataCacheGenerator } from './sofieIngestCache' +import { canRundownBeUpdated, getRundownId, getSegmentId } from './lib' +import { JobContext } from '../jobs' +import { IngestPropsBase } from '@sofie-automation/corelib/dist/worker/ingest' +import { UserError, UserErrorMessage } from '@sofie-automation/corelib/dist/error' +import { loadIngestModelFromRundownExternalId } from './model/implementation/LoadIngestModel' +import { Complete, clone } from '@sofie-automation/corelib/dist/lib' +import { CommitIngestData, runWithRundownLockWithoutFetchingRundown } from './lock' +import { DatabasePersistedModel } from '../modelBase' +import { + NrcsIngestChangeDetails, + IngestRundown, + UserOperationChange, + SofieIngestSegment, +} from '@sofie-automation/blueprints-integration' +import { MutableIngestRundownImpl } from '../blueprints/ingest/MutableIngestRundownImpl' +import { ProcessIngestDataContext } from '../blueprints/context' +import { PartId, RundownId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { GenerateRundownMode, updateRundownFromIngestData, updateRundownFromIngestDataInner } from './generationRundown' +import { calculateSegmentsAndRemovalsFromIngestData, calculateSegmentsFromIngestData } from './generationSegment' +import { SegmentOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Segment' +import { IngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' +import { SofieIngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' +import { NrcsIngestRundownDataCache } from './nrcsIngestCache' + +export enum ComputedIngestChangeAction { + DELETE = 'delete', + FORCE_DELETE = 'force-delete', +} + +export interface UpdateIngestRundownChange { + ingestRundown: IngestRundownWithSource + changes: NrcsIngestChangeDetails | UserOperationChange +} + +export type UpdateIngestRundownResult = UpdateIngestRundownChange | ComputedIngestChangeAction + +export interface ComputedIngestChangeObject { + ingestRundown: SofieIngestRundownWithSource + + // define what needs regenerating + segmentsToRemove: string[] + segmentsUpdatedRanks: Record // contains the new rank + segmentsToRegenerate: SofieIngestSegment[] + regenerateRundown: boolean // Future: full vs metadata? + + segmentExternalIdChanges: Record // old -> new +} + +export type ComputedIngestChanges = ComputedIngestChangeObject | ComputedIngestChangeAction + +/** + * Perform an 'ingest' update operation which modifies a Rundown without modifying the ingest data + * This will automatically do some post-update data changes, to ensure the playout side (partinstances etc) is updated with the changes + * @param context Context of the job being run + * @param data Ids for the rundown and peripheral device + * @param doWorkFcn Function to run to update the Rundown. Return the blob of data about the change to help the post-update perform its duties. Return null to indicate that nothing changed + */ +export async function runCustomIngestUpdateOperation( + context: JobContext, + data: IngestPropsBase, + doWorkFcn: ( + context: JobContext, + ingestModel: IngestModel, + ingestRundown: SofieIngestRundownWithSource + ) => Promise +): Promise { + if (!data.rundownExternalId) { + throw new Error(`Job is missing rundownExternalId`) + } + + const rundownId = getRundownId(context.studioId, data.rundownExternalId) + return runWithRundownLockWithoutFetchingRundown(context, rundownId, async (rundownLock) => { + const span = context.startSpan(`ingestLockFunction.${context.studioId}`) + + // Load the old ingest data + const pIngestModel = loadIngestModelFromRundownExternalId(context, rundownLock, data.rundownExternalId) + pIngestModel.catch(() => null) // Prevent unhandled promise rejection + const sofieIngestObjectCache = await SofieIngestRundownDataCache.create(context, rundownId) + const sofieIngestRundown = sofieIngestObjectCache.fetchRundown() + if (!sofieIngestRundown) throw new Error(`SofieIngestRundown "${rundownId}" not found`) + + let resultingError: UserError | void | undefined + + try { + const ingestModel = await pIngestModel + + // Load any 'before' data for the commit, + const beforeRundown = ingestModel.rundown + const beforePartMap = generatePartMap(ingestModel) + + // Perform the update operation + const commitData = await doWorkFcn(context, ingestModel, sofieIngestRundown) + + if (commitData) { + const commitSpan = context.startSpan('ingest.commit') + // The change is accepted. Perform some playout calculations and save it all + resultingError = await CommitIngestOperation( + context, + ingestModel, + beforeRundown, + beforePartMap, + commitData + ) + commitSpan?.end() + } else { + // Should be no changes + ingestModel.assertNoChanges() + } + } finally { + span?.end() + } + + if (resultingError) throw resultingError + + return rundownId + }) +} + +export type IngestUpdateOperationFunction = ( + oldIngestRundown: IngestRundownWithSource | undefined +) => UpdateIngestRundownResult + +/** + * Perform an ingest update operation on a rundown + * This will automatically do some post-update data changes, to ensure the playout side (partinstances etc) is updated with the changes + * @param context Context of the job being run + * @param data Ids for the rundown and peripheral device + * @param updateNrcsIngestModelFcn Function to mutate the ingestData. Throw if the requested change is not valid. Return undefined to indicate the ingestData should be deleted + */ +export async function runIngestUpdateOperation( + context: JobContext, + data: IngestPropsBase, + updateNrcsIngestModelFcn: IngestUpdateOperationFunction +): Promise { + return runIngestUpdateOperationBase(context, data, async (nrcsIngestObjectCache) => + updateNrcsIngestObjects(context, nrcsIngestObjectCache, updateNrcsIngestModelFcn) + ) +} + +/** + * Perform an ingest update operation on a rundown + * This will automatically do some post-update data changes, to ensure the playout side (partinstances etc) is updated with the changes + * @param context Context of the job being run + * @param data Ids for the rundown and peripheral device + * @param updateNrcsIngestModelFcn Function to mutate the ingestData. Throw if the requested change is not valid. Return undefined to indicate the ingestData should be deleted + */ +export async function runIngestUpdateOperationBase( + context: JobContext, + data: IngestPropsBase, + executeFcn: (nrcsIngestObjectCache: NrcsIngestRundownDataCache) => Promise +): Promise { + if (!data.rundownExternalId) { + throw new Error(`Job is missing rundownExternalId`) + } + + const rundownId = getRundownId(context.studioId, data.rundownExternalId) + return runWithRundownLockWithoutFetchingRundown(context, rundownId, async (rundownLock) => { + const span = context.startSpan(`ingestLockFunction.${context.studioId}`) + + // Load the old ingest data + const pIngestModel = loadIngestModelFromRundownExternalId(context, rundownLock, data.rundownExternalId) + pIngestModel.catch(() => null) // Prevent unhandled promise rejection + const pSofieIngestObjectCache = SofieIngestRundownDataCache.create(context, rundownId) + pSofieIngestObjectCache.catch(() => null) // Prevent unhandled promise rejection + const nrcsIngestObjectCache = await NrcsIngestRundownDataCache.create(context, rundownId) + const originalNrcsIngestRundown = clone(nrcsIngestObjectCache.fetchRundown()) + + const ingestRundownChanges = await executeFcn(nrcsIngestObjectCache) + + // Start saving the nrcs ingest data + const pSaveNrcsIngestChanges = nrcsIngestObjectCache.saveToDatabase() + pSaveNrcsIngestChanges.catch(() => null) // Prevent unhandled promise rejection + + let resultingError: UserError | void | undefined + + try { + // Update the Sofie ingest view + const sofieIngestObjectCache = await pSofieIngestObjectCache + const computedChanges = await updateSofieIngestRundown( + context, + rundownId, + sofieIngestObjectCache, + ingestRundownChanges, + originalNrcsIngestRundown + ) + + // Start saving the Sofie ingest data + const pSaveSofieIngestChanges = sofieIngestObjectCache.saveToDatabase() + + try { + resultingError = await updateSofieRundownModel(context, pIngestModel, computedChanges) + } finally { + // Ensure we save the sofie ingest data + await pSaveSofieIngestChanges + } + } finally { + // Ensure we save the nrcs ingest data + // await pSaveNrcsIngestChanges + + span?.end() + } + + if (resultingError) throw resultingError + + return rundownId + }) +} + +function updateNrcsIngestObjects( + context: JobContext, + nrcsIngestObjectCache: NrcsIngestRundownDataCache, + updateNrcsIngestModelFcn: (oldIngestRundown: IngestRundownWithSource | undefined) => UpdateIngestRundownResult +): UpdateIngestRundownResult { + const updateNrcsIngestModelSpan = context.startSpan('ingest.calcFcn') + const oldNrcsIngestRundown = nrcsIngestObjectCache.fetchRundown() + const updatedIngestRundown = updateNrcsIngestModelFcn(clone(oldNrcsIngestRundown)) + updateNrcsIngestModelSpan?.end() + + switch (updatedIngestRundown) { + // case UpdateIngestRundownAction.REJECT: + // // Reject change + // return + case ComputedIngestChangeAction.DELETE: + case ComputedIngestChangeAction.FORCE_DELETE: + nrcsIngestObjectCache.delete() + break + default: + nrcsIngestObjectCache.replace(updatedIngestRundown.ingestRundown) + break + } + + return updatedIngestRundown +} + +async function updateSofieIngestRundown( + context: JobContext, + rundownId: RundownId, + sofieIngestObjectCache: SofieIngestRundownDataCache, + ingestRundownChanges: UpdateIngestRundownResult, + previousNrcsIngestRundown: IngestRundown | undefined +): Promise { + if ( + ingestRundownChanges === ComputedIngestChangeAction.DELETE || + ingestRundownChanges === ComputedIngestChangeAction.FORCE_DELETE + ) { + // Also delete the Sofie view of the Rundown, so that future ingest calls know it has been deleted + sofieIngestObjectCache.delete() + + return ingestRundownChanges + } else { + const studioBlueprint = context.studioBlueprint.blueprint + + const nrcsIngestRundown = ingestRundownChanges.ingestRundown + const sofieIngestRundown = sofieIngestObjectCache.fetchRundown() + + sortIngestRundown(nrcsIngestRundown) + + const mutableIngestRundown = sofieIngestRundown + ? new MutableIngestRundownImpl(clone(sofieIngestRundown), true) + : new MutableIngestRundownImpl( + { + externalId: nrcsIngestRundown.externalId, + name: nrcsIngestRundown.name, + type: nrcsIngestRundown.type, + segments: [], + payload: undefined, + userEditStates: {}, + rundownSource: nrcsIngestRundown.rundownSource, + } satisfies Complete, + false + ) + + const blueprintContext = new ProcessIngestDataContext( + { + name: 'processIngestData', + identifier: `studio:${context.studioId},blueprint:${studioBlueprint.blueprintId}`, + }, + context.studio, + context.getStudioBlueprintConfig() + ) + + // Let blueprints apply changes to the Sofie ingest data + if (typeof studioBlueprint.processIngestData === 'function') { + await studioBlueprint.processIngestData( + blueprintContext, + mutableIngestRundown, + nrcsIngestRundown, + previousNrcsIngestRundown, + ingestRundownChanges.changes + ) + } else if (ingestRundownChanges.changes.source === 'ingest') { + // Backwards compabible mode: Blueprints has not defined a processIngestData() + + if (nrcsIngestRundown.type === 'mos') { + // MOS has a special flow to group parts into segments + const groupedResult = blueprintContext.groupMosPartsInRundownAndChangesWithSeparator( + nrcsIngestRundown, + previousNrcsIngestRundown, + ingestRundownChanges.changes, + ';' // Backwards compatibility + ) + + blueprintContext.defaultApplyIngestChanges( + mutableIngestRundown, + groupedResult.nrcsIngestRundown, + groupedResult.ingestChanges + ) + } else { + blueprintContext.defaultApplyIngestChanges( + mutableIngestRundown, + nrcsIngestRundown, + ingestRundownChanges.changes + ) + } + } else { + throw new Error(`Blueprint missing processIngestData function`) + } + + // Ensure the rundownSource is propogated + mutableIngestRundown.updateRundownSource(nrcsIngestRundown.rundownSource) + + const ingestObjectGenerator = new SofieIngestRundownDataCacheGenerator(rundownId) + const resultChanges = mutableIngestRundown.intoIngestRundown(ingestObjectGenerator) + + // Sync changes to the cache + sofieIngestObjectCache.replaceDocuments(resultChanges.changedCacheObjects) + sofieIngestObjectCache.removeAllOtherDocuments(resultChanges.allCacheObjectIds) + + return resultChanges.computedChanges + } +} + +function sortIngestRundown(rundown: IngestRundown): void { + rundown.segments.sort((a, b) => a.rank - b.rank) + for (const segment of rundown.segments) { + segment.parts.sort((a, b) => a.rank - b.rank) + } +} + +async function updateSofieRundownModel( + context: JobContext, + pIngestModel: Promise, + computedIngestChanges: ComputedIngestChanges | null +) { + const ingestModel = await pIngestModel + + // Load any 'before' data for the commit + const beforeRundown = ingestModel.rundown + const beforePartMap = generatePartMap(ingestModel) + + let commitData: CommitIngestData | null = null + + if ( + computedIngestChanges === ComputedIngestChangeAction.DELETE || + computedIngestChanges === ComputedIngestChangeAction.FORCE_DELETE + ) { + // Get the rundown, and fail if it doesn't exist + const rundown = ingestModel.getRundown() + + // Check if it can be deleted + const canRemove = + computedIngestChanges === ComputedIngestChangeAction.FORCE_DELETE || canRundownBeUpdated(rundown, false) + if (!canRemove) throw UserError.create(UserErrorMessage.RundownRemoveWhileActive, { name: rundown.name }) + + // The rundown has been deleted + commitData = { + changedSegmentIds: [], + removedSegmentIds: [], + renamedSegments: new Map(), + + removeRundown: true, + returnRemoveFailure: true, + } + } else if (computedIngestChanges) { + const calcSpan = context.startSpan('ingest.calcFcn') + commitData = await applyCalculatedIngestChangesToModel(context, ingestModel, computedIngestChanges) + calcSpan?.end() + } + + let resultingError: UserError | void | undefined + + if (commitData) { + const commitSpan = context.startSpan('ingest.commit') + // The change is accepted. Perform some playout calculations and save it all + resultingError = await CommitIngestOperation(context, ingestModel, beforeRundown, beforePartMap, commitData) + commitSpan?.end() + } else { + // Should be no changes + ingestModel.assertNoChanges() + } + + return resultingError +} + +async function applyCalculatedIngestChangesToModel( + context: JobContext, + ingestModel: IngestModel, + computedIngestChanges: ComputedIngestChangeObject +): Promise { + const newIngestRundown = computedIngestChanges.ingestRundown + + // Ensure the rundown can be updated + const rundown = ingestModel.rundown + // if (!canRundownBeUpdated(rundown, false)) return null + if (!canRundownBeUpdated(rundown, computedIngestChanges.regenerateRundown)) return null + + const span = context.startSpan('ingest.applyCalculatedIngestChangesToModel') + + if (!rundown || computedIngestChanges.regenerateRundown) { + // Do a full regeneration + + // Perform any segment id changes, to ensure the contents remains correctly linked + const renamedSegments = applyExternalIdDiff(ingestModel, computedIngestChanges, true) + + // perform the regeneration + const result = await updateRundownFromIngestData( + context, + ingestModel, + newIngestRundown, + GenerateRundownMode.Create + ) + + span?.end() + if (result) { + return { + ...result, + renamedSegments, + } + } else { + return { + changedSegmentIds: [], + removedSegmentIds: [], + removeRundown: false, + renamedSegments, + } + } + } else { + // Update segment ranks: + for (const [segmentExternalId, newRank] of Object.entries(computedIngestChanges.segmentsUpdatedRanks)) { + const segment = ingestModel.getSegmentByExternalId(segmentExternalId) + if (segment) { + segment.setRank(newRank) + } + } + + // Updated segments that has had their segment.externalId changed: + const renamedSegments = applyExternalIdDiff(ingestModel, computedIngestChanges, true) + + // If requested, regenerate the rundown in the 'metadata' mode + if (computedIngestChanges.regenerateRundown) { + const regenerateCommitData = await updateRundownFromIngestDataInner( + context, + ingestModel, + newIngestRundown, + GenerateRundownMode.MetadataChange // TODO - full vs metadata? + ) + if (regenerateCommitData?.regenerateAllContents) { + const regeneratedSegmentIds = await calculateSegmentsAndRemovalsFromIngestData( + context, + ingestModel, + newIngestRundown, + regenerateCommitData.allRundownWatchedPackages + ) + + // TODO - should this include the ones which were renamed/updated ranks above? + return { + changedSegmentIds: regeneratedSegmentIds.changedSegmentIds, + removedSegmentIds: regeneratedSegmentIds.removedSegmentIds, + renamedSegments: renamedSegments, + + removeRundown: false, + } satisfies CommitIngestData + } + } + + // Create/Update segments + const changedSegmentIds = await calculateSegmentsFromIngestData( + context, + ingestModel, + computedIngestChanges.segmentsToRegenerate, + null + ) + + const changedSegmentIdsSet = new Set(changedSegmentIds) + for (const segmentId of Object.keys(computedIngestChanges.segmentsUpdatedRanks)) { + changedSegmentIdsSet.add(ingestModel.getSegmentIdFromExternalId(segmentId)) + } + // TODO - include changed external ids? + + // Remove/orphan old segments + const orphanedSegmentIds: SegmentId[] = [] + for (const segmentExternalId of computedIngestChanges.segmentsToRemove) { + const segment = ingestModel.getSegmentByExternalId(segmentExternalId) + if (segment) { + // We orphan it and queue for deletion. the commit phase will complete if possible + orphanedSegmentIds.push(segment.segment._id) + segment.setOrphaned(SegmentOrphanedReason.DELETED) + + segment.removeAllParts() + + // It can't also have been changed if it is deleted + changedSegmentIdsSet.delete(segment.segment._id) + } + } + + span?.end() + return { + changedSegmentIds: Array.from(changedSegmentIdsSet), + removedSegmentIds: orphanedSegmentIds, // Only inform about the ones that werent renamed + renamedSegments: renamedSegments, + + removeRundown: false, + } satisfies CommitIngestData + } +} + +/** + * Apply the externalId renames from a DiffSegmentEntries + * @param ingestModel Ingest model of the rundown being updated + * @param segmentDiff Calculated Diff + * @returns Map of the SegmentId changes + */ +function applyExternalIdDiff( + ingestModel: IngestModel, + segmentDiff: Pick, + canDiscardParts: boolean +): CommitIngestData['renamedSegments'] { + // Updated segments that has had their segment.externalId changed: + const renamedSegments = new Map() + for (const [oldSegmentExternalId, newSegmentExternalId] of Object.entries( + segmentDiff.segmentExternalIdChanges + )) { + const oldSegmentId = getSegmentId(ingestModel.rundownId, oldSegmentExternalId) + const newSegmentId = getSegmentId(ingestModel.rundownId, newSegmentExternalId) + + // Track the rename + renamedSegments.set(oldSegmentId, newSegmentId) + + // If the segment doesnt exist (it should), then there isn't a segment to rename + const oldSegment = ingestModel.getSegment(oldSegmentId) + if (!oldSegment) continue + + if (ingestModel.getSegment(newSegmentId)) { + // If the new SegmentId already exists, we need to discard the old one rather than trying to merge it. + // This can only be done if the caller is expecting to regenerate Segments + const canDiscardPartsForSegment = canDiscardParts && !segmentDiff.segmentsUpdatedRanks[oldSegmentExternalId] + if (!canDiscardPartsForSegment) { + throw new Error(`Cannot merge Segments with only rank changes`) + } + + // Remove the old Segment and it's contents, the new one will be generated shortly + ingestModel.removeSegment(oldSegmentId) + } else { + // Perform the rename + ingestModel.changeSegmentId(oldSegmentId, newSegmentId) + } + } + + return renamedSegments +} + +function generatePartMap(ingestModel: IngestModelReadonly): BeforeIngestOperationPartMap { + const rundown = ingestModel.rundown + if (!rundown) return new Map() + + const res = new Map>() + for (const segment of ingestModel.getAllSegments()) { + res.set( + segment.segment._id, + segment.parts.map((p) => ({ id: p.part._id, rank: p.part._rank })) + ) + } + return res +} diff --git a/packages/job-worker/src/ingest/sofieIngestCache.ts b/packages/job-worker/src/ingest/sofieIngestCache.ts new file mode 100644 index 00000000000..576d2eb67c5 --- /dev/null +++ b/packages/job-worker/src/ingest/sofieIngestCache.ts @@ -0,0 +1,263 @@ +import { RundownId, SegmentId, SofieIngestDataCacheObjId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { + SofieIngestDataCacheObj, + SofieIngestCacheType, + SofieIngestDataCacheObjRundown, + SofieIngestDataCacheObjSegment, + SofieIngestDataCacheObjPart, + SofieIngestRundownWithSource, +} from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' +import { protectString, unprotectString } from '@sofie-automation/corelib/dist/protectedString' +import _ = require('underscore') +import { SofieIngestPart, SofieIngestSegment } from '@sofie-automation/blueprints-integration' +import { JobContext } from '../jobs' +import { getPartId, getSegmentId } from './lib' +import { SetOptional } from 'type-fest' +import { groupByToMap, normalizeArrayToMap } from '@sofie-automation/corelib/dist/lib' +import { AnyBulkWriteOperation } from 'mongodb' +import { ICollection } from '../db' +import { getCurrentTime } from '../lib' + +export class SofieIngestRundownDataCache { + readonly #changedDocumentIds = new Set() + + private constructor( + private readonly context: JobContext, + private readonly collection: ICollection, + private documents: SofieIngestDataCacheObj[] + ) {} + + static async create(context: JobContext, rundownId: RundownId): Promise { + const docs = await context.directCollections.SofieIngestDataCache.findFetch({ rundownId }) + + return new SofieIngestRundownDataCache(context, context.directCollections.SofieIngestDataCache, docs) + } + + /** + * Fetch the IngestRundown contained in the cache + * Note: This does not deep clone the objects, so the returned object should not be modified + */ + fetchRundown(): SofieIngestRundownWithSource | undefined { + const span = this.context.startSpan('ingest.ingestCache.loadCachedRundownData') + + const cachedRundown = this.documents.find( + (e): e is SofieIngestDataCacheObjRundown => e.type === SofieIngestCacheType.RUNDOWN + ) + if (!cachedRundown) { + span?.end() + return undefined + } + + const ingestRundown: SofieIngestRundownWithSource = { + ...cachedRundown.data, + segments: [], + } + + const hasSegmentId = ( + obj: SofieIngestDataCacheObj + ): obj is SofieIngestDataCacheObjSegment | SofieIngestDataCacheObjPart => { + return !!obj.segmentId + } + + const segmentMap = groupByToMap(this.documents.filter(hasSegmentId), 'segmentId') + for (const objs of segmentMap.values()) { + const segmentEntry = objs.find( + (e): e is SofieIngestDataCacheObjSegment => e.type === SofieIngestCacheType.SEGMENT + ) + if (segmentEntry) { + const ingestSegment: SofieIngestSegment = { + ...segmentEntry.data, + parts: [], + } + + for (const entry of objs) { + if (entry.type === SofieIngestCacheType.PART) { + ingestSegment.parts.push(entry.data) + } + } + + ingestSegment.parts = _.sortBy(ingestSegment.parts, (s) => s.rank) + ingestRundown.segments.push(ingestSegment) + } + } + + ingestRundown.segments = _.sortBy(ingestRundown.segments, (s) => s.rank) + + span?.end() + return ingestRundown + } + + /** + * Delete the contents of the cache + */ + delete(): void { + // Mark each document for deletion + for (const doc of this.documents) { + this.#changedDocumentIds.add(doc._id) + } + + this.documents = [] + } + + /** + * Remove all documents from the cache other than the ids provided + * @param documentIdsToKeep The IDs of the documents to keep in the cache + */ + removeAllOtherDocuments(documentIdsToKeep: SofieIngestDataCacheObjId[]): void { + const documentIdsToKeepSet = new Set(documentIdsToKeep) + + const newDocuments: SofieIngestDataCacheObj[] = [] + for (const document of this.documents) { + if (!documentIdsToKeepSet.has(document._id)) { + this.#changedDocumentIds.add(document._id) + } else { + newDocuments.push(document) + } + } + this.documents = newDocuments + } + + /** + * Replace/insert a set of documents into the cache + * This can be used to insert or update multiple documents at once + * This does not diff the documents, it assumes that has already been done prior to calling this method + * @param changedCacheObjects Documents to store in the cache + */ + replaceDocuments(changedCacheObjects: SofieIngestDataCacheObj[]): void { + const newDocumentsMap = normalizeArrayToMap(this.documents, '_id') + + for (const newDocument of changedCacheObjects) { + this.#changedDocumentIds.add(newDocument._id) + newDocumentsMap.set(newDocument._id, newDocument) + } + + this.documents = Array.from(newDocumentsMap.values()) + } + + /** + * Write any changes in the cache to the database + */ + async saveToDatabase(): Promise { + if (this.#changedDocumentIds.size === 0) return + + const documentsMap = normalizeArrayToMap(this.documents, '_id') + + const modifiedTime = getCurrentTime() + + const updates: AnyBulkWriteOperation[] = [] + const removedIds: SofieIngestDataCacheObjId[] = [] + for (const changedId of this.#changedDocumentIds) { + const newDoc = documentsMap.get(changedId) + if (!newDoc) { + removedIds.push(changedId) + } else { + updates.push({ + replaceOne: { + filter: { + _id: changedId, + }, + replacement: { + ...newDoc, + modified: modifiedTime, + }, + upsert: true, + }, + }) + } + } + + if (removedIds.length) { + updates.push({ + deleteMany: { + filter: { + _id: { $in: removedIds as any }, + }, + }, + }) + } + + await this.collection.bulkWrite(updates) + } +} + +export class SofieIngestRundownDataCacheGenerator { + constructor(public readonly rundownId: RundownId) {} + + getPartObjectId(partExternalId: string): SofieIngestDataCacheObjId { + return protectString(`${this.rundownId}_part_${partExternalId}`) + } + getSegmentObjectId(segmentExternalId: string): SofieIngestDataCacheObjId { + return protectString(`${this.rundownId}_segment_${segmentExternalId}`) + } + getRundownObjectId(): SofieIngestDataCacheObjId { + return protectString(unprotectString(this.rundownId)) + } + + generatePartObject(segmentId: SegmentId, part: SofieIngestPart): SofieIngestDataCacheObjPart { + return { + _id: this.getPartObjectId(part.externalId), + type: SofieIngestCacheType.PART, + rundownId: this.rundownId, + segmentId: segmentId, + partId: getPartId(this.rundownId, part.externalId), + modified: 0, // Populated when saving + data: part, + } + } + + generateSegmentObject(ingestSegment: SetOptional): SofieIngestDataCacheObjSegment { + return { + _id: this.getSegmentObjectId(ingestSegment.externalId), + type: SofieIngestCacheType.SEGMENT, + rundownId: this.rundownId, + segmentId: getSegmentId(this.rundownId, ingestSegment.externalId), + modified: 0, // Populated when saving + data: { + ...ingestSegment, + parts: [], // omit the parts, they come as separate objects + }, + } + } + + generateRundownObject( + ingestRundown: SetOptional + ): SofieIngestDataCacheObjRundown { + return { + _id: this.getRundownObjectId(), + type: SofieIngestCacheType.RUNDOWN, + rundownId: this.rundownId, + modified: 0, // Populated when saving + data: { + ...ingestRundown, + segments: [], // omit the segments, they come as separate objects + }, + } + } + + generateCacheForRundown(ingestRundown: SofieIngestRundownWithSource): SofieIngestDataCacheObj[] { + const cacheEntries: SofieIngestDataCacheObj[] = [] + + const rundown = this.generateRundownObject(ingestRundown) + cacheEntries.push(rundown) + + for (const segment of ingestRundown.segments) { + cacheEntries.push(...this.generateCacheForSegment(segment)) + } + + return cacheEntries + } + + private generateCacheForSegment(ingestSegment: SofieIngestSegment): SofieIngestDataCacheObj[] { + const cacheEntries: Array = [] + + const segment = this.generateSegmentObject(ingestSegment) + cacheEntries.push(segment) + + const segmentId = getSegmentId(this.rundownId, ingestSegment.externalId) + for (const part of ingestSegment.parts) { + cacheEntries.push(this.generatePartObject(segmentId, part)) + } + + return cacheEntries + } +} diff --git a/packages/job-worker/src/ingest/userOperation.ts b/packages/job-worker/src/ingest/userOperation.ts new file mode 100644 index 00000000000..c3e068715b8 --- /dev/null +++ b/packages/job-worker/src/ingest/userOperation.ts @@ -0,0 +1,23 @@ +import { UserExecuteChangeOperationProps } from '@sofie-automation/corelib/dist/worker/ingest' +import { JobContext } from '../jobs' +import { UpdateIngestRundownResult, runIngestUpdateOperationBase } from './runOperation' +import { IngestChangeType } from '@sofie-automation/blueprints-integration' + +export async function handleUserExecuteChangeOperation( + context: JobContext, + data: UserExecuteChangeOperationProps +): Promise { + await runIngestUpdateOperationBase(context, data, async (nrcsIngestObjectCache) => { + const nrcsIngestRundown = nrcsIngestObjectCache.fetchRundown() + if (!nrcsIngestRundown) throw new Error(`Rundown "${data.rundownExternalId}" not found`) + + return { + ingestRundown: nrcsIngestRundown, + changes: { + source: IngestChangeType.User, + operation: data.operation as unknown as any, + operationTarget: data.operationTarget, + }, + } satisfies UpdateIngestRundownResult + }) +} diff --git a/packages/job-worker/src/playout/__tests__/helpers/rundowns.ts b/packages/job-worker/src/playout/__tests__/helpers/rundowns.ts index bc410b2897a..deea4059bc4 100644 --- a/packages/job-worker/src/playout/__tests__/helpers/rundowns.ts +++ b/packages/job-worker/src/playout/__tests__/helpers/rundowns.ts @@ -59,7 +59,6 @@ export async function setupRundownBase( externalId: 'MOCK_SEGMENT_0', rundownId: rundown._id, name: 'Segment 0', - externalModified: 1, } await context.mockCollections.Segments.insertOne(segment0) /* tslint:disable:ter-indent*/ diff --git a/packages/job-worker/src/playout/lookahead/__tests__/util.test.ts b/packages/job-worker/src/playout/lookahead/__tests__/util.test.ts index b1762bce986..4c3cc76bd80 100644 --- a/packages/job-worker/src/playout/lookahead/__tests__/util.test.ts +++ b/packages/job-worker/src/playout/lookahead/__tests__/util.test.ts @@ -86,7 +86,6 @@ describe('getOrderedPartsAfterPlayhead', () => { externalId: 'MOCK_SEGMENT_0', rundownId: rundownId, name: 'Segment 0', - externalModified: 1, }), context.mockCollections.Segments.insertOne({ _id: protectString(rundownId + '_segment01'), @@ -94,7 +93,6 @@ describe('getOrderedPartsAfterPlayhead', () => { externalId: 'MOCK_SEGMENT_1', rundownId: rundownId, name: 'Segment 1', - externalModified: 1, }), context.mockCollections.Segments.insertOne({ _id: protectString(rundownId + '_segment2'), @@ -102,7 +100,6 @@ describe('getOrderedPartsAfterPlayhead', () => { externalId: 'MOCK_SEGMENT_2', rundownId: rundownId, name: 'Segment 2', - externalModified: 1, }), ]) segmentId0 = segmentIds[0] diff --git a/packages/job-worker/src/playout/model/PlayoutPartInstanceModel.ts b/packages/job-worker/src/playout/model/PlayoutPartInstanceModel.ts index 15693bf8fb7..fcbf74e4915 100644 --- a/packages/job-worker/src/playout/model/PlayoutPartInstanceModel.ts +++ b/packages/job-worker/src/playout/model/PlayoutPartInstanceModel.ts @@ -6,6 +6,7 @@ import { PartNote } from '@sofie-automation/corelib/dist/dataModel/Notes' import { IBlueprintMutatablePart, PieceLifespan, Time } from '@sofie-automation/blueprints-integration' import { PartCalculatedTimings } from '@sofie-automation/corelib/dist/playout/timings' import { PlayoutPieceInstanceModel } from './PlayoutPieceInstanceModel' +import { CoreUserEditingDefinition } from '@sofie-automation/corelib/dist/dataModel/UserEditingDefinitions' /** * Token returned when making a backup copy of a PlayoutPartInstanceModel @@ -15,6 +16,10 @@ export interface PlayoutPartInstanceModelSnapshot { __isPlayoutPartInstanceModelBackup: true } +export interface PlayoutMutatablePart extends Omit { + userEditOperations?: CoreUserEditingDefinition[] +} + export interface PlayoutPartInstanceModel { /** * The PartInstance properties @@ -207,7 +212,7 @@ export interface PlayoutPartInstanceModel { * @param props New properties for the Part being wrapped * @returns True if any valid properties were provided */ - updatePartProps(props: Partial): boolean + updatePartProps(props: Partial): boolean /** * Ensure that this PartInstance is setup correctly for being in the AdlibTesting Segment diff --git a/packages/job-worker/src/playout/model/implementation/PlayoutPartInstanceModelImpl.ts b/packages/job-worker/src/playout/model/implementation/PlayoutPartInstanceModelImpl.ts index 1970195c1ea..fdeccee75b4 100644 --- a/packages/job-worker/src/playout/model/implementation/PlayoutPartInstanceModelImpl.ts +++ b/packages/job-worker/src/playout/model/implementation/PlayoutPartInstanceModelImpl.ts @@ -21,7 +21,11 @@ import { PieceLifespan, Time, } from '@sofie-automation/blueprints-integration' -import { PlayoutPartInstanceModel, PlayoutPartInstanceModelSnapshot } from '../PlayoutPartInstanceModel' +import { + PlayoutMutatablePart, + PlayoutPartInstanceModel, + PlayoutPartInstanceModelSnapshot, +} from '../PlayoutPartInstanceModel' import { protectString } from '@sofie-automation/corelib/dist/protectedString' import { PlayoutPieceInstanceModel } from '../PlayoutPieceInstanceModel' import { PlayoutPieceInstanceModelImpl } from './PlayoutPieceInstanceModelImpl' @@ -525,21 +529,23 @@ export class PlayoutPartInstanceModelImpl implements PlayoutPartInstanceModel { this.#setPartInstanceValue('previousPartEndState', previousPartEndState) } - updatePartProps(props: Partial): boolean { + updatePartProps(props: Partial): boolean { // Future: this could do some better validation // filter the submission to the allowed ones const trimmedProps: Partial = filterPropsToAllowed(props) if (Object.keys(trimmedProps).length === 0) return false - this.#compareAndSetPartInstanceValue( - 'part', - { - ...this.partInstanceImpl.part, - ...trimmedProps, - }, - true - ) + const newPart: DBPart = { + ...this.partInstanceImpl.part, + ...trimmedProps, + userEditOperations: this.partInstanceImpl.part.userEditOperations, // Replaced below if changed + } + + // Only replace `userEditOperations` if new values were provided + if ('userEditOperations' in trimmedProps) newPart.userEditOperations = props.userEditOperations + + this.#compareAndSetPartInstanceValue('part', newPart, true) return true } diff --git a/packages/job-worker/src/playout/model/implementation/PlayoutRundownModelImpl.ts b/packages/job-worker/src/playout/model/implementation/PlayoutRundownModelImpl.ts index e5e37fe8ef5..5483307d1f2 100644 --- a/packages/job-worker/src/playout/model/implementation/PlayoutRundownModelImpl.ts +++ b/packages/job-worker/src/playout/model/implementation/PlayoutRundownModelImpl.ts @@ -8,7 +8,6 @@ import { PlayoutSegmentModel } from '../PlayoutSegmentModel' import { UserError, UserErrorMessage } from '@sofie-automation/corelib/dist/error' import { SegmentOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Segment' import { getRandomId } from '@sofie-automation/corelib/dist/lib' -import { getCurrentTime } from '../../../lib' import { PlayoutSegmentModelImpl } from './PlayoutSegmentModelImpl' export class PlayoutRundownModelImpl implements PlayoutRundownModel { @@ -74,7 +73,6 @@ export class PlayoutRundownModelImpl implements PlayoutRundownModel { _id: segmentId, _rank: calculateRankForAdlibTestingSegment(this.#segments), externalId: '__adlib-testing__', - externalModified: getCurrentTime(), rundownId: this.rundown._id, orphaned: SegmentOrphanedReason.ADLIB_TESTING, name: '', diff --git a/packages/job-worker/src/playout/model/implementation/__tests__/PlayoutRundownModelImpl.spec.ts b/packages/job-worker/src/playout/model/implementation/__tests__/PlayoutRundownModelImpl.spec.ts index b49b9d0d2a8..410e2ec1798 100644 --- a/packages/job-worker/src/playout/model/implementation/__tests__/PlayoutRundownModelImpl.spec.ts +++ b/packages/job-worker/src/playout/model/implementation/__tests__/PlayoutRundownModelImpl.spec.ts @@ -34,7 +34,6 @@ describe('PlayoutRundownModelImpl', () => { _id: protectString(id), rundownId: protectString('rd0'), externalId: id, - externalModified: 100000, _rank: rank, name: `${id} segment`, } @@ -108,14 +107,12 @@ describe('PlayoutRundownModelImpl', () => { const fixedSegment: ReadonlyDeep = { ...createdSegment.segment, - externalModified: 0, } expect(fixedSegment).toEqual({ _id: expectedId, rundownId: protectString('rd0'), externalId: '__adlib-testing__', - externalModified: 0, _rank: -1, name: '', orphaned: SegmentOrphanedReason.ADLIB_TESTING, diff --git a/packages/job-worker/src/playout/model/implementation/__tests__/PlayoutSegmentModelImpl.spec.ts b/packages/job-worker/src/playout/model/implementation/__tests__/PlayoutSegmentModelImpl.spec.ts index dca9722599f..33e087c5ad7 100644 --- a/packages/job-worker/src/playout/model/implementation/__tests__/PlayoutSegmentModelImpl.spec.ts +++ b/packages/job-worker/src/playout/model/implementation/__tests__/PlayoutSegmentModelImpl.spec.ts @@ -9,7 +9,6 @@ describe('PlayoutSegmentModelImpl', () => { _id: protectString('abc'), rundownId: protectString('rd0'), externalId: 'ext1', - externalModified: 100000, _rank: 1, name: 'test segment', } diff --git a/packages/job-worker/src/playout/snapshot.ts b/packages/job-worker/src/playout/snapshot.ts index 9b479f0968d..939b3ce32bc 100644 --- a/packages/job-worker/src/playout/snapshot.ts +++ b/packages/job-worker/src/playout/snapshot.ts @@ -19,16 +19,10 @@ import { RestorePlaylistSnapshotResult, } from '@sofie-automation/corelib/dist/worker/studio' import { getCurrentTime, getSystemVersion } from '../lib' -import _ = require('underscore') import { JobContext } from '../jobs' import { runWithPlaylistLock } from './lock' import { CoreRundownPlaylistSnapshot } from '@sofie-automation/corelib/dist/snapshots' -import { - unprotectString, - ProtectedString, - protectStringArray, - protectString, -} from '@sofie-automation/corelib/dist/protectedString' +import { unprotectString, ProtectedString, protectString } from '@sofie-automation/corelib/dist/protectedString' import { saveIntoDb } from '../db/changes' import { getPartId, getSegmentId } from '../ingest/lib' import { assertNever, getRandomId, literal } from '@sofie-automation/corelib/dist/lib' @@ -36,6 +30,7 @@ import { logger } from '../logging' import { JSONBlobParse, JSONBlobStringify } from '@sofie-automation/shared-lib/dist/lib/JSONBlob' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { RundownOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Rundown' +import { SofieIngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' /** * Generate the Playlist owned portions of a Playlist snapshot @@ -53,10 +48,15 @@ export async function handleGeneratePlaylistSnapshot( const rundowns = await context.directCollections.Rundowns.findFetch({ playlistId: playlist._id }) const rundownIds = rundowns.map((i) => i._id) - const ingestData = await context.directCollections.IngestDataCache.findFetch( + const ingestData = await context.directCollections.NrcsIngestDataCache.findFetch( { rundownId: { $in: rundownIds } }, { sort: { modified: -1 } } ) // @todo: check sorting order + const sofieIngestData = await context.directCollections.SofieIngestDataCache.findFetch( + { rundownId: { $in: rundownIds } }, + { sort: { modified: -1 } } + ) // @todo: check sorting order + // const userActions = await context.directCollections.UserActionsLog.findFetch({ // args: { // $regex: @@ -121,6 +121,7 @@ export async function handleGeneratePlaylistSnapshot( playlist, rundowns, ingestData, + sofieIngestData, baselineObjs, baselineAdlibs, segments, @@ -403,10 +404,16 @@ export async function handleRestorePlaylistSnapshot( saveIntoDb(context, context.directCollections.Rundowns, { playlistId }, snapshot.rundowns), saveIntoDb( context, - context.directCollections.IngestDataCache, + context.directCollections.NrcsIngestDataCache, { rundownId: { $in: rundownIds } }, updateItemIds(snapshot.ingestData, true) ), + saveIntoDb( + context, + context.directCollections.SofieIngestDataCache, + { rundownId: { $in: rundownIds } }, + updateItemIds(snapshot.sofieIngestData || (snapshot.ingestData as any as SofieIngestDataCacheObj[]), true) + ), saveIntoDb( context, context.directCollections.RundownBaselineObjects, @@ -470,7 +477,7 @@ export async function handleRestorePlaylistSnapshot( saveIntoDb( context, context.directCollections.ExpectedMediaItems, - { partId: { $in: protectStringArray(_.keys(partIdMap)) } }, + { partId: { $in: Array.from(partIdMap.keys()) } }, updateItemIds(snapshot.expectedMediaItems, true) ), saveIntoDb( diff --git a/packages/job-worker/src/rundownPlaylists.ts b/packages/job-worker/src/rundownPlaylists.ts index dce9f538dd0..b77a47d0d79 100644 --- a/packages/job-worker/src/rundownPlaylists.ts +++ b/packages/job-worker/src/rundownPlaylists.ts @@ -150,7 +150,8 @@ export async function removeRundownFromDb(context: JobContext, lock: RundownLock context.directCollections.ExpectedMediaItems.remove({ rundownId: rundownId }), context.directCollections.ExpectedPlayoutItems.remove({ rundownId: rundownId }), context.directCollections.ExpectedPackages.remove({ rundownId: rundownId }), - context.directCollections.IngestDataCache.remove({ rundownId: rundownId }), + context.directCollections.SofieIngestDataCache.remove({ rundownId: rundownId }), + context.directCollections.NrcsIngestDataCache.remove({ rundownId: rundownId }), context.directCollections.RundownBaselineAdLibPieces.remove({ rundownId: rundownId }), context.directCollections.Segments.remove({ rundownId: rundownId }), context.directCollections.Parts.remove({ rundownId: rundownId }), diff --git a/packages/job-worker/src/workers/ingest/jobs.ts b/packages/job-worker/src/workers/ingest/jobs.ts index 1fa5a5b9228..2d2ef71a179 100644 --- a/packages/job-worker/src/workers/ingest/jobs.ts +++ b/packages/job-worker/src/workers/ingest/jobs.ts @@ -39,6 +39,13 @@ import { handleBucketRemoveAdlibPiece, } from '../../ingest/bucket/bucketAdlibs' import { handleBucketItemImport, handleBucketItemRegenerate } from '../../ingest/bucket/import' +import { handleUserExecuteChangeOperation } from '../../ingest/userOperation' +import { + wrapCustomIngestJob, + wrapGenericIngestJob, + wrapGenericIngestJobWithPrecheck, + wrapMosIngestJob, +} from '../../ingest/jobWrappers' import { handleCreateAdlibTestingRundownForShowStyleVariant } from '../../ingest/createAdlibTestingRundown' type ExecutableFunction = ( @@ -51,34 +58,35 @@ export type IngestJobHandlers = { } export const ingestJobHandlers: IngestJobHandlers = { - [IngestJobs.RemoveRundown]: handleRemovedRundown, - [IngestJobs.UpdateRundown]: handleUpdatedRundown, - [IngestJobs.UpdateRundownMetaData]: handleUpdatedRundownMetaData, - [IngestJobs.RemoveSegment]: handleRemovedSegment, - [IngestJobs.UpdateSegment]: handleUpdatedSegment, - [IngestJobs.UpdateSegmentRanks]: handleUpdatedSegmentRanks, - [IngestJobs.RemovePart]: handleRemovedPart, - [IngestJobs.UpdatePart]: handleUpdatedPart, - [IngestJobs.RegenerateRundown]: handleRegenerateRundown, - [IngestJobs.RegenerateSegment]: handleRegenerateSegment, + [IngestJobs.RemoveRundown]: wrapGenericIngestJob(handleRemovedRundown), + [IngestJobs.UpdateRundown]: wrapGenericIngestJob(handleUpdatedRundown), + [IngestJobs.UpdateRundownMetaData]: wrapGenericIngestJob(handleUpdatedRundownMetaData), + [IngestJobs.RemoveSegment]: wrapGenericIngestJob(handleRemovedSegment), + [IngestJobs.UpdateSegment]: wrapGenericIngestJobWithPrecheck(handleUpdatedSegment), + [IngestJobs.UpdateSegmentRanks]: wrapGenericIngestJob(handleUpdatedSegmentRanks), + [IngestJobs.RemovePart]: wrapGenericIngestJob(handleRemovedPart), + [IngestJobs.UpdatePart]: wrapGenericIngestJob(handleUpdatedPart), + [IngestJobs.RegenerateRundown]: wrapGenericIngestJob(handleRegenerateRundown), + [IngestJobs.RegenerateSegment]: wrapGenericIngestJob(handleRegenerateSegment), - [IngestJobs.RemoveOrphanedSegments]: handleRemoveOrphanedSegemnts, + [IngestJobs.RemoveOrphanedSegments]: wrapCustomIngestJob(handleRemoveOrphanedSegemnts), - [IngestJobs.MosRundown]: handleMosRundownData, - [IngestJobs.MosRundownMetadata]: handleMosRundownMetadata, + [IngestJobs.MosRundown]: wrapMosIngestJob(handleMosRundownData), + [IngestJobs.MosRundownMetadata]: wrapMosIngestJob(handleMosRundownMetadata), [IngestJobs.MosRundownStatus]: handleMosRundownStatus, - [IngestJobs.MosRundownReadyToAir]: handleMosRundownReadyToAir, - [IngestJobs.MosFullStory]: handleMosFullStory, - [IngestJobs.MosDeleteStory]: handleMosDeleteStory, - [IngestJobs.MosInsertStory]: handleMosInsertStories, - [IngestJobs.MosMoveStory]: handleMosMoveStories, - [IngestJobs.MosSwapStory]: handleMosSwapStories, + [IngestJobs.MosRundownReadyToAir]: wrapCustomIngestJob(handleMosRundownReadyToAir), + [IngestJobs.MosFullStory]: wrapMosIngestJob(handleMosFullStory), + [IngestJobs.MosDeleteStory]: wrapMosIngestJob(handleMosDeleteStory), + [IngestJobs.MosInsertStory]: wrapMosIngestJob(handleMosInsertStories), + [IngestJobs.MosMoveStory]: wrapMosIngestJob(handleMosMoveStories), + [IngestJobs.MosSwapStory]: wrapMosIngestJob(handleMosSwapStories), [IngestJobs.ExpectedPackagesRegenerate]: handleExpectedPackagesRegenerate, [IngestJobs.PackageInfosUpdatedRundown]: handleUpdatedPackageInfoForRundown, [IngestJobs.UserRemoveRundown]: handleUserRemoveRundown, [IngestJobs.UserUnsyncRundown]: handleUserUnsyncRundown, + [IngestJobs.UserExecuteChangeOperation]: handleUserExecuteChangeOperation, [IngestJobs.BucketItemImport]: handleBucketItemImport, [IngestJobs.BucketItemRegenerate]: handleBucketItemRegenerate, diff --git a/packages/lerna.json b/packages/lerna.json index 124db81cae6..c455cb8562e 100644 --- a/packages/lerna.json +++ b/packages/lerna.json @@ -2,4 +2,4 @@ "version": "1.52.0-in-development", "npmClient": "yarn", "useWorkspaces": true -} \ No newline at end of file +} diff --git a/packages/live-status-gateway/src/topics/__tests__/segmentsTopic.spec.ts b/packages/live-status-gateway/src/topics/__tests__/segmentsTopic.spec.ts index 9fe1029f8be..8d371e1c0d9 100644 --- a/packages/live-status-gateway/src/topics/__tests__/segmentsTopic.spec.ts +++ b/packages/live-status-gateway/src/topics/__tests__/segmentsTopic.spec.ts @@ -18,7 +18,6 @@ function makeTestSegment(id: string, rank: number, rundownId: string): DBSegment name: `Segment ${id}`, _rank: rank, rundownId: protectString(rundownId), - externalModified: 1695799420147, } } diff --git a/packages/meteor-lib/src/api/userActions.ts b/packages/meteor-lib/src/api/userActions.ts index 91c964bbec9..7bf7c59edc5 100644 --- a/packages/meteor-lib/src/api/userActions.ts +++ b/packages/meteor-lib/src/api/userActions.ts @@ -1,7 +1,7 @@ import { ClientAPI } from './client' import { EvaluationBase } from '../collections/Evaluations' import { Bucket } from '../collections/Buckets' -import { IngestAdlib, ActionUserData } from '@sofie-automation/blueprints-integration' +import { IngestAdlib, ActionUserData, UserOperationTarget } from '@sofie-automation/blueprints-integration' import { BucketAdLib } from '@sofie-automation/corelib/dist/dataModel/BucketAdLibPiece' import { AdLibActionCommon } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' import { BucketAdLibAction } from '@sofie-automation/corelib/dist/dataModel/BucketAdLibAction' @@ -328,6 +328,13 @@ export interface NewUserActionAPI { playlistId: RundownPlaylistId, rundownId: RundownId ): Promise> + executeUserChangeOperation( + userEvent: string, + eventTime: Time, + rundownId: RundownId, + operationTarget: UserOperationTarget, + operation: { id: string; [key: string]: any } + ): Promise> createAdlibTestingRundownForShowStyleVariant( userEvent: string, @@ -427,6 +434,7 @@ export enum UserActionAPIMethods { 'disablePeripheralSubDevice' = 'userAction.system.disablePeripheralSubDevice', + 'executeUserChangeOperation' = 'userAction.executeUserChangeOperation', 'activateAdlibTestingMode' = 'userAction.activateAdlibTestingMode', 'createAdlibTestingRundownForShowStyleVariant' = 'userAction.createAdlibTestingRundownForShowStyleVariant', diff --git a/packages/meteor-lib/src/userAction.ts b/packages/meteor-lib/src/userAction.ts index b9eb3dc2629..b868a3307d4 100644 --- a/packages/meteor-lib/src/userAction.ts +++ b/packages/meteor-lib/src/userAction.ts @@ -50,6 +50,7 @@ export enum UserAction { PERIPHERAL_DEVICE_REFRESH_DEBUG_STATES, ACTIVATE_ADLIB_TESTING, QUEUE_NEXT_SEGMENT, + EXECUTE_USER_OPERATION, CREATE_ADLIB_TESTING_RUNDOWN, SET_QUICK_LOOP_START, SET_QUICK_LOOP_END, diff --git a/packages/shared-lib/src/peripheralDevice/ingest.ts b/packages/shared-lib/src/peripheralDevice/ingest.ts index 5e38b10d0e7..c53739f87e6 100644 --- a/packages/shared-lib/src/peripheralDevice/ingest.ts +++ b/packages/shared-lib/src/peripheralDevice/ingest.ts @@ -1,10 +1,10 @@ -export interface IngestPlaylist { +export interface IngestPlaylist { /** Id of the playlist. */ externalId: string /** Ingest cache of rundowns in this playlist. */ - rundowns: IngestRundown[] + rundowns: IngestRundown[] } -export interface IngestRundown { +export interface IngestRundown { /** Id of the rundown as reported by the ingest gateway. Must be unique for each rundown owned by the gateway */ externalId: string /** Name of the rundown */ @@ -14,37 +14,38 @@ export interface IngestRundown { type: string /** Raw payload of rundown metadata. Only used by the blueprints */ - payload?: any + payload: TRundownPayload - /** Array of segmsnts in this rundown */ - segments: IngestSegment[] + /** Array of segments in this rundown */ + segments: IngestSegment[] } -export interface IngestSegment { +export interface IngestSegment { /** Id of the segment as reported by the ingest gateway. Must be unique for each segment in the rundown */ externalId: string /** Name of the segment */ name: string + /** Rank of the segment within the rundown */ rank: number /** Raw payload of segment metadata. Only used by the blueprints */ - payload?: any + payload: TSegmentPayload /** Array of parts in this segment */ - parts: IngestPart[] + parts: IngestPart[] } -export interface IngestPart { +export interface IngestPart { /** Id of the part as reported by the ingest gateway. Must be unique for each part in the rundown */ externalId: string /** Name of the part */ name: string - /** Rank of the part within the segmetn */ + /** Rank of the part within the segment */ rank: number /** Raw payload of the part. Only used by the blueprints */ - payload?: any + payload: TPartPayload } -export interface IngestAdlib { +export interface IngestAdlib { /** Id of the adlib as reported by the ingest source. Must be unique for each adlib */ externalId: string /** Name of the adlib */ @@ -53,5 +54,5 @@ export interface IngestAdlib { /** Type of the raw payload. Only used by the blueprints */ payloadType: string /** Raw payload of the adlib. Only used by the blueprints */ - payload?: any + payload: TPayload } diff --git a/packages/webui/src/__mocks__/defaultCollectionObjects.ts b/packages/webui/src/__mocks__/defaultCollectionObjects.ts index 99dfea69b74..af06dffe311 100644 --- a/packages/webui/src/__mocks__/defaultCollectionObjects.ts +++ b/packages/webui/src/__mocks__/defaultCollectionObjects.ts @@ -129,7 +129,6 @@ export function defaultSegment(_id: SegmentId, rundownId: RundownId): DBSegment externalId: unprotectString(_id), rundownId: rundownId, name: 'Default Segment', - externalModified: 1, } } diff --git a/packages/webui/src/__mocks__/helpers/database.ts b/packages/webui/src/__mocks__/helpers/database.ts index 382e789aa09..fbc094fa42a 100644 --- a/packages/webui/src/__mocks__/helpers/database.ts +++ b/packages/webui/src/__mocks__/helpers/database.ts @@ -343,7 +343,6 @@ export async function setupDefaultRundown( externalId: 'MOCK_SEGMENT_0', rundownId: rundown._id, name: 'Segment 0', - externalModified: 1, } MongoMock.getInnerMockCollection(Segments).insert(segment0) /* tslint:disable:ter-indent*/ @@ -453,7 +452,6 @@ export async function setupDefaultRundown( externalId: 'MOCK_SEGMENT_2', rundownId: rundown._id, name: 'Segment 1', - externalModified: 1, } MongoMock.getInnerMockCollection(Segments).insert(segment1) @@ -496,7 +494,6 @@ export async function setupDefaultRundown( externalId: 'MOCK_SEGMENT_2', rundownId: rundown._id, name: 'Segment 2', - externalModified: 1, } MongoMock.getInnerMockCollection(Segments).insert(segment2) diff --git a/packages/webui/src/client/lib/__tests__/__snapshots__/rundown.test.ts.snap b/packages/webui/src/client/lib/__tests__/__snapshots__/rundown.test.ts.snap index 59fcec5d1a6..9af78812d9e 100644 --- a/packages/webui/src/client/lib/__tests__/__snapshots__/rundown.test.ts.snap +++ b/packages/webui/src/client/lib/__tests__/__snapshots__/rundown.test.ts.snap @@ -274,7 +274,6 @@ exports[`client/lib/rundown RundownUtils.getResolvedSegment Basic Segment resolu "_id": "randomId9002_segment0", "_rank": 0, "externalId": "MOCK_SEGMENT_0", - "externalModified": 1, "name": "Segment 0", "outputLayers": { "pgm": { diff --git a/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts b/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts index 145188e74bd..f33f36367f2 100644 --- a/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts +++ b/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts @@ -71,7 +71,6 @@ function makeMockSegment( _id: protectString(id), name: 'mock-segment', externalId: id, - externalModified: 0, _rank: rank, rundownId: protectString(rundownId), segmentTiming: timing, diff --git a/packages/webui/src/client/lib/clientUserAction.ts b/packages/webui/src/client/lib/clientUserAction.ts index cd75970630c..7ac5e7bf16a 100644 --- a/packages/webui/src/client/lib/clientUserAction.ts +++ b/packages/webui/src/client/lib/clientUserAction.ts @@ -113,6 +113,8 @@ function userActionToLabel(userAction: UserAction, t: i18next.TFunction) { return t('Resetting Playlist to default order') case UserAction.PERIPHERAL_DEVICE_REFRESH_DEBUG_STATES: return t('Refreshing debug states') + case UserAction.EXECUTE_USER_OPERATION: + return t('Execute User Operation') case UserAction.ACTIVATE_ADLIB_TESTING: return t('Rehearsal Mode') case UserAction.CREATE_ADLIB_TESTING_RUNDOWN: diff --git a/packages/webui/src/client/ui/MediaStatus/MediaStatus.tsx b/packages/webui/src/client/ui/MediaStatus/MediaStatus.tsx index 8dd3bfa7453..646a039c334 100644 --- a/packages/webui/src/client/ui/MediaStatus/MediaStatus.tsx +++ b/packages/webui/src/client/ui/MediaStatus/MediaStatus.tsx @@ -120,7 +120,6 @@ function useRundownPlaylists(playlistIds: RundownPlaylistId[]) { projection: { displayAs: 0, externalId: 0, - externalModified: 0, privateData: 0, notes: 0, segmentTiming: 0, diff --git a/packages/webui/src/client/ui/SegmentTimeline/SegmentContextMenu.tsx b/packages/webui/src/client/ui/SegmentTimeline/SegmentContextMenu.tsx index 756859e2e77..67d8e3ce7c9 100644 --- a/packages/webui/src/client/ui/SegmentTimeline/SegmentContextMenu.tsx +++ b/packages/webui/src/client/ui/SegmentTimeline/SegmentContextMenu.tsx @@ -14,6 +14,7 @@ import { IContextMenuContext } from '../RundownView' import { PartUi, SegmentUi } from './SegmentTimelineContainer' import { SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { SegmentOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Segment' +import { RenderUserEditOperations } from '../UserEditOperations/RenderUserEditOperations' import * as RundownResolver from '../../lib/RundownResolver' interface IProps { @@ -39,6 +40,8 @@ export const SegmentContextMenu = withTranslation()( render(): JSX.Element | null { const { t } = this.props + if (!this.props.studioMode || !this.props.playlist || !this.props.playlist.activationId) return null + const part = this.getPartFromContext() const segment = this.getSegmentFromContext() const timecode = this.getTimePosition() @@ -48,12 +51,17 @@ export const SegmentContextMenu = withTranslation()( (part && this.props.playlist && part.instance._id === this.props.playlist.currentPartInfo?.partInstanceId) || undefined + const isSegmentEditAble = segment?._id !== this.props.playlist.queuedSegmentId + + const isPartEditAble = + isSegmentEditAble && + part?.instance._id !== this.props.playlist.currentPartInfo?.partInstanceId && + part?.instance._id !== this.props.playlist.nextPartInfo?.partInstanceId && + part?.instance._id !== this.props.playlist.previousPartInfo?.partInstanceId + const canSetAsNext = !!this.props.playlist?.activationId - return this.props.studioMode && - this.props.playlist && - this.props.playlist.activationId && - segment?.orphaned !== SegmentOrphanedReason.ADLIB_TESTING ? ( + return segment?.orphaned !== SegmentOrphanedReason.ADLIB_TESTING ? ( {part && timecode === null && ( @@ -76,6 +84,18 @@ export const SegmentContextMenu = withTranslation()( {t('Clear queued segment')} )} + {segment && + RenderUserEditOperations( + isSegmentEditAble, + segment.rundownId, + segment.name, + segment.userEditOperations, + { + segmentExternalId: segment?.externalId, + partExternalId: undefined, + pieceExternalId: undefined, + } + )}
)} @@ -145,6 +165,18 @@ export const SegmentContextMenu = withTranslation()( )} )} + + {RenderUserEditOperations( + isPartEditAble, + part.instance.rundownId, + part.instance.part.title, + part.instance.part.userEditOperations, + { + segmentExternalId: segment?.externalId, + partExternalId: part.instance.part.externalId, + pieceExternalId: undefined, + } + )} )}
diff --git a/packages/webui/src/client/ui/SegmentTimeline/SegmentTimeline.scss b/packages/webui/src/client/ui/SegmentTimeline/SegmentTimeline.scss index 23b506db6bd..3d20bd3b245 100644 --- a/packages/webui/src/client/ui/SegmentTimeline/SegmentTimeline.scss +++ b/packages/webui/src/client/ui/SegmentTimeline/SegmentTimeline.scss @@ -25,6 +25,22 @@ $timeline-layer-height: 1em; white-space: nowrap; border-radius: 3px; } +.segment-timeline__title__user-edit-states { + position: absolute; + bottom: 0; + left: 0; + right: 0; + display: flex; + flex-flow: row nowrap; +} + +.segment-timeline__title__user-edit-state { + margin-left: 5px; + margin-bottom: 5px; + width: 20px; + height: 20px; +} + .segment-timeline__part-identifiers { text-align: right; margin: 0 1px 0 2px; diff --git a/packages/webui/src/client/ui/SegmentTimeline/SegmentTimeline.tsx b/packages/webui/src/client/ui/SegmentTimeline/SegmentTimeline.tsx index d9102cbb0f6..ab93f820c57 100644 --- a/packages/webui/src/client/ui/SegmentTimeline/SegmentTimeline.tsx +++ b/packages/webui/src/client/ui/SegmentTimeline/SegmentTimeline.tsx @@ -37,7 +37,7 @@ import { wrapPartToTemporaryInstance } from '@sofie-automation/meteor-lib/dist/c import { SegmentTimelineSmallPartFlag } from './SmallParts/SegmentTimelineSmallPartFlag' import { UIStateStorage } from '../../lib/UIStateStorage' import { getPartInstanceTimingId, RundownTimingContext } from '../../lib/rundownTiming' -import { IOutputLayer, ISourceLayer, NoteSeverity } from '@sofie-automation/blueprints-integration' +import { IOutputLayer, ISourceLayer, NoteSeverity, UserEditingType } from '@sofie-automation/blueprints-integration' import { SegmentTimelineZoomButtons } from './SegmentTimelineZoomButtons' import { SegmentViewMode } from '../SegmentContainer/SegmentViewModes' import { SwitchViewModeButton } from '../SegmentContainer/SwitchViewModeButton' @@ -55,6 +55,7 @@ import { import { SegmentTimeAnchorTime } from '../RundownView/RundownTiming/SegmentTimeAnchorTime' import { logger } from '../../lib/logging' import * as RundownResolver from '../../lib/RundownResolver' +import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' interface IProps { id: string @@ -1099,6 +1100,7 @@ export class SegmentTimelineClass extends React.Component )} +
{this.props.playlist && @@ -1231,3 +1233,27 @@ export const SegmentTimeline = withTranslation()( } })(SegmentTimelineClass) ) + +interface HeaderEditStatesProps { + userEditOperations: DBSegment['userEditOperations'] +} +function HeaderEditStates({ userEditOperations }: HeaderEditStatesProps) { + return ( +
+ {userEditOperations && + userEditOperations.map((operation) => { + if (operation.type === UserEditingType.FORM || !operation.svgIcon || !operation.isActive) return null + + return ( +
+ ) + })} +
+ ) +} diff --git a/packages/webui/src/client/ui/UserEditOperations/RenderUserEditOperations.tsx b/packages/webui/src/client/ui/UserEditOperations/RenderUserEditOperations.tsx new file mode 100644 index 00000000000..6b0577d65dc --- /dev/null +++ b/packages/webui/src/client/ui/UserEditOperations/RenderUserEditOperations.tsx @@ -0,0 +1,90 @@ +import React from 'react' +import { assertNever, clone } from '@sofie-automation/corelib/dist/lib' +import { RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { CoreUserEditingDefinition } from '@sofie-automation/corelib/dist/dataModel/UserEditingDefinitions' +import { JSONBlobParse, UserEditingType, UserOperationTarget } from '@sofie-automation/blueprints-integration' +import { translateMessage } from '@sofie-automation/corelib/dist/TranslatableMessage' +import { MenuItem } from '@jstarpl/react-contextmenu' +import { i18nTranslator } from '../i18n' +import { doModalDialog } from '../../lib/ModalDialog' +import { SchemaFormInPlace } from '../../lib/forms/SchemaFormInPlace' +import { doUserAction, UserAction } from '../../lib/clientUserAction' +import { MeteorCall } from '../../lib/meteorApi' + +export function RenderUserEditOperations( + isFormEditable: boolean, + rundownId: RundownId, + targetName: string, + userEditOperations: CoreUserEditingDefinition[] | undefined, + operationTarget: UserOperationTarget +): React.JSX.Element | null { + const t = i18nTranslator + if (!userEditOperations || userEditOperations.length === 0) return null + return ( + <> +
+ {userEditOperations.map((userEditOperation, i) => { + switch (userEditOperation.type) { + case UserEditingType.ACTION: + return ( + { + doUserAction(t, e, UserAction.EXECUTE_USER_OPERATION, (e, ts) => + MeteorCall.userAction.executeUserChangeOperation(e, ts, rundownId, operationTarget, { + id: userEditOperation.id, + }) + ) + }} + > + { + // ToDo: use CSS to Style state instead of asterix + userEditOperation.isActive ? {'• '} : null + } + {translateMessage(userEditOperation.label, i18nTranslator)} + + ) + case UserEditingType.FORM: + return ( + { + const schema = JSONBlobParse(userEditOperation.schema) + const values = clone(userEditOperation.currentValues) + + // TODO: + doModalDialog({ + title: t(`Edit {{targetName}}`, { targetName }), + message: ( + + ), + // acceptText: 'OK', + yes: t('Save Changes'), + no: t('Cancel'), + onAccept: () => { + doUserAction(t, e, UserAction.EXECUTE_USER_OPERATION, (e, ts) => + MeteorCall.userAction.executeUserChangeOperation(e, ts, rundownId, operationTarget, { + ...values, + id: userEditOperation.id, + }) + ) + }, + }) + }} + > + {translateMessage(userEditOperation.label, i18nTranslator)} + + ) + default: + assertNever(userEditOperation) + return null + } + })} + + ) +} From df35b3d6e01495e01dbc3480f33b8441c4da8901 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Tue, 1 Oct 2024 13:21:40 +0100 Subject: [PATCH 15/81] fix: patch `timecode` dependency to work once packaged with vite --- .../.yarn/patches/timecode-npm-0.0.4-82bde9e6fe.patch | 11 +++++++++++ packages/package.json | 5 ++++- packages/yarn.lock | 7 +++++++ 3 files changed, 22 insertions(+), 1 deletion(-) create mode 100644 packages/.yarn/patches/timecode-npm-0.0.4-82bde9e6fe.patch diff --git a/packages/.yarn/patches/timecode-npm-0.0.4-82bde9e6fe.patch b/packages/.yarn/patches/timecode-npm-0.0.4-82bde9e6fe.patch new file mode 100644 index 00000000000..ab845f486fa --- /dev/null +++ b/packages/.yarn/patches/timecode-npm-0.0.4-82bde9e6fe.patch @@ -0,0 +1,11 @@ +diff --git a/lib/timecode.js b/lib/timecode.js +index ef4028e2ebc3b5480e93d88a38627454553e3502..a521b33cd239c38dfced835410aa57e8be9246ca 100644 +--- a/lib/timecode.js ++++ b/lib/timecode.js +@@ -217,3 +217,5 @@ var exports = exports || window; + + exports.Timecode = Timecode; + ++// Vite needs a more modern export ++module.exports = { Timecode } +\ No newline at end of file diff --git a/packages/package.json b/packages/package.json index a9e4603c2bf..13435f2a653 100644 --- a/packages/package.json +++ b/packages/package.json @@ -68,5 +68,8 @@ "typescript": "~4.9.5" }, "name": "packages", - "packageManager": "yarn@3.5.0" + "packageManager": "yarn@3.5.0", + "resolutions": { + "timecode@0.0.4": "patch:timecode@npm%3A0.0.4#./.yarn/patches/timecode-npm-0.0.4-82bde9e6fe.patch" + } } diff --git a/packages/yarn.lock b/packages/yarn.lock index 94d104e22c2..1456ec7553f 100644 --- a/packages/yarn.lock +++ b/packages/yarn.lock @@ -26028,6 +26028,13 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard +"timecode@patch:timecode@npm%3A0.0.4#./.yarn/patches/timecode-npm-0.0.4-82bde9e6fe.patch::locator=packages%40workspace%3A.": + version: 0.0.4 + resolution: "timecode@patch:timecode@npm%3A0.0.4#./.yarn/patches/timecode-npm-0.0.4-82bde9e6fe.patch::version=0.0.4&hash=4d4986&locator=packages%40workspace%3A." + checksum: da05fb8e2f907a5bdb538c00ccaf216447337a568d9efddcb3835c130929fef2aeb864a5eba0381847e9404dbb412012806e0e02fd881562c82ae23c61ef139c + languageName: node + linkType: hard + "timeline-state-resolver-types@npm:9.2.0-nightly-release52-20240923-122840-58cfbb259.0": version: 9.2.0-nightly-release52-20240923-122840-58cfbb259.0 resolution: "timeline-state-resolver-types@npm:9.2.0-nightly-release52-20240923-122840-58cfbb259.0" From c6f563d0bd7d1fd8d821cda531e1d4dd81fa1fa5 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Tue, 1 Oct 2024 13:32:16 +0100 Subject: [PATCH 16/81] fix: missing font once packaged with vite --- .../webui/src/client/ui/SegmentTimeline/TimelineGrid.tsx | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/webui/src/client/ui/SegmentTimeline/TimelineGrid.tsx b/packages/webui/src/client/ui/SegmentTimeline/TimelineGrid.tsx index 3fc47e0a85f..661ce540e99 100644 --- a/packages/webui/src/client/ui/SegmentTimeline/TimelineGrid.tsx +++ b/packages/webui/src/client/ui/SegmentTimeline/TimelineGrid.tsx @@ -21,7 +21,10 @@ declare class FontFace { load(): void } -const LABEL_FONT_URL = 'url("/fonts/roboto-gh-pages/fonts/Light/Roboto-Light.woff")' +const LABEL_FONT_URL = `url("${new URL( + '../../../fonts/roboto-gh-pages/fonts/Light/Roboto-Light.woff2', + import.meta.url +)}")` const LABEL_COLOR = 'rgb(80,80,80)' const SHORT_LINE_GRID_COLOR = 'rgb(112,112,112)' const LONG_LINE_GRID_COLOR = 'rgb(80,80,80)' From 9f217cdf0eefc9d5499bfd678a15930be1f83b2b Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Tue, 1 Oct 2024 14:34:16 +0100 Subject: [PATCH 17/81] fix: koa not serving app for non-root url --- meteor/server/api/rest/koa.ts | 26 +++++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/meteor/server/api/rest/koa.ts b/meteor/server/api/rest/koa.ts index 00ecd85ac4d..3a8c54dc0a0 100644 --- a/meteor/server/api/rest/koa.ts +++ b/meteor/server/api/rest/koa.ts @@ -18,6 +18,7 @@ declare module 'http' { } const rootRouter = new KoaRouter() +const boundRouterPaths: string[] = [] Meteor.startup(() => { const koaApp = new Koa() @@ -51,7 +52,8 @@ Meteor.startup(() => { // serve the webui through koa // This is to avoid meteor injecting anything into the served html - koaApp.use(staticServe(public_dir)) + const webuiServer = staticServe(public_dir) + koaApp.use(webuiServer) logger.debug(`Serving static files from ${public_dir}`) // Serve the meteor runtime config @@ -66,9 +68,31 @@ Meteor.startup(() => { }) koaApp.use(rootRouter.routes()).use(rootRouter.allowedMethods()) + + koaApp.use(async (ctx, next) => { + if (ctx.method !== 'GET') return next() + + // Don't use the fallback for certain paths + if (ctx.path.startsWith('/assets/')) return next() + + // Don't use the fallback for anything handled by another router + // This does not feel efficient, but koa doesn't appear to have any shared state between the router handlers + for (const bindPath of boundRouterPaths) { + if (ctx.path.startsWith(bindPath)) return next() + } + + // fallback to the root file + ctx.path = '/' + return webuiServer(ctx, next) + }) }) export function bindKoaRouter(koaRouter: KoaRouter, bindPath: string): void { + // Track this path as having a router + let bindPathFull = bindPath + if (!bindPathFull.endsWith('/')) bindPathFull += '/' + boundRouterPaths.push(bindPathFull) + rootRouter.use(bindPath, koaRouter.routes()).use(bindPath, koaRouter.allowedMethods()) } From 38f5690f63efaf2c31504945cb3029b3c0c57382 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Tue, 1 Oct 2024 16:23:37 +0100 Subject: [PATCH 18/81] fix: some styling bugs --- packages/webui/index.html | 1 + packages/webui/src/client/styles/main.scss | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/webui/index.html b/packages/webui/index.html index 5e02e90cfed..efd40955942 100644 --- a/packages/webui/index.html +++ b/packages/webui/index.html @@ -1,3 +1,4 @@ + Sofie diff --git a/packages/webui/src/client/styles/main.scss b/packages/webui/src/client/styles/main.scss index 3538eafd6d5..8b3de38f608 100644 --- a/packages/webui/src/client/styles/main.scss +++ b/packages/webui/src/client/styles/main.scss @@ -30,7 +30,6 @@ input { @import 'prompter'; @import 'rundownList'; @import 'rundownSystemStatus'; -@import 'rundownView'; @import 'settings'; @import 'splitDropdown'; @import 'statusbar'; @@ -104,6 +103,8 @@ input { @import '../ui/RundownView/MediaStatusPopUp/MediaStatusPopUpSegmentRule.scss'; @import '../ui/SegmentAdlibTesting/SegmentAdlibTesting.scss'; +@import 'rundownView'; + body { overscroll-behavior: none; touch-action: pan-x pan-y; From 9ffb1bc5f7deec1c99052e2b96aecc013216f78b Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Wed, 2 Oct 2024 13:36:24 +0100 Subject: [PATCH 19/81] fix: handle durations of onRundownChange infinites correctly when spanning into another part --- .../job-worker/src/playout/timeline/rundown.ts | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/packages/job-worker/src/playout/timeline/rundown.ts b/packages/job-worker/src/playout/timeline/rundown.ts index f637c666ef9..0e7a1079865 100644 --- a/packages/job-worker/src/playout/timeline/rundown.ts +++ b/packages/job-worker/src/playout/timeline/rundown.ts @@ -328,8 +328,16 @@ function generateCurrentInfinitePieceObjects( infiniteGroup.enable.duration = infiniteInNextPart.piece.enable.duration } - // If this piece does not continue in the next part, then set it to end with the part it belongs to - if ( + const pieceInstanceWithUpdatedEndCap: PieceInstanceWithTimings = { ...pieceInstance } + // Give the infinite group and end cap when the end of the piece is known + if (pieceInstance.resolvedEndCap) { + // If the cap is a number, it is relative to the part, not the parent group so needs to be handled here + if (typeof pieceInstance.resolvedEndCap === 'number') { + infiniteGroup.enable.end = `#${timingContext.currentPartGroup.id}.start + ${pieceInstance.resolvedEndCap}` + delete pieceInstanceWithUpdatedEndCap.resolvedEndCap + } + } else if ( + // If this piece does not continue in the next part, then set it to end with the part it belongs to !infiniteInNextPart && currentPartInfo.partInstance.part.autoNext && infiniteGroup.enable.duration === undefined && @@ -355,7 +363,7 @@ function generateCurrentInfinitePieceObjects( activePlaylist._id, infiniteGroup, nowInParent, - pieceInstance, + pieceInstanceWithUpdatedEndCap, pieceEnable, 0, groupClasses, From 8384ba6e8026fe9217211895492fad2969471930 Mon Sep 17 00:00:00 2001 From: Mint de Wit Date: Mon, 30 Sep 2024 11:56:23 +0000 Subject: [PATCH 20/81] fix: findMarkerPosition always needs all parts available --- meteor/server/publications/lib/quickLoop.ts | 11 +++++------ .../publications/partInstancesUI/publication.ts | 4 ++-- .../partInstancesUI/reactiveContentCache.ts | 11 +++++++++++ .../partInstancesUI/rundownContentObserver.ts | 14 +++++++++++++- 4 files changed, 31 insertions(+), 9 deletions(-) diff --git a/meteor/server/publications/lib/quickLoop.ts b/meteor/server/publications/lib/quickLoop.ts index 73a3b5f1dd0..10fadc4c3d9 100644 --- a/meteor/server/publications/lib/quickLoop.ts +++ b/meteor/server/publications/lib/quickLoop.ts @@ -118,16 +118,15 @@ export function findMarkerPosition( marker: QuickLoopMarker, fallback: number, segmentCache: ReadonlyObjectDeep>>, - partCache: - | { parts: ReadonlyObjectDeep>> } - | { partInstances: ReadonlyObjectDeep> }, + partCache: { + parts?: ReadonlyObjectDeep>> + partInstances?: ReadonlyObjectDeep> + }, rundownRanks: Record ): MarkerPosition { const part = marker.type === QuickLoopMarkerType.PART - ? 'parts' in partCache - ? partCache.parts.findOne(marker.id) - : partCache.partInstances.findOne({ 'part._id': marker.id })?.part + ? partCache.partInstances?.findOne({ 'part._id': marker.id })?.part ?? partCache.parts?.findOne(marker.id) : undefined const partRank = part?._rank ?? fallback diff --git a/meteor/server/publications/partInstancesUI/publication.ts b/meteor/server/publications/partInstancesUI/publication.ts index e85c9b8ec01..0d1d5a37f49 100644 --- a/meteor/server/publications/partInstancesUI/publication.ts +++ b/meteor/server/publications/partInstancesUI/publication.ts @@ -162,7 +162,7 @@ export async function manipulateUIPartInstancesPublicationData( playlist.quickLoop.start, -Infinity, state.contentCache.Segments, - { partInstances: state.contentCache.PartInstances }, + { partInstances: state.contentCache.PartInstances, parts: state.contentCache.Parts }, rundownRanks ) const quickLoopEndPosition = @@ -171,7 +171,7 @@ export async function manipulateUIPartInstancesPublicationData( playlist.quickLoop.end, Infinity, state.contentCache.Segments, - { partInstances: state.contentCache.PartInstances }, + { partInstances: state.contentCache.PartInstances, parts: state.contentCache.Parts }, rundownRanks ) diff --git a/meteor/server/publications/partInstancesUI/reactiveContentCache.ts b/meteor/server/publications/partInstancesUI/reactiveContentCache.ts index b9356fb6a10..b7c01b627ab 100644 --- a/meteor/server/publications/partInstancesUI/reactiveContentCache.ts +++ b/meteor/server/publications/partInstancesUI/reactiveContentCache.ts @@ -5,6 +5,7 @@ import { MongoFieldSpecifierOnesStrict, MongoFieldSpecifierZeroes } from '@sofie import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' +import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' export type RundownPlaylistCompact = Pick export const rundownPlaylistFieldSpecifier = literal>({ @@ -21,6 +22,14 @@ export const segmentFieldSpecifier = literal>>({ + _id: 1, + _rank: 1, + segmentId: 1, + rundownId: 1, +}) + export type PartInstanceOmitedFields = 'part.privateData' export const partInstanceFieldSpecifier = literal>({ // @ts-expect-error Mongo typings aren't clever enough yet @@ -36,6 +45,7 @@ export const studioFieldSpecifier = literal> Segments: ReactiveCacheCollection> + Parts: ReactiveCacheCollection> PartInstances: ReactiveCacheCollection> RundownPlaylists: ReactiveCacheCollection } @@ -44,6 +54,7 @@ export function createReactiveContentCache(): ContentCache { const cache: ContentCache = { Studios: new ReactiveCacheCollection>('studios'), Segments: new ReactiveCacheCollection>('segments'), + Parts: new ReactiveCacheCollection>('parts'), PartInstances: new ReactiveCacheCollection>('partInstances'), RundownPlaylists: new ReactiveCacheCollection('rundownPlaylists'), } diff --git a/meteor/server/publications/partInstancesUI/rundownContentObserver.ts b/meteor/server/publications/partInstancesUI/rundownContentObserver.ts index aa405e90713..c8a21b842c2 100644 --- a/meteor/server/publications/partInstancesUI/rundownContentObserver.ts +++ b/meteor/server/publications/partInstancesUI/rundownContentObserver.ts @@ -3,12 +3,13 @@ import { RundownId, RundownPlaylistActivationId, StudioId } from '@sofie-automat import { logger } from '../../logging' import { ContentCache, + partFieldSpecifier, partInstanceFieldSpecifier, rundownPlaylistFieldSpecifier, segmentFieldSpecifier, studioFieldSpecifier, } from './reactiveContentCache' -import { PartInstances, RundownPlaylists, Segments, Studios } from '../../collections' +import { PartInstances, Parts, RundownPlaylists, Segments, Studios } from '../../collections' export class RundownContentObserver { #observers: Meteor.LiveQueryHandle[] = [] @@ -53,6 +54,17 @@ export class RundownContentObserver { projection: segmentFieldSpecifier, } ), + Parts.observeChanges( + { + rundownId: { + $in: rundownIds, + }, + }, + cache.Parts.link(), + { + projection: partFieldSpecifier, + } + ), PartInstances.observeChanges( { rundownId: { From 5ad423d2a713e6d26a4c7f5e0f24d56d7e885e28 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Wed, 2 Oct 2024 16:00:49 +0100 Subject: [PATCH 21/81] fix: segment remaining timer not counting --- .../lib/__tests__/rundownTiming.test.ts | 19 ------------------- .../webui/src/client/lib/rundownTiming.ts | 7 +------ 2 files changed, 1 insertion(+), 25 deletions(-) diff --git a/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts b/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts index 85415186414..3da1c9e71c1 100644 --- a/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts +++ b/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts @@ -167,7 +167,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: undefined, remainingTimeOnCurrentPart: undefined, rundownsBeforeNextBreak: undefined, - segmentStartedPlayback: {}, }) ) }) @@ -269,7 +268,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: undefined, remainingTimeOnCurrentPart: undefined, rundownsBeforeNextBreak: undefined, - segmentStartedPlayback: {}, }) ) }) @@ -371,7 +369,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: undefined, remainingTimeOnCurrentPart: undefined, rundownsBeforeNextBreak: undefined, - segmentStartedPlayback: {}, }) ) }) @@ -477,7 +474,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: undefined, remainingTimeOnCurrentPart: undefined, rundownsBeforeNextBreak: undefined, - segmentStartedPlayback: {}, }) ) }) @@ -604,7 +600,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: undefined, remainingTimeOnCurrentPart: undefined, rundownsBeforeNextBreak: undefined, - segmentStartedPlayback: {}, }) ) }) @@ -759,7 +754,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: false, remainingTimeOnCurrentPart: 2500, rundownsBeforeNextBreak: [], - segmentStartedPlayback: {}, nextRundownAnchor: undefined, }) ) @@ -915,7 +909,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: false, remainingTimeOnCurrentPart: -4000, rundownsBeforeNextBreak: [], - segmentStartedPlayback: {}, nextRundownAnchor: undefined, }) ) @@ -1020,7 +1013,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: undefined, remainingTimeOnCurrentPart: undefined, rundownsBeforeNextBreak: undefined, - segmentStartedPlayback: {}, }) ) }) @@ -1158,7 +1150,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: undefined, remainingTimeOnCurrentPart: undefined, rundownsBeforeNextBreak: undefined, - segmentStartedPlayback: {}, }) ) }) @@ -1279,7 +1270,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: undefined, remainingTimeOnCurrentPart: undefined, rundownsBeforeNextBreak: undefined, - segmentStartedPlayback: {}, }) ) }) @@ -1399,7 +1389,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: undefined, remainingTimeOnCurrentPart: undefined, rundownsBeforeNextBreak: undefined, - segmentStartedPlayback: {}, nextRundownAnchor: undefined, }) ) @@ -1529,7 +1518,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: undefined, remainingTimeOnCurrentPart: undefined, rundownsBeforeNextBreak: undefined, - segmentStartedPlayback: {}, }) ) }) @@ -1675,7 +1663,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: undefined, remainingTimeOnCurrentPart: undefined, rundownsBeforeNextBreak: undefined, - segmentStartedPlayback: {}, }) ) }) @@ -1823,7 +1810,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: false, remainingTimeOnCurrentPart: 500, rundownsBeforeNextBreak: [], - segmentStartedPlayback: {}, nextRundownAnchor: 2000, }) ) @@ -1972,7 +1958,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: false, remainingTimeOnCurrentPart: -1500, rundownsBeforeNextBreak: [], - segmentStartedPlayback: {}, nextRundownAnchor: 4000, }) ) @@ -2127,7 +2112,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: false, remainingTimeOnCurrentPart: 500, rundownsBeforeNextBreak: [], - segmentStartedPlayback: {}, nextRundownAnchor: 3000, }) ) @@ -2276,7 +2260,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: false, remainingTimeOnCurrentPart: 500, rundownsBeforeNextBreak: [], - segmentStartedPlayback: {}, nextRundownAnchor: 2000, }) ) @@ -2425,7 +2408,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: false, remainingTimeOnCurrentPart: -1500, rundownsBeforeNextBreak: [], - segmentStartedPlayback: {}, nextRundownAnchor: 4000, }) ) @@ -2580,7 +2562,6 @@ describe('rundown Timing Calculator', () => { breakIsLastRundown: false, remainingTimeOnCurrentPart: 500, rundownsBeforeNextBreak: [], - segmentStartedPlayback: {}, nextRundownAnchor: 3000, }) ) diff --git a/packages/webui/src/client/lib/rundownTiming.ts b/packages/webui/src/client/lib/rundownTiming.ts index 855248d5682..6312efd2eac 100644 --- a/packages/webui/src/client/lib/rundownTiming.ts +++ b/packages/webui/src/client/lib/rundownTiming.ts @@ -66,7 +66,6 @@ export class RundownTimingCalculator { private partDisplayDurations: Record = {} private partDisplayDurationsNoPlayback: Record = {} private displayDurationGroups: Record = {} - private segmentStartedPlayback: Record = {} private segmentAsPlayedDurations: Record = {} private breakProps: { props: BreakProps | undefined @@ -124,7 +123,6 @@ export class RundownTimingCalculator { let liveSegmentId: SegmentId | undefined Object.keys(this.displayDurationGroups).forEach((key) => delete this.displayDurationGroups[key]) - Object.keys(this.segmentStartedPlayback).forEach((key) => delete this.segmentStartedPlayback[key]) Object.keys(this.segmentAsPlayedDurations).forEach((key) => delete this.segmentAsPlayedDurations[key]) this.untimedSegments.clear() this.linearParts.length = 0 @@ -167,7 +165,7 @@ export class RundownTimingCalculator { if (liveSegment?.segmentTiming?.countdownType === CountdownType.SEGMENT_BUDGET_DURATION) { remainingBudgetOnCurrentSegment = - (this.segmentStartedPlayback[unprotectString(liveSegmentId)] ?? + (playlist.segmentsStartedPlayback?.[unprotectString(liveSegmentId)] ?? lastStartedPlayback ?? now) + (liveSegment.segmentTiming.budgetDuration ?? 0) - @@ -633,7 +631,6 @@ export class RundownTimingCalculator { partDisplayStartsAt: this.partDisplayStartsAt, partExpectedDurations: this.partExpectedDurations, partDisplayDurations: this.partDisplayDurations, - segmentStartedPlayback: this.segmentStartedPlayback, currentTime: now, remainingTimeOnCurrentPart, remainingBudgetOnCurrentSegment, @@ -726,8 +723,6 @@ export interface RundownTimingContext { * if the Part does not have an expected duration. */ partExpectedDurations?: Record - /** Time when selected segments started playback. Contains only the current segment and the segment before, if we've just entered a new one */ - segmentStartedPlayback?: Record /** Remaining time on current part */ remainingTimeOnCurrentPart?: number /** Remaining budget on current segment, if its countdownType === CountdownType.SEGMENT_BUDGET_DURATION */ From 680c3e001becbe34f0c979baf80225018366b423 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Thu, 3 Oct 2024 12:18:53 +0100 Subject: [PATCH 22/81] fix: track segmentStartedPlayback against `SegmentPlayoutId` instead of `SegmentId` --- .../corelib/src/dataModel/RundownPlaylist.ts | 5 +++- .../src/playout/model/PlayoutModel.ts | 5 ++-- .../model/implementation/PlayoutModelImpl.ts | 15 ++++++----- .../src/playout/timings/partPlayback.ts | 7 +++-- .../webui/src/client/lib/rundownTiming.ts | 26 +++++++++---------- 5 files changed, 33 insertions(+), 25 deletions(-) diff --git a/packages/corelib/src/dataModel/RundownPlaylist.ts b/packages/corelib/src/dataModel/RundownPlaylist.ts index 99500fc19f8..a2ba8cccb13 100644 --- a/packages/corelib/src/dataModel/RundownPlaylist.ts +++ b/packages/corelib/src/dataModel/RundownPlaylist.ts @@ -160,7 +160,10 @@ export interface DBRundownPlaylist { lastIncorrectPartPlaybackReported?: Time /** Actual time of each rundown starting playback */ rundownsStartedPlayback?: Record - /** Actual time of SOME segments starting playback - usually just the previous and current one */ + /** + * Actual time of SOME segments starting playback - usually just the previous and current one + * This is not using SegmentId, but SegmentPlayoutId + */ segmentsStartedPlayback?: Record /** Time of the last take */ lastTakeTime?: Time diff --git a/packages/job-worker/src/playout/model/PlayoutModel.ts b/packages/job-worker/src/playout/model/PlayoutModel.ts index 10caee9a473..9f380299061 100644 --- a/packages/job-worker/src/playout/model/PlayoutModel.ts +++ b/packages/job-worker/src/playout/model/PlayoutModel.ts @@ -7,6 +7,7 @@ import { RundownPlaylistActivationId, RundownPlaylistId, SegmentId, + SegmentPlayoutId, } from '@sofie-automation/corelib/dist/dataModel/Ids' import { BaseModel } from '../../modelBase' import { @@ -330,10 +331,10 @@ export interface PlayoutModel extends PlayoutModelReadonly, StudioPlayoutModelBa /** * Track a Segment as having started playback - * @param segmentId Id of the Segment + * @param segmentPlayoutId Playout id of the Segment * @param timestamp Timestamp playback started */ - setSegmentStartedPlayback(segmentId: SegmentId, timestamp: number): void + setSegmentStartedPlayback(segmentPlayoutId: SegmentPlayoutId, timestamp: number): void /** * Set or clear a QuickLoop Marker diff --git a/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts b/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts index 6ee433b9dd4..4e522fdb15f 100644 --- a/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts +++ b/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts @@ -587,6 +587,7 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou delete this.playlistImpl.lastTakeTime delete this.playlistImpl.startedPlayback delete this.playlistImpl.rundownsStartedPlayback + delete this.playlistImpl.segmentsStartedPlayback delete this.playlistImpl.previousPersistentState delete this.playlistImpl.trackedAbSessions delete this.playlistImpl.queuedSegmentId @@ -740,21 +741,21 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou this.#playlistHasChanged = true } - setSegmentStartedPlayback(segmentId: SegmentId, timestamp: number): void { - const segmentIdsToKeep: string[] = [] + setSegmentStartedPlayback(segmentPlayoutId: SegmentPlayoutId, timestamp: number): void { + const segmentPlayoutIdsToKeep: string[] = [] if (this.previousPartInstance) { - segmentIdsToKeep.push(unprotectString(this.previousPartInstance.partInstance.segmentId)) + segmentPlayoutIdsToKeep.push(unprotectString(this.previousPartInstance.partInstance.segmentPlayoutId)) } if (this.currentPartInstance) { - segmentIdsToKeep.push(unprotectString(this.currentPartInstance.partInstance.segmentId)) + segmentPlayoutIdsToKeep.push(unprotectString(this.currentPartInstance.partInstance.segmentPlayoutId)) } this.playlistImpl.segmentsStartedPlayback = this.playlistImpl.segmentsStartedPlayback - ? _.pick(this.playlistImpl.segmentsStartedPlayback, segmentIdsToKeep) + ? _.pick(this.playlistImpl.segmentsStartedPlayback, segmentPlayoutIdsToKeep) : {} - const segmentIdStr = unprotectString(segmentId) - this.playlistImpl.segmentsStartedPlayback[segmentIdStr] = timestamp + const segmentPlayoutIdStr = unprotectString(segmentPlayoutId) + this.playlistImpl.segmentsStartedPlayback[segmentPlayoutIdStr] = timestamp this.#playlistHasChanged = true } diff --git a/packages/job-worker/src/playout/timings/partPlayback.ts b/packages/job-worker/src/playout/timings/partPlayback.ts index 71aea9a1627..c0dcdd98520 100644 --- a/packages/job-worker/src/playout/timings/partPlayback.ts +++ b/packages/job-worker/src/playout/timings/partPlayback.ts @@ -195,8 +195,11 @@ export function reportPartInstanceHasStarted( playoutModel.setRundownStartedPlayback(partInstance.partInstance.rundownId, timestamp) } - if (partInstance.partInstance.segmentId !== playoutModel.previousPartInstance?.partInstance.segmentId) { - playoutModel.setSegmentStartedPlayback(partInstance.partInstance.segmentId, timestamp) + if ( + partInstance.partInstance.segmentPlayoutId !== + playoutModel.previousPartInstance?.partInstance.segmentPlayoutId + ) { + playoutModel.setSegmentStartedPlayback(partInstance.partInstance.segmentPlayoutId, timestamp) } if (timestampUpdated) { diff --git a/packages/webui/src/client/lib/rundownTiming.ts b/packages/webui/src/client/lib/rundownTiming.ts index 6312efd2eac..9551c1f9211 100644 --- a/packages/webui/src/client/lib/rundownTiming.ts +++ b/packages/webui/src/client/lib/rundownTiming.ts @@ -11,7 +11,7 @@ * without knowing what particular case you are trying to solve. */ -import { PartId, PartInstanceId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { PartId, PartInstanceId, SegmentId, SegmentPlayoutId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { literal } from '@sofie-automation/corelib/dist/lib' import { PlaylistTiming } from '@sofie-automation/corelib/dist/playout/rundownTiming' import { calculatePartInstanceExpectedDurationWithTransition } from '@sofie-automation/corelib/dist/playout/timings' @@ -37,7 +37,7 @@ interface BreakProps { type CalculateTimingsPartInstance = Pick< PartInstance, - '_id' | 'isTemporary' | 'segmentId' | 'orphaned' | 'timings' | 'part' + '_id' | 'isTemporary' | 'segmentId' | 'segmentPlayoutId' | 'orphaned' | 'timings' | 'part' > export type TimingId = string @@ -120,7 +120,7 @@ export class RundownTimingCalculator { let rundownsBeforeNextBreak: Rundown[] | undefined let breakIsLastRundown: boolean | undefined - let liveSegmentId: SegmentId | undefined + let liveSegmentIds: [SegmentId, SegmentPlayoutId] | undefined Object.keys(this.displayDurationGroups).forEach((key) => delete this.displayDurationGroups[key]) Object.keys(this.segmentAsPlayedDurations).forEach((key) => delete this.segmentAsPlayedDurations[key]) @@ -130,7 +130,7 @@ export class RundownTimingCalculator { let nextAIndex = -1 let currentAIndex = -1 - let lastSegmentId: SegmentId | undefined = undefined + let lastSegmentIds: [SegmentId, SegmentPlayoutId] | undefined = undefined let nextRundownAnchor: number | undefined = undefined if (playlist) { @@ -158,14 +158,14 @@ export class RundownTimingCalculator { ? partInstance.timings?.plannedStartedPlayback : undefined - if (partInstance.segmentId !== lastSegmentId) { + if (!lastSegmentIds || partInstance.segmentId !== lastSegmentIds[0]) { this.untimedSegments.add(partInstance.segmentId) - if (liveSegmentId && lastSegmentId === liveSegmentId) { - const liveSegment = segmentsMap.get(liveSegmentId) + if (liveSegmentIds && lastSegmentIds && lastSegmentIds[0] === liveSegmentIds[0]) { + const liveSegment = segmentsMap.get(liveSegmentIds[0]) if (liveSegment?.segmentTiming?.countdownType === CountdownType.SEGMENT_BUDGET_DURATION) { remainingBudgetOnCurrentSegment = - (playlist.segmentsStartedPlayback?.[unprotectString(liveSegmentId)] ?? + (playlist.segmentsStartedPlayback?.[unprotectString(liveSegmentIds[1])] ?? lastStartedPlayback ?? now) + (liveSegment.segmentTiming.budgetDuration ?? 0) - @@ -177,7 +177,7 @@ export class RundownTimingCalculator { waitAccumulator += segmentBudgetDurationLeft } segmentBudgetDurationLeft = segmentBudget ?? 0 - lastSegmentId = partInstance.segmentId + lastSegmentIds = [partInstance.segmentId, partInstance.segmentPlayoutId] } // add piece to accumulator @@ -189,7 +189,7 @@ export class RundownTimingCalculator { this.nextSegmentId = partInstance.segmentId } else if (playlist.currentPartInfo?.partInstanceId === partInstance._id) { currentAIndex = aIndex - liveSegmentId = partInstance.segmentId + liveSegmentIds = [partInstance.segmentId, partInstance.segmentPlayoutId] } const partCounts = @@ -284,7 +284,7 @@ export class RundownTimingCalculator { partDisplayDuration = Math.max(partDisplayDurationNoPlayback, now - lastStartedPlayback) this.partPlayed[partInstanceOrPartId] = now - lastStartedPlayback const segmentStartedPlayback = - playlist.segmentsStartedPlayback?.[unprotectString(partInstance.segmentId)] ?? + playlist.segmentsStartedPlayback?.[unprotectString(partInstance.segmentPlayoutId)] ?? lastStartedPlayback // NOTE: displayDurationGroups are ignored here, when using budgetDuration @@ -563,8 +563,8 @@ export class RundownTimingCalculator { let valToAddToRundownAsPlayedDuration = 0 let valToAddToRundownRemainingDuration = 0 - if (segment._id === liveSegmentId) { - const startedPlayback = playlist.segmentsStartedPlayback?.[unprotectString(segment._id)] + if (liveSegmentIds && segment._id === liveSegmentIds[0]) { + const startedPlayback = playlist.segmentsStartedPlayback?.[unprotectString(liveSegmentIds[1])] valToAddToRundownRemainingDuration = Math.max( 0, segmentBudgetDuration - (startedPlayback ? now - startedPlayback : 0) From 6df166801055d1e0163930170153d6ac8554e6fa Mon Sep 17 00:00:00 2001 From: Mint de Wit Date: Mon, 7 Oct 2024 12:05:20 +0000 Subject: [PATCH 23/81] fix: reset segments when added to an active loop --- .../src/playout/model/PlayoutModel.ts | 2 + .../model/implementation/PlayoutModelImpl.ts | 4 ++ .../model/services/QuickLoopService.ts | 34 ++++++++- .../src/playout/quickLoopMarkers.ts | 70 ++++++++++++++++++- 4 files changed, 108 insertions(+), 2 deletions(-) diff --git a/packages/job-worker/src/playout/model/PlayoutModel.ts b/packages/job-worker/src/playout/model/PlayoutModel.ts index 10caee9a473..88e04e92afd 100644 --- a/packages/job-worker/src/playout/model/PlayoutModel.ts +++ b/packages/job-worker/src/playout/model/PlayoutModel.ts @@ -342,6 +342,8 @@ export interface PlayoutModel extends PlayoutModelReadonly, StudioPlayoutModelBa */ setQuickLoopMarker(type: 'start' | 'end', marker: QuickLoopMarker | null): void + getSegmentsBetweenQuickLoopMarker(start: QuickLoopMarker, end: QuickLoopMarker): SegmentId[] + calculatePartTimings( fromPartInstance: PlayoutPartInstanceModel | null, toPartInstance: PlayoutPartInstanceModel, diff --git a/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts b/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts index 6ee433b9dd4..09651a09b83 100644 --- a/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts +++ b/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts @@ -792,6 +792,10 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou this.#playlistHasChanged = true } + getSegmentsBetweenQuickLoopMarker(start: QuickLoopMarker, end: QuickLoopMarker): SegmentId[] { + return this.quickLoopService.getSegmentsBetweenMarkers(start, end) + } + /** Lifecycle */ /** @deprecated */ diff --git a/packages/job-worker/src/playout/model/services/QuickLoopService.ts b/packages/job-worker/src/playout/model/services/QuickLoopService.ts index 91a6245fc94..54d57ea67a0 100644 --- a/packages/job-worker/src/playout/model/services/QuickLoopService.ts +++ b/packages/job-worker/src/playout/model/services/QuickLoopService.ts @@ -8,7 +8,7 @@ import { } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { ReadonlyObjectDeep } from 'type-fest/source/readonly-deep' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' -import { RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { RundownId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' import { PlayoutPartInstanceModel } from '../PlayoutPartInstanceModel' import { JobContext } from '../../../jobs' @@ -149,6 +149,38 @@ export class QuickLoopService { return quickLoopProps } + getSegmentsBetweenMarkers(startMarker: QuickLoopMarker, endMarker: QuickLoopMarker): SegmentId[] { + const orderedParts = this.playoutModel.getAllOrderedParts() + const rundownIds = this.playoutModel.getRundownIds() + + const start = this.findQuickLoopMarkerPosition(startMarker, 'start', orderedParts, rundownIds) + const end = this.findQuickLoopMarkerPosition(endMarker, 'end', orderedParts, rundownIds) + + if (this.areMarkersFlipped(start, end)) return [] + + const segmentIds: Set = new Set() + + for (const part of orderedParts) { + const currentSegment = this.playoutModel.findSegment(part.segmentId)?.segment + const currentRundownIndex = rundownIds.findIndex((id) => id === part.rundownId) + + if (!currentSegment) continue // ??? + + if ( + currentRundownIndex >= start.rundownRank && + currentRundownIndex <= end.rundownRank && + currentSegment._rank >= start.segmentRank && + currentSegment._rank <= end.segmentRank && + part._rank >= start.partRank && + part._rank <= start.partRank + ) { + segmentIds.add(currentSegment._id) + } + } + + return Array.from(segmentIds.values()) + } + private areMarkersFlipped(startPosition: MarkerPosition, endPosition: MarkerPosition) { return compareMarkerPositions(startPosition, endPosition) < 0 } diff --git a/packages/job-worker/src/playout/quickLoopMarkers.ts b/packages/job-worker/src/playout/quickLoopMarkers.ts index 67e35588221..cceeed6b81d 100644 --- a/packages/job-worker/src/playout/quickLoopMarkers.ts +++ b/packages/job-worker/src/playout/quickLoopMarkers.ts @@ -5,6 +5,9 @@ import { runJobWithPlayoutModel } from './lock' import { updateTimeline } from './timeline/generate' import { selectNextPart } from './selectNextPart' import { setNextPart } from './setNext' +import { resetPartInstancesWithPieceInstances } from './lib' +import { QuickLoopMarker, QuickLoopMarkerType } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import { SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' export async function handleSetQuickLoopMarker(context: JobContext, data: SetQuickLoopMarkerProps): Promise { return runJobWithPlayoutModel( @@ -17,9 +20,74 @@ export async function handleSetQuickLoopMarker(context: JobContext, data: SetQui async (playoutModel) => { const playlist = playoutModel.playlist if (!playlist.activationId) throw new Error(`Playlist has no activationId!`) - const wasQuickLoopRunning = playoutModel.playlist.quickLoop?.running + const oldProps = playoutModel.playlist.quickLoop + const wasQuickLoopRunning = oldProps?.running playoutModel.setQuickLoopMarker(data.type, data.marker) + const markerChanged = ( + markerA: QuickLoopMarker | undefined, + markerB: QuickLoopMarker | undefined + ): boolean => { + if (!markerA || !markerB) return false + + if ( + (markerA.type === QuickLoopMarkerType.RUNDOWN || + markerA.type === QuickLoopMarkerType.SEGMENT || + markerA.type === QuickLoopMarkerType.PART) && + (markerB.type === QuickLoopMarkerType.RUNDOWN || + markerB.type === QuickLoopMarkerType.SEGMENT || + markerB.type === QuickLoopMarkerType.PART) + ) { + return markerA.id !== markerB.id + } + + return false + } + + if (playlist.currentPartInfo) { + // rundown is on air + let segmentsToReset: SegmentId[] = [] + + if ( + playlist.quickLoop?.start && + oldProps?.start && + markerChanged(oldProps.start, playlist.quickLoop.start) + ) { + // start marker changed + segmentsToReset = playoutModel.getSegmentsBetweenQuickLoopMarker( + playlist.quickLoop.start, + oldProps.start + ) + } else if ( + playlist.quickLoop?.end && + oldProps?.end && + markerChanged(oldProps.end, playlist.quickLoop.end) + ) { + // end marker changed + segmentsToReset = playoutModel.getSegmentsBetweenQuickLoopMarker( + oldProps.end, + playlist.quickLoop.end + ) + } else if (playlist.quickLoop?.start && playlist.quickLoop.end && !(oldProps?.start && oldProps.end)) { + // a new loop was created + segmentsToReset = playoutModel.getSegmentsBetweenQuickLoopMarker( + playlist.quickLoop.start, + playlist.quickLoop.end + ) + } + + // reset segments that have been added to the loop and are not on-air + resetPartInstancesWithPieceInstances(context, playoutModel, { + segmentId: { + $in: segmentsToReset.filter( + (segmentId) => + segmentId !== playoutModel.currentPartInstance?.partInstance.segmentId && + segmentId !== playoutModel.nextPartInstance?.partInstance.segmentId + ), + }, + }) + } + if (wasQuickLoopRunning) { const nextPart = selectNextPart( context, From 632ff26d8ff81f2aab850256a89d5b4f0408f697 Mon Sep 17 00:00:00 2001 From: Mint de Wit Date: Mon, 7 Oct 2024 12:15:48 +0000 Subject: [PATCH 24/81] fix(ui): timing in quickloop --- .../webui/src/client/lib/rundownTiming.ts | 31 ++++++++++++++++--- 1 file changed, 26 insertions(+), 5 deletions(-) diff --git a/packages/webui/src/client/lib/rundownTiming.ts b/packages/webui/src/client/lib/rundownTiming.ts index 6312efd2eac..a3c074dd6b9 100644 --- a/packages/webui/src/client/lib/rundownTiming.ts +++ b/packages/webui/src/client/lib/rundownTiming.ts @@ -107,6 +107,8 @@ export class RundownTimingCalculator { let remainingRundownDuration = 0 let asPlayedRundownDuration = 0 let asDisplayedRundownDuration = 0 + // the "wait" for a part is defined as its asPlayedDuration or its displayDuration or its expectedDuration + const waitPerPart: Record = {} let waitAccumulator = 0 let currentRemaining = 0 let startsAtAccumulator = 0 @@ -436,10 +438,13 @@ export class RundownTimingCalculator { 0 } if (segmentUsesBudget) { - waitAccumulator += Math.min(waitDuration, Math.max(segmentBudgetDurationLeft, 0)) + const wait = Math.min(waitDuration, Math.max(segmentBudgetDurationLeft, 0)) + waitAccumulator += wait segmentBudgetDurationLeft -= waitDuration + waitPerPart[unprotectString(partId)] = wait + Math.max(0, segmentBudgetDurationLeft) } else { waitAccumulator += waitDuration + waitPerPart[unprotectString(partId)] = waitDuration } // remaining is the sum of unplayed lines + whatever is left of the current segment @@ -480,7 +485,9 @@ export class RundownTimingCalculator { }) // This is where the waitAccumulator-generated data in the linearSegLines is used to calculate the countdowns. + // at this point the "waitAccumulator" should be the total sum of all the "waits" in the rundown let localAccum = 0 + let timeTillEndLoop: undefined | number = undefined for (let i = 0; i < this.linearParts.length; i++) { if (i < nextAIndex) { // this is a line before next line @@ -517,6 +524,11 @@ export class RundownTimingCalculator { // and add the currentRemaining countdown, since we are currentRemaining + diff between next and // this away from this line. this.linearParts[i][1] = (this.linearParts[i][1] || 0) - localAccum + currentRemaining + + if (!partsInQuickLoop[unprotectString(this.linearParts[i][0])]) { + timeTillEndLoop = timeTillEndLoop ?? this.linearParts[i][1] ?? undefined + } + if (nextRundownAnchor === undefined) { nextRundownAnchor = getSegmentRundownAnchorFromPart( this.linearParts[i][0], @@ -527,13 +539,22 @@ export class RundownTimingCalculator { } } } - // contiunation of linearParts calculations for looping playlists + // at this point the localAccumulator should be the sum of waits before the next line + // continuation of linearParts calculations for looping playlists if (isLoopRunning(playlist)) { + // we track the sum of all the "waits" that happen in the loop + let waitInLoop = 0 + // if timeTillEndLoop was undefined then we can assume the end of the loop is the last line in the rundown + timeTillEndLoop = timeTillEndLoop ?? waitAccumulator - localAccum + currentRemaining for (let i = 0; i < nextAIndex; i++) { if (!partsInQuickLoop[unprotectString(this.linearParts[i][0])]) continue - // offset the parts before the on air line by the countdown for the end of the rundown - this.linearParts[i][1] = - (this.linearParts[i][1] || 0) + waitAccumulator - localAccum + currentRemaining + + // this countdown is the wait until the loop ends + whatever waits occur before this part but inside the loop + this.linearParts[i][1] = timeTillEndLoop + waitInLoop + + // add the wait from this part to the waitInLoop (the lookup here should still work by the definition of a "wait") + waitInLoop += waitPerPart[unprotectString(this.linearParts[i][0])] ?? 0 + if (nextRundownAnchor === undefined) { nextRundownAnchor = getSegmentRundownAnchorFromPart( this.linearParts[i][0], From cf2685394e3cc9462fbb83670ee9eb3352c400d7 Mon Sep 17 00:00:00 2001 From: olzzon Date: Tue, 8 Oct 2024 09:48:32 +0200 Subject: [PATCH 25/81] fix: disable-enable heartbeat when using open media hot standby --- packages/mos-gateway/src/mosHandler.ts | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/packages/mos-gateway/src/mosHandler.ts b/packages/mos-gateway/src/mosHandler.ts index 975dada3e37..8fc2d086940 100644 --- a/packages/mos-gateway/src/mosHandler.ts +++ b/packages/mos-gateway/src/mosHandler.ts @@ -488,6 +488,11 @@ export class MosHandler { deviceOptions.primary.heartbeatInterval = deviceOptions.primary.heartbeatInterval || DEFAULT_MOS_HEARTBEAT_INTERVAL + if (deviceOptions.secondary?.id && this._openMediaHotStandby[deviceOptions.secondary.id]) { + //@ts-expect-error this is not yet added to the official mos-connection + deviceOptions.secondary.openMediaHotStandby = true + } + const mosDevice: MosDevice = await this.mos.connect(deviceOptions) this._ownMosDevices[deviceId] = mosDevice From c6777041c773965d461f2f1952c6cef57c0cce39 Mon Sep 17 00:00:00 2001 From: Mint de Wit Date: Tue, 8 Oct 2024 08:40:05 +0000 Subject: [PATCH 26/81] fix: rundown reset resets quickloop markers --- .../src/playout/model/implementation/PlayoutModelImpl.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts b/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts index 09651a09b83..b7f06510972 100644 --- a/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts +++ b/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts @@ -593,7 +593,9 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou if (regenerateActivationId) this.playlistImpl.activationId = getRandomId() - if (this.playlistImpl.quickLoop?.running) this.playlistImpl.quickLoop.running = false + // reset quickloop: + this.setQuickLoopMarker('start', null) + this.setQuickLoopMarker('end', null) this.#playlistHasChanged = true } From 3863d0b3426e50634e47e6207fc0e6e0d6999f97 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Mon, 14 Oct 2024 11:57:21 +0100 Subject: [PATCH 27/81] fix: ensure timeline is regenerated after switching routeset affecting ab players --- .../src/blueprints/context/adlibActions.ts | 14 ++++++++++--- packages/job-worker/src/playout/debug.ts | 16 +++----------- packages/job-worker/src/playout/lib.ts | 21 +++++++++++++++++++ .../src/playout/model/PlayoutModel.ts | 3 ++- .../model/implementation/PlayoutModelImpl.ts | 4 ++-- .../src/studio/model/StudioBaselineHelper.ts | 13 +++++++++++- .../src/studio/model/StudioPlayoutModel.ts | 3 ++- .../studio/model/StudioPlayoutModelImpl.ts | 4 ++-- packages/job-worker/src/studio/routeSet.ts | 7 ++++++- 9 files changed, 61 insertions(+), 24 deletions(-) diff --git a/packages/job-worker/src/blueprints/context/adlibActions.ts b/packages/job-worker/src/blueprints/context/adlibActions.ts index ec002435c4a..9bbe86103b6 100644 --- a/packages/job-worker/src/blueprints/context/adlibActions.ts +++ b/packages/job-worker/src/blueprints/context/adlibActions.ts @@ -60,7 +60,15 @@ export class DatastoreActionExecutionContext /** Actions */ export class ActionExecutionContext extends ShowStyleUserContext implements IActionExecutionContext, IEventContext { - public takeAfterExecute: boolean + /** + * Whether the blueprints requested a take to be performed at the end of this action + * */ + public takeAfterExecute = false + /** + * Whether the blueprints performed an action that explicitly requires the timeline to be regenerated + * This isn't the only indicator that it should be regenerated + */ + public forceRegenerateTimeline = false public get currentPartState(): ActionPartChange { return this.partAndPieceInstanceService.currentPartState @@ -84,7 +92,6 @@ export class ActionExecutionContext extends ShowStyleUserContext implements IAct private readonly partAndPieceInstanceService: PartAndPieceInstanceActionService ) { super(contextInfo, _context, showStyle, watchedPackages) - this.takeAfterExecute = false } async getPartInstance(part: 'current' | 'next'): Promise { @@ -185,7 +192,8 @@ export class ActionExecutionContext extends ShowStyleUserContext implements IAct } async switchRouteSet(routeSetId: string, state: boolean | 'toggle'): Promise { - this._playoutModel.switchRouteSet(routeSetId, state) + const affectsTimeline = this._playoutModel.switchRouteSet(routeSetId, state) + this.forceRegenerateTimeline = this.forceRegenerateTimeline || affectsTimeline } async hackGetMediaObjectDuration(mediaId: string): Promise { diff --git a/packages/job-worker/src/playout/debug.ts b/packages/job-worker/src/playout/debug.ts index 596a0941b62..b46cadcad52 100644 --- a/packages/job-worker/src/playout/debug.ts +++ b/packages/job-worker/src/playout/debug.ts @@ -8,7 +8,8 @@ import { logger } from '../logging' import { syncPlayheadInfinitesForNextPartInstance } from './infinites' import { setNextPart } from './setNext' import { runJobWithPlayoutModel } from './lock' -import { updateStudioTimeline, updateTimeline } from './timeline/generate' +import { updateTimeline } from './timeline/generate' +import { updateTimelineFromStudioPlayoutModel } from './lib' /** * Ensure that the infinite pieces on the nexted-part are correct @@ -80,17 +81,6 @@ export async function handleDebugCrash(context: JobContext, data: DebugRegenerat */ export async function handleDebugUpdateTimeline(context: JobContext, _data: void): Promise { await runJobWithStudioPlayoutModel(context, async (studioPlayoutModel) => { - const activePlaylists = studioPlayoutModel.getActiveRundownPlaylists() - if (activePlaylists.length > 1) { - throw new Error(`Too many active playlists`) - } else if (activePlaylists.length > 0) { - const playlist = activePlaylists[0] - - await runJobWithPlayoutModel(context, { playlistId: playlist._id }, null, async (playoutModel) => { - await updateTimeline(context, playoutModel) - }) - } else { - await updateStudioTimeline(context, studioPlayoutModel) - } + await updateTimelineFromStudioPlayoutModel(context, studioPlayoutModel) }) } diff --git a/packages/job-worker/src/playout/lib.ts b/packages/job-worker/src/playout/lib.ts index c2589d381df..c2c67bf0e2f 100644 --- a/packages/job-worker/src/playout/lib.ts +++ b/packages/job-worker/src/playout/lib.ts @@ -10,6 +10,9 @@ import { MongoQuery } from '../db' import { mongoWhere } from '@sofie-automation/corelib/dist/mongo' import { setNextPart } from './setNext' import { selectNextPart } from './selectNextPart' +import { StudioPlayoutModel } from '../studio/model/StudioPlayoutModel' +import { runJobWithPlayoutModel } from './lock' +import { updateTimeline, updateStudioTimeline } from './timeline/generate' /** * Reset the rundownPlaylist (all of the rundowns within the playlist): @@ -175,3 +178,21 @@ export function prefixAllObjectIds(objList: T[], p return obj }) } + +export async function updateTimelineFromStudioPlayoutModel( + context: JobContext, + studioPlayoutModel: StudioPlayoutModel +): Promise { + const activePlaylists = studioPlayoutModel.getActiveRundownPlaylists() + if (activePlaylists.length > 1) { + throw new Error(`Too many active playlists`) + } else if (activePlaylists.length > 0) { + const playlist = activePlaylists[0] + + await runJobWithPlayoutModel(context, { playlistId: playlist._id }, null, async (playoutModel) => { + await updateTimeline(context, playoutModel) + }) + } else { + await updateStudioTimeline(context, studioPlayoutModel) + } +} diff --git a/packages/job-worker/src/playout/model/PlayoutModel.ts b/packages/job-worker/src/playout/model/PlayoutModel.ts index ece25bd15f8..00baf444d85 100644 --- a/packages/job-worker/src/playout/model/PlayoutModel.ts +++ b/packages/job-worker/src/playout/model/PlayoutModel.ts @@ -196,8 +196,9 @@ export interface PlayoutModel extends PlayoutModelReadonly, StudioPlayoutModelBa * Update the active state of a RouteSet * @param routeSetId * @param isActive + * @returns Whether the change may affect timeline generation */ - switchRouteSet(routeSetId: string, isActive: boolean | 'toggle'): void + switchRouteSet(routeSetId: string, isActive: boolean | 'toggle'): boolean /** * Clear the currently selected PartInstances, so that nothing is selected for playback diff --git a/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts b/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts index 64fcaeadc66..3e5d39f757a 100644 --- a/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts +++ b/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts @@ -480,8 +480,8 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou return partInstance } - switchRouteSet(routeSetId: string, isActive: boolean | 'toggle'): void { - this.#baselineHelper.updateRouteSetActive(routeSetId, isActive) + switchRouteSet(routeSetId: string, isActive: boolean | 'toggle'): boolean { + return this.#baselineHelper.updateRouteSetActive(routeSetId, isActive) } cycleSelectedPartInstances(): void { diff --git a/packages/job-worker/src/studio/model/StudioBaselineHelper.ts b/packages/job-worker/src/studio/model/StudioBaselineHelper.ts index dee0717ce85..35256befb4e 100644 --- a/packages/job-worker/src/studio/model/StudioBaselineHelper.ts +++ b/packages/job-worker/src/studio/model/StudioBaselineHelper.ts @@ -83,7 +83,7 @@ export class StudioBaselineHelper { > } - updateRouteSetActive(routeSetId: string, isActive: boolean | 'toggle'): void { + updateRouteSetActive(routeSetId: string, isActive: boolean | 'toggle'): boolean { const studio = this.#context.studio const routeSets: WrappedOverridableItemNormal[] = getAllCurrentItemsFromOverrides( @@ -106,6 +106,9 @@ export class StudioBaselineHelper { } const overrideHelper = new OverrideOpHelperImpl(saveOverrides, this.#overridesRouteSetBuffer) + // Track whether changing this routeset could affect how the timeline is generated, so that it can be following this update + let mayAffectTimeline = couldRoutesetAffectTimelineGeneration(routeSet) + logger.debug(`switchRouteSet "${studio._id}" "${routeSet.id}"=${isActive}`) overrideHelper.setItemValue(routeSet.id, `active`, isActive) @@ -116,10 +119,18 @@ export class StudioBaselineHelper { if (otherRouteSet.computed?.exclusivityGroup === routeSet.computed.exclusivityGroup) { logger.debug(`switchRouteSet Other ID "${studio._id}" "${otherRouteSet.id}"=false`) overrideHelper.setItemValue(otherRouteSet.id, `active`, false) + + mayAffectTimeline = mayAffectTimeline || couldRoutesetAffectTimelineGeneration(otherRouteSet) } } } overrideHelper.commit() + + return mayAffectTimeline } } + +function couldRoutesetAffectTimelineGeneration(routeSet: WrappedOverridableItemNormal): boolean { + return routeSet.computed.abPlayers.length > 0 +} diff --git a/packages/job-worker/src/studio/model/StudioPlayoutModel.ts b/packages/job-worker/src/studio/model/StudioPlayoutModel.ts index b54a4d3af2c..3a7cdb60c05 100644 --- a/packages/job-worker/src/studio/model/StudioPlayoutModel.ts +++ b/packages/job-worker/src/studio/model/StudioPlayoutModel.ts @@ -73,6 +73,7 @@ export interface StudioPlayoutModel extends StudioPlayoutModelBase, BaseModel { * Update the active state of a RouteSet * @param routeSetId The RouteSet to update * @param isActive The new active state of the RouteSet + * @returns Whether the change may affect timeline generation */ - switchRouteSet(routeSetId: string, isActive: boolean | 'toggle'): void + switchRouteSet(routeSetId: string, isActive: boolean | 'toggle'): boolean } diff --git a/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts b/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts index 528a15ef3a8..53486ee37c6 100644 --- a/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts +++ b/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts @@ -101,8 +101,8 @@ export class StudioPlayoutModelImpl implements StudioPlayoutModel { return this.#timeline } - switchRouteSet(routeSetId: string, isActive: boolean | 'toggle'): void { - this.#baselineHelper.updateRouteSetActive(routeSetId, isActive) + switchRouteSet(routeSetId: string, isActive: boolean | 'toggle'): boolean { + return this.#baselineHelper.updateRouteSetActive(routeSetId, isActive) } /** diff --git a/packages/job-worker/src/studio/routeSet.ts b/packages/job-worker/src/studio/routeSet.ts index 89cb910f24c..eba95040620 100644 --- a/packages/job-worker/src/studio/routeSet.ts +++ b/packages/job-worker/src/studio/routeSet.ts @@ -1,9 +1,14 @@ import { SwitchRouteSetProps } from '@sofie-automation/corelib/dist/worker/studio' import { JobContext } from '../jobs' import { runJobWithStudioPlayoutModel } from './lock' +import { updateTimelineFromStudioPlayoutModel } from '../playout/lib' export async function handleSwitchRouteSet(context: JobContext, data: SwitchRouteSetProps): Promise { await runJobWithStudioPlayoutModel(context, async (studioPlayoutModel) => { - studioPlayoutModel.switchRouteSet(data.routeSetId, data.state) + const routesetChangeMayAffectTimeline = studioPlayoutModel.switchRouteSet(data.routeSetId, data.state) + + if (routesetChangeMayAffectTimeline) { + await updateTimelineFromStudioPlayoutModel(context, studioPlayoutModel) + } }) } From 8af9ab714ecb3b1f5acb8fbe0d382b403d1e246f Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Mon, 14 Oct 2024 12:09:02 +0100 Subject: [PATCH 28/81] fix: ensure timeline is regenerated after switching routeset affecting ab players --- packages/job-worker/src/playout/adlibAction.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/job-worker/src/playout/adlibAction.ts b/packages/job-worker/src/playout/adlibAction.ts index 85761622b45..2168ba716c8 100644 --- a/packages/job-worker/src/playout/adlibAction.ts +++ b/packages/job-worker/src/playout/adlibAction.ts @@ -234,6 +234,7 @@ async function applyAnyExecutionSideEffects( if (actionContext.takeAfterExecute) { await performTakeToNextedPart(context, playoutModel, now) } else if ( + actionContext.forceRegenerateTimeline || actionContext.currentPartState !== ActionPartChange.NONE || actionContext.nextPartState !== ActionPartChange.NONE ) { From 83cda21d3ef06b419abd9aecd3d2df23580914c3 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Tue, 15 Oct 2024 16:08:49 +0100 Subject: [PATCH 29/81] fix: remove unused meteor ui packages --- meteor/.meteor/packages | 10 ---------- meteor/.meteor/versions | 19 ------------------- meteor/server/main.ts | 15 --------------- 3 files changed, 44 deletions(-) diff --git a/meteor/.meteor/packages b/meteor/.meteor/packages index 32ca94185a1..04d09220d29 100644 --- a/meteor/.meteor/packages +++ b/meteor/.meteor/packages @@ -9,24 +9,14 @@ # but you can also edit it by hand. meteor-base@1.5.1 # Packages every Meteor app needs to have -mobile-experience@1.1.0 # Packages for a great mobile UX mongo@1.16.7 # The database Meteor supports right now reactive-var@1.0.12 # Reactive variable for tracker -standard-minifier-css@1.9.2 # CSS minifier run for production mode -standard-minifier-js@2.8.1 # JS minifier run for production mode -es5-shim@4.8.0 # ECMAScript 5 compatibility for older browsers ecmascript@0.16.7 # Enable ECMAScript2015+ syntax in app code typescript@4.9.4 # Enable TypeScript syntax in .ts and .tsx modules shell-server@0.5.0 # Server-side component of the `meteor shell` command -modern-browsers@0.1.9 # Select when to allow use of the "modern" bundle - -static-html@1.3.2 # Define static page content in .html files -react-meteor-data # React higher-order component for reactively tracking Meteor data tracker@1.3.2 # Meteor's client-side reactive programming library -session@1.2.1 -fourseven:scss dynamic-import@0.7.3 ostrio:meteor-root diff --git a/meteor/.meteor/versions b/meteor/.meteor/versions index bd092097f29..d337a7f4b4d 100644 --- a/meteor/.meteor/versions +++ b/meteor/.meteor/versions @@ -6,10 +6,7 @@ babel-compiler@7.10.4 babel-runtime@1.5.1 base64@1.0.12 binary-heap@1.0.11 -blaze-tools@1.1.3 boilerplate-generator@1.7.1 -caching-compiler@1.2.2 -caching-html-compiler@1.2.1 callback-hook@1.5.1 check@1.3.2 ddp@1.4.1 @@ -27,25 +24,17 @@ ejson@1.1.3 email@2.2.5 es5-shim@4.8.0 fetch@0.1.3 -fourseven:scss@4.15.0 geojson-utils@1.0.11 hot-code-push@1.0.4 -html-tools@1.1.3 -htmljs@1.1.1 id-map@1.1.1 inter-process-messaging@0.1.1 julusian:meteor-elastic-apm@2.5.2 kschingiz:meteor-measured@1.0.3 -launch-screen@1.3.0 localstorage@1.2.0 logging@1.3.2 meteor@1.11.3 meteor-base@1.5.1 -minifier-css@1.6.4 -minifier-js@2.7.5 minimongo@1.9.3 -mobile-experience@1.1.0 -mobile-status-bar@1.1.0 modern-browsers@0.1.9 modules@0.19.0 modules-runtime@0.13.1 @@ -60,21 +49,13 @@ promise@0.12.2 random@1.2.1 rate-limit@1.1.1 react-fast-refresh@0.2.7 -react-meteor-data@2.5.1 -reactive-dict@1.3.1 reactive-var@1.0.12 reload@1.3.1 retry@1.1.0 routepolicy@1.1.1 -session@1.2.1 sha@1.0.9 shell-server@0.5.0 socket-stream-client@0.5.1 -spacebars-compiler@1.3.1 -standard-minifier-css@1.9.2 -standard-minifier-js@2.8.1 -static-html@1.3.2 -templating-tools@1.2.2 tracker@1.3.2 typescript@4.9.4 underscore@1.0.13 diff --git a/meteor/server/main.ts b/meteor/server/main.ts index 7b6b7fa73df..30c7678f5c4 100644 --- a/meteor/server/main.ts +++ b/meteor/server/main.ts @@ -3,26 +3,11 @@ */ import { Meteor } from 'meteor/meteor' -import { setMinimumBrowserVersions } from 'meteor/modern-browsers' Meteor.startup(() => { console.log('startup') }) -setMinimumBrowserVersions( - { - chrome: 80, - firefox: 74, - edge: 80, - ie: Infinity, - mobile_safari: [13, 4], - opera: 67, - safari: [13, 1], - electron: 6, - }, - 'optional chaining' -) - // Import all files that register Meteor methods: import './api/blueprints/api' import './api/blueprints/http' From 7b3411fca086f2d22747ee160747be3bf12b6613 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Wed, 16 Oct 2024 16:37:22 +0100 Subject: [PATCH 30/81] chore: fix bad merge --- .../publications/packageManager/expectedPackages/generate.ts | 3 --- 1 file changed, 3 deletions(-) diff --git a/meteor/server/publications/packageManager/expectedPackages/generate.ts b/meteor/server/publications/packageManager/expectedPackages/generate.ts index 3320f89d87a..cdc21bf3538 100644 --- a/meteor/server/publications/packageManager/expectedPackages/generate.ts +++ b/meteor/server/publications/packageManager/expectedPackages/generate.ts @@ -17,7 +17,6 @@ import { CustomPublishCollection } from '../../../lib/customPublication' import { logger } from '../../../logging' import { ExpectedPackagesContentCache } from './contentCache' import type { StudioFields } from './publication' -import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' /** * Regenerate the output for the provided ExpectedPackage `regenerateIds`, updating the data in `collection` as needed @@ -39,7 +38,6 @@ export async function updateCollectionForExpectedPackageIds( ): Promise { const updatedDocIds = new Set() const missingExpectedPackageIds = new Set() - const packageContainers = applyAndValidateOverrides(studio.packageContainersWithOverrides).obj for (const packageId of regenerateIds) { const packageDoc = contentCache.ExpectedPackages.findOne(packageId) @@ -110,7 +108,6 @@ export async function updateCollectionForPieceInstanceIds( ): Promise { const updatedDocIds = new Set() const missingPieceInstanceIds = new Set() - const packageContainers = applyAndValidateOverrides(studio.packageContainersWithOverrides).obj for (const pieceInstanceId of regenerateIds) { const pieceInstanceDoc = contentCache.PieceInstances.findOne(pieceInstanceId) From 5b12e59dd836f2dfb512d7085003e4272849155f Mon Sep 17 00:00:00 2001 From: Mint de Wit Date: Thu, 17 Oct 2024 15:11:07 +0000 Subject: [PATCH 31/81] chore: revert countdown in top bar to part time --- packages/webui/src/client/ui/RundownView.tsx | 1 + .../RundownTiming/CurrentPartOrSegmentRemaining.tsx | 3 +++ 2 files changed, 4 insertions(+) diff --git a/packages/webui/src/client/ui/RundownView.tsx b/packages/webui/src/client/ui/RundownView.tsx index f86944eb405..5e8e5fd83aa 100644 --- a/packages/webui/src/client/ui/RundownView.tsx +++ b/packages/webui/src/client/ui/RundownView.tsx @@ -314,6 +314,7 @@ const TimingDisplay = withTranslation()( {rundownPlaylist.holdState && rundownPlaylist.holdState !== RundownHoldState.COMPLETE ? ( diff --git a/packages/webui/src/client/ui/RundownView/RundownTiming/CurrentPartOrSegmentRemaining.tsx b/packages/webui/src/client/ui/RundownView/RundownTiming/CurrentPartOrSegmentRemaining.tsx index a62ca027598..98947b296d2 100644 --- a/packages/webui/src/client/ui/RundownView/RundownTiming/CurrentPartOrSegmentRemaining.tsx +++ b/packages/webui/src/client/ui/RundownView/RundownTiming/CurrentPartOrSegmentRemaining.tsx @@ -14,6 +14,8 @@ interface IPartRemainingProps { heavyClassName?: string speaking?: boolean vibrating?: boolean + /** This component will show the remaining segment budget when that is available, but in some places in the UI we want it to force it to show remaining Part regardless */ + forceToPartTimer?: boolean } // global variable for remembering last uttered displayTime @@ -37,6 +39,7 @@ export const CurrentPartOrSegmentRemaining = withTiming let displayTimecode = this.props.timingDurations.remainingBudgetOnCurrentSegment ?? this.props.timingDurations.remainingTimeOnCurrentPart + if (this.props.forceToPartTimer) displayTimecode = this.props.timingDurations.remainingTimeOnCurrentPart if (displayTimecode === undefined) return null displayTimecode *= -1 return ( From fe79834f8c732acf08b2f4f3d327d6a70d9c1421 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Thu, 17 Oct 2024 15:56:15 +0100 Subject: [PATCH 32/81] chore: split job-worker context.ts into multiple files --- packages/job-worker/src/workers/caches.ts | 2 +- .../src/workers/context/JobContextImpl.ts | 141 +++++++++++ .../job-worker/src/workers/context/Locks.ts | 67 ++++++ .../StudioCacheContextImpl.ts} | 225 +----------------- .../job-worker/src/workers/context/util.ts | 1 + .../job-worker/src/workers/events/child.ts | 3 +- .../job-worker/src/workers/ingest/child.ts | 3 +- .../job-worker/src/workers/studio/child.ts | 3 +- 8 files changed, 226 insertions(+), 219 deletions(-) create mode 100644 packages/job-worker/src/workers/context/JobContextImpl.ts create mode 100644 packages/job-worker/src/workers/context/Locks.ts rename packages/job-worker/src/workers/{context.ts => context/StudioCacheContextImpl.ts} (57%) create mode 100644 packages/job-worker/src/workers/context/util.ts diff --git a/packages/job-worker/src/workers/caches.ts b/packages/job-worker/src/workers/caches.ts index f816a8f4076..f252a9a0de6 100644 --- a/packages/job-worker/src/workers/caches.ts +++ b/packages/job-worker/src/workers/caches.ts @@ -16,7 +16,7 @@ import { clone, deepFreeze } from '@sofie-automation/corelib/dist/lib' import { logger } from '../logging' import deepmerge = require('deepmerge') import { ProcessedShowStyleBase, ProcessedShowStyleVariant, StudioCacheContext } from '../jobs' -import { StudioCacheContextImpl } from './context' +import { StudioCacheContextImpl } from './context/StudioCacheContextImpl' /** * A Wrapper to maintain a cache and provide a context using the cache when appropriate diff --git a/packages/job-worker/src/workers/context/JobContextImpl.ts b/packages/job-worker/src/workers/context/JobContextImpl.ts new file mode 100644 index 00000000000..0133b66333f --- /dev/null +++ b/packages/job-worker/src/workers/context/JobContextImpl.ts @@ -0,0 +1,141 @@ +import { IDirectCollections } from '../../db' +import { JobContext } from '../../jobs' +import { WorkerDataCache } from '../caches' +import { RundownId, RundownPlaylistId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { getIngestQueueName, IngestJobFunc } from '@sofie-automation/corelib/dist/worker/ingest' +import { ApmSpan, ApmTransaction } from '../../profiler' +import { getRandomString } from '@sofie-automation/corelib/dist/lib' +import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' +import { getStudioQueueName, StudioJobFunc } from '@sofie-automation/corelib/dist/worker/studio' +import { LockBase, PlaylistLock, RundownLock } from '../../jobs/lock' +import { logger } from '../../logging' +import { BaseModel } from '../../modelBase' +import { LocksManager } from '../locks' +import { unprotectString } from '@sofie-automation/corelib/dist/protectedString' +import { EventsJobFunc, getEventsQueueName } from '@sofie-automation/corelib/dist/worker/events' +import { FastTrackTimelineFunc } from '../../main' +import { TimelineComplete } from '@sofie-automation/corelib/dist/dataModel/Timeline' +import type { QueueJobFunc } from './util' +import { StudioCacheContextImpl } from './StudioCacheContextImpl' +import { PlaylistLockImpl, RundownLockImpl } from './Locks' + +export class JobContextImpl extends StudioCacheContextImpl implements JobContext { + private readonly locks: Array = [] + private readonly caches: Array = [] + + constructor( + directCollections: Readonly, + cacheData: WorkerDataCache, + private readonly locksManager: LocksManager, + private readonly transaction: ApmTransaction | undefined, + private readonly queueJob: QueueJobFunc, + private readonly fastTrackTimeline: FastTrackTimelineFunc | null + ) { + super(directCollections, cacheData) + } + + trackCache(cache: BaseModel): void { + this.caches.push(cache) + } + + async lockPlaylist(playlistId: RundownPlaylistId): Promise { + const span = this.startSpan('lockPlaylist') + if (span) span.setLabel('playlistId', unprotectString(playlistId)) + + const lockId = getRandomString() + logger.silly(`PlaylistLock: Locking "${playlistId}"`) + + const resourceId = `playlist:${playlistId}` + await this.locksManager.aquire(lockId, resourceId) + + const doRelease = async () => { + const span = this.startSpan('unlockPlaylist') + if (span) span.setLabel('playlistId', unprotectString(playlistId)) + + await this.locksManager.release(lockId, resourceId) + + if (span) span.end() + } + const lock = new PlaylistLockImpl(playlistId, doRelease) + this.locks.push(lock) + + logger.silly(`PlaylistLock: Locked "${playlistId}"`) + + if (span) span.end() + + return lock + } + + async lockRundown(rundownId: RundownId): Promise { + const span = this.startSpan('lockRundown') + if (span) span.setLabel('rundownId', unprotectString(rundownId)) + + const lockId = getRandomString() + logger.silly(`RundownLock: Locking "${rundownId}"`) + + const resourceId = `rundown:${rundownId}` + await this.locksManager.aquire(lockId, resourceId) + + const doRelease = async () => { + const span = this.startSpan('unlockRundown') + if (span) span.setLabel('rundownId', unprotectString(rundownId)) + + await this.locksManager.release(lockId, resourceId) + + if (span) span.end() + } + const lock = new RundownLockImpl(rundownId, doRelease) + this.locks.push(lock) + + logger.silly(`RundownLock: Locked "${rundownId}"`) + + if (span) span.end() + + return lock + } + + /** Ensure resources are cleaned up after the job completes */ + async cleanupResources(): Promise { + // Ensure all locks are freed + for (const lock of this.locks) { + if (lock.isLocked) { + logger.warn(`Lock never freed: ${lock}`) + await lock.release().catch((e) => { + logger.error(`Lock free failed: ${stringifyError(e)}`) + }) + } + } + + // Ensure all caches were saved/aborted + for (const cache of this.caches) { + try { + cache.assertNoChanges() + } catch (e) { + logger.warn(`${cache.displayName} has unsaved changes: ${stringifyError(e)}`) + } + } + } + + startSpan(spanName: string): ApmSpan | null { + if (this.transaction) return this.transaction.startSpan(spanName) + return null + } + + async queueIngestJob(name: T, data: Parameters[0]): Promise { + await this.queueJob(getIngestQueueName(this.studioId), name, data) + } + async queueStudioJob(name: T, data: Parameters[0]): Promise { + await this.queueJob(getStudioQueueName(this.studioId), name, data) + } + async queueEventJob(name: T, data: Parameters[0]): Promise { + await this.queueJob(getEventsQueueName(this.studioId), name, data) + } + + hackPublishTimelineToFastTrack(newTimeline: TimelineComplete): void { + if (this.fastTrackTimeline) { + this.fastTrackTimeline(newTimeline).catch((e) => { + logger.error(`Failed to publish timeline to fast track: ${stringifyError(e)}`) + }) + } + } +} diff --git a/packages/job-worker/src/workers/context/Locks.ts b/packages/job-worker/src/workers/context/Locks.ts new file mode 100644 index 00000000000..55cc72f36d5 --- /dev/null +++ b/packages/job-worker/src/workers/context/Locks.ts @@ -0,0 +1,67 @@ +import type { RundownId, RundownPlaylistId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { PlaylistLock, RundownLock } from '../../jobs/lock' +import { logger } from '../../logging' + +export class PlaylistLockImpl extends PlaylistLock { + #isLocked = true + + public constructor(playlistId: RundownPlaylistId, private readonly doRelease: () => Promise) { + super(playlistId) + } + + get isLocked(): boolean { + return this.#isLocked + } + + async release(): Promise { + if (!this.#isLocked) { + logger.warn(`PlaylistLock: Already released "${this.playlistId}"`) + } else { + logger.silly(`PlaylistLock: Releasing "${this.playlistId}"`) + + this.#isLocked = false + + await this.doRelease() + + logger.silly(`PlaylistLock: Released "${this.playlistId}"`) + + if (this.deferedFunctions.length > 0) { + for (const fcn of this.deferedFunctions) { + await fcn() + } + } + } + } +} + +export class RundownLockImpl extends RundownLock { + #isLocked = true + + public constructor(rundownId: RundownId, private readonly doRelease: () => Promise) { + super(rundownId) + } + + get isLocked(): boolean { + return this.#isLocked + } + + async release(): Promise { + if (!this.#isLocked) { + logger.warn(`RundownLock: Already released "${this.rundownId}"`) + } else { + logger.silly(`RundownLock: Releasing "${this.rundownId}"`) + + this.#isLocked = false + + await this.doRelease() + + logger.silly(`RundownLock: Released "${this.rundownId}"`) + + if (this.deferedFunctions.length > 0) { + for (const fcn of this.deferedFunctions) { + await fcn() + } + } + } + } +} diff --git a/packages/job-worker/src/workers/context.ts b/packages/job-worker/src/workers/context/StudioCacheContextImpl.ts similarity index 57% rename from packages/job-worker/src/workers/context.ts rename to packages/job-worker/src/workers/context/StudioCacheContextImpl.ts index 383b7f41a97..dff38b6e883 100644 --- a/packages/job-worker/src/workers/context.ts +++ b/packages/job-worker/src/workers/context/StudioCacheContextImpl.ts @@ -1,48 +1,28 @@ -import { IDirectCollections } from '../db' +import { IDirectCollections } from '../../db' import { ProcessedShowStyleBase, ProcessedShowStyleVariant, - JobContext, ProcessedShowStyleCompound, StudioCacheContext, -} from '../jobs' +} from '../../jobs' import { ReadonlyDeep } from 'type-fest' -import { WorkerDataCache } from './caches' +import { WorkerDataCache } from '../caches' import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' -import { - RundownId, - RundownPlaylistId, - ShowStyleBaseId, - ShowStyleVariantId, - StudioId, -} from '@sofie-automation/corelib/dist/dataModel/Ids' -import { getIngestQueueName, IngestJobFunc } from '@sofie-automation/corelib/dist/worker/ingest' -import { parseBlueprintDocument, WrappedShowStyleBlueprint, WrappedStudioBlueprint } from '../blueprints/cache' +import { ShowStyleBaseId, ShowStyleVariantId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { parseBlueprintDocument, WrappedShowStyleBlueprint, WrappedStudioBlueprint } from '../../blueprints/cache' import { ReadonlyObjectDeep } from 'type-fest/source/readonly-deep' -import { ApmSpan, ApmTransaction } from '../profiler' import { DBShowStyleBase } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' -import { clone, deepFreeze, getRandomString } from '@sofie-automation/corelib/dist/lib' -import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' -import { createShowStyleCompound } from '../showStyles' +import { clone, deepFreeze } from '@sofie-automation/corelib/dist/lib' +import { createShowStyleCompound } from '../../showStyles' import { BlueprintManifestType } from '@sofie-automation/blueprints-integration' import { preprocessShowStyleConfig, preprocessStudioConfig, ProcessedShowStyleConfig, ProcessedStudioConfig, -} from '../blueprints/config' -import { getStudioQueueName, StudioJobFunc } from '@sofie-automation/corelib/dist/worker/studio' -import { LockBase, PlaylistLock, RundownLock } from '../jobs/lock' -import { logger } from '../logging' -import { BaseModel } from '../modelBase' -import { LocksManager } from './locks' -import { unprotectString } from '@sofie-automation/corelib/dist/protectedString' -import { EventsJobFunc, getEventsQueueName } from '@sofie-automation/corelib/dist/worker/events' -import { FastTrackTimelineFunc } from '../main' -import { TimelineComplete } from '@sofie-automation/corelib/dist/dataModel/Timeline' -import { processShowStyleBase, processShowStyleVariant } from '../jobs/showStyle' - -export type QueueJobFunc = (queueName: string, jobName: string, jobData: unknown) => Promise +} from '../../blueprints/config' + +import { processShowStyleBase, processShowStyleVariant } from '../../jobs/showStyle' export class StudioCacheContextImpl implements StudioCacheContext { constructor( @@ -276,127 +256,6 @@ export class StudioCacheContextImpl implements StudioCacheContext { } } -export class JobContextImpl extends StudioCacheContextImpl implements JobContext { - private readonly locks: Array = [] - private readonly caches: Array = [] - - constructor( - directCollections: Readonly, - cacheData: WorkerDataCache, - private readonly locksManager: LocksManager, - private readonly transaction: ApmTransaction | undefined, - private readonly queueJob: QueueJobFunc, - private readonly fastTrackTimeline: FastTrackTimelineFunc | null - ) { - super(directCollections, cacheData) - } - - trackCache(cache: BaseModel): void { - this.caches.push(cache) - } - - async lockPlaylist(playlistId: RundownPlaylistId): Promise { - const span = this.startSpan('lockPlaylist') - if (span) span.setLabel('playlistId', unprotectString(playlistId)) - - const lockId = getRandomString() - logger.silly(`PlaylistLock: Locking "${playlistId}"`) - - const resourceId = `playlist:${playlistId}` - await this.locksManager.aquire(lockId, resourceId) - - const doRelease = async () => { - const span = this.startSpan('unlockPlaylist') - if (span) span.setLabel('playlistId', unprotectString(playlistId)) - - await this.locksManager.release(lockId, resourceId) - - if (span) span.end() - } - const lock = new PlaylistLockImpl(playlistId, doRelease) - this.locks.push(lock) - - logger.silly(`PlaylistLock: Locked "${playlistId}"`) - - if (span) span.end() - - return lock - } - - async lockRundown(rundownId: RundownId): Promise { - const span = this.startSpan('lockRundown') - if (span) span.setLabel('rundownId', unprotectString(rundownId)) - - const lockId = getRandomString() - logger.silly(`RundownLock: Locking "${rundownId}"`) - - const resourceId = `rundown:${rundownId}` - await this.locksManager.aquire(lockId, resourceId) - - const doRelease = async () => { - const span = this.startSpan('unlockRundown') - if (span) span.setLabel('rundownId', unprotectString(rundownId)) - - await this.locksManager.release(lockId, resourceId) - - if (span) span.end() - } - const lock = new RundownLockImpl(rundownId, doRelease) - this.locks.push(lock) - - logger.silly(`RundownLock: Locked "${rundownId}"`) - - if (span) span.end() - - return lock - } - - /** Ensure resources are cleaned up after the job completes */ - async cleanupResources(): Promise { - // Ensure all locks are freed - for (const lock of this.locks) { - if (lock.isLocked) { - logger.warn(`Lock never freed: ${lock}`) - await lock.release().catch((e) => { - logger.error(`Lock free failed: ${stringifyError(e)}`) - }) - } - } - - // Ensure all caches were saved/aborted - for (const cache of this.caches) { - try { - cache.assertNoChanges() - } catch (e) { - logger.warn(`${cache.displayName} has unsaved changes: ${stringifyError(e)}`) - } - } - } - - startSpan(spanName: string): ApmSpan | null { - if (this.transaction) return this.transaction.startSpan(spanName) - return null - } - - async queueIngestJob(name: T, data: Parameters[0]): Promise { - await this.queueJob(getIngestQueueName(this.studioId), name, data) - } - async queueStudioJob(name: T, data: Parameters[0]): Promise { - await this.queueJob(getStudioQueueName(this.studioId), name, data) - } - async queueEventJob(name: T, data: Parameters[0]): Promise { - await this.queueJob(getEventsQueueName(this.studioId), name, data) - } - - hackPublishTimelineToFastTrack(newTimeline: TimelineComplete): void { - if (this.fastTrackTimeline) { - this.fastTrackTimeline(newTimeline).catch((e) => { - logger.error(`Failed to publish timeline to fast track: ${stringifyError(e)}`) - }) - } - } -} - async function loadShowStyleBlueprint( collections: IDirectCollections, showStyleBase: Pick, '_id' | 'blueprintId'> @@ -424,67 +283,3 @@ async function loadShowStyleBlueprint( blueprint: blueprintManifest, }) } - -class PlaylistLockImpl extends PlaylistLock { - #isLocked = true - - public constructor(playlistId: RundownPlaylistId, private readonly doRelease: () => Promise) { - super(playlistId) - } - - get isLocked(): boolean { - return this.#isLocked - } - - async release(): Promise { - if (!this.#isLocked) { - logger.warn(`PlaylistLock: Already released "${this.playlistId}"`) - } else { - logger.silly(`PlaylistLock: Releasing "${this.playlistId}"`) - - this.#isLocked = false - - await this.doRelease() - - logger.silly(`PlaylistLock: Released "${this.playlistId}"`) - - if (this.deferedFunctions.length > 0) { - for (const fcn of this.deferedFunctions) { - await fcn() - } - } - } - } -} - -class RundownLockImpl extends RundownLock { - #isLocked = true - - public constructor(rundownId: RundownId, private readonly doRelease: () => Promise) { - super(rundownId) - } - - get isLocked(): boolean { - return this.#isLocked - } - - async release(): Promise { - if (!this.#isLocked) { - logger.warn(`RundownLock: Already released "${this.rundownId}"`) - } else { - logger.silly(`RundownLock: Releasing "${this.rundownId}"`) - - this.#isLocked = false - - await this.doRelease() - - logger.silly(`RundownLock: Released "${this.rundownId}"`) - - if (this.deferedFunctions.length > 0) { - for (const fcn of this.deferedFunctions) { - await fcn() - } - } - } - } -} diff --git a/packages/job-worker/src/workers/context/util.ts b/packages/job-worker/src/workers/context/util.ts new file mode 100644 index 00000000000..38ac084220e --- /dev/null +++ b/packages/job-worker/src/workers/context/util.ts @@ -0,0 +1 @@ +export type QueueJobFunc = (queueName: string, jobName: string, jobData: unknown) => Promise diff --git a/packages/job-worker/src/workers/events/child.ts b/packages/job-worker/src/workers/events/child.ts index 55745e5331a..76d95c4c316 100644 --- a/packages/job-worker/src/workers/events/child.ts +++ b/packages/job-worker/src/workers/events/child.ts @@ -11,7 +11,8 @@ import { WorkerDataCache, WorkerDataCacheWrapperImpl, } from '../caches' -import { JobContextImpl, QueueJobFunc } from '../context' +import { JobContextImpl } from '../context/JobContextImpl' +import { QueueJobFunc } from '../context/util' import { AnyLockEvent, LocksManager } from '../locks' import { FastTrackTimelineFunc, LogLineWithSourceFunc } from '../../main' import { interceptLogging, logger } from '../../logging' diff --git a/packages/job-worker/src/workers/ingest/child.ts b/packages/job-worker/src/workers/ingest/child.ts index d00f8a4ae8d..86af4b86343 100644 --- a/packages/job-worker/src/workers/ingest/child.ts +++ b/packages/job-worker/src/workers/ingest/child.ts @@ -5,7 +5,8 @@ import { createMongoConnection, getMongoCollections, IDirectCollections } from ' import { unprotectString } from '@sofie-automation/corelib/dist/protectedString' import { setupApmAgent, startTransaction } from '../../profiler' import { InvalidateWorkerDataCache, invalidateWorkerDataCache, loadWorkerDataCache, WorkerDataCache } from '../caches' -import { JobContextImpl, QueueJobFunc } from '../context' +import { JobContextImpl } from '../context/JobContextImpl' +import { QueueJobFunc } from '../context/util' import { AnyLockEvent, LocksManager } from '../locks' import { FastTrackTimelineFunc, LogLineWithSourceFunc } from '../../main' import { interceptLogging, logger } from '../../logging' diff --git a/packages/job-worker/src/workers/studio/child.ts b/packages/job-worker/src/workers/studio/child.ts index d582e03e80f..40903527c6e 100644 --- a/packages/job-worker/src/workers/studio/child.ts +++ b/packages/job-worker/src/workers/studio/child.ts @@ -5,7 +5,8 @@ import { createMongoConnection, getMongoCollections, IDirectCollections } from ' import { unprotectString } from '@sofie-automation/corelib/dist/protectedString' import { setupApmAgent, startTransaction } from '../../profiler' import { InvalidateWorkerDataCache, invalidateWorkerDataCache, loadWorkerDataCache, WorkerDataCache } from '../caches' -import { QueueJobFunc, JobContextImpl } from '../context' +import { JobContextImpl } from '../context/JobContextImpl' +import { QueueJobFunc } from '../context/util' import { AnyLockEvent, LocksManager } from '../locks' import { FastTrackTimelineFunc, LogLineWithSourceFunc } from '../../main' import { interceptLogging, logger } from '../../logging' From ce6f6a49673617f582798d1caf7f8a4edcf4002f Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Thu, 17 Oct 2024 15:53:07 +0100 Subject: [PATCH 33/81] feat: reimplement model `switchRouteSet` to perform operation on `context` to simplify persistence and future access of studio --- packages/corelib/src/overrideOpHelper.ts | 15 +- packages/job-worker/src/__mocks__/context.ts | 12 + packages/job-worker/src/jobs/index.ts | 20 + .../model/implementation/PlayoutModelImpl.ts | 3 +- .../src/studio/model/StudioBaselineHelper.ts | 81 +--- .../studio/model/StudioPlayoutModelImpl.ts | 8 +- .../src/workers/context/JobContextImpl.ts | 23 + .../workers/context/StudioRouteSetUpdater.ts | 109 +++++ .../__tests__/StudioRouteSetUpdater.spec.ts | 403 ++++++++++++++++++ 9 files changed, 588 insertions(+), 86 deletions(-) create mode 100644 packages/job-worker/src/workers/context/StudioRouteSetUpdater.ts create mode 100644 packages/job-worker/src/workers/context/__tests__/StudioRouteSetUpdater.spec.ts diff --git a/packages/corelib/src/overrideOpHelper.ts b/packages/corelib/src/overrideOpHelper.ts index 5962a8126a1..fd04bc3814b 100644 --- a/packages/corelib/src/overrideOpHelper.ts +++ b/packages/corelib/src/overrideOpHelper.ts @@ -162,12 +162,15 @@ export interface OverrideOpHelperBatcher extends OverrideOpHelperForItemContents export type OverrideOpHelper = () => OverrideOpHelperBatcher export class OverrideOpHelperImpl implements OverrideOpHelperBatcher { - readonly #saveOverrides: SaveOverridesFunction + readonly #saveOverrides: SaveOverridesFunction | null readonly #object: ObjectWithOverrides - constructor(saveOverrides: SaveOverridesFunction, object: ObjectWithOverrides) { + constructor( + saveOverrides: SaveOverridesFunction | null, + object: ObjectWithOverrides | ReadonlyDeep> + ) { this.#saveOverrides = saveOverrides - this.#object = { ...object } + this.#object = { defaults: object.defaults, overrides: [...object.overrides] } } clearItemOverrides = (itemId: string, subPath: string): this => { @@ -314,6 +317,12 @@ export class OverrideOpHelperImpl implements OverrideOpHelperBatcher { } commit = (): void => { + if (!this.#saveOverrides) throw new Error('Cannot commit changes without a save function') + this.#saveOverrides(this.#object.overrides) } + + getPendingOps = (): SomeObjectOverrideOp[] => { + return this.#object.overrides + } } diff --git a/packages/job-worker/src/__mocks__/context.ts b/packages/job-worker/src/__mocks__/context.ts index 9316d943aa5..937c22ca487 100644 --- a/packages/job-worker/src/__mocks__/context.ts +++ b/packages/job-worker/src/__mocks__/context.ts @@ -226,6 +226,18 @@ export class MockJobContext implements JobContext { // throw new Error('Method not implemented.') } + setRouteSetActive(_routeSetId: string, _isActive: boolean | 'toggle'): boolean { + throw new Error('Method not implemented.') + } + + async saveRouteSetChanges(): Promise { + throw new Error('Method not implemented.') + } + + discardRouteSetChanges(): void { + throw new Error('Method not implemented.') + } + /** * Mock methods */ diff --git a/packages/job-worker/src/jobs/index.ts b/packages/job-worker/src/jobs/index.ts index c2e71c4ec96..f8ab90e9f4c 100644 --- a/packages/job-worker/src/jobs/index.ts +++ b/packages/job-worker/src/jobs/index.ts @@ -64,6 +64,26 @@ export interface JobContext extends StudioCacheContext { /** Hack: fast-track the timeline out to the playout-gateway. */ hackPublishTimelineToFastTrack(newTimeline: TimelineComplete): void + + /** + * Set whether a routeset for this studio is active. + * Any routeset `exclusivityGroup` will be respected. + * The changes will be immediately visible in subsequent calls to the `studio` getter + * @param routeSetId The routeSetId to change + * @param isActive Whether the routeSet should be active, or toggle + * @returns Whether the change could affect playout + */ + setRouteSetActive(routeSetId: string, isActive: boolean | 'toggle'): boolean + + /** + * Save any changes to the routesets for this studio to the database + */ + saveRouteSetChanges(): Promise + + /** + * Discard any unsaved changes to the routesets for this studio + */ + discardRouteSetChanges(): void } /** diff --git a/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts b/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts index 3e5d39f757a..1b36ccad801 100644 --- a/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts +++ b/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts @@ -481,7 +481,7 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou } switchRouteSet(routeSetId: string, isActive: boolean | 'toggle'): boolean { - return this.#baselineHelper.updateRouteSetActive(routeSetId, isActive) + return this.context.setRouteSetActive(routeSetId, isActive) } cycleSelectedPartInstances(): void { @@ -638,6 +638,7 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou ...writePartInstancesAndPieceInstances(this.context, this.allPartInstances), writeAdlibTestingSegments(this.context, this.rundownsImpl), this.#baselineHelper.saveAllToDatabase(), + this.context.saveRouteSetChanges(), ]) this.#playlistHasChanged = false diff --git a/packages/job-worker/src/studio/model/StudioBaselineHelper.ts b/packages/job-worker/src/studio/model/StudioBaselineHelper.ts index 35256befb4e..5b413522483 100644 --- a/packages/job-worker/src/studio/model/StudioBaselineHelper.ts +++ b/packages/job-worker/src/studio/model/StudioBaselineHelper.ts @@ -6,33 +6,19 @@ import { } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItemStudio } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' import { saveIntoDb } from '../../db/changes' -import { StudioRouteBehavior, StudioRouteSet } from '@sofie-automation/corelib/dist/dataModel/Studio' -import { logger } from '../../logging' -import { - WrappedOverridableItemNormal, - getAllCurrentItemsFromOverrides, - OverrideOpHelperImpl, -} from '@sofie-automation/corelib/dist/overrideOpHelper' -import { ObjectWithOverrides, SomeObjectOverrideOp } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' export class StudioBaselineHelper { readonly #context: JobContext - #overridesRouteSetBuffer: ObjectWithOverrides> #pendingExpectedPackages: ExpectedPackageDBFromStudioBaselineObjects[] | undefined #pendingExpectedPlayoutItems: ExpectedPlayoutItemStudio[] | undefined - #routeSetChanged: boolean constructor(context: JobContext) { this.#context = context - this.#overridesRouteSetBuffer = { ...context.studio.routeSetsWithOverrides } as ObjectWithOverrides< - Record - > - this.#routeSetChanged = false } hasChanges(): boolean { - return !!this.#pendingExpectedPackages || !!this.#pendingExpectedPlayoutItems || this.#routeSetChanged + return !!this.#pendingExpectedPackages || !!this.#pendingExpectedPlayoutItems } setExpectedPackages(packages: ExpectedPackageDBFromStudioBaselineObjects[]): void { @@ -63,74 +49,9 @@ export class StudioBaselineHelper { this.#pendingExpectedPackages ) : undefined, - this.#routeSetChanged - ? this.#context.directCollections.Studios.update( - { - _id: this.#context.studioId, - }, - { - $set: { 'routeSetsWithOverrides.overrides': this.#overridesRouteSetBuffer.overrides }, - } - ) - : undefined, ]) this.#pendingExpectedPlayoutItems = undefined this.#pendingExpectedPackages = undefined - this.#routeSetChanged = false - this.#overridesRouteSetBuffer = { ...this.#context.studio.routeSetsWithOverrides } as ObjectWithOverrides< - Record - > - } - - updateRouteSetActive(routeSetId: string, isActive: boolean | 'toggle'): boolean { - const studio = this.#context.studio - - const routeSets: WrappedOverridableItemNormal[] = getAllCurrentItemsFromOverrides( - this.#overridesRouteSetBuffer, - null - ) - - const routeSet = routeSets.find((routeSet) => routeSet.id === routeSetId) - - if (routeSet === undefined) throw new Error(`RouteSet "${routeSetId}" not found!`) - - if (isActive === 'toggle') isActive = !routeSet.computed.active - - if (routeSet.computed?.behavior === StudioRouteBehavior.ACTIVATE_ONLY && isActive === false) - throw new Error(`RouteSet "${routeSet.id}" is ACTIVATE_ONLY`) - - const saveOverrides = (newOps: SomeObjectOverrideOp[]) => { - this.#overridesRouteSetBuffer.overrides = newOps - this.#routeSetChanged = true - } - const overrideHelper = new OverrideOpHelperImpl(saveOverrides, this.#overridesRouteSetBuffer) - - // Track whether changing this routeset could affect how the timeline is generated, so that it can be following this update - let mayAffectTimeline = couldRoutesetAffectTimelineGeneration(routeSet) - - logger.debug(`switchRouteSet "${studio._id}" "${routeSet.id}"=${isActive}`) - overrideHelper.setItemValue(routeSet.id, `active`, isActive) - - // Deactivate other routeSets in the same exclusivity group: - if (routeSet.computed.exclusivityGroup && isActive === true) { - for (const [, otherRouteSet] of Object.entries>(routeSets)) { - if (otherRouteSet.id === routeSet.id) continue - if (otherRouteSet.computed?.exclusivityGroup === routeSet.computed.exclusivityGroup) { - logger.debug(`switchRouteSet Other ID "${studio._id}" "${otherRouteSet.id}"=false`) - overrideHelper.setItemValue(otherRouteSet.id, `active`, false) - - mayAffectTimeline = mayAffectTimeline || couldRoutesetAffectTimelineGeneration(otherRouteSet) - } - } - } - - overrideHelper.commit() - - return mayAffectTimeline } } - -function couldRoutesetAffectTimelineGeneration(routeSet: WrappedOverridableItemNormal): boolean { - return routeSet.computed.abPlayers.length > 0 -} diff --git a/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts b/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts index 53486ee37c6..8abd587defe 100644 --- a/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts +++ b/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts @@ -102,7 +102,7 @@ export class StudioPlayoutModelImpl implements StudioPlayoutModel { } switchRouteSet(routeSetId: string, isActive: boolean | 'toggle'): boolean { - return this.#baselineHelper.updateRouteSetActive(routeSetId, isActive) + return this.context.setRouteSetActive(routeSetId, isActive) } /** @@ -125,7 +125,11 @@ export class StudioPlayoutModelImpl implements StudioPlayoutModel { } this.#timelineHasChanged = false - await this.#baselineHelper.saveAllToDatabase() + await Promise.all([ + this.#baselineHelper.saveAllToDatabase(), + this.context.saveRouteSetChanges(), + // + ]) if (span) span.end() } diff --git a/packages/job-worker/src/workers/context/JobContextImpl.ts b/packages/job-worker/src/workers/context/JobContextImpl.ts index 0133b66333f..7be35b55f26 100644 --- a/packages/job-worker/src/workers/context/JobContextImpl.ts +++ b/packages/job-worker/src/workers/context/JobContextImpl.ts @@ -18,11 +18,16 @@ import { TimelineComplete } from '@sofie-automation/corelib/dist/dataModel/Timel import type { QueueJobFunc } from './util' import { StudioCacheContextImpl } from './StudioCacheContextImpl' import { PlaylistLockImpl, RundownLockImpl } from './Locks' +import { StudioRouteSetUpdater } from './StudioRouteSetUpdater' +import type { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' +import type { ReadonlyDeep } from 'type-fest' export class JobContextImpl extends StudioCacheContextImpl implements JobContext { private readonly locks: Array = [] private readonly caches: Array = [] + private readonly studioRouteSetUpdater: StudioRouteSetUpdater + constructor( directCollections: Readonly, cacheData: WorkerDataCache, @@ -32,6 +37,12 @@ export class JobContextImpl extends StudioCacheContextImpl implements JobContext private readonly fastTrackTimeline: FastTrackTimelineFunc | null ) { super(directCollections, cacheData) + + this.studioRouteSetUpdater = new StudioRouteSetUpdater(directCollections, cacheData) + } + + get studio(): ReadonlyDeep { + return this.studioRouteSetUpdater.studioWithChanges ?? super.studio } trackCache(cache: BaseModel): void { @@ -138,4 +149,16 @@ export class JobContextImpl extends StudioCacheContextImpl implements JobContext }) } } + + setRouteSetActive(routeSetId: string, isActive: boolean | 'toggle'): boolean { + return this.studioRouteSetUpdater.setRouteSetActive(routeSetId, isActive) + } + + async saveRouteSetChanges(): Promise { + return this.studioRouteSetUpdater.saveRouteSetChanges() + } + + discardRouteSetChanges(): void { + return this.studioRouteSetUpdater.discardRouteSetChanges() + } } diff --git a/packages/job-worker/src/workers/context/StudioRouteSetUpdater.ts b/packages/job-worker/src/workers/context/StudioRouteSetUpdater.ts new file mode 100644 index 00000000000..cea5c9e53b5 --- /dev/null +++ b/packages/job-worker/src/workers/context/StudioRouteSetUpdater.ts @@ -0,0 +1,109 @@ +import { StudioRouteBehavior, StudioRouteSet } from '@sofie-automation/blueprints-integration' +import type { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { deepFreeze } from '@sofie-automation/corelib/dist/lib' +import { + getAllCurrentItemsFromOverrides, + OverrideOpHelperImpl, + WrappedOverridableItemNormal, +} from '@sofie-automation/corelib/dist/overrideOpHelper' +import { logger } from '../../logging' +import type { ReadonlyDeep } from 'type-fest' +import type { WorkerDataCache } from '../caches' +import type { IDirectCollections } from '../../db' + +export class StudioRouteSetUpdater { + readonly #directCollections: Readonly + readonly #cacheData: Pick + + constructor(directCollections: Readonly, cacheData: Pick) { + this.#directCollections = directCollections + this.#cacheData = cacheData + } + + // Future: this could store a Map, if the context exposed a simplified view of DBStudio + #studioWithRouteSetChanges: ReadonlyDeep | undefined = undefined + + get studioWithChanges(): ReadonlyDeep | undefined { + return this.#studioWithRouteSetChanges + } + + setRouteSetActive(routeSetId: string, isActive: boolean | 'toggle'): boolean { + const currentStudio = this.#studioWithRouteSetChanges ?? this.#cacheData.studio + const currentRouteSets = getAllCurrentItemsFromOverrides(currentStudio.routeSetsWithOverrides, null) + + const routeSet = currentRouteSets.find((routeSet) => routeSet.id === routeSetId) + if (!routeSet) throw new Error(`RouteSet "${routeSetId}" not found!`) + + if (isActive === 'toggle') { + isActive = !routeSet.computed.active + } + + if (routeSet.computed.behavior === StudioRouteBehavior.ACTIVATE_ONLY && !isActive) + throw new Error(`RouteSet "${routeSet.id}" is ACTIVATE_ONLY`) + + const overrideHelper = new OverrideOpHelperImpl(null, currentStudio.routeSetsWithOverrides) + + // Update the pending changes + logger.debug(`switchRouteSet "${this.#cacheData.studio._id}" "${routeSet.id}"=${isActive}`) + overrideHelper.setItemValue(routeSetId, 'active', isActive) + + let mayAffectTimeline = couldRoutesetAffectTimelineGeneration(routeSet) + + // Deactivate other routeSets in the same exclusivity group: + if (routeSet.computed.exclusivityGroup && isActive) { + for (const otherRouteSet of Object.values>(currentRouteSets)) { + if (otherRouteSet.id === routeSet.id) continue + if (otherRouteSet.computed?.exclusivityGroup === routeSet.computed.exclusivityGroup) { + logger.debug(`switchRouteSet Other ID "${this.#cacheData.studio._id}" "${otherRouteSet.id}"=false`) + overrideHelper.setItemValue(otherRouteSet.id, 'active', false) + + mayAffectTimeline = mayAffectTimeline || couldRoutesetAffectTimelineGeneration(otherRouteSet) + } + } + } + + const updatedOverrideOps = overrideHelper.getPendingOps() + + // Update the cached studio + this.#studioWithRouteSetChanges = Object.freeze({ + ...currentStudio, + routeSetsWithOverrides: Object.freeze({ + ...currentStudio.routeSetsWithOverrides, + overrides: deepFreeze(updatedOverrideOps), + }), + }) + + return mayAffectTimeline + } + + async saveRouteSetChanges(): Promise { + if (!this.#studioWithRouteSetChanges) return + + // Save the changes to the database + // This is technically a little bit of a race condition, if someone uses the config pages but no more so than the rest of the system + await this.#directCollections.Studios.update( + { + _id: this.#cacheData.studio._id, + }, + { + $set: { + 'routeSetsWithOverrides.overrides': + this.#studioWithRouteSetChanges.routeSetsWithOverrides.overrides, + }, + } + ) + + // Pretend that the studio as reported by the database has changed, this will be fixed after this job by the ChangeStream firing + this.#cacheData.studio = this.#studioWithRouteSetChanges + this.#studioWithRouteSetChanges = undefined + } + + discardRouteSetChanges(): void { + // Discard any pending changes + this.#studioWithRouteSetChanges = undefined + } +} + +function couldRoutesetAffectTimelineGeneration(routeSet: WrappedOverridableItemNormal): boolean { + return routeSet.computed.abPlayers.length > 0 +} diff --git a/packages/job-worker/src/workers/context/__tests__/StudioRouteSetUpdater.spec.ts b/packages/job-worker/src/workers/context/__tests__/StudioRouteSetUpdater.spec.ts new file mode 100644 index 00000000000..77692f4072a --- /dev/null +++ b/packages/job-worker/src/workers/context/__tests__/StudioRouteSetUpdater.spec.ts @@ -0,0 +1,403 @@ +import { StudioRouteBehavior, StudioRouteSet } from '@sofie-automation/blueprints-integration' +import { setupDefaultJobEnvironment } from '../../../__mocks__/context' +import { StudioRouteSetUpdater } from '../StudioRouteSetUpdater' +import type { WorkerDataCache } from '../../caches' +import { wrapDefaultObject } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' + +function setupTest(routeSets: Record) { + const context = setupDefaultJobEnvironment() + const mockCache: Pick = { + studio: { + ...context.studio, + routeSetsWithOverrides: wrapDefaultObject(routeSets), + }, + } + const mockCollection = context.mockCollections.Studios + const routeSetHelper = new StudioRouteSetUpdater(context.directCollections, mockCache) + + return { context, mockCache, mockCollection, routeSetHelper } +} + +const SINGLE_ROUTESET: Record = { + one: { + name: 'test', + active: false, + behavior: StudioRouteBehavior.TOGGLE, + routes: [], + abPlayers: [], + }, +} +const SINGLE_ROUTESET_WITH_AB: Record = { + one: { + name: 'test', + active: false, + behavior: StudioRouteBehavior.TOGGLE, + routes: [], + abPlayers: [{ playerId: 'test', poolName: 'test' }], + }, +} +const EXCLUSIVE_ROUTESETS: Record = { + one: { + name: 'test', + active: false, + behavior: StudioRouteBehavior.TOGGLE, + exclusivityGroup: 'main', + routes: [], + abPlayers: [{ playerId: 'test', poolName: 'test' }], + }, + two: { + name: 'test', + active: true, + behavior: StudioRouteBehavior.TOGGLE, + exclusivityGroup: 'main', + routes: [], + abPlayers: [], + }, + activate: { + name: 'test', + active: false, + behavior: StudioRouteBehavior.ACTIVATE_ONLY, + exclusivityGroup: 'main', + routes: [], + abPlayers: [], + }, +} + +describe('StudioRouteSetUpdater', () => { + it('no changes should not save', async () => { + const { mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET) + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toHaveLength(0) + }) + + it('no changes when setting missing routeset', async () => { + const { mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET) + + expect(() => routeSetHelper.setRouteSetActive('missing', true)).toThrow(/not found/) + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toHaveLength(0) + }) + + it('change when setting routeset - true', async () => { + const { mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET) + + routeSetHelper.setRouteSetActive('one', true) + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toEqual([ + { + type: 'update', + args: [ + { _id: 'mockStudio0' }, + { + $set: { + 'routeSetsWithOverrides.overrides': [ + { + op: 'set', + path: 'one.active', + value: true, + }, + ], + }, + }, + ], + }, + ]) + }) + it('change when setting routeset - false', async () => { + const { mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET) + + routeSetHelper.setRouteSetActive('one', false) + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toEqual([ + { + type: 'update', + args: [ + { _id: 'mockStudio0' }, + { + $set: { + 'routeSetsWithOverrides.overrides': [ + { + op: 'set', + path: 'one.active', + value: false, + }, + ], + }, + }, + ], + }, + ]) + }) + it('change when setting routeset - toggle', async () => { + const { mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET) + + routeSetHelper.setRouteSetActive('one', 'toggle') + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toEqual([ + { + type: 'update', + args: [ + { _id: 'mockStudio0' }, + { + $set: { + 'routeSetsWithOverrides.overrides': [ + { + op: 'set', + path: 'one.active', + value: true, + }, + ], + }, + }, + ], + }, + ]) + }) + it('change when setting routeset - toggle twice', async () => { + const { mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET) + + routeSetHelper.setRouteSetActive('one', 'toggle') + routeSetHelper.setRouteSetActive('one', 'toggle') + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toEqual([ + { + type: 'update', + args: [ + { _id: 'mockStudio0' }, + { + $set: { + 'routeSetsWithOverrides.overrides': [ + { + op: 'set', + path: 'one.active', + value: false, + }, + ], + }, + }, + ], + }, + ]) + }) + + it('discard changes should not save', async () => { + const { mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET) + + routeSetHelper.setRouteSetActive('one', true) + + expect(routeSetHelper.studioWithChanges).toBeTruthy() + + routeSetHelper.discardRouteSetChanges() + + expect(routeSetHelper.studioWithChanges).toBeFalsy() + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toHaveLength(0) + }) + + it('save should update mockCache', async () => { + const { mockCache, mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET) + + const studioBefore = mockCache.studio + expect(routeSetHelper.studioWithChanges).toBeFalsy() + + routeSetHelper.setRouteSetActive('one', true) + expect(routeSetHelper.studioWithChanges).toBeTruthy() + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toHaveLength(1) + + // Object should have changed + expect(mockCache.studio).not.toBe(studioBefore) + // Object should not be equal + expect(mockCache.studio).not.toEqual(studioBefore) + expect(routeSetHelper.studioWithChanges).toBeFalsy() + }) + + it('no changes should not update mockCache', async () => { + const { mockCache, mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET) + + const studioBefore = mockCache.studio + expect(routeSetHelper.studioWithChanges).toBeFalsy() + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toHaveLength(0) + + expect(mockCache.studio).toBe(studioBefore) + expect(routeSetHelper.studioWithChanges).toBeFalsy() + }) + + it('discard changes should not update mockCache', async () => { + const { mockCache, mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET) + + const studioBefore = mockCache.studio + expect(routeSetHelper.studioWithChanges).toBeFalsy() + + routeSetHelper.setRouteSetActive('one', true) + expect(routeSetHelper.studioWithChanges).toBeTruthy() + routeSetHelper.discardRouteSetChanges() + expect(routeSetHelper.studioWithChanges).toBeFalsy() + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toHaveLength(0) + + expect(mockCache.studio).toBe(studioBefore) + expect(routeSetHelper.studioWithChanges).toBeFalsy() + }) + + it('ACTIVATE_ONLY routeset can be activated', async () => { + const { mockCollection, routeSetHelper } = setupTest(EXCLUSIVE_ROUTESETS) + + routeSetHelper.setRouteSetActive('activate', true) + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toHaveLength(1) + }) + + it('ACTIVATE_ONLY routeset canot be deactivated', async () => { + const { mockCollection, routeSetHelper } = setupTest(EXCLUSIVE_ROUTESETS) + + expect(() => routeSetHelper.setRouteSetActive('activate', false)).toThrow(/ACTIVATE_ONLY/) + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toHaveLength(0) + }) + + describe('exclusive groups', () => { + it('deactivate member of exclusive group', async () => { + const { mockCollection, routeSetHelper } = setupTest(EXCLUSIVE_ROUTESETS) + + routeSetHelper.setRouteSetActive('one', false) + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toEqual([ + { + type: 'update', + args: [ + { _id: 'mockStudio0' }, + { + $set: { + 'routeSetsWithOverrides.overrides': [ + { + op: 'set', + path: 'one.active', + value: false, + }, + ], + }, + }, + ], + }, + ]) + }) + + it('activate member of exclusive group', async () => { + const { mockCollection, routeSetHelper } = setupTest(EXCLUSIVE_ROUTESETS) + + routeSetHelper.setRouteSetActive('one', true) + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toEqual([ + { + type: 'update', + args: [ + { _id: 'mockStudio0' }, + { + $set: { + 'routeSetsWithOverrides.overrides': [ + { + op: 'set', + path: 'one.active', + value: true, + }, + { + op: 'set', + path: 'two.active', + value: false, + }, + { + op: 'set', + path: 'activate.active', + value: false, + }, + ], + }, + }, + ], + }, + ]) + }) + }) + + describe('Return value', () => { + it('update player with ab', async () => { + const { mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET_WITH_AB) + + expect(routeSetHelper.setRouteSetActive('one', false)).toBe(true) + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toHaveLength(1) + }) + + it('update player without ab', async () => { + const { mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET) + + expect(routeSetHelper.setRouteSetActive('one', false)).toBe(false) + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toHaveLength(1) + }) + + it('update exclusive group - disabling player without ab', async () => { + const { mockCollection, routeSetHelper } = setupTest(EXCLUSIVE_ROUTESETS) + + expect(routeSetHelper.setRouteSetActive('two', false)).toBe(false) + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toHaveLength(1) + }) + + it('update exclusive group - disabling player with ab', async () => { + const { mockCollection, routeSetHelper } = setupTest(EXCLUSIVE_ROUTESETS) + + expect(routeSetHelper.setRouteSetActive('one', false)).toBe(true) + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toHaveLength(1) + }) + + it('update exclusive group - enabling player without ab', async () => { + const { mockCollection, routeSetHelper } = setupTest(EXCLUSIVE_ROUTESETS) + + expect(routeSetHelper.setRouteSetActive('two', true)).toBe(true) + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toHaveLength(1) + }) + }) +}) From 17d344b162a7ffd2576aecc031be02b82723162d Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Thu, 17 Oct 2024 16:51:55 +0100 Subject: [PATCH 34/81] fix: tests --- packages/job-worker/src/__mocks__/context.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/job-worker/src/__mocks__/context.ts b/packages/job-worker/src/__mocks__/context.ts index 937c22ca487..57a861bb9ff 100644 --- a/packages/job-worker/src/__mocks__/context.ts +++ b/packages/job-worker/src/__mocks__/context.ts @@ -231,11 +231,11 @@ export class MockJobContext implements JobContext { } async saveRouteSetChanges(): Promise { - throw new Error('Method not implemented.') + // throw new Error('Method not implemented.') } discardRouteSetChanges(): void { - throw new Error('Method not implemented.') + // throw new Error('Method not implemented.') } /** From 8706d186476029c353502a3bfd81549c966515dc Mon Sep 17 00:00:00 2001 From: Peter C <12292660+PeterC89@users.noreply.github.com> Date: Thu, 17 Oct 2024 17:13:54 +0100 Subject: [PATCH 35/81] Revert "chore: revert countdown in top bar to part time" This reverts commit 5b12e59dd836f2dfb512d7085003e4272849155f. --- packages/webui/src/client/ui/RundownView.tsx | 1 - .../RundownTiming/CurrentPartOrSegmentRemaining.tsx | 3 --- 2 files changed, 4 deletions(-) diff --git a/packages/webui/src/client/ui/RundownView.tsx b/packages/webui/src/client/ui/RundownView.tsx index 5e8e5fd83aa..f86944eb405 100644 --- a/packages/webui/src/client/ui/RundownView.tsx +++ b/packages/webui/src/client/ui/RundownView.tsx @@ -314,7 +314,6 @@ const TimingDisplay = withTranslation()( {rundownPlaylist.holdState && rundownPlaylist.holdState !== RundownHoldState.COMPLETE ? ( diff --git a/packages/webui/src/client/ui/RundownView/RundownTiming/CurrentPartOrSegmentRemaining.tsx b/packages/webui/src/client/ui/RundownView/RundownTiming/CurrentPartOrSegmentRemaining.tsx index 98947b296d2..a62ca027598 100644 --- a/packages/webui/src/client/ui/RundownView/RundownTiming/CurrentPartOrSegmentRemaining.tsx +++ b/packages/webui/src/client/ui/RundownView/RundownTiming/CurrentPartOrSegmentRemaining.tsx @@ -14,8 +14,6 @@ interface IPartRemainingProps { heavyClassName?: string speaking?: boolean vibrating?: boolean - /** This component will show the remaining segment budget when that is available, but in some places in the UI we want it to force it to show remaining Part regardless */ - forceToPartTimer?: boolean } // global variable for remembering last uttered displayTime @@ -39,7 +37,6 @@ export const CurrentPartOrSegmentRemaining = withTiming let displayTimecode = this.props.timingDurations.remainingBudgetOnCurrentSegment ?? this.props.timingDurations.remainingTimeOnCurrentPart - if (this.props.forceToPartTimer) displayTimecode = this.props.timingDurations.remainingTimeOnCurrentPart if (displayTimecode === undefined) return null displayTimecode *= -1 return ( From 0180936ae51359b063fa74f4fb7e8b28d12b8f64 Mon Sep 17 00:00:00 2001 From: Mint de Wit Date: Thu, 17 Oct 2024 15:11:07 +0000 Subject: [PATCH 36/81] chore: revert countdown in top bar to part time --- packages/webui/src/client/ui/RundownView.tsx | 1 + .../RundownTiming/CurrentPartOrSegmentRemaining.tsx | 3 +++ 2 files changed, 4 insertions(+) diff --git a/packages/webui/src/client/ui/RundownView.tsx b/packages/webui/src/client/ui/RundownView.tsx index f86944eb405..5e8e5fd83aa 100644 --- a/packages/webui/src/client/ui/RundownView.tsx +++ b/packages/webui/src/client/ui/RundownView.tsx @@ -314,6 +314,7 @@ const TimingDisplay = withTranslation()( {rundownPlaylist.holdState && rundownPlaylist.holdState !== RundownHoldState.COMPLETE ? ( diff --git a/packages/webui/src/client/ui/RundownView/RundownTiming/CurrentPartOrSegmentRemaining.tsx b/packages/webui/src/client/ui/RundownView/RundownTiming/CurrentPartOrSegmentRemaining.tsx index a62ca027598..98947b296d2 100644 --- a/packages/webui/src/client/ui/RundownView/RundownTiming/CurrentPartOrSegmentRemaining.tsx +++ b/packages/webui/src/client/ui/RundownView/RundownTiming/CurrentPartOrSegmentRemaining.tsx @@ -14,6 +14,8 @@ interface IPartRemainingProps { heavyClassName?: string speaking?: boolean vibrating?: boolean + /** This component will show the remaining segment budget when that is available, but in some places in the UI we want it to force it to show remaining Part regardless */ + forceToPartTimer?: boolean } // global variable for remembering last uttered displayTime @@ -37,6 +39,7 @@ export const CurrentPartOrSegmentRemaining = withTiming let displayTimecode = this.props.timingDurations.remainingBudgetOnCurrentSegment ?? this.props.timingDurations.remainingTimeOnCurrentPart + if (this.props.forceToPartTimer) displayTimecode = this.props.timingDurations.remainingTimeOnCurrentPart if (displayTimecode === undefined) return null displayTimecode *= -1 return ( From 320df23e3aef4e2e4a861ae90ad91aafcb8439ee Mon Sep 17 00:00:00 2001 From: Mint de Wit Date: Fri, 18 Oct 2024 08:27:39 +0000 Subject: [PATCH 37/81] chore: swap segment and part countdown --- packages/webui/src/client/ui/RundownView.tsx | 2 +- .../RundownTiming/CurrentPartOrSegmentRemaining.tsx | 10 ++++------ packages/webui/src/client/ui/Shelf/PartTimingPanel.tsx | 1 + 3 files changed, 6 insertions(+), 7 deletions(-) diff --git a/packages/webui/src/client/ui/RundownView.tsx b/packages/webui/src/client/ui/RundownView.tsx index 5e8e5fd83aa..8b0f49441a6 100644 --- a/packages/webui/src/client/ui/RundownView.tsx +++ b/packages/webui/src/client/ui/RundownView.tsx @@ -314,7 +314,7 @@ const TimingDisplay = withTranslation()( {rundownPlaylist.holdState && rundownPlaylist.holdState !== RundownHoldState.COMPLETE ? ( diff --git a/packages/webui/src/client/ui/RundownView/RundownTiming/CurrentPartOrSegmentRemaining.tsx b/packages/webui/src/client/ui/RundownView/RundownTiming/CurrentPartOrSegmentRemaining.tsx index 98947b296d2..f90232f199b 100644 --- a/packages/webui/src/client/ui/RundownView/RundownTiming/CurrentPartOrSegmentRemaining.tsx +++ b/packages/webui/src/client/ui/RundownView/RundownTiming/CurrentPartOrSegmentRemaining.tsx @@ -14,8 +14,8 @@ interface IPartRemainingProps { heavyClassName?: string speaking?: boolean vibrating?: boolean - /** This component will show the remaining segment budget when that is available, but in some places in the UI we want it to force it to show remaining Part regardless */ - forceToPartTimer?: boolean + /** Use the segment budget instead of the part duration */ + useSegmentTime?: boolean } // global variable for remembering last uttered displayTime @@ -36,10 +36,8 @@ export const CurrentPartOrSegmentRemaining = withTiming render(): JSX.Element | null { if (!this.props.timingDurations || !this.props.timingDurations.currentTime) return null if (this.props.timingDurations.currentPartInstanceId !== this.props.currentPartInstanceId) return null - let displayTimecode = - this.props.timingDurations.remainingBudgetOnCurrentSegment ?? - this.props.timingDurations.remainingTimeOnCurrentPart - if (this.props.forceToPartTimer) displayTimecode = this.props.timingDurations.remainingTimeOnCurrentPart + let displayTimecode = this.props.timingDurations.remainingTimeOnCurrentPart + if (this.props.useSegmentTime) displayTimecode = this.props.timingDurations.remainingBudgetOnCurrentSegment if (displayTimecode === undefined) return null displayTimecode *= -1 return ( diff --git a/packages/webui/src/client/ui/Shelf/PartTimingPanel.tsx b/packages/webui/src/client/ui/Shelf/PartTimingPanel.tsx index 3c17e4e9d52..83558746d2a 100644 --- a/packages/webui/src/client/ui/Shelf/PartTimingPanel.tsx +++ b/packages/webui/src/client/ui/Shelf/PartTimingPanel.tsx @@ -59,6 +59,7 @@ class PartTimingPanelInner extends React.Component< vibrating={getAllowVibrating() && panel.speakCountDown} heavyClassName="overtime" className="part-remaining" + useSegmentTime={true} /> ) : ( From 1c6df60e25dfe1c98153d2a60acec2189be1cf11 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Fri, 18 Oct 2024 10:17:16 +0100 Subject: [PATCH 38/81] fix: use segment budget duration for segment duration counter (#15) --- .../webui/src/client/lib/rundownTiming.ts | 6 +++ .../CurrentPartOrSegmentRemaining.tsx | 2 +- .../RundownTiming/SegmentDuration.tsx | 45 ++++++++++++------- 3 files changed, 35 insertions(+), 18 deletions(-) diff --git a/packages/webui/src/client/lib/rundownTiming.ts b/packages/webui/src/client/lib/rundownTiming.ts index b9491f40d0b..403513f5e6a 100644 --- a/packages/webui/src/client/lib/rundownTiming.ts +++ b/packages/webui/src/client/lib/rundownTiming.ts @@ -610,6 +610,7 @@ export class RundownTimingCalculator { let remainingTimeOnCurrentPart: number | undefined = undefined let currentPartWillAutoNext = false + let currentSegmentId: SegmentId | null | undefined if (currentAIndex >= 0) { const currentLivePartInstance = partInstances[currentAIndex] const currentLivePart = currentLivePartInstance.part @@ -635,10 +636,13 @@ export class RundownTimingCalculator { : onAirPartDuration currentPartWillAutoNext = !!(currentLivePart.autoNext && currentLivePart.expectedDuration) + + currentSegmentId = currentLivePart.segmentId } return literal({ currentPartInstanceId: playlist ? playlist.currentPartInfo?.partInstanceId ?? null : undefined, + currentSegmentId: currentSegmentId, totalPlaylistDuration: totalRundownDuration, remainingPlaylistDuration: remainingRundownDuration, asDisplayedPlaylistDuration: asDisplayedRundownDuration, @@ -708,6 +712,8 @@ export class RundownTimingCalculator { export interface RundownTimingContext { /** This stores the part instance that was active when this timing information was generated. */ currentPartInstanceId?: PartInstanceId | null + /** This stores the id of the segment that was active when this timing information was generated. */ + currentSegmentId?: SegmentId | null /** This is the total duration of the playlist as planned (using expectedDurations). */ totalPlaylistDuration?: number /** This is the content remaining to be played in the playlist (based on the expectedDurations). */ diff --git a/packages/webui/src/client/ui/RundownView/RundownTiming/CurrentPartOrSegmentRemaining.tsx b/packages/webui/src/client/ui/RundownView/RundownTiming/CurrentPartOrSegmentRemaining.tsx index f90232f199b..c33e0e25a3a 100644 --- a/packages/webui/src/client/ui/RundownView/RundownTiming/CurrentPartOrSegmentRemaining.tsx +++ b/packages/webui/src/client/ui/RundownView/RundownTiming/CurrentPartOrSegmentRemaining.tsx @@ -32,7 +32,7 @@ export const CurrentPartOrSegmentRemaining = withTiming tickResolution: TimingTickResolution.Synced, dataResolution: TimingDataResolution.Synced, })( - class CurrentPartRemaining extends React.Component> { + class CurrentPartOrSegmentRemaining extends React.Component> { render(): JSX.Element | null { if (!this.props.timingDurations || !this.props.timingDurations.currentTime) return null if (this.props.timingDurations.currentPartInstanceId !== this.props.currentPartInstanceId) return null diff --git a/packages/webui/src/client/ui/RundownView/RundownTiming/SegmentDuration.tsx b/packages/webui/src/client/ui/RundownView/RundownTiming/SegmentDuration.tsx index 9fa40064390..b521e39fd07 100644 --- a/packages/webui/src/client/ui/RundownView/RundownTiming/SegmentDuration.tsx +++ b/packages/webui/src/client/ui/RundownView/RundownTiming/SegmentDuration.tsx @@ -6,6 +6,7 @@ import { PartUi } from '../../SegmentTimeline/SegmentTimelineContainer' import { calculatePartInstanceExpectedDurationWithTransition } from '@sofie-automation/corelib/dist/playout/timings' import { getPartInstanceTimingId } from '../../../lib/rundownTiming' import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' +import { CountdownType } from '@sofie-automation/blueprints-integration' interface ISegmentDurationProps { segment: DBSegment @@ -28,30 +29,40 @@ export const SegmentDuration = withTiming()(function props: WithTiming ) { let duration: number | undefined = undefined - let budget = 0 let playedOut = 0 const segmentBudgetDuration = props.segment.segmentTiming?.budgetDuration + const segmentTimingType = props.segment.segmentTiming?.countdownType ?? CountdownType.PART_EXPECTED_DURATION - if (segmentBudgetDuration !== undefined) { - budget = segmentBudgetDuration - } - if (props.parts && props.timingDurations.partPlayed) { - const { partPlayed } = props.timingDurations - if (segmentBudgetDuration === undefined) { + let budget = segmentBudgetDuration ?? 0 + let hardFloor = false + + if (segmentTimingType === CountdownType.SEGMENT_BUDGET_DURATION) { + hardFloor = true + + if (props.timingDurations.currentSegmentId === props.segment._id) { + duration = props.timingDurations.remainingBudgetOnCurrentSegment ?? segmentBudgetDuration ?? 0 + } else { + duration = segmentBudgetDuration ?? 0 + } + } else { + if (props.parts && props.timingDurations.partPlayed) { + const { partPlayed } = props.timingDurations + if (segmentBudgetDuration === undefined) { + props.parts.forEach((part) => { + budget += + part.instance.orphaned || part.instance.part.untimed + ? 0 + : calculatePartInstanceExpectedDurationWithTransition(part.instance) || 0 + }) + } props.parts.forEach((part) => { - budget += - part.instance.orphaned || part.instance.part.untimed - ? 0 - : calculatePartInstanceExpectedDurationWithTransition(part.instance) || 0 + playedOut += (!part.instance.part.untimed ? partPlayed[getPartInstanceTimingId(part.instance)] : 0) || 0 }) } - props.parts.forEach((part) => { - playedOut += (!part.instance.part.untimed ? partPlayed[getPartInstanceTimingId(part.instance)] : 0) || 0 - }) - } - duration = budget - playedOut + duration = budget - playedOut + } const showNegativeStyling = !props.fixed && !props.countUp @@ -72,7 +83,7 @@ export const SegmentDuration = withTiming()(function })} role="timer" > - {RundownUtils.formatDiffToTimecode(value, false, false, true, false, true, '+')} + {RundownUtils.formatDiffToTimecode(value, false, false, true, false, true, '+', false, hardFloor)} ) From 41f3d33cd3aaffa8799c3f8b912a643209d307fd Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Fri, 18 Oct 2024 12:40:07 +0100 Subject: [PATCH 39/81] chore: refactor --- .../job-worker/src/blueprints/context/lib.ts | 2 +- .../PlayoutPartInstanceModelImpl.ts | 35 +++++++------------ 2 files changed, 14 insertions(+), 23 deletions(-) diff --git a/packages/job-worker/src/blueprints/context/lib.ts b/packages/job-worker/src/blueprints/context/lib.ts index 13cb6a45094..7175dce3b34 100644 --- a/packages/job-worker/src/blueprints/context/lib.ts +++ b/packages/job-worker/src/blueprints/context/lib.ts @@ -99,7 +99,7 @@ export const IBlueprintPieceObjectsSampleKeys = allKeysOfObject }) // Compile a list of the keys which are allowed to be set -export const IBlueprintMutatablePartSampleKeys = allKeysOfObject({ +export const PlayoutMutatablePartSampleKeys = allKeysOfObject({ title: true, prompterTitle: true, privateData: true, diff --git a/packages/job-worker/src/playout/model/implementation/PlayoutPartInstanceModelImpl.ts b/packages/job-worker/src/playout/model/implementation/PlayoutPartInstanceModelImpl.ts index fdeccee75b4..9c295d32fee 100644 --- a/packages/job-worker/src/playout/model/implementation/PlayoutPartInstanceModelImpl.ts +++ b/packages/job-worker/src/playout/model/implementation/PlayoutPartInstanceModelImpl.ts @@ -15,12 +15,7 @@ import { PartCalculatedTimings, } from '@sofie-automation/corelib/dist/playout/timings' import { PartNote } from '@sofie-automation/corelib/dist/dataModel/Notes' -import { - IBlueprintMutatablePart, - IBlueprintPieceType, - PieceLifespan, - Time, -} from '@sofie-automation/blueprints-integration' +import { IBlueprintPieceType, PieceLifespan, Time } from '@sofie-automation/blueprints-integration' import { PlayoutMutatablePart, PlayoutPartInstanceModel, @@ -32,7 +27,7 @@ import { PlayoutPieceInstanceModelImpl } from './PlayoutPieceInstanceModelImpl' import { EmptyPieceTimelineObjectsBlob } from '@sofie-automation/corelib/dist/dataModel/Piece' import _ = require('underscore') import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' -import { IBlueprintMutatablePartSampleKeys } from '../../../blueprints/context/lib' +import { PlayoutMutatablePartSampleKeys } from '../../../blueprints/context/lib' import { QuickLoopService } from '../services/QuickLoopService' /** @@ -533,19 +528,17 @@ export class PlayoutPartInstanceModelImpl implements PlayoutPartInstanceModel { // Future: this could do some better validation // filter the submission to the allowed ones - const trimmedProps: Partial = filterPropsToAllowed(props) + const trimmedProps: Partial = filterPropsToAllowed(props) if (Object.keys(trimmedProps).length === 0) return false - const newPart: DBPart = { - ...this.partInstanceImpl.part, - ...trimmedProps, - userEditOperations: this.partInstanceImpl.part.userEditOperations, // Replaced below if changed - } - - // Only replace `userEditOperations` if new values were provided - if ('userEditOperations' in trimmedProps) newPart.userEditOperations = props.userEditOperations - - this.#compareAndSetPartInstanceValue('part', newPart, true) + this.#compareAndSetPartInstanceValue( + 'part', + { + ...this.partInstanceImpl.part, + ...trimmedProps, + }, + true + ) return true } @@ -581,8 +574,6 @@ export class PlayoutPartInstanceModelImpl implements PlayoutPartInstanceModel { } } -function filterPropsToAllowed( - props: Partial> -): Partial> { - return _.pick(props, [...IBlueprintMutatablePartSampleKeys]) +function filterPropsToAllowed(props: Partial): Partial { + return _.pick(props, [...PlayoutMutatablePartSampleKeys]) } From 638837917947f8126ef6d9073746b42a51a93df5 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Fri, 18 Oct 2024 12:47:30 +0100 Subject: [PATCH 40/81] chore: backport fix --- packages/job-worker/src/playout/abPlayback/index.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/job-worker/src/playout/abPlayback/index.ts b/packages/job-worker/src/playout/abPlayback/index.ts index bd1e2facc0e..d803ecac460 100644 --- a/packages/job-worker/src/playout/abPlayback/index.ts +++ b/packages/job-worker/src/playout/abPlayback/index.ts @@ -26,7 +26,7 @@ import { abPoolFilterDisabled, findPlayersInRouteSets } from './routeSetDisablin * @param abSessionHelper Helper for generation sessionId * @param blueprint Blueprint of the currently playing ShowStyle * @param showStyle The currently playing ShowStyle - * @param playoutModel The current playout model + * @param playlist The currently playing Playlist * @param resolvedPieces All the PieceInstances on the timeline, resolved to have 'accurate' playback timings * @param timelineObjects The current timeline * @returns New AB assignments to be persisted on the playlist for the next call From 78f2b7fe76e61d7c8b6868c7f97ee7b951cd344a Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Mon, 21 Oct 2024 12:01:51 +0100 Subject: [PATCH 41/81] chore: fix tests --- .../src/topics/__tests__/segmentsTopic.spec.ts | 1 - .../webui/src/client/lib/__tests__/rundownTiming.test.ts | 8 ++++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/live-status-gateway/src/topics/__tests__/segmentsTopic.spec.ts b/packages/live-status-gateway/src/topics/__tests__/segmentsTopic.spec.ts index 781e4b4adc8..c26f1f17637 100644 --- a/packages/live-status-gateway/src/topics/__tests__/segmentsTopic.spec.ts +++ b/packages/live-status-gateway/src/topics/__tests__/segmentsTopic.spec.ts @@ -18,7 +18,6 @@ function makeTestSegment(id: string, rank: number, rundownId: string, segmentPro name: `Segment ${id}`, _rank: rank, rundownId: protectString(rundownId), - externalModified: 1695799420147, ...segmentProps, } } diff --git a/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts b/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts index 67bcb0d92be..67599df91cc 100644 --- a/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts +++ b/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts @@ -699,6 +699,7 @@ describe('rundown Timing Calculator', () => { asDisplayedPlaylistDuration: 7000, asPlayedPlaylistDuration: 7000, currentPartWillAutoNext: false, + currentSegmentId: protectString(segmentId1), currentTime: 3500, rundownExpectedDurations: { [rundownId1]: 7000, @@ -854,6 +855,7 @@ describe('rundown Timing Calculator', () => { asDisplayedPlaylistDuration: 11000, asPlayedPlaylistDuration: 11000, currentPartWillAutoNext: false, + currentSegmentId: protectString(segmentId1), currentTime: 10000, rundownExpectedDurations: { [rundownId1]: 7000, @@ -1757,6 +1759,7 @@ describe('rundown Timing Calculator', () => { asDisplayedPlaylistDuration: 4000, asPlayedPlaylistDuration: 4000, currentPartWillAutoNext: false, + currentSegmentId: protectString(segmentId1), currentTime: 1500, rundownExpectedDurations: { [rundownId1]: 4000, @@ -1905,6 +1908,7 @@ describe('rundown Timing Calculator', () => { asDisplayedPlaylistDuration: 5500, asPlayedPlaylistDuration: 5500, currentPartWillAutoNext: false, + currentSegmentId: protectString(segmentId1), currentTime: 3500, rundownExpectedDurations: { [rundownId1]: 4000, @@ -2059,6 +2063,7 @@ describe('rundown Timing Calculator', () => { asDisplayedPlaylistDuration: 4000, asPlayedPlaylistDuration: 4000, currentPartWillAutoNext: false, + currentSegmentId: protectString(segmentId2), currentTime: 2500, rundownExpectedDurations: { [rundownId1]: 4000, @@ -2207,6 +2212,7 @@ describe('rundown Timing Calculator', () => { asDisplayedPlaylistDuration: 4000, asPlayedPlaylistDuration: 4000, currentPartWillAutoNext: false, + currentSegmentId: protectString(segmentId1), currentTime: 1500, rundownExpectedDurations: { [rundownId1]: 4000, @@ -2355,6 +2361,7 @@ describe('rundown Timing Calculator', () => { asDisplayedPlaylistDuration: 5500, asPlayedPlaylistDuration: 5500, currentPartWillAutoNext: false, + currentSegmentId: protectString(segmentId1), currentTime: 3500, rundownExpectedDurations: { [rundownId1]: 4000, @@ -2509,6 +2516,7 @@ describe('rundown Timing Calculator', () => { asDisplayedPlaylistDuration: 4000, asPlayedPlaylistDuration: 4000, currentPartWillAutoNext: false, + currentSegmentId: protectString(segmentId2), currentTime: 2500, rundownExpectedDurations: { [rundownId1]: 4000, From d839a5de96dedeffde1d4457f63e0ec4e4875fe1 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Mon, 21 Oct 2024 12:34:02 +0100 Subject: [PATCH 42/81] chore: fix test --- .../src/playout/__tests__/__snapshots__/playout.test.ts.snap | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/packages/job-worker/src/playout/__tests__/__snapshots__/playout.test.ts.snap b/packages/job-worker/src/playout/__tests__/__snapshots__/playout.test.ts.snap index 570be55e514..96eca8b8923 100644 --- a/packages/job-worker/src/playout/__tests__/__snapshots__/playout.test.ts.snap +++ b/packages/job-worker/src/playout/__tests__/__snapshots__/playout.test.ts.snap @@ -75,6 +75,11 @@ exports[`Playout API Basic rundown control 4`] = ` "nextTimeOffset": null, "organizationId": null, "previousPartInfo": null, + "quickLoop": { + "forceAutoNext": "disabled", + "locked": false, + "running": false, + }, "rehearsal": false, "resetTime": 0, "rundownIdsInOrder": [], From 44a9bc037477791580b3cd476538e646ef325e82 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Tue, 22 Oct 2024 10:39:34 +0100 Subject: [PATCH 43/81] chore: fix tests --- .../lib/__tests__/rundownTiming.test.ts | 54 +++++++++---------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts b/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts index 67599df91cc..67f3ec342f5 100644 --- a/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts +++ b/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts @@ -9,7 +9,7 @@ import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { literal, protectString, unprotectString } from '../tempLib' import { RundownTimingCalculator, RundownTimingContext, findPartInstancesInQuickLoop } from '../rundownTiming' -import { IBlueprintPieceType, PlaylistTimingType } from '@sofie-automation/blueprints-integration' +import { IBlueprintPieceType, PlaylistTimingType, SegmentTimingInfo } from '@sofie-automation/blueprints-integration' import { PartId, RundownId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { CalculateTimingsPiece } from '@sofie-automation/corelib/dist/playout/timings' @@ -40,12 +40,7 @@ function makeMockPart( rank: number, rundownId: string, segmentId: string, - durations: { - budgetDuration?: number - displayDuration?: number - displayDurationGroup?: string - expectedDuration?: number - } + durations: Pick ): DBPart { return literal({ _id: protectString(id), @@ -59,16 +54,7 @@ function makeMockPart( }) } -function makeMockSegment( - id: string, - rank: number, - rundownId: string, - timing?: { - expectedStart?: number - expectedEnd?: number - budgetDuration?: number - } -): DBSegment { +function makeMockSegment(id: string, rank: number, rundownId: string, timing?: SegmentTimingInfo): DBSegment { return literal({ _id: protectString(id), name: 'mock-segment', @@ -1408,24 +1394,31 @@ describe('rundown Timing Calculator', () => { const segmentId1 = 'segment1' const segmentId2 = 'segment2' const segmentsMap: Map = new Map() - segmentsMap.set(protectString(segmentId1), makeMockSegment(segmentId1, 0, rundownId1)) - segmentsMap.set(protectString(segmentId2), makeMockSegment(segmentId2, 0, rundownId1)) + segmentsMap.set( + protectString(segmentId1), + makeMockSegment(segmentId1, 0, rundownId1, { + budgetDuration: 5000, + }) + ) + segmentsMap.set( + protectString(segmentId2), + makeMockSegment(segmentId2, 0, rundownId1, { + budgetDuration: 3000, + }) + ) const parts: DBPart[] = [] parts.push( makeMockPart('part1', 0, rundownId1, segmentId1, { - budgetDuration: 2000, expectedDuration: 1000, }) ) parts.push( makeMockPart('part2', 0, rundownId1, segmentId1, { - budgetDuration: 3000, expectedDuration: 1000, }) ) parts.push( makeMockPart('part3', 0, rundownId1, segmentId2, { - budgetDuration: 3000, expectedDuration: 1000, }) ) @@ -1537,24 +1530,31 @@ describe('rundown Timing Calculator', () => { const segmentId1 = 'segment1' const segmentId2 = 'segment2' const segmentsMap: Map = new Map() - segmentsMap.set(protectString(segmentId1), makeMockSegment(segmentId1, 0, rundownId1)) - segmentsMap.set(protectString(segmentId2), makeMockSegment(segmentId2, 0, rundownId1)) + segmentsMap.set( + protectString(segmentId1), + makeMockSegment(segmentId1, 0, rundownId1, { + budgetDuration: 5000, + }) + ) + segmentsMap.set( + protectString(segmentId2), + makeMockSegment(segmentId2, 0, rundownId1, { + budgetDuration: 3000, + }) + ) const parts: DBPart[] = [] parts.push( makeMockPart('part1', 0, rundownId1, segmentId1, { - budgetDuration: 2000, expectedDuration: 2000, }) ) parts.push( makeMockPart('part2', 0, rundownId1, segmentId1, { - budgetDuration: 3000, expectedDuration: 2000, }) ) parts.push( makeMockPart('part3', 0, rundownId1, segmentId2, { - budgetDuration: 3000, expectedDuration: 1000, }) ) From 24a4819b1b5ace8e3bbfde6d37e6e05ac4238e44 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Tue, 22 Oct 2024 12:02:34 +0100 Subject: [PATCH 44/81] chore: remove test from bad merge --- .../lib/__tests__/rundownTiming.test.ts | 123 +----------------- 1 file changed, 1 insertion(+), 122 deletions(-) diff --git a/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts b/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts index 67f3ec342f5..262fe8922c6 100644 --- a/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts +++ b/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts @@ -9,9 +9,8 @@ import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { literal, protectString, unprotectString } from '../tempLib' import { RundownTimingCalculator, RundownTimingContext, findPartInstancesInQuickLoop } from '../rundownTiming' -import { IBlueprintPieceType, PlaylistTimingType, SegmentTimingInfo } from '@sofie-automation/blueprints-integration' +import { PlaylistTimingType, SegmentTimingInfo } from '@sofie-automation/blueprints-integration' import { PartId, RundownId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { CalculateTimingsPiece } from '@sofie-automation/corelib/dist/playout/timings' const DEFAULT_DURATION = 0 const DEFAULT_NONZERO_DURATION = 4000 @@ -1262,126 +1261,6 @@ describe('rundown Timing Calculator', () => { ) }) - it('Adds Piece preroll to Part durations', () => { - const timing = new RundownTimingCalculator() - const playlist: DBRundownPlaylist = makeMockPlaylist() - playlist.timing = { - type: 'forward-time' as any, - expectedStart: 0, - expectedDuration: 40000, - } - const rundownId = 'rundown1' - const segmentId1 = 'segment1' - const segmentId2 = 'segment2' - const segmentsMap: Map = new Map() - segmentsMap.set(protectString(segmentId1), makeMockSegment(segmentId1, 0, rundownId)) - segmentsMap.set(protectString(segmentId2), makeMockSegment(segmentId2, 0, rundownId)) - const parts: DBPart[] = [] - parts.push(makeMockPart('part1', 0, rundownId, segmentId1, { expectedDuration: 1000 })) - parts.push(makeMockPart('part2', 0, rundownId, segmentId1, { expectedDuration: 1000 })) - parts.push(makeMockPart('part3', 0, rundownId, segmentId2, { expectedDuration: 1000 })) - parts.push(makeMockPart('part4', 0, rundownId, segmentId2, { expectedDuration: 1000 })) - const partInstances = convertPartsToPartInstances(parts) - const piecesMap: Map = new Map() - piecesMap.set(protectString('part1'), [ - literal({ - enable: { - start: 0, - }, - prerollDuration: 5000, - pieceType: IBlueprintPieceType.Normal, - }), - ]) - piecesMap.set(protectString('part2'), [ - literal({ - enable: { - start: 0, - }, - prerollDuration: 240, - pieceType: IBlueprintPieceType.Normal, - }), - ]) - const partInstancesMap: Map = new Map() - const rundown = makeMockRundown(rundownId, playlist) - const rundowns = [rundown] - const result = timing.updateDurations( - 0, - false, - playlist, - rundowns, - undefined, - partInstances, - partInstancesMap, - segmentsMap, - DEFAULT_DURATION, - {} - ) - expect(result).toEqual( - literal({ - isLowResolution: false, - asDisplayedPlaylistDuration: 9240, - asPlayedPlaylistDuration: 9240, - currentPartInstanceId: null, - currentPartWillAutoNext: false, - currentTime: 0, - rundownExpectedDurations: { - [rundownId]: 4000, - }, - rundownAsPlayedDurations: { - [rundownId]: 9240, - }, - partCountdown: { - part1: 0, - part2: 6000, - part3: 7240, - part4: 8240, - }, - partDisplayDurations: { - part1: 6000, - part2: 1240, - part3: 1000, - part4: 1000, - }, - partDisplayStartsAt: { - part1: 0, - part2: 6000, - part3: 7240, - part4: 8240, - }, - partDurations: { - part1: 6000, - part2: 1240, - part3: 1000, - part4: 1000, - }, - partExpectedDurations: { - part1: 6000, - part2: 1240, - part3: 1000, - part4: 1000, - }, - partPlayed: { - part1: 0, - part2: 0, - part3: 0, - part4: 0, - }, - partStartsAt: { - part1: 0, - part2: 6000, - part3: 7240, - part4: 8240, - }, - remainingPlaylistDuration: 9240, - totalPlaylistDuration: 9240, - breakIsLastRundown: undefined, - remainingTimeOnCurrentPart: undefined, - rundownsBeforeNextBreak: undefined, - nextRundownAnchor: undefined, - }) - ) - }) - it('Handles part with autonext', () => { const timing = new RundownTimingCalculator() const playlist: DBRundownPlaylist = makeMockPlaylist() From c0a3acf7812092da7a7f4b4b49e1887f78cd609d Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Wed, 23 Oct 2024 11:43:28 +0100 Subject: [PATCH 45/81] feat: quickloop improvements SOFIE-153 (#16) --- meteor/server/api/userActions.ts | 20 +++ .../src/context/eventContext.ts | 4 + .../src/context/onSetAsNextContext.ts | 12 ++ .../src/context/partsAndPieceActionContext.ts | 4 + .../src/context/quickLoopInfo.ts | 6 + packages/corelib/src/worker/studio.ts | 7 + .../blueprints/context/OnSetAsNextContext.ts | 26 ++++ .../src/blueprints/context/OnTakeContext.ts | 5 + .../context/OnTimelineGenerateContext.ts | 7 +- .../src/blueprints/context/adlibActions.ts | 11 +- .../job-worker/src/blueprints/context/lib.ts | 11 ++ .../PartAndPieceInstanceActionService.ts | 6 + .../job-worker/src/playout/moveNextPart.ts | 18 +-- .../src/playout/quickLoopMarkers.ts | 108 ++++++++++---- packages/job-worker/src/playout/setNext.ts | 137 ++++++++++++------ .../job-worker/src/playout/setNextJobs.ts | 10 +- .../job-worker/src/workers/studio/jobs.ts | 3 +- .../api/schemas/activePlaylist.yaml | 38 +++++ .../src/liveStatusServer.ts | 1 + .../topics/__tests__/activePlaylist.spec.ts | 2 + .../src/topics/activePlaylistTopic.ts | 97 ++++++++++++- packages/meteor-lib/src/api/userActions.ts | 6 + packages/meteor-lib/src/userAction.ts | 1 + .../webui/src/client/lib/RundownResolver.ts | 4 + .../webui/src/client/lib/clientUserAction.ts | 2 + packages/webui/src/client/ui/RundownView.tsx | 19 +++ 26 files changed, 472 insertions(+), 93 deletions(-) create mode 100644 packages/blueprints-integration/src/context/quickLoopInfo.ts diff --git a/meteor/server/api/userActions.ts b/meteor/server/api/userActions.ts index 3fb8a5a38e2..1f4935625ed 100644 --- a/meteor/server/api/userActions.ts +++ b/meteor/server/api/userActions.ts @@ -1270,6 +1270,26 @@ class ServerUserActionAPI ) } + async clearQuickLoop( + userEvent: string, + eventTime: number, + playlistId: RundownPlaylistId + ): Promise> { + return ServerClientAPI.runUserActionInLogForPlaylistOnWorker( + this, + userEvent, + eventTime, + playlistId, + () => { + check(playlistId, String) + }, + StudioJobs.ClearQuickLoopMarkers, + { + playlistId, + } + ) + } + async executeUserChangeOperation( userEvent: string, eventTime: Time, diff --git a/packages/blueprints-integration/src/context/eventContext.ts b/packages/blueprints-integration/src/context/eventContext.ts index 271ba20871a..e5fe6e234d8 100644 --- a/packages/blueprints-integration/src/context/eventContext.ts +++ b/packages/blueprints-integration/src/context/eventContext.ts @@ -2,6 +2,7 @@ import type { OnGenerateTimelineObj, TSR } from '../timeline' import type { IBlueprintPartInstance, IBlueprintPieceInstance, IBlueprintSegmentDB } from '../documents' import type { IRundownContext } from './rundownContext' import type { IBlueprintExternalMessageQueueObj } from '../message' +import { BlueprintQuickLookInfo } from './quickLoopInfo' export interface IEventContext { getCurrentTime(): number @@ -12,6 +13,9 @@ export interface ITimelineEventContext extends IEventContext, IRundownContext { readonly nextPartInstance: Readonly | undefined readonly previousPartInstance: Readonly | undefined + /** Information about the current loop, if there is one */ + readonly quickLoopInfo: BlueprintQuickLookInfo | null + /** * Get the full session id for an ab playback session. * Note: sessionName should be unique within the segment unless pieces want to share a session diff --git a/packages/blueprints-integration/src/context/onSetAsNextContext.ts b/packages/blueprints-integration/src/context/onSetAsNextContext.ts index 0b28371aa23..da6afe52ae5 100644 --- a/packages/blueprints-integration/src/context/onSetAsNextContext.ts +++ b/packages/blueprints-integration/src/context/onSetAsNextContext.ts @@ -9,12 +9,16 @@ import { IEventContext, IShowStyleUserContext, } from '..' +import { BlueprintQuickLookInfo } from './quickLoopInfo' /** * Context in which 'current' is the part currently on air, and 'next' is the partInstance being set as Next * This is similar to `IPartAndPieceActionContext`, but has more limits on what is allowed to be changed. */ export interface IOnSetAsNextContext extends IShowStyleUserContext, IEventContext { + /** Information about the current loop, if there is one */ + readonly quickLoopInfo: BlueprintQuickLookInfo | null + /** * Data fetching */ @@ -65,4 +69,12 @@ export interface IOnSetAsNextContext extends IShowStyleUserContext, IEventContex */ /** Remove piecesInstances by id. Returns ids of piecesInstances that were removed. Note: For now we only allow removing from the next, but this might change to include current if there is justification */ removePieceInstances(part: 'next', pieceInstanceIds: string[]): Promise + + /** + * Move the next part through the rundown. Can move by either a number of parts, or segments in either direction. + * This will result in the `onSetAsNext` callback being called again following the current call, with the new PartInstance. + * Multiple calls of this inside one call to `onSetAsNext` will replace earlier calls. + * @returns Whether a new Part was found using the provided offset + */ + moveNextPart(partDelta: number, segmentDelta: number): Promise } diff --git a/packages/blueprints-integration/src/context/partsAndPieceActionContext.ts b/packages/blueprints-integration/src/context/partsAndPieceActionContext.ts index 4c68a254f86..d7a213f599d 100644 --- a/packages/blueprints-integration/src/context/partsAndPieceActionContext.ts +++ b/packages/blueprints-integration/src/context/partsAndPieceActionContext.ts @@ -8,8 +8,12 @@ import { IBlueprintResolvedPieceInstance, Time, } from '..' +import { BlueprintQuickLookInfo } from './quickLoopInfo' export interface IPartAndPieceActionContext { + /** Information about the current loop, if there is one */ + readonly quickLoopInfo: BlueprintQuickLookInfo | null + /** * Data fetching */ diff --git a/packages/blueprints-integration/src/context/quickLoopInfo.ts b/packages/blueprints-integration/src/context/quickLoopInfo.ts new file mode 100644 index 00000000000..7c486b8e2d7 --- /dev/null +++ b/packages/blueprints-integration/src/context/quickLoopInfo.ts @@ -0,0 +1,6 @@ +export type BlueprintQuickLookInfo = Readonly<{ + /** Whether there is a loop running */ + running: boolean + /** Whether the loop is locked from user editing */ + locked: boolean +}> diff --git a/packages/corelib/src/worker/studio.ts b/packages/corelib/src/worker/studio.ts index c86324ed4a2..73c271a99f3 100644 --- a/packages/corelib/src/worker/studio.ts +++ b/packages/corelib/src/worker/studio.ts @@ -194,6 +194,11 @@ export enum StudioJobs { */ SetQuickLoopMarker = 'setQuickLoopMarker', + /** + * Clear all QuickLoop markers + */ + ClearQuickLoopMarkers = 'clearQuickLoopMarkers', + /** * Switch the route of the studio * for use in ad.lib actions and other triggers @@ -350,6 +355,7 @@ export interface SetQuickLoopMarkerProps extends RundownPlayoutPropsBase { type: 'start' | 'end' marker: QuickLoopMarker | null } +export type ClearQuickLoopMarkersProps = RundownPlayoutPropsBase export interface SwitchRouteSetProps { routeSetId: string @@ -409,6 +415,7 @@ export type StudioJobFunc = { [StudioJobs.ActivateAdlibTesting]: (data: ActivateAdlibTestingProps) => void [StudioJobs.SetQuickLoopMarker]: (data: SetQuickLoopMarkerProps) => void + [StudioJobs.ClearQuickLoopMarkers]: (data: ClearQuickLoopMarkersProps) => void [StudioJobs.SwitchRouteSet]: (data: SwitchRouteSetProps) => void } diff --git a/packages/job-worker/src/blueprints/context/OnSetAsNextContext.ts b/packages/job-worker/src/blueprints/context/OnSetAsNextContext.ts index 624e0a68d4d..403fc2fbf3c 100644 --- a/packages/job-worker/src/blueprints/context/OnSetAsNextContext.ts +++ b/packages/job-worker/src/blueprints/context/OnSetAsNextContext.ts @@ -22,11 +22,16 @@ import { PlayoutModel } from '../../playout/model/PlayoutModel' import { ReadonlyDeep } from 'type-fest' import { getCurrentTime } from '../../lib' import { protectString } from '@sofie-automation/corelib/dist/protectedString' +import { BlueprintQuickLookInfo } from '@sofie-automation/blueprints-integration/dist/context/quickLoopInfo' +import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' +import { selectNewPartWithOffsets } from '../../playout/moveNextPart' export class OnSetAsNextContext extends ShowStyleUserContext implements IOnSetAsNextContext, IEventContext, IPartAndPieceInstanceActionContext { + public pendingMoveNextPart: { selectedPart: ReadonlyDeep | null } | undefined = undefined + constructor( contextInfo: UserContextInfo, context: JobContext, @@ -38,6 +43,10 @@ export class OnSetAsNextContext super(contextInfo, context, showStyle, watchedPackages) } + public get quickLoopInfo(): BlueprintQuickLookInfo | null { + return this.partAndPieceInstanceService.quickLoopInfo + } + public get nextPartState(): ActionPartChange { return this.partAndPieceInstanceService.nextPartState } @@ -112,6 +121,23 @@ export class OnSetAsNextContext return this.partAndPieceInstanceService.removePieceInstances('next', pieceInstanceIds) } + async moveNextPart(partDelta: number, segmentDelta: number): Promise { + if (typeof partDelta !== 'number') throw new Error('partDelta must be a number') + if (typeof segmentDelta !== 'number') throw new Error('segmentDelta must be a number') + + // Values of 0 mean discard the pending change + if (partDelta === 0 && segmentDelta === 0) { + this.pendingMoveNextPart = undefined + return true + } + + this.pendingMoveNextPart = { + selectedPart: selectNewPartWithOffsets(this.jobContext, this.playoutModel, partDelta, segmentDelta), + } + + return !!this.pendingMoveNextPart.selectedPart + } + getCurrentTime(): number { return getCurrentTime() } diff --git a/packages/job-worker/src/blueprints/context/OnTakeContext.ts b/packages/job-worker/src/blueprints/context/OnTakeContext.ts index ce0e31979da..8fef14c7532 100644 --- a/packages/job-worker/src/blueprints/context/OnTakeContext.ts +++ b/packages/job-worker/src/blueprints/context/OnTakeContext.ts @@ -23,10 +23,15 @@ import { getCurrentTime } from '../../lib' import { JobContext, ProcessedShowStyleCompound } from '../../jobs' import { executePeripheralDeviceAction, listPlayoutDevices } from '../../peripheralDevice' import { ActionPartChange, PartAndPieceInstanceActionService } from './services/PartAndPieceInstanceActionService' +import { BlueprintQuickLookInfo } from '@sofie-automation/blueprints-integration/dist/context/quickLoopInfo' export class OnTakeContext extends ShowStyleUserContext implements IOnTakeContext, IEventContext { public isTakeAborted: boolean + public get quickLoopInfo(): BlueprintQuickLookInfo | null { + return this.partAndPieceInstanceService.quickLoopInfo + } + public get currentPartState(): ActionPartChange { return this.partAndPieceInstanceService.currentPartState } diff --git a/packages/job-worker/src/blueprints/context/OnTimelineGenerateContext.ts b/packages/job-worker/src/blueprints/context/OnTimelineGenerateContext.ts index 9b7af560d33..6c3f8cd30d8 100644 --- a/packages/job-worker/src/blueprints/context/OnTimelineGenerateContext.ts +++ b/packages/job-worker/src/blueprints/context/OnTimelineGenerateContext.ts @@ -15,17 +15,20 @@ import { PieceInstance, ResolvedPieceInstance } from '@sofie-automation/corelib/ import { ProcessedStudioConfig, ProcessedShowStyleConfig } from '../config' import _ = require('underscore') import { ProcessedShowStyleCompound } from '../../jobs' -import { convertPartInstanceToBlueprints } from './lib' +import { convertPartInstanceToBlueprints, createBlueprintQuickLoopInfo } from './lib' import { RundownContext } from './RundownContext' import { AbSessionHelper } from '../../playout/abPlayback/abSessionHelper' import { protectString } from '@sofie-automation/corelib/dist/protectedString' import { PieceInstanceId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { BlueprintQuickLookInfo } from '@sofie-automation/blueprints-integration/dist/context/quickLoopInfo' export class OnTimelineGenerateContext extends RundownContext implements ITimelineEventContext { readonly currentPartInstance: Readonly | undefined readonly nextPartInstance: Readonly | undefined readonly previousPartInstance: Readonly | undefined + readonly quickLoopInfo: BlueprintQuickLookInfo | null + readonly abSessionsHelper: AbSessionHelper readonly #pieceInstanceCache = new Map>() @@ -57,6 +60,8 @@ export class OnTimelineGenerateContext extends RundownContext implements ITimeli this.nextPartInstance = nextPartInstance && convertPartInstanceToBlueprints(nextPartInstance) this.previousPartInstance = previousPartInstance && convertPartInstanceToBlueprints(previousPartInstance) + this.quickLoopInfo = createBlueprintQuickLoopInfo(playlist) + const partInstances = _.compact([previousPartInstance, currentPartInstance, nextPartInstance]) for (const pieceInstance of pieceInstances) { diff --git a/packages/job-worker/src/blueprints/context/adlibActions.ts b/packages/job-worker/src/blueprints/context/adlibActions.ts index 9bbe86103b6..a0b6dfe31bd 100644 --- a/packages/job-worker/src/blueprints/context/adlibActions.ts +++ b/packages/job-worker/src/blueprints/context/adlibActions.ts @@ -22,12 +22,14 @@ import { ShowStyleUserContext } from './ShowStyleUserContext' import { WatchedPackagesHelper } from './watchedPackages' import { getCurrentTime } from '../../lib' import { JobContext, ProcessedShowStyleCompound } from '../../jobs' -import { moveNextPart } from '../../playout/moveNextPart' +import { selectNewPartWithOffsets } from '../../playout/moveNextPart' import { ProcessedShowStyleConfig } from '../config' import { DatastorePersistenceMode } from '@sofie-automation/shared-lib/dist/core/model/TimelineDatastore' import { removeTimelineDatastoreValue, setTimelineDatastoreValue } from '../../playout/datastore' import { executePeripheralDeviceAction, listPlayoutDevices } from '../../peripheralDevice' import { ActionPartChange, PartAndPieceInstanceActionService } from './services/PartAndPieceInstanceActionService' +import { BlueprintQuickLookInfo } from '@sofie-automation/blueprints-integration/dist/context/quickLoopInfo' +import { setNextPartFromPart } from '../../playout/setNext' export class DatastoreActionExecutionContext extends ShowStyleUserContext @@ -70,6 +72,10 @@ export class ActionExecutionContext extends ShowStyleUserContext implements IAct */ public forceRegenerateTimeline = false + public get quickLoopInfo(): BlueprintQuickLookInfo | null { + return this.partAndPieceInstanceService.quickLoopInfo + } + public get currentPartState(): ActionPartChange { return this.partAndPieceInstanceService.currentPartState } @@ -151,7 +157,8 @@ export class ActionExecutionContext extends ShowStyleUserContext implements IAct } async moveNextPart(partDelta: number, segmentDelta: number): Promise { - await moveNextPart(this._context, this._playoutModel, partDelta, segmentDelta) + const selectedPart = selectNewPartWithOffsets(this._context, this._playoutModel, partDelta, segmentDelta) + if (selectedPart) await setNextPartFromPart(this._context, this._playoutModel, selectedPart, true) } async updatePartInstance( diff --git a/packages/job-worker/src/blueprints/context/lib.ts b/packages/job-worker/src/blueprints/context/lib.ts index 7175dce3b34..f0fb167a197 100644 --- a/packages/job-worker/src/blueprints/context/lib.ts +++ b/packages/job-worker/src/blueprints/context/lib.ts @@ -59,6 +59,7 @@ import { UserEditingType, } from '@sofie-automation/blueprints-integration/dist/userEditing' import type { PlayoutMutatablePart } from '../../playout/model/PlayoutPartInstanceModel' +import { BlueprintQuickLookInfo } from '@sofie-automation/blueprints-integration/dist/context/quickLoopInfo' /** * Convert an object to have all the values of all keys (including optionals) be 'true' @@ -581,3 +582,13 @@ export function convertPartialBlueprintMutablePartToCore( return playoutUpdatePart } + +export function createBlueprintQuickLoopInfo(playlist: ReadonlyDeep): BlueprintQuickLookInfo | null { + const playlistLoopProps = playlist.quickLoop + if (!playlistLoopProps) return null + + return { + running: playlistLoopProps.running, + locked: playlistLoopProps.locked, + } +} diff --git a/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts b/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts index 45d96f383c4..849b95c2b0a 100644 --- a/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts +++ b/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts @@ -22,6 +22,7 @@ import { convertPieceInstanceToBlueprints, convertPieceToBlueprints, convertResolvedPieceInstanceToBlueprints, + createBlueprintQuickLoopInfo, getMediaObjectDuration, } from '../lib' import { getResolvedPiecesForCurrentPartInstance } from '../../../playout/resolvedPieces' @@ -55,6 +56,7 @@ import { syncPlayheadInfinitesForNextPartInstance } from '../../../playout/infin import { validateAdlibTestingPartInstanceProperties } from '../../../playout/adlibTesting' import { DBPart, isPartPlayable } from '@sofie-automation/corelib/dist/dataModel/Part' import { PlayoutRundownModel } from '../../../playout/model/PlayoutRundownModel' +import { BlueprintQuickLookInfo } from '@sofie-automation/blueprints-integration/dist/context/quickLoopInfo' export enum ActionPartChange { NONE = 0, @@ -71,6 +73,10 @@ export class PartAndPieceInstanceActionService { private readonly _playoutModel: PlayoutModel readonly showStyleCompound: ReadonlyDeep + public get quickLoopInfo(): BlueprintQuickLookInfo | null { + return createBlueprintQuickLoopInfo(this._playoutModel.playlist) + } + /** To be set by any mutation methods on this context. Indicates to core how extensive the changes are to the current partInstance */ public currentPartState: ActionPartChange = ActionPartChange.NONE /** To be set by any mutation methods on this context. Indicates to core how extensive the changes are to the next partInstance */ diff --git a/packages/job-worker/src/playout/moveNextPart.ts b/packages/job-worker/src/playout/moveNextPart.ts index 14d64aaaaea..ca6a3e4e9c3 100644 --- a/packages/job-worker/src/playout/moveNextPart.ts +++ b/packages/job-worker/src/playout/moveNextPart.ts @@ -1,20 +1,18 @@ import { groupByToMap } from '@sofie-automation/corelib/dist/lib' import { DBPart, isPartPlayable } from '@sofie-automation/corelib/dist/dataModel/Part' import { JobContext } from '../jobs' -import { PartId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { PlayoutModel } from './model/PlayoutModel' +import { PlayoutModelReadonly } from './model/PlayoutModel' import { sortPartsInSortedSegments } from '@sofie-automation/corelib/dist/playout/playlist' -import { setNextPartFromPart } from './setNext' import { logger } from '../logging' import { SegmentOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Segment' import { ReadonlyDeep } from 'type-fest' -export async function moveNextPart( - context: JobContext, - playoutModel: PlayoutModel, +export function selectNewPartWithOffsets( + _context: JobContext, + playoutModel: PlayoutModelReadonly, partDelta: number, segmentDelta: number -): Promise { +): ReadonlyDeep | null { const playlist = playoutModel.playlist const currentPartInstance = playoutModel.currentPartInstance?.partInstance @@ -69,8 +67,7 @@ export async function moveNextPart( // TODO - looping playlists if (selectedPart) { // Switch to that part - await setNextPartFromPart(context, playoutModel, selectedPart, true) - return selectedPart._id + return selectedPart } else { // Nothing looked valid so do nothing // Note: we should try and a smaller delta if it is not -1/1 @@ -101,8 +98,7 @@ export async function moveNextPart( if (targetPart) { // Switch to that part - await setNextPartFromPart(context, playoutModel, targetPart, true) - return targetPart._id + return targetPart } else { // Nothing looked valid so do nothing // Note: we should try and a smaller delta if it is not -1/1 diff --git a/packages/job-worker/src/playout/quickLoopMarkers.ts b/packages/job-worker/src/playout/quickLoopMarkers.ts index cceeed6b81d..7b046545147 100644 --- a/packages/job-worker/src/playout/quickLoopMarkers.ts +++ b/packages/job-worker/src/playout/quickLoopMarkers.ts @@ -1,5 +1,5 @@ import { UserError, UserErrorMessage } from '@sofie-automation/corelib/dist/error' -import { SetQuickLoopMarkerProps } from '@sofie-automation/corelib/dist/worker/studio' +import { ClearQuickLoopMarkersProps, SetQuickLoopMarkerProps } from '@sofie-automation/corelib/dist/worker/studio' import { JobContext } from '../jobs' import { runJobWithPlayoutModel } from './lock' import { updateTimeline } from './timeline/generate' @@ -8,6 +8,7 @@ import { setNextPart } from './setNext' import { resetPartInstancesWithPieceInstances } from './lib' import { QuickLoopMarker, QuickLoopMarkerType } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { PlayoutModel } from './model/PlayoutModel' export async function handleSetQuickLoopMarker(context: JobContext, data: SetQuickLoopMarkerProps): Promise { return runJobWithPlayoutModel( @@ -20,8 +21,10 @@ export async function handleSetQuickLoopMarker(context: JobContext, data: SetQui async (playoutModel) => { const playlist = playoutModel.playlist if (!playlist.activationId) throw new Error(`Playlist has no activationId!`) + const oldProps = playoutModel.playlist.quickLoop const wasQuickLoopRunning = oldProps?.running + playoutModel.setQuickLoopMarker(data.type, data.marker) const markerChanged = ( @@ -46,7 +49,7 @@ export async function handleSetQuickLoopMarker(context: JobContext, data: SetQui if (playlist.currentPartInfo) { // rundown is on air - let segmentsToReset: SegmentId[] = [] + let segmentIdsToReset: SegmentId[] = [] if ( playlist.quickLoop?.start && @@ -54,7 +57,7 @@ export async function handleSetQuickLoopMarker(context: JobContext, data: SetQui markerChanged(oldProps.start, playlist.quickLoop.start) ) { // start marker changed - segmentsToReset = playoutModel.getSegmentsBetweenQuickLoopMarker( + segmentIdsToReset = playoutModel.getSegmentsBetweenQuickLoopMarker( playlist.quickLoop.start, oldProps.start ) @@ -64,45 +67,100 @@ export async function handleSetQuickLoopMarker(context: JobContext, data: SetQui markerChanged(oldProps.end, playlist.quickLoop.end) ) { // end marker changed - segmentsToReset = playoutModel.getSegmentsBetweenQuickLoopMarker( + segmentIdsToReset = playoutModel.getSegmentsBetweenQuickLoopMarker( oldProps.end, playlist.quickLoop.end ) } else if (playlist.quickLoop?.start && playlist.quickLoop.end && !(oldProps?.start && oldProps.end)) { // a new loop was created - segmentsToReset = playoutModel.getSegmentsBetweenQuickLoopMarker( + segmentIdsToReset = playoutModel.getSegmentsBetweenQuickLoopMarker( playlist.quickLoop.start, playlist.quickLoop.end ) } // reset segments that have been added to the loop and are not on-air - resetPartInstancesWithPieceInstances(context, playoutModel, { - segmentId: { - $in: segmentsToReset.filter( - (segmentId) => - segmentId !== playoutModel.currentPartInstance?.partInstance.segmentId && - segmentId !== playoutModel.nextPartInstance?.partInstance.segmentId - ), - }, - }) + resetPartInstancesWithPieceInstancesForAffectedSegments(context, playoutModel, segmentIdsToReset) } if (wasQuickLoopRunning) { - const nextPart = selectNextPart( - context, - playoutModel.playlist, - playoutModel.currentPartInstance?.partInstance ?? null, - playoutModel.nextPartInstance?.partInstance ?? null, - playoutModel.getAllOrderedSegments(), - playoutModel.getAllOrderedParts(), - { ignoreUnplayable: true, ignoreQuickLoop: false } + await updateNextedPartAfterQuickLoopMarkerChange(context, playoutModel) + } + await updateTimeline(context, playoutModel) + } + ) +} + +export async function handleClearQuickLoopMarkers( + context: JobContext, + data: ClearQuickLoopMarkersProps +): Promise { + return runJobWithPlayoutModel( + context, + data, + async (playoutModel) => { + const playlist = playoutModel.playlist + if (!playlist.activationId) throw UserError.create(UserErrorMessage.InactiveRundown) + }, + async (playoutModel) => { + const playlist = playoutModel.playlist + if (!playlist.activationId) throw new Error(`Playlist has no activationId!`) + + const wasQuickLoopRunning = playoutModel.playlist.quickLoop?.running + + // a new loop was created + if (playlist.quickLoop?.start && playlist.quickLoop.end) { + const segmentIdsToReset = playoutModel.getSegmentsBetweenQuickLoopMarker( + playlist.quickLoop.start, + playlist.quickLoop.end ) - if (nextPart?.part._id !== playoutModel.nextPartInstance?.partInstance.part._id) { - await setNextPart(context, playoutModel, nextPart, false) - } + + // reset segments that have been added to the loop and are not on-air + resetPartInstancesWithPieceInstancesForAffectedSegments(context, playoutModel, segmentIdsToReset) + } + + playoutModel.setQuickLoopMarker('start', null) + playoutModel.setQuickLoopMarker('end', null) + + if (wasQuickLoopRunning) { + await updateNextedPartAfterQuickLoopMarkerChange(context, playoutModel) } await updateTimeline(context, playoutModel) } ) } + +async function updateNextedPartAfterQuickLoopMarkerChange(context: JobContext, playoutModel: PlayoutModel) { + const nextPart = selectNextPart( + context, + playoutModel.playlist, + playoutModel.currentPartInstance?.partInstance ?? null, + playoutModel.nextPartInstance?.partInstance ?? null, + playoutModel.getAllOrderedSegments(), + playoutModel.getAllOrderedParts(), + { ignoreUnplayable: true, ignoreQuickLoop: false } + ) + if (nextPart?.part._id !== playoutModel.nextPartInstance?.partInstance.part._id) { + await setNextPart(context, playoutModel, nextPart, false) + } +} + +function resetPartInstancesWithPieceInstancesForAffectedSegments( + context: JobContext, + playoutModel: PlayoutModel, + segmentIdsToReset: SegmentId[] +) { + const segmentIdsExceptTheCurrent = segmentIdsToReset.filter( + (segmentId) => + segmentId !== playoutModel.currentPartInstance?.partInstance.segmentId && + segmentId !== playoutModel.nextPartInstance?.partInstance.segmentId + ) + if (segmentIdsExceptTheCurrent.length === 0) return + + // reset segments that have been added to the loop and are not on-air + resetPartInstancesWithPieceInstances(context, playoutModel, { + segmentId: { + $in: segmentIdsExceptTheCurrent, + }, + }) +} diff --git a/packages/job-worker/src/playout/setNext.ts b/packages/job-worker/src/playout/setNext.ts index a6178d33475..de53256f54e 100644 --- a/packages/job-worker/src/playout/setNext.ts +++ b/packages/job-worker/src/playout/setNext.ts @@ -47,10 +47,57 @@ export async function setNextPart( ): Promise { const span = context.startSpan('setNextPart') - const rundownIds = playoutModel.getRundownIds() - const currentPartInstance = playoutModel.currentPartInstance - const nextPartInstance = playoutModel.nextPartInstance + const attemptedPartIds = new Set() + if (rawNextPart && 'part' in rawNextPart) attemptedPartIds.add(rawNextPart.part._id) + let moveNextToPart = await setNextPartAndCheckForPendingMoveNextPart( + context, + playoutModel, + rawNextPart, + setManually, + nextTimeOffset + ) + while (moveNextToPart) { + // Ensure that we aren't stuck in an infinite loop. If this while loop is being run for a part twice, then the blueprints are behaving oddly and will likely get stuck + // Instead of throwing and causing a larger failure, we can stop processing here, and leave something as next + const nextedId = moveNextToPart.selectedPart?._id ?? null + if (attemptedPartIds.has(nextedId)) { + logger.error(`Blueprint onSetAsNext callback moved the next part ${attemptedPartIds.size}, forming a loop`) + break + } + attemptedPartIds.add(nextedId) + + moveNextToPart = await setNextPartAndCheckForPendingMoveNextPart( + context, + playoutModel, + moveNextToPart.selectedPart + ? { + part: moveNextToPart.selectedPart, + consumesQueuedSegmentId: false, + } + : null, + true + ) + } + + playoutModel.removeUntakenPartInstances() + + resetPartInstancesWhenChangingSegment(context, playoutModel) + + playoutModel.updateQuickLoopState() + + await cleanupOrphanedItems(context, playoutModel) + + if (span) span.end() +} + +async function setNextPartAndCheckForPendingMoveNextPart( + context: JobContext, + playoutModel: PlayoutModel, + rawNextPart: ReadonlyDeep> | PlayoutPartInstanceModel | null, + setManually: boolean, + nextTimeOffset?: number | undefined +) { if (rawNextPart) { if (!playoutModel.playlist.activationId) throw new Error(`RundownPlaylist "${playoutModel.playlist._id}" is not active`) @@ -64,7 +111,7 @@ export async function setNextPart( throw new Error('Part is marked as invalid, cannot set as next.') } - if (!rundownIds.includes(inputPartInstance.partInstance.rundownId)) { + if (!playoutModel.getRundown(inputPartInstance.partInstance.rundownId)) { throw new Error( `PartInstance "${inputPartInstance.partInstance._id}" of rundown "${inputPartInstance.partInstance.rundownId}" not part of RundownPlaylist "${playoutModel.playlist._id}"` ) @@ -78,7 +125,7 @@ export async function setNextPart( throw new Error('Part is marked as invalid, cannot set as next.') } - if (!rundownIds.includes(selectedPart.part.rundownId)) { + if (!playoutModel.getRundown(selectedPart.part.rundownId)) { throw new Error( `Part "${selectedPart.part._id}" of rundown "${selectedPart.part.rundownId}" not part of RundownPlaylist "${playoutModel.playlist._id}"` ) @@ -86,6 +133,9 @@ export async function setNextPart( consumesQueuedSegmentId = selectedPart.consumesQueuedSegmentId ?? false + const currentPartInstance = playoutModel.currentPartInstance + const nextPartInstance = playoutModel.nextPartInstance + if (nextPartInstance && nextPartInstance.partInstance.part._id === selectedPart.part._id) { // Re-use existing @@ -120,22 +170,13 @@ export async function setNextPart( playoutModel.setPartInstanceAsNext(newPartInstance, setManually, consumesQueuedSegmentId, nextTimeOffset) - await executeOnSetAsNextCallback(playoutModel, newPartInstance, context) + return executeOnSetAsNextCallback(playoutModel, newPartInstance, context) } else { // Set to null playoutModel.setPartInstanceAsNext(null, setManually, false, nextTimeOffset) + return undefined } - - playoutModel.removeUntakenPartInstances() - - resetPartInstancesWhenChangingSegment(context, playoutModel) - - playoutModel.updateQuickLoopState() - - await cleanupOrphanedItems(context, playoutModel) - - if (span) span.end() } async function executeOnSetAsNextCallback( @@ -144,38 +185,40 @@ async function executeOnSetAsNextCallback( context: JobContext ) { const rundownOfNextPart = playoutModel.getRundown(newPartInstance.partInstance.rundownId) - if (rundownOfNextPart) { - const blueprint = await context.getShowStyleBlueprint(rundownOfNextPart.rundown.showStyleBaseId) - if (blueprint.blueprint.onSetAsNext) { - const showStyle = await context.getShowStyleCompound( - rundownOfNextPart.rundown.showStyleVariantId, - rundownOfNextPart.rundown.showStyleBaseId - ) - const watchedPackagesHelper = WatchedPackagesHelper.empty(context) - const onSetAsNextContext = new OnSetAsNextContext( - { - name: `${rundownOfNextPart.rundown.name}(${playoutModel.playlist.name})`, - identifier: `playlist=${playoutModel.playlist._id},rundown=${ - rundownOfNextPart.rundown._id - },currentPartInstance=${ - playoutModel.playlist.currentPartInfo?.partInstanceId - },execution=${getRandomId()}`, - tempSendUserNotesIntoBlackHole: true, // TODO-CONTEXT store these notes - }, - context, - playoutModel, - showStyle, - watchedPackagesHelper, - new PartAndPieceInstanceActionService(context, playoutModel, showStyle, rundownOfNextPart) - ) - try { - await blueprint.blueprint.onSetAsNext(onSetAsNextContext) - await applyOnSetAsNextSideEffects(context, playoutModel, onSetAsNextContext) - } catch (err) { - logger.error(`Error in showStyleBlueprint.onSetAsNext: ${stringifyError(err)}`) - } - } + if (!rundownOfNextPart) return null + + const blueprint = await context.getShowStyleBlueprint(rundownOfNextPart.rundown.showStyleBaseId) + if (!blueprint.blueprint.onSetAsNext) return null + + const showStyle = await context.getShowStyleCompound( + rundownOfNextPart.rundown.showStyleVariantId, + rundownOfNextPart.rundown.showStyleBaseId + ) + + const watchedPackagesHelper = WatchedPackagesHelper.empty(context) + const onSetAsNextContext = new OnSetAsNextContext( + { + name: `${rundownOfNextPart.rundown.name}(${playoutModel.playlist.name})`, + identifier: `playlist=${playoutModel.playlist._id},rundown=${ + rundownOfNextPart.rundown._id + },currentPartInstance=${playoutModel.playlist.currentPartInfo?.partInstanceId},execution=${getRandomId()}`, + tempSendUserNotesIntoBlackHole: true, // TODO-CONTEXT store these notes + }, + context, + playoutModel, + showStyle, + watchedPackagesHelper, + new PartAndPieceInstanceActionService(context, playoutModel, showStyle, rundownOfNextPart) + ) + + try { + await blueprint.blueprint.onSetAsNext(onSetAsNextContext) + await applyOnSetAsNextSideEffects(context, playoutModel, onSetAsNextContext) + } catch (err) { + logger.error(`Error in showStyleBlueprint.onSetAsNext: ${stringifyError(err)}`) } + + return onSetAsNextContext.pendingMoveNextPart } async function applyOnSetAsNextSideEffects( diff --git a/packages/job-worker/src/playout/setNextJobs.ts b/packages/job-worker/src/playout/setNextJobs.ts index eaaec419498..fe297ae3133 100644 --- a/packages/job-worker/src/playout/setNextJobs.ts +++ b/packages/job-worker/src/playout/setNextJobs.ts @@ -12,7 +12,7 @@ import { import { JobContext } from '../jobs' import { runJobWithPlayoutModel } from './lock' import { setNextPartFromPart, setNextSegment, queueNextSegment } from './setNext' -import { moveNextPart } from './moveNextPart' +import { selectNewPartWithOffsets } from './moveNextPart' import { updateTimeline } from './timeline/generate' import { PlayoutSegmentModel } from './model/PlayoutSegmentModel' import { ReadonlyDeep } from 'type-fest' @@ -68,11 +68,13 @@ export async function handleMoveNextPart(context: JobContext, data: MoveNextPart } }, async (playoutModel) => { - const newPartId = await moveNextPart(context, playoutModel, data.partDelta, data.segmentDelta) + const selectedPart = selectNewPartWithOffsets(context, playoutModel, data.partDelta, data.segmentDelta) + if (!selectedPart) return null - if (newPartId) await updateTimeline(context, playoutModel) + await setNextPartFromPart(context, playoutModel, selectedPart, true) + await updateTimeline(context, playoutModel) - return newPartId + return selectedPart._id } ) } diff --git a/packages/job-worker/src/workers/studio/jobs.ts b/packages/job-worker/src/workers/studio/jobs.ts index a7741e6e56e..4cc371d5dfb 100644 --- a/packages/job-worker/src/workers/studio/jobs.ts +++ b/packages/job-worker/src/workers/studio/jobs.ts @@ -44,7 +44,7 @@ import { import { handleTimelineTriggerTime, handleOnPlayoutPlaybackChanged } from '../../playout/timings' import { handleExecuteAdlibAction } from '../../playout/adlibAction' import { handleTakeNextPart } from '../../playout/take' -import { handleSetQuickLoopMarker } from '../../playout/quickLoopMarkers' +import { handleClearQuickLoopMarkers, handleSetQuickLoopMarker } from '../../playout/quickLoopMarkers' import { handleActivateAdlibTesting } from '../../playout/adlibTesting' import { handleExecuteBucketAdLibOrAction } from '../../playout/bucketAdlibJobs' import { handleSwitchRouteSet } from '../../studio/routeSet' @@ -107,6 +107,7 @@ export const studioJobHandlers: StudioJobHandlers = { [StudioJobs.ActivateAdlibTesting]: handleActivateAdlibTesting, [StudioJobs.SetQuickLoopMarker]: handleSetQuickLoopMarker, + [StudioJobs.ClearQuickLoopMarkers]: handleClearQuickLoopMarkers, [StudioJobs.SwitchRouteSet]: handleSwitchRouteSet, } diff --git a/packages/live-status-gateway/api/schemas/activePlaylist.yaml b/packages/live-status-gateway/api/schemas/activePlaylist.yaml index f0ad1349fac..ae9b551995f 100644 --- a/packages/live-status-gateway/api/schemas/activePlaylist.yaml +++ b/packages/live-status-gateway/api/schemas/activePlaylist.yaml @@ -29,6 +29,23 @@ $defs: $ref: '#/$defs/part' publicData: description: Optional arbitrary data + quickLoop: + description: Information about the current quickLoop, if any + type: object + properties: + locked: + description: Whether the user is allowed to make alterations to the Start/End markers + type: boolean + running: + description: Whether the loop has two valid markers and is currently running + type: boolean + start: + description: The start of the loop + $ref: '#/$defs/quickLoopMarker' + end: + description: The end of the loop + $ref: '#/$defs/quickLoopMarker' + required: [locked, running] required: [event, id, name, rundownIds, currentPart, currentSegment, nextPart] additionalProperties: false examples: @@ -191,3 +208,24 @@ $defs: tags: ['camera'] publicData: switcherSource: 1 + quickLoopMarker: + type: object + properties: + markerType: + description: The type of entity the marker is locked to + type: string + enum: + - playlist + - rundown + - segment + - part + rundownId: + description: The rundown that this marker references. This will be set for rundown, segment and part markers + type: string + segmentId: + description: The segment that this marker references. This will be set for segment and part markers + type: string + partId: + description: The part that this marker references. This will be set for only part markers + type: string + required: [markerType] diff --git a/packages/live-status-gateway/src/liveStatusServer.ts b/packages/live-status-gateway/src/liveStatusServer.ts index 2998d151b8e..90bd64c4713 100644 --- a/packages/live-status-gateway/src/liveStatusServer.ts +++ b/packages/live-status-gateway/src/liveStatusServer.ts @@ -109,6 +109,7 @@ export class LiveStatusServer { await partsHandler.subscribe(activePlaylistTopic) await pieceInstancesHandler.subscribe(activePlaylistTopic) await segmentHandler.subscribe(activePlaylistTopic) + await segmentsHandler.subscribe(activePlaylistTopic) await playlistHandler.subscribe(activePiecesTopic) await showStyleBaseHandler.subscribe(activePiecesTopic) diff --git a/packages/live-status-gateway/src/topics/__tests__/activePlaylist.spec.ts b/packages/live-status-gateway/src/topics/__tests__/activePlaylist.spec.ts index d26fedebb08..c7fe838ee23 100644 --- a/packages/live-status-gateway/src/topics/__tests__/activePlaylist.spec.ts +++ b/packages/live-status-gateway/src/topics/__tests__/activePlaylist.spec.ts @@ -49,6 +49,7 @@ describe('ActivePlaylistTopic', () => { currentSegment: null, rundownIds: unprotectStringArray(playlist.rundownIdsInOrder), publicData: undefined, + quickLoop: undefined, } // eslint-disable-next-line @typescript-eslint/unbound-method @@ -139,6 +140,7 @@ describe('ActivePlaylistTopic', () => { }, rundownIds: unprotectStringArray(playlist.rundownIdsInOrder), publicData: { a: 'b' }, + quickLoop: undefined, } // eslint-disable-next-line @typescript-eslint/unbound-method diff --git a/packages/live-status-gateway/src/topics/activePlaylistTopic.ts b/packages/live-status-gateway/src/topics/activePlaylistTopic.ts index 0f3acc01283..7bafb39871f 100644 --- a/packages/live-status-gateway/src/topics/activePlaylistTopic.ts +++ b/packages/live-status-gateway/src/topics/activePlaylistTopic.ts @@ -1,9 +1,13 @@ import { Logger } from 'winston' import { WebSocket } from 'ws' import { unprotectString } from '@sofie-automation/shared-lib/dist/lib/protectedString' -import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import { + DBRundownPlaylist, + QuickLoopMarker, + QuickLoopMarkerType, +} from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' -import { literal } from '@sofie-automation/shared-lib/dist/lib/lib' +import { assertNever, literal } from '@sofie-automation/shared-lib/dist/lib/lib' import { WebSocketTopicBase, WebSocketTopic, CollectionObserver } from '../wsHandler' import { SelectedPartInstances, PartInstancesHandler } from '../collections/partInstancesHandler' import { PlaylistHandler } from '../collections/playlistHandler' @@ -17,6 +21,8 @@ import { SelectedPieceInstances, PieceInstancesHandler, PieceInstanceMin } from import { PieceStatus, toPieceStatus } from './helpers/pieceStatus' import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' import { SegmentHandler } from '../collections/segmentHandler' +import { SegmentsHandler } from '../collections/segmentsHandler' +import { normalizeArray } from '@sofie-automation/corelib/dist/lib' const THROTTLE_PERIOD_MS = 100 @@ -38,6 +44,20 @@ interface CurrentSegmentStatus { timing: CurrentSegmentTiming } +interface ActivePlaylistQuickLoopMarker { + type: 'playlist' | 'rundown' | 'segment' | 'part' + rundownId: string | undefined + segmentId: string | undefined + partId: string | undefined +} + +interface ActivePlaylistQuickLoopStatus { + locked: boolean + running: boolean + start: ActivePlaylistQuickLoopMarker | undefined + end: ActivePlaylistQuickLoopMarker | undefined +} + export interface ActivePlaylistStatus { event: string id: string | null @@ -46,6 +66,7 @@ export interface ActivePlaylistStatus { currentPart: CurrentPartStatus | null currentSegment: CurrentSegmentStatus | null nextPart: PartStatus | null + quickLoop: ActivePlaylistQuickLoopStatus | undefined publicData: unknown } @@ -67,6 +88,8 @@ export class ActivePlaylistTopic private _firstInstanceInSegmentPlayout: DBPartInstance | undefined private _partInstancesInCurrentSegment: DBPartInstance[] = [] private _partsBySegmentId: Record = {} + private _partsById: Record = {} + private _segmentsById: Record = {} private _pieceInstancesInCurrentPartInstance: PieceInstanceMin[] | undefined private _pieceInstancesInNextPartInstance: PieceInstanceMin[] | undefined private _showStyleBaseExt: ShowStyleBaseExt | undefined @@ -145,6 +168,7 @@ export class ActivePlaylistTopic publicData: nextPart.publicData, }) : null, + quickLoop: this.transformQuickLoopStatus(), publicData: this._activePlaylist.publicData, }) : literal({ @@ -155,12 +179,73 @@ export class ActivePlaylistTopic currentPart: null, currentSegment: null, nextPart: null, + quickLoop: undefined, publicData: undefined, }) this.sendMessage(subscribers, message) } + private transformQuickLoopStatus(): ActivePlaylistQuickLoopStatus | undefined { + if (!this._activePlaylist) return + + const quickLoopProps = this._activePlaylist.quickLoop + if (!quickLoopProps) return undefined + + return { + locked: quickLoopProps.locked, + running: quickLoopProps.running, + start: this.transformQuickLoopMarkerStatus(quickLoopProps.start), + end: this.transformQuickLoopMarkerStatus(quickLoopProps.end), + } + } + + private transformQuickLoopMarkerStatus( + marker: QuickLoopMarker | undefined + ): ActivePlaylistQuickLoopMarker | undefined { + if (!marker) return undefined + + switch (marker.type) { + case QuickLoopMarkerType.PLAYLIST: + return { + type: 'playlist', + rundownId: undefined, + segmentId: undefined, + partId: undefined, + } + case QuickLoopMarkerType.RUNDOWN: + return { + type: 'rundown', + rundownId: unprotectString(marker.id), + segmentId: undefined, + partId: undefined, + } + case QuickLoopMarkerType.SEGMENT: { + const segment = this._segmentsById[unprotectString(marker.id)] + + return { + type: 'segment', + rundownId: unprotectString(segment?.rundownId), + segmentId: unprotectString(marker.id), + partId: undefined, + } + } + case QuickLoopMarkerType.PART: { + const part = this._partsById[unprotectString(marker.id)] + + return { + type: 'part', + rundownId: unprotectString(part?.rundownId), + segmentId: unprotectString(part?.segmentId), + partId: unprotectString(marker.id), + } + } + default: + assertNever(marker) + return undefined + } + } + private isDataInconsistent() { return ( this._currentPartInstance?._id !== this._activePlaylist?.currentPartInfo?.partInstanceId || @@ -182,6 +267,7 @@ export class ActivePlaylistTopic | DBPart[] | SelectedPieceInstances | DBSegment + | DBSegment[] | undefined ): Promise { let hasAnythingChanged = false @@ -219,6 +305,7 @@ export class ActivePlaylistTopic break } case PartsHandler.name: { + this._partsById = normalizeArray(data as DBPart[], '_id') this._partsBySegmentId = _.groupBy(data as DBPart[], 'segmentId') this.logUpdateReceived('parts', source) hasAnythingChanged = true // TODO: can this be smarter? @@ -243,6 +330,12 @@ export class ActivePlaylistTopic hasAnythingChanged = true break } + case SegmentsHandler.name: { + this._segmentsById = normalizeArray(data as DBSegment[], '_id') + this.logUpdateReceived('segments', source) + hasAnythingChanged = true // TODO: can this be smarter? + break + } default: throw new Error(`${this._name} received unsupported update from ${source}}`) } diff --git a/packages/meteor-lib/src/api/userActions.ts b/packages/meteor-lib/src/api/userActions.ts index 7bf7c59edc5..91f521b617f 100644 --- a/packages/meteor-lib/src/api/userActions.ts +++ b/packages/meteor-lib/src/api/userActions.ts @@ -355,6 +355,11 @@ export interface NewUserActionAPI { rundownPlaylistId: RundownPlaylistId, marker: QuickLoopMarker | null ): Promise> + clearQuickLoop( + userEvent: string, + eventTime: Time, + rundownPlaylistId: RundownPlaylistId + ): Promise> } export enum UserActionAPIMethods { @@ -441,6 +446,7 @@ export enum UserActionAPIMethods { 'setQuickLoopStart' = 'userAction.setQuickLoopStart', 'setQuickLoopEnd' = 'userAction.setQuickLoopEnd', + 'clearQuickLoop' = 'userAction.clearQuickLoop', } export interface ReloadRundownPlaylistResponse { diff --git a/packages/meteor-lib/src/userAction.ts b/packages/meteor-lib/src/userAction.ts index b868a3307d4..43b54388a2b 100644 --- a/packages/meteor-lib/src/userAction.ts +++ b/packages/meteor-lib/src/userAction.ts @@ -54,4 +54,5 @@ export enum UserAction { CREATE_ADLIB_TESTING_RUNDOWN, SET_QUICK_LOOP_START, SET_QUICK_LOOP_END, + CLEAR_QUICK_LOOP, } diff --git a/packages/webui/src/client/lib/RundownResolver.ts b/packages/webui/src/client/lib/RundownResolver.ts index d58e5573db4..78abccf470b 100644 --- a/packages/webui/src/client/lib/RundownResolver.ts +++ b/packages/webui/src/client/lib/RundownResolver.ts @@ -299,6 +299,10 @@ export function isLoopDefined(playlist: DBRundownPlaylist | undefined): boolean return playlist?.quickLoop?.start != null && playlist?.quickLoop?.end != null } +export function isAnyLoopMarkerDefined(playlist: DBRundownPlaylist | undefined): boolean { + return playlist?.quickLoop?.start != null || playlist?.quickLoop?.end != null +} + export function isLoopRunning(playlist: DBRundownPlaylist | undefined): boolean { return !!playlist?.quickLoop?.running } diff --git a/packages/webui/src/client/lib/clientUserAction.ts b/packages/webui/src/client/lib/clientUserAction.ts index 7ac5e7bf16a..b507a7c63d4 100644 --- a/packages/webui/src/client/lib/clientUserAction.ts +++ b/packages/webui/src/client/lib/clientUserAction.ts @@ -123,6 +123,8 @@ function userActionToLabel(userAction: UserAction, t: i18next.TFunction) { return t('Setting as QuickLoop Start') case UserAction.SET_QUICK_LOOP_END: return t('Setting as QuickLoop End') + case UserAction.CLEAR_QUICK_LOOP: + return t('Clear QuickLoop') default: assertNever(userAction) } diff --git a/packages/webui/src/client/ui/RundownView.tsx b/packages/webui/src/client/ui/RundownView.tsx index 8b0f49441a6..cb8c94e7462 100644 --- a/packages/webui/src/client/ui/RundownView.tsx +++ b/packages/webui/src/client/ui/RundownView.tsx @@ -166,6 +166,7 @@ import { isEntirePlaylistLooping, isLoopRunning } from '../lib/RundownResolver' import { useRundownAndShowStyleIdsForPlaylist } from './util/useRundownAndShowStyleIdsForPlaylist' import { RundownPlaylistClientUtil } from '../lib/rundownPlaylistUtil' import { UserPermissionsContext, UserPermissions } from './UserPermissions' +import * as RundownResolver from '../lib/RundownResolver' import { MAGIC_TIME_SCALE_FACTOR } from './SegmentTimeline/Constants' @@ -555,6 +556,15 @@ const RundownHeader = withTranslation()( } } + clearQuickLoop = (e: any) => { + const { t } = this.props + if (this.props.userPermissions.studio && this.props.playlist.activationId) { + doUserAction(t, e, UserAction.CLEAR_QUICK_LOOP, (e, ts) => + MeteorCall.userAction.clearQuickLoop(e, ts, this.props.playlist._id) + ) + } + } + holdUndo = (e: any) => { const { t } = this.props if ( @@ -1004,6 +1014,12 @@ const RundownHeader = withTranslation()( render(): JSX.Element { const { t } = this.props + + const canClearQuickLoop = + !!this.props.studio.settings.enableQuickLoop && + !RundownResolver.isLoopLocked(this.props.playlist) && + RundownResolver.isAnyLoopMarkerDefined(this.props.playlist) + return ( <> @@ -1037,6 +1053,9 @@ const RundownHeader = withTranslation()( {this.props.playlist.activationId ? ( this.hold(e)}>{t('Hold')} ) : null} + {this.props.playlist.activationId && canClearQuickLoop ? ( + this.clearQuickLoop(e)}>{t('Clear QuickLoop')} + ) : null} {!( this.props.playlist.activationId && !this.props.playlist.rehearsal && From f116019ea5d0cd5d59be02d0dfb38161f3051d2d Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Wed, 23 Oct 2024 13:58:21 +0100 Subject: [PATCH 46/81] fix: prefer field_order from package info stream over deepscan --- .../__tests__/checkPieceContentStatus.test.ts | 138 ++++++++++++++---- .../checkPieceContentStatus.ts | 16 +- .../src/package-manager/packageInfo.ts | 1 + 3 files changed, 125 insertions(+), 30 deletions(-) diff --git a/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts b/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts index f0d34355f54..8dbb0916751 100644 --- a/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts +++ b/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts @@ -41,37 +41,119 @@ import { DEFAULT_MINIMUM_TAKE_SPAN } from '@sofie-automation/shared-lib/dist/cor const mockMediaObjectsCollection = MongoMock.getInnerMockCollection(MediaObjects) describe('lib/mediaObjects', () => { - test('buildFormatString', () => { - const format1 = buildFormatString( - PackageInfo.FieldOrder.TFF, - literal({ - width: 1920, - height: 1080, - codec_time_base: '1/25', - }) - ) - expect(format1).toEqual('1920x1080i2500tff') + describe('buildFormatString', () => { + it('deepscan tff, stream unknown', () => { + const format1 = buildFormatString( + PackageInfo.FieldOrder.TFF, + literal({ + width: 1920, + height: 1080, + codec_time_base: '1/25', + }) + ) + expect(format1).toEqual('1920x1080i2500tff') + }) - const format2 = buildFormatString( - PackageInfo.FieldOrder.Progressive, + it('deepscan progressive, stream unknown', () => { + const format2 = buildFormatString( + PackageInfo.FieldOrder.Progressive, + literal({ + width: 1280, + height: 720, + codec_time_base: '1001/60000', + }) + ) + expect(format2).toEqual('1280x720p5994') + }) - literal({ - width: 1280, - height: 720, - codec_time_base: '1001/60000', - }) - ) - expect(format2).toEqual('1280x720p5994') + it('deepscan bff, stream unknown', () => { + const format3 = buildFormatString( + PackageInfo.FieldOrder.BFF, + literal({ + width: 720, + height: 576, + codec_time_base: '1/25', + }) + ) + expect(format3).toEqual('720x576i2500bff') + }) - const format3 = buildFormatString( - PackageInfo.FieldOrder.BFF, - literal({ - width: 720, - height: 576, - codec_time_base: '1/25', - }) - ) - expect(format3).toEqual('720x576i2500bff') + it('deepscan tff, stream bff', () => { + const format3 = buildFormatString( + PackageInfo.FieldOrder.TFF, + literal({ + width: 720, + height: 576, + codec_time_base: '1/25', + field_order: PackageInfo.FieldOrder.BFF, + }) + ) + expect(format3).toEqual('720x576i2500bff') + }) + + it('deepscan bff, stream tff', () => { + const format3 = buildFormatString( + PackageInfo.FieldOrder.BFF, + literal({ + width: 720, + height: 576, + codec_time_base: '1/25', + field_order: PackageInfo.FieldOrder.TFF, + }) + ) + expect(format3).toEqual('720x576i2500tff') + }) + + it('deepscan progressive, stream tff', () => { + const format3 = buildFormatString( + PackageInfo.FieldOrder.Progressive, + literal({ + width: 720, + height: 576, + codec_time_base: '1/25', + field_order: PackageInfo.FieldOrder.TFF, + }) + ) + expect(format3).toEqual('720x576i2500tff') + }) + + it('deepscan bff, stream progressive', () => { + const format3 = buildFormatString( + PackageInfo.FieldOrder.BFF, + literal({ + width: 720, + height: 576, + codec_time_base: '1/25', + field_order: PackageInfo.FieldOrder.Progressive, + }) + ) + expect(format3).toEqual('720x576i2500bff') + }) + + it('deepscan unknown, stream progressive', () => { + const format3 = buildFormatString( + undefined, + literal({ + width: 720, + height: 576, + codec_time_base: '1/25', + field_order: PackageInfo.FieldOrder.Progressive, + }) + ) + expect(format3).toEqual('720x576p2500') + }) + + it('r_frame_rate', () => { + const format3 = buildFormatString( + PackageInfo.FieldOrder.Progressive, + literal({ + width: 720, + height: 576, + r_frame_rate: '25/1', + }) + ) + expect(format3).toEqual('720x576p2500') + }) }) test('acceptFormat', () => { diff --git a/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts b/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts index 987d8650315..309d99441b8 100644 --- a/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts +++ b/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts @@ -60,9 +60,21 @@ interface ScanInfoForPackage { * formatted string */ export function buildFormatString( - field_order: PackageInfo.FieldOrder | undefined, + scan_field_order: PackageInfo.FieldOrder | undefined, stream: PieceContentStreamInfo ): string { + let field_order: PackageInfo.FieldOrder + if (stream.field_order === PackageInfo.FieldOrder.BFF || stream.field_order === PackageInfo.FieldOrder.TFF) { + // If the stream says it is interlaced, trust that + field_order = stream.field_order + } else if (scan_field_order && scan_field_order !== PackageInfo.FieldOrder.Unknown) { + // Then try the scan if it gave a value + field_order = scan_field_order + } else { + // Fallback to whatever the stream has + field_order = stream.field_order || PackageInfo.FieldOrder.Unknown + } + let format = `${stream.width || 0}x${stream.height || 0}` switch (field_order) { case PackageInfo.FieldOrder.Progressive: @@ -877,7 +889,7 @@ function getPackageWarningMessage( export type PieceContentStreamInfo = Pick< PackageInfo.FFProbeScanStream, - 'width' | 'height' | 'time_base' | 'codec_type' | 'codec_time_base' | 'channels' | 'r_frame_rate' + 'width' | 'height' | 'time_base' | 'codec_type' | 'codec_time_base' | 'channels' | 'r_frame_rate' | 'field_order' > function checkStreamFormatsAndCounts( messages: Array, diff --git a/packages/shared-lib/src/package-manager/packageInfo.ts b/packages/shared-lib/src/package-manager/packageInfo.ts index ca036caa2b2..03c5b50cedb 100644 --- a/packages/shared-lib/src/package-manager/packageInfo.ts +++ b/packages/shared-lib/src/package-manager/packageInfo.ts @@ -44,6 +44,7 @@ export namespace PackageInfo { display_aspect_ratio?: string // Example: '16:9' pix_fmt?: string // Example: 'yuv420p' bits_per_raw_sample?: string // Example: '8' + field_order?: FieldOrder // audio sample_fmt?: string From fcc85e8a1d9a6b1f445258b0d65a08b97bd390c4 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Thu, 24 Oct 2024 13:13:22 +0100 Subject: [PATCH 47/81] chore: fix bad type --- .../publications/pieceContentStatusUI/checkPieceContentStatus.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts b/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts index 309d99441b8..536c41fbed9 100644 --- a/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts +++ b/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts @@ -349,6 +349,7 @@ async function checkPieceContentMediaObjectStatus( codec_time_base: stream.codec.time_base, channels: stream.channels, r_frame_rate: undefined, + field_order: undefined, }) ), (stream) => buildFormatString(mediainfo.field_order, stream), From 734eb982e275c7f1ab065c752bf4289d1c3c173d Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Thu, 24 Oct 2024 13:30:07 +0100 Subject: [PATCH 48/81] chore: update snapshot --- .../src/playout/__tests__/__snapshots__/playout.test.ts.snap | 5 ----- 1 file changed, 5 deletions(-) diff --git a/packages/job-worker/src/playout/__tests__/__snapshots__/playout.test.ts.snap b/packages/job-worker/src/playout/__tests__/__snapshots__/playout.test.ts.snap index 96eca8b8923..570be55e514 100644 --- a/packages/job-worker/src/playout/__tests__/__snapshots__/playout.test.ts.snap +++ b/packages/job-worker/src/playout/__tests__/__snapshots__/playout.test.ts.snap @@ -75,11 +75,6 @@ exports[`Playout API Basic rundown control 4`] = ` "nextTimeOffset": null, "organizationId": null, "previousPartInfo": null, - "quickLoop": { - "forceAutoNext": "disabled", - "locked": false, - "running": false, - }, "rehearsal": false, "resetTime": 0, "rundownIdsInOrder": [], From 7c907cc0bc5d2a8a979fd8847309321e82b388cc Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Thu, 24 Oct 2024 14:24:00 +0100 Subject: [PATCH 49/81] feat: Server-side notifications SOFIE-152 (#20) --- meteor/server/api/__tests__/cleanup.test.ts | 17 + meteor/server/api/cleanup.ts | 30 + meteor/server/api/studio/api.ts | 2 + meteor/server/collections/index.ts | 21 + meteor/server/publications/lib/quickLoop.ts | 2 +- meteor/server/publications/system.ts | 37 +- packages/corelib/src/dataModel/Collections.ts | 1 + packages/corelib/src/dataModel/Ids.ts | 3 + packages/corelib/src/dataModel/Notes.ts | 5 + .../corelib/src/dataModel/Notifications.ts | 124 ++++ packages/corelib/src/dataModel/Rundown.ts | 2 +- .../corelib/src/dataModel/RundownPlaylist.ts | 4 + packages/corelib/src/lib.ts | 14 + .../job-worker/src/__mocks__/collection.ts | 3 + .../src/blueprints/context/CommonContext.ts | 3 - .../blueprints/context/GetRundownContext.ts | 4 +- .../blueprints/context/OnSetAsNextContext.ts | 4 +- .../src/blueprints/context/OnTakeContext.ts | 4 +- .../context/ShowStyleUserContext.ts | 60 +- .../context/StudioBaselineContext.ts | 4 +- .../blueprints/context/StudioUserContext.ts | 24 +- .../src/blueprints/context/adlibActions.ts | 6 +- packages/job-worker/src/db/collections.ts | 3 + .../__tests__/selectShowStyleVariant.test.ts | 1 - .../job-worker/src/ingest/bucket/import.ts | 32 +- packages/job-worker/src/ingest/commit.ts | 29 +- .../src/ingest/createAdlibTestingRundown.ts | 23 +- .../src/ingest/generationRundown.ts | 38 +- .../src/ingest/model/IngestModel.ts | 10 +- .../model/implementation/IngestModelImpl.ts | 31 +- .../__snapshots__/mosIngest.test.ts.snap | 15 + .../src/ingest/syncChangesToPartInstance.ts | 30 +- .../src/notifications/NotificationsModel.ts | 68 ++ .../notifications/NotificationsModelHelper.ts | 321 +++++++++ .../NotificationsModelHelper.spec.ts | 672 ++++++++++++++++++ packages/job-worker/src/notifications/util.ts | 13 + .../__tests__/routeSetDisabling.spec.ts | 29 + .../src/playout/abPlayback/index.ts | 37 +- .../playout/abPlayback/routeSetDisabling.ts | 11 +- .../job-worker/src/playout/adlibAction.ts | 118 ++- packages/job-worker/src/playout/lib.ts | 7 + .../src/playout/model/PlayoutModel.ts | 3 +- .../playout/model/PlayoutPartInstanceModel.ts | 8 - .../model/implementation/PlayoutModelImpl.ts | 50 ++ .../PlayoutPartInstanceModelImpl.ts | 5 - packages/job-worker/src/playout/setNext.ts | 46 +- packages/job-worker/src/playout/take.ts | 44 +- .../src/playout/timeline/generate.ts | 12 +- packages/job-worker/src/rundownPlaylists.ts | 76 +- packages/job-worker/src/studio/cleanup.ts | 5 +- packages/meteor-lib/src/api/pubsub.ts | 15 + packages/meteor-lib/src/lib.ts | 15 - .../triggers/actionFilterChainCompilers.ts | 3 +- .../webui/src/client/collections/index.ts | 3 + .../client/lib/notifications/notifications.ts | 2 +- packages/webui/src/client/lib/tempLib.ts | 3 +- .../src/client/lib/uncaughtErrorHandler.ts | 3 + packages/webui/src/client/ui/RundownView.tsx | 27 +- .../client/ui/RundownView/RundownNotifier.tsx | 109 ++- .../getReactivePieceNoteCountsForSegment.tsx | 37 +- .../Studio/Routings/RouteSetAbPlayers.tsx | 8 +- packages/webui/vite.config.mts | 7 + 62 files changed, 2103 insertions(+), 240 deletions(-) create mode 100644 packages/corelib/src/dataModel/Notifications.ts create mode 100644 packages/job-worker/src/notifications/NotificationsModel.ts create mode 100644 packages/job-worker/src/notifications/NotificationsModelHelper.ts create mode 100644 packages/job-worker/src/notifications/__tests__/NotificationsModelHelper.spec.ts create mode 100644 packages/job-worker/src/notifications/util.ts diff --git a/meteor/server/api/__tests__/cleanup.test.ts b/meteor/server/api/__tests__/cleanup.test.ts index 6aecf74097c..ae7dd14fcb7 100644 --- a/meteor/server/api/__tests__/cleanup.test.ts +++ b/meteor/server/api/__tests__/cleanup.test.ts @@ -46,10 +46,12 @@ import { PackageContainerStatuses, TimelineDatastore, SofieIngestDataCache, + Notifications, } from '../../collections' import { Collections } from '../../collections/lib' import { generateTranslationBundleOriginId } from '../translationsBundles' import { CollectionCleanupResult } from '@sofie-automation/meteor-lib/dist/api/system' +import { DBNotificationTargetType } from '@sofie-automation/corelib/dist/dataModel/Notifications' describe('Cleanup', () => { let env: DefaultEnvironment @@ -446,6 +448,21 @@ async function setDefaultDatatoDB(env: DefaultEnvironment, now: number) { type: '' as any, }) + await Notifications.insertAsync({ + _id: getRandomId(), + category: '', + created: now, + localId: '', + message: {} as any, + severity: 0 as any, + modified: now, + relatedTo: { + type: DBNotificationTargetType.RUNDOWN, + studioId, + rundownId, + }, + }) + // Ensure that we have added one of everything: for (const [collectionName, collection] of Collections.entries()) { if ( diff --git a/meteor/server/api/cleanup.ts b/meteor/server/api/cleanup.ts index a15f3b49f16..2f733c9b8e0 100644 --- a/meteor/server/api/cleanup.ts +++ b/meteor/server/api/cleanup.ts @@ -70,10 +70,12 @@ import { Workers, WorkerThreadStatuses, SofieIngestDataCache, + Notifications, } from '../collections' import { AsyncOnlyMongoCollection, AsyncOnlyReadOnlyMongoCollection } from '../collections/collection' import { getCollectionKey } from '../collections/lib' import { generateTranslationBundleOriginId } from './translationsBundles' +import { DBNotificationTargetType } from '@sofie-automation/corelib/dist/dataModel/Notifications' /** * If actuallyCleanup=true, cleans up old data. Otherwise just checks what old data there is @@ -449,6 +451,34 @@ export async function cleanupOldDataInner(actuallyCleanup = false): Promise { diff --git a/meteor/server/api/studio/api.ts b/meteor/server/api/studio/api.ts index 70e7699f80a..de6b0b93a43 100644 --- a/meteor/server/api/studio/api.ts +++ b/meteor/server/api/studio/api.ts @@ -11,6 +11,7 @@ import { ExpectedPackageWorkStatuses, ExternalMessageQueue, MediaObjects, + Notifications, PackageContainerPackageStatuses, PackageInfos, PeripheralDevices, @@ -102,6 +103,7 @@ async function removeStudio(context: MethodContext, studioId: StudioId): Promise ExpectedPackageWorkStatuses.removeAsync({ studioId: studio._id }), PackageInfos.removeAsync({ studioId: studio._id }), PackageContainerPackageStatuses.removeAsync({ studioId: studio._id }), + Notifications.removeAsync({ 'relatedTo.studioId': studio._id }), ]) } diff --git a/meteor/server/collections/index.ts b/meteor/server/collections/index.ts index 112093c97b0..5aead39e278 100644 --- a/meteor/server/collections/index.ts +++ b/meteor/server/collections/index.ts @@ -46,6 +46,7 @@ import { allowAccessToStudio, } from '../security/lib/security' import { SystemWriteAccess } from '../security/system' +import type { DBNotificationObj } from '@sofie-automation/corelib/dist/dataModel/Notifications' export * from './bucket' export * from './packages-media' @@ -102,6 +103,26 @@ registerIndex(ExternalMessageQueue, { rundownId: 1, }) +export const Notifications = createAsyncOnlyMongoCollection(CollectionName.Notifications, false) +// For NotificationsModelHelper.getAllNotifications +registerIndex(Notifications, { + // @ts-expect-error nested property + 'relatedTo.studioId': 1, + catgory: 1, +}) +// For MeteorPubSub.notificationsForRundownPlaylist +registerIndex(Notifications, { + // @ts-expect-error nested property + 'relatedTo.studioId': 1, + 'relatedTo.playlistId': 1, +}) +// For MeteorPubSub.notificationsForRundown +registerIndex(Notifications, { + // @ts-expect-error nested property + 'relatedTo.studioId': 1, + 'relatedTo.rundownId': 1, +}) + export const Organizations = createAsyncOnlyMongoCollection(CollectionName.Organizations, { async update(userId, doc, fields, _modifier) { const access = await allowAccessToOrganization({ userId: userId }, doc._id) diff --git a/meteor/server/publications/lib/quickLoop.ts b/meteor/server/publications/lib/quickLoop.ts index 10fadc4c3d9..967d4ac745b 100644 --- a/meteor/server/publications/lib/quickLoop.ts +++ b/meteor/server/publications/lib/quickLoop.ts @@ -9,7 +9,7 @@ import { MarkerPosition, compareMarkerPositions } from '@sofie-automation/coreli import { ProtectedString, unprotectString } from '@sofie-automation/corelib/dist/protectedString' import { DEFAULT_FALLBACK_PART_DURATION } from '@sofie-automation/shared-lib/dist/core/constants' import { getCurrentTime } from '../../lib/lib' -import { generateTranslation } from '@sofie-automation/meteor-lib/dist/lib' +import { generateTranslation } from '@sofie-automation/corelib/dist/lib' import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' diff --git a/meteor/server/publications/system.ts b/meteor/server/publications/system.ts index bc74aa1ca62..ed36f597054 100644 --- a/meteor/server/publications/system.ts +++ b/meteor/server/publications/system.ts @@ -3,9 +3,11 @@ import { meteorPublish } from './lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { SystemReadAccess } from '../security/system' import { OrganizationReadAccess } from '../security/organization' -import { CoreSystem, Users } from '../collections' +import { CoreSystem, Notifications, Users } from '../collections' import { SYSTEM_ID } from '@sofie-automation/meteor-lib/dist/collections/CoreSystem' -import { OrganizationId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { OrganizationId, RundownId, RundownPlaylistId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { triggerWriteAccessBecauseNoCheckNecessary } from '../security/lib/securityVerify' +import { check } from 'meteor/check' meteorPublish(MeteorPubSub.coreSystem, async function (token: string | undefined) { if (await SystemReadAccess.coreSystem({ userId: this.userId, token })) { @@ -74,3 +76,34 @@ meteorPublish( return null } ) + +meteorPublish(MeteorPubSub.notificationsForRundown, async function (studioId: StudioId, rundownId: RundownId) { + // HACK: This should do real auth + triggerWriteAccessBecauseNoCheckNecessary() + + check(studioId, String) + check(rundownId, String) + + return Notifications.findWithCursor({ + // Loosely match any notifications related to this rundown + 'relatedTo.studioId': studioId, + 'relatedTo.rundownId': rundownId, + }) +}) + +meteorPublish( + MeteorPubSub.notificationsForRundownPlaylist, + async function (studioId: StudioId, playlistId: RundownPlaylistId) { + // HACK: This should do real auth + triggerWriteAccessBecauseNoCheckNecessary() + + check(studioId, String) + check(playlistId, String) + + return Notifications.findWithCursor({ + // Loosely match any notifications related to this playlist + 'relatedTo.studioId': studioId, + 'relatedTo.playlistId': playlistId, + }) + } +) diff --git a/packages/corelib/src/dataModel/Collections.ts b/packages/corelib/src/dataModel/Collections.ts index 670bdfcd442..6560aab026a 100644 --- a/packages/corelib/src/dataModel/Collections.ts +++ b/packages/corelib/src/dataModel/Collections.ts @@ -18,6 +18,7 @@ export enum CollectionName { MediaObjects = 'mediaObjects', MediaWorkFlows = 'mediaWorkFlows', MediaWorkFlowSteps = 'mediaWorkFlowSteps', + Notifications = 'notifications', Organizations = 'organizations', PartInstances = 'partInstances', PackageInfos = 'packageInfos', diff --git a/packages/corelib/src/dataModel/Ids.ts b/packages/corelib/src/dataModel/Ids.ts index 1e4e544cca8..37d77d3bf8c 100644 --- a/packages/corelib/src/dataModel/Ids.ts +++ b/packages/corelib/src/dataModel/Ids.ts @@ -41,6 +41,9 @@ export type NrcsIngestDataCacheObjId = ProtectedString<'NrcsIngestDataCacheObjId /** A string, identifying a SofieIngestDataCacheObj */ export type SofieIngestDataCacheObjId = ProtectedString<'SofieIngestDataCacheObjId'> +/** A string, identifying a DBNotificationObj */ +export type NotificationId = ProtectedString<'NotificationId'> + /** A string, identifying a Organization */ export type OrganizationId = ProtectedString<'OrganizationId'> diff --git a/packages/corelib/src/dataModel/Notes.ts b/packages/corelib/src/dataModel/Notes.ts index 280a421a19a..7d097323b2d 100644 --- a/packages/corelib/src/dataModel/Notes.ts +++ b/packages/corelib/src/dataModel/Notes.ts @@ -24,6 +24,11 @@ export interface GenericNote extends INoteBase { name: string } } +export interface RundownPlaylistNote extends INoteBase { + origin: { + name: string + } +} export interface RundownNote extends INoteBase { origin: { name: string diff --git a/packages/corelib/src/dataModel/Notifications.ts b/packages/corelib/src/dataModel/Notifications.ts new file mode 100644 index 00000000000..da6d9c11577 --- /dev/null +++ b/packages/corelib/src/dataModel/Notifications.ts @@ -0,0 +1,124 @@ +import type { NoteSeverity } from '@sofie-automation/blueprints-integration' +import type { NotificationId, PartInstanceId, PieceInstanceId, RundownId, RundownPlaylistId, StudioId } from './Ids' +import type { ITranslatableMessage } from '../TranslatableMessage' + +/** + * This describes a notification that should be shown to a user + * These can come from various sources, and are added and removed dynamically during system usage + */ +export interface DBNotificationObj { + _id: NotificationId + + /** + * Used to group a certain group of notifications + * Each source of these notifications should use its own value, so that it can find and cleanup after itself when appropriate + * Typically, a method will clear all previous notifications for a category when it is called, and then possibly add new ones + * This is a technical value, not intended to be conusmed outside of the generation/update logic + */ + category: string + + /** + * Unique id for this notification within the category + */ + localId: string + + severity: NoteSeverity + message: ITranslatableMessage + // type: 'event' | 'persistent' + + /** Description of what the notification is related to */ + relatedTo: DBNotificationTarget + + created: number // unix timestamp + modified: number // unix timestamp + + // /** + // * When set, the notification will be automatically dismissed after this time + // * For events, this is typically set to less than a minute + // * For persistent notifications, this is never set + // */ + // autoTimeout?: number // unix timestamp +} + +export type DBNotificationTarget = + // | DBNotificationTargetEverywhere + // | DBNotificationTargetStudio + | DBNotificationTargetRundown + // | DBNotificationTargetSegment + // | DBNotificationTargetPart + // | DBNotificationTargetPiece + | DBNotificationTargetRundownPlaylist + | DBNotificationTargetPartInstance + | DBNotificationTargetPieceInstance + +export enum DBNotificationTargetType { + // EVERYWHERE = 'everywhere', + // STUDIO = 'studio', + RUNDOWN = 'rundown', + // SEGMENT = 'segment', + // PART = 'part', + // PIECE = 'piece', + PLAYLIST = 'playlist', + PARTINSTANCE = 'partInstance', + PIECEINSTANCE = 'pieceInstance', +} + +// export interface DBNotificationTargetEverywhere { +// type: DBNotificationTargetType.EVERYWHERE +// } + +// export interface DBNotificationTargetStudio { +// type: DBNotificationTargetType.STUDIO +// studioId: StudioId +// } + +export interface DBNotificationTargetRundown { + type: DBNotificationTargetType.RUNDOWN + studioId: StudioId + rundownId: RundownId +} + +// export interface DBNotificationTargetSegment { +// type: DBNotificationTargetType.SEGMENT +// studioId: StudioId +// rundownId: RundownId +// segmentId: SegmentId +// } + +// export interface DBNotificationTargetPart { +// type: DBNotificationTargetType.PART +// studioId: StudioId +// rundownId: RundownId +// // segmentId: SegmentId +// partId: PartId +// } + +// export interface DBNotificationTargetPiece { +// type: DBNotificationTargetType.PIECE +// studioId: StudioId +// rundownId: RundownId +// // segmentId: SegmentId +// partId: PartId +// pieceId: PieceId +// } + +export interface DBNotificationTargetRundownPlaylist { + type: DBNotificationTargetType.PLAYLIST + studioId: StudioId + playlistId: RundownPlaylistId +} + +export interface DBNotificationTargetPartInstance { + type: DBNotificationTargetType.PARTINSTANCE + studioId: StudioId + rundownId: RundownId + partInstanceId: PartInstanceId +} + +export interface DBNotificationTargetPieceInstance { + type: DBNotificationTargetType.PIECEINSTANCE + studioId: StudioId + rundownId: RundownId + partInstanceId: PartInstanceId + pieceInstanceId: PieceInstanceId +} diff --git a/packages/corelib/src/dataModel/Rundown.ts b/packages/corelib/src/dataModel/Rundown.ts index a4fd75f3559..2359297f155 100644 --- a/packages/corelib/src/dataModel/Rundown.ts +++ b/packages/corelib/src/dataModel/Rundown.ts @@ -60,7 +60,7 @@ export interface Rundown { /** Last sent storyStatus to ingestDevice (MOS) */ notifiedCurrentPlayingPartExternalId?: string - /** Holds notes (warnings / errors) thrown by the blueprints during creation, or appended after */ + /** Holds notes (warnings / errors) thrown by the blueprints during creation */ notes?: Array externalId: string diff --git a/packages/corelib/src/dataModel/RundownPlaylist.ts b/packages/corelib/src/dataModel/RundownPlaylist.ts index a2ba8cccb13..241e0c38959 100644 --- a/packages/corelib/src/dataModel/RundownPlaylist.ts +++ b/packages/corelib/src/dataModel/RundownPlaylist.ts @@ -10,6 +10,7 @@ import { StudioId, RundownId, } from './Ids' +import { RundownPlaylistNote } from './Notes' /** Details of an ab-session requested by the blueprints in onTimelineGenerate */ export interface ABSessionInfo { @@ -152,6 +153,9 @@ export interface DBRundownPlaylist { */ queuedSegmentId?: SegmentId + /** Holds notes (warnings / errors) thrown by the blueprints during creation */ + notes?: Array + quickLoop?: QuickLoopProps /** Actual time of playback starting */ diff --git a/packages/corelib/src/lib.ts b/packages/corelib/src/lib.ts index c32833abe08..399db4fead5 100644 --- a/packages/corelib/src/lib.ts +++ b/packages/corelib/src/lib.ts @@ -7,6 +7,7 @@ import { Timecode } from 'timecode' import { iterateDeeply, iterateDeeplyEnum, Time } from '@sofie-automation/blueprints-integration' import { IStudioSettings } from './dataModel/Studio' import { customAlphabet as createNanoid } from 'nanoid' +import type { ITranslatableMessage } from './TranslatableMessage' /** * Limited character set to use for id generation @@ -455,3 +456,16 @@ export function stringifyObjects(objs: unknown): string { return objs + '' } } + +/** Generate the translation for a string, to be applied later when it gets rendered */ +export function generateTranslation( + key: string, + args?: { [k: string]: any }, + namespaces?: string[] +): ITranslatableMessage { + return { + key, + args, + namespaces, + } +} diff --git a/packages/job-worker/src/__mocks__/collection.ts b/packages/job-worker/src/__mocks__/collection.ts index 4b2a71b25cd..8956336d04c 100644 --- a/packages/job-worker/src/__mocks__/collection.ts +++ b/packages/job-worker/src/__mocks__/collection.ts @@ -42,6 +42,7 @@ import { RundownBaselineAdLibItem } from '@sofie-automation/corelib/dist/dataMod import { ExternalMessageQueueObj } from '@sofie-automation/corelib/dist/dataModel/ExternalMessageQueue' import { MediaObjects } from '@sofie-automation/corelib/dist/dataModel/MediaObjects' import { PackageInfoDB } from '@sofie-automation/corelib/dist/dataModel/PackageInfos' +import type { DBNotificationObj } from '@sofie-automation/corelib/dist/dataModel/Notifications' export interface CollectionOperation { type: string @@ -285,6 +286,7 @@ export function getMockCollections(): { ExpectedMediaItems: new MockMongoCollection(CollectionName.ExpectedMediaItems), ExpectedPlayoutItems: new MockMongoCollection(CollectionName.ExpectedPlayoutItems), SofieIngestDataCache: new MockMongoCollection(CollectionName.SofieIngestDataCache), + Notifications: new MockMongoCollection(CollectionName.Notifications), NrcsIngestDataCache: new MockMongoCollection(CollectionName.NrcsIngestDataCache), Parts: new MockMongoCollection(CollectionName.Parts), PartInstances: new MockMongoCollection(CollectionName.PartInstances), @@ -342,6 +344,7 @@ export interface IMockCollections { ExpectedMediaItems: MockMongoCollection ExpectedPlayoutItems: MockMongoCollection SofieIngestDataCache: MockMongoCollection + Notifications: MockMongoCollection NrcsIngestDataCache: MockMongoCollection Parts: MockMongoCollection PartInstances: MockMongoCollection diff --git a/packages/job-worker/src/blueprints/context/CommonContext.ts b/packages/job-worker/src/blueprints/context/CommonContext.ts index b2d3382bb05..7a0c9299419 100644 --- a/packages/job-worker/src/blueprints/context/CommonContext.ts +++ b/packages/job-worker/src/blueprints/context/CommonContext.ts @@ -8,9 +8,6 @@ export interface ContextInfo { /** Full identifier info for the context. Should be able to identify the rundown/studio/blueprint etc being executed */ identifier: string } -export interface UserContextInfo extends ContextInfo { - tempSendUserNotesIntoBlackHole?: boolean // TODO-CONTEXT remove this -} /** Common */ diff --git a/packages/job-worker/src/blueprints/context/GetRundownContext.ts b/packages/job-worker/src/blueprints/context/GetRundownContext.ts index 1bab034433f..694fff768b9 100644 --- a/packages/job-worker/src/blueprints/context/GetRundownContext.ts +++ b/packages/job-worker/src/blueprints/context/GetRundownContext.ts @@ -7,7 +7,7 @@ import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/Rund import { WatchedPackagesHelper } from './watchedPackages' import { JobContext, ProcessedShowStyleCompound } from '../../jobs' import { ReadonlyObjectDeep } from 'type-fest/source/readonly-deep' -import { UserContextInfo } from './CommonContext' +import { ContextInfo } from './CommonContext' import { ShowStyleUserContext } from './ShowStyleUserContext' import { convertRundownPlaylistToBlueprints } from './lib' @@ -15,7 +15,7 @@ export class GetRundownContext extends ShowStyleUserContext implements IGetRundo private cachedPlaylistsInStudio: Promise[]> | undefined constructor( - contextInfo: UserContextInfo, + contextInfo: ContextInfo, context: JobContext, showStyleCompound: ReadonlyDeep, watchedPackages: WatchedPackagesHelper, diff --git a/packages/job-worker/src/blueprints/context/OnSetAsNextContext.ts b/packages/job-worker/src/blueprints/context/OnSetAsNextContext.ts index 403fc2fbf3c..7de1cf88ac7 100644 --- a/packages/job-worker/src/blueprints/context/OnSetAsNextContext.ts +++ b/packages/job-worker/src/blueprints/context/OnSetAsNextContext.ts @@ -1,5 +1,5 @@ import { JobContext, ProcessedShowStyleCompound } from '../../jobs' -import { UserContextInfo } from './CommonContext' +import { ContextInfo } from './CommonContext' import { ShowStyleUserContext } from './ShowStyleUserContext' import { IBlueprintMutatablePart, @@ -33,7 +33,7 @@ export class OnSetAsNextContext public pendingMoveNextPart: { selectedPart: ReadonlyDeep | null } | undefined = undefined constructor( - contextInfo: UserContextInfo, + contextInfo: ContextInfo, context: JobContext, private playoutModel: PlayoutModel, showStyle: ReadonlyDeep, diff --git a/packages/job-worker/src/blueprints/context/OnTakeContext.ts b/packages/job-worker/src/blueprints/context/OnTakeContext.ts index 8fef14c7532..a8ee539c910 100644 --- a/packages/job-worker/src/blueprints/context/OnTakeContext.ts +++ b/packages/job-worker/src/blueprints/context/OnTakeContext.ts @@ -16,7 +16,7 @@ import { import { PeripheralDeviceId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { ReadonlyDeep } from 'type-fest' import { PlayoutModel } from '../../playout/model/PlayoutModel' -import { UserContextInfo } from './CommonContext' +import { ContextInfo } from './CommonContext' import { ShowStyleUserContext } from './ShowStyleUserContext' import { WatchedPackagesHelper } from './watchedPackages' import { getCurrentTime } from '../../lib' @@ -40,7 +40,7 @@ export class OnTakeContext extends ShowStyleUserContext implements IOnTakeContex } constructor( - contextInfo: UserContextInfo, + contextInfo: ContextInfo, private readonly _context: JobContext, private readonly _playoutModel: PlayoutModel, showStyle: ReadonlyDeep, diff --git a/packages/job-worker/src/blueprints/context/ShowStyleUserContext.ts b/packages/job-worker/src/blueprints/context/ShowStyleUserContext.ts index 900d5685305..be6d264c1f1 100644 --- a/packages/job-worker/src/blueprints/context/ShowStyleUserContext.ts +++ b/packages/job-worker/src/blueprints/context/ShowStyleUserContext.ts @@ -3,18 +3,17 @@ import { ReadonlyDeep } from 'type-fest' import { WatchedPackagesHelper } from './watchedPackages' import { INoteBase } from '@sofie-automation/corelib/dist/dataModel/Notes' import { JobContext, ProcessedShowStyleCompound } from '../../jobs' -import { UserContextInfo } from './CommonContext' +import { ContextInfo } from './CommonContext' import { ShowStyleContext } from './ShowStyleContext' import { getMediaObjectDuration } from './lib' export class ShowStyleUserContext extends ShowStyleContext implements IShowStyleUserContext { public readonly notes: INoteBase[] = [] - private readonly tempSendNotesIntoBlackHole: boolean protected readonly jobContext: JobContext constructor( - contextInfo: UserContextInfo, + contextInfo: ContextInfo, context: JobContext, showStyleCompound: ReadonlyDeep, private readonly watchedPackages: WatchedPackagesHelper @@ -26,49 +25,36 @@ export class ShowStyleUserContext extends ShowStyleContext implements IShowStyle showStyleCompound, context.getShowStyleBlueprintConfig(showStyleCompound) ) - this.tempSendNotesIntoBlackHole = contextInfo.tempSendUserNotesIntoBlackHole ?? false this.jobContext = context } notifyUserError(message: string, params?: { [key: string]: any }): void { - if (this.tempSendNotesIntoBlackHole) { - this.logError(`UserNotes: "${message}", ${JSON.stringify(params)}`) - } else { - this.notes.push({ - type: NoteSeverity.ERROR, - message: { - key: message, - args: params, - }, - }) - } + this.notes.push({ + type: NoteSeverity.ERROR, + message: { + key: message, + args: params, + }, + }) } notifyUserWarning(message: string, params?: { [key: string]: any }): void { - if (this.tempSendNotesIntoBlackHole) { - this.logWarning(`UserNotes: "${message}", ${JSON.stringify(params)}`) - } else { - this.notes.push({ - type: NoteSeverity.WARNING, - message: { - key: message, - args: params, - }, - }) - } + this.notes.push({ + type: NoteSeverity.WARNING, + message: { + key: message, + args: params, + }, + }) } notifyUserInfo(message: string, params?: { [key: string]: any }): void { - if (this.tempSendNotesIntoBlackHole) { - this.logInfo(`UserNotes: "${message}", ${JSON.stringify(params)}`) - } else { - this.notes.push({ - type: NoteSeverity.INFO, - message: { - key: message, - args: params, - }, - }) - } + this.notes.push({ + type: NoteSeverity.INFO, + message: { + key: message, + args: params, + }, + }) } getPackageInfo(packageId: string): Readonly> { diff --git a/packages/job-worker/src/blueprints/context/StudioBaselineContext.ts b/packages/job-worker/src/blueprints/context/StudioBaselineContext.ts index b2481d3bc7e..db93eb3cc30 100644 --- a/packages/job-worker/src/blueprints/context/StudioBaselineContext.ts +++ b/packages/job-worker/src/blueprints/context/StudioBaselineContext.ts @@ -1,7 +1,7 @@ import { PackageInfo, IStudioBaselineContext } from '@sofie-automation/blueprints-integration' import { WatchedPackagesHelper } from './watchedPackages' import { JobContext } from '../../jobs' -import { UserContextInfo } from './CommonContext' +import { ContextInfo } from './CommonContext' import { StudioContext } from './StudioContext' import { getMediaObjectDuration } from './lib' @@ -9,7 +9,7 @@ export class StudioBaselineContext extends StudioContext implements IStudioBasel private readonly jobContext: JobContext constructor( - contextInfo: UserContextInfo, + contextInfo: ContextInfo, context: JobContext, private readonly watchedPackages: WatchedPackagesHelper ) { diff --git a/packages/job-worker/src/blueprints/context/StudioUserContext.ts b/packages/job-worker/src/blueprints/context/StudioUserContext.ts index 1ed5a483cb6..be2c471dc46 100644 --- a/packages/job-worker/src/blueprints/context/StudioUserContext.ts +++ b/packages/job-worker/src/blueprints/context/StudioUserContext.ts @@ -3,20 +3,18 @@ import { ReadonlyDeep } from 'type-fest' import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' import { ProcessedStudioConfig } from '../config' import { INoteBase } from '@sofie-automation/corelib/dist/dataModel/Notes' -import { UserContextInfo } from './CommonContext' +import { ContextInfo } from './CommonContext' import { StudioContext } from './StudioContext' export class StudioUserContext extends StudioContext implements IStudioUserContext { public readonly notes: INoteBase[] = [] - private readonly tempSendNotesIntoBlackHole: boolean constructor( - contextInfo: UserContextInfo, + contextInfo: ContextInfo, studio: ReadonlyDeep, studioBlueprintConfig: ProcessedStudioConfig ) { super(contextInfo, studio, studioBlueprintConfig) - this.tempSendNotesIntoBlackHole = contextInfo.tempSendUserNotesIntoBlackHole ?? false } notifyUserError(message: string, params?: { [key: string]: any }): void { @@ -30,16 +28,12 @@ export class StudioUserContext extends StudioContext implements IStudioUserConte this.addNote(NoteSeverity.INFO, message, params) } private addNote(type: NoteSeverity, message: string, params?: { [key: string]: any }) { - if (this.tempSendNotesIntoBlackHole) { - this.logNote(`UserNotes: "${message}", ${JSON.stringify(params)}`, type) - } else { - this.notes.push({ - type: type, - message: { - key: message, - args: params, - }, - }) - } + this.notes.push({ + type: type, + message: { + key: message, + args: params, + }, + }) } } diff --git a/packages/job-worker/src/blueprints/context/adlibActions.ts b/packages/job-worker/src/blueprints/context/adlibActions.ts index a0b6dfe31bd..274299ff592 100644 --- a/packages/job-worker/src/blueprints/context/adlibActions.ts +++ b/packages/job-worker/src/blueprints/context/adlibActions.ts @@ -17,7 +17,7 @@ import { import { PartInstanceId, PeripheralDeviceId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { ReadonlyDeep } from 'type-fest' import { PlayoutModel } from '../../playout/model/PlayoutModel' -import { UserContextInfo } from './CommonContext' +import { ContextInfo } from './CommonContext' import { ShowStyleUserContext } from './ShowStyleUserContext' import { WatchedPackagesHelper } from './watchedPackages' import { getCurrentTime } from '../../lib' @@ -38,7 +38,7 @@ export class DatastoreActionExecutionContext protected readonly _context: JobContext constructor( - contextInfo: UserContextInfo, + contextInfo: ContextInfo, context: JobContext, showStyle: ReadonlyDeep, watchedPackages: WatchedPackagesHelper @@ -89,7 +89,7 @@ export class ActionExecutionContext extends ShowStyleUserContext implements IAct } constructor( - contextInfo: UserContextInfo, + contextInfo: ContextInfo, private readonly _context: JobContext, private readonly _playoutModel: PlayoutModel, showStyle: ReadonlyDeep, diff --git a/packages/job-worker/src/db/collections.ts b/packages/job-worker/src/db/collections.ts index 1b9b7142227..4e38e4019d3 100644 --- a/packages/job-worker/src/db/collections.ts +++ b/packages/job-worker/src/db/collections.ts @@ -44,6 +44,7 @@ import { ReadonlyDeep } from 'type-fest' import { ExternalMessageQueueObj } from '@sofie-automation/corelib/dist/dataModel/ExternalMessageQueue' import { MediaObjects } from '@sofie-automation/corelib/dist/dataModel/MediaObjects' import EventEmitter = require('eventemitter3') +import type { DBNotificationObj } from '@sofie-automation/corelib/dist/dataModel/Notifications' export type MongoQuery = Filter export type MongoModifier = UpdateFilter @@ -99,6 +100,7 @@ export interface IDirectCollections { ExpectedMediaItems: ICollection ExpectedPlayoutItems: ICollection SofieIngestDataCache: ICollection + Notifications: ICollection NrcsIngestDataCache: ICollection Parts: ICollection PartInstances: ICollection @@ -164,6 +166,7 @@ export function getMongoCollections( database.collection(CollectionName.SofieIngestDataCache), allowWatchers ), + Notifications: wrapMongoCollection(database.collection(CollectionName.Notifications), allowWatchers), NrcsIngestDataCache: wrapMongoCollection( database.collection(CollectionName.NrcsIngestDataCache), allowWatchers diff --git a/packages/job-worker/src/ingest/__tests__/selectShowStyleVariant.test.ts b/packages/job-worker/src/ingest/__tests__/selectShowStyleVariant.test.ts index 095a4ef0975..2c6e7d8ce4d 100644 --- a/packages/job-worker/src/ingest/__tests__/selectShowStyleVariant.test.ts +++ b/packages/job-worker/src/ingest/__tests__/selectShowStyleVariant.test.ts @@ -24,7 +24,6 @@ describe('selectShowStyleVariant', () => { { name: 'test', identifier: 'test', - tempSendUserNotesIntoBlackHole: true, }, context.studio, context.getStudioBlueprintConfig() diff --git a/packages/job-worker/src/ingest/bucket/import.ts b/packages/job-worker/src/ingest/bucket/import.ts index c7216ef48fc..56188a8e562 100644 --- a/packages/job-worker/src/ingest/bucket/import.ts +++ b/packages/job-worker/src/ingest/bucket/import.ts @@ -1,6 +1,11 @@ import { RundownImportVersions } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { ShowStyleUserContext } from '../../blueprints/context' -import { IBlueprintActionManifest, IBlueprintAdLibPiece, IngestAdlib } from '@sofie-automation/blueprints-integration' +import { + IBlueprintActionManifest, + IBlueprintAdLibPiece, + IngestAdlib, + NoteSeverity, +} from '@sofie-automation/blueprints-integration' import { WatchedPackagesHelper } from '../../blueprints/context/watchedPackages' import { JobContext, ProcessedShowStyleCompound } from '../../jobs' import { getSystemVersion } from '../../lib' @@ -28,6 +33,7 @@ import { WrappedShowStyleBlueprint } from '../../blueprints/cache' import { ReadonlyDeep } from 'type-fest' import { BucketId, ShowStyleBaseId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { ExpectedPackageDBType } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { interpollateTranslation } from '@sofie-automation/corelib/dist/TranslatableMessage' export async function handleBucketItemImport(context: JobContext, data: BucketItemImportProps): Promise { await regenerateBucketItemFromIngestInfo( @@ -255,6 +261,8 @@ async function generateBucketAdlibForVariant( // pieceId: BucketAdLibId | BucketAdLibActionId, payload: IngestAdlib ): Promise { + if (!blueprint.blueprint.getAdlibItem) return null + const watchedPackages = await WatchedPackagesHelper.create(context, { // We don't know what the `pieceId` will be, but we do know the `externalId` pieceExternalId: payload.externalId, @@ -267,7 +275,6 @@ async function generateBucketAdlibForVariant( { name: `Bucket Ad-Lib`, identifier: `studioId=${context.studioId},showStyleBaseId=${showStyleCompound._id},showStyleVariantId=${showStyleCompound.showStyleVariantId}`, - tempSendUserNotesIntoBlackHole: true, // TODO-CONTEXT }, context, showStyleCompound, @@ -275,9 +282,26 @@ async function generateBucketAdlibForVariant( ) try { - if (blueprint.blueprint.getAdlibItem) { - return blueprint.blueprint.getAdlibItem(contextForVariant, payload) + const adlibItem = blueprint.blueprint.getAdlibItem(contextForVariant, payload) + + // Log any notes + // Future: This should either be a context which doesn't support notes, or should do something better with them + for (const note of contextForVariant.notes) { + switch (note.type) { + case NoteSeverity.ERROR: + contextForVariant.logError(`UserNote: ${interpollateTranslation(note.message)}`) + break + case NoteSeverity.WARNING: + contextForVariant.logWarning(`UserNote: ${interpollateTranslation(note.message)}`) + break + case NoteSeverity.INFO: + default: + contextForVariant.logInfo(`UserNote: ${interpollateTranslation(note.message)}`) + break + } } + + return adlibItem } catch (err) { logger.error(`Error in showStyleBlueprint.getShowStyleVariantId: ${stringifyError(err)}`) } diff --git a/packages/job-worker/src/ingest/commit.ts b/packages/job-worker/src/ingest/commit.ts index 069e96aea06..937000d55d7 100644 --- a/packages/job-worker/src/ingest/commit.ts +++ b/packages/job-worker/src/ingest/commit.ts @@ -15,6 +15,7 @@ import { updatePartInstanceRanksAndOrphanedState } from '../updatePartInstanceRa import { getPlaylistIdFromExternalId, produceRundownPlaylistInfoFromRundown, + removePlaylistFromDb, removeRundownFromDb, } from '../rundownPlaylists' import { ReadonlyDeep } from 'type-fest' @@ -108,6 +109,8 @@ export async function CommitIngestOperation( // The rundown is safe to simply move or remove trappedInPlaylistId = undefined + updateNotificationForTrappedInPlaylist(ingestModel, false) + await removeRundownFromPlaylistAndUpdatePlaylist( context, ingestModel.rundownId, @@ -496,7 +499,7 @@ export async function regeneratePlaylistAndRundownOrder( return newPlaylist } else { // Playlist is empty and should be removed - await context.directCollections.RundownPlaylists.remove(oldPlaylist._id) + await removePlaylistFromDb(context, lock) return null } @@ -518,7 +521,7 @@ export async function updatePlayoutAfterChangingRundownInPlaylist( throw new Error(`RundownPlaylist "${playoutModel.playlistId}" has no contents but is active...`) // Remove an empty playlist - await context.directCollections.RundownPlaylists.remove({ _id: playoutModel.playlistId }) + await removePlaylistFromDb(context, playlistLock) playoutModel.assertNoChanges() return @@ -641,15 +644,29 @@ function setRundownAsTrappedInPlaylist( if (rundownIsToBeRemoved) { // Orphan the deleted rundown ingestModel.setRundownOrphaned(RundownOrphanedReason.DELETED) + + updateNotificationForTrappedInPlaylist(ingestModel, false) } else { + updateNotificationForTrappedInPlaylist(ingestModel, true) + } +} + +function updateNotificationForTrappedInPlaylist(ingestModel: IngestModel, isTrapped: boolean) { + const notificationCategory = `trappedInPlaylist:${ingestModel.rundownId}` + + if (isTrapped) { // The rundown is still synced, but is in the wrong playlist. Notify the user - ingestModel.appendRundownNotes({ - type: NoteSeverity.WARNING, + ingestModel.setNotification(notificationCategory, { + id: `trappedInPlaylist`, + severity: NoteSeverity.WARNING, message: getTranslatedMessage(ServerTranslatedMesssages.PLAYLIST_ON_AIR_CANT_MOVE_RUNDOWN), - origin: { - name: 'Data update', + relatedTo: { + type: 'rundown', + rundownId: ingestModel.rundownId, }, }) + } else { + ingestModel.clearAllNotifications(notificationCategory) } } diff --git a/packages/job-worker/src/ingest/createAdlibTestingRundown.ts b/packages/job-worker/src/ingest/createAdlibTestingRundown.ts index e19c6810055..d5412443d73 100644 --- a/packages/job-worker/src/ingest/createAdlibTestingRundown.ts +++ b/packages/job-worker/src/ingest/createAdlibTestingRundown.ts @@ -16,6 +16,9 @@ import { logger } from '../logging' import { RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { handleUpdatedRundown } from './ingestRundownJobs' import { runIngestUpdateOperation } from './runOperation' +import { NotificationsModelHelper } from '../notifications/NotificationsModelHelper' +import { unprotectString } from '@sofie-automation/corelib/dist/protectedString' +import { convertNoteToNotification } from '../notifications/util' export async function handleCreateAdlibTestingRundownForShowStyleVariant( context: JobContext, @@ -31,7 +34,6 @@ export async function handleCreateAdlibTestingRundownForShowStyleVariant( { name: `Create Adlib Testing Rundown`, identifier: `studioId=${context.studioId},showStyleBaseId=${showStyleCompound._id},showStyleVariantId=${showStyleCompound.showStyleVariantId}`, - tempSendUserNotesIntoBlackHole: true, // TODO-CONTEXT }, context, showStyleCompound, @@ -62,9 +64,26 @@ export async function handleCreateAdlibTestingRundownForShowStyleVariant( showStyleVariantId: showStyleVariant._id, }, } - return runIngestUpdateOperation(context, updateData, (ingestRundown) => + const createdRundownId = await runIngestUpdateOperation(context, updateData, (ingestRundown) => handleUpdatedRundown(context, updateData, ingestRundown) ) + + // Store the notes as notifications. This is necessary, as any stored on the Rundown will be lost when the rundown is regenerated, without regenerating these notes + const notificationCategory = unprotectString(createdRundownId) + const notificationsHelper = new NotificationsModelHelper(context, 'adlibTestingRundown', null) + notificationsHelper.clearAllNotifications(notificationCategory) + for (const note of blueprintContext.notes) { + notificationsHelper.setNotification(notificationCategory, { + ...convertNoteToNotification(note, [showStyleBlueprint.blueprintId]), + relatedTo: { + type: 'rundown', + rundownId: createdRundownId, + }, + }) + } + await notificationsHelper.saveAllToDatabase() + + return createdRundownId } function fallbackBlueprintMethod( diff --git a/packages/job-worker/src/ingest/generationRundown.ts b/packages/job-worker/src/ingest/generationRundown.ts index 42c12b41e20..c327ded88f0 100644 --- a/packages/job-worker/src/ingest/generationRundown.ts +++ b/packages/job-worker/src/ingest/generationRundown.ts @@ -115,13 +115,11 @@ export async function updateRundownFromIngestDataInner( { name: 'selectShowStyleVariant', identifier: `studioId=${context.studio._id},rundownId=${ingestModel.rundownId},ingestRundownId=${ingestModel.rundownExternalId}`, - tempSendUserNotesIntoBlackHole: true, }, context.studio, context.getStudioBlueprintConfig() ) - // TODO-CONTEXT save any user notes from selectShowStyleContext const showStyle = await selectShowStyleVariant( context, selectShowStyleContext, @@ -138,6 +136,14 @@ export async function updateRundownFromIngestDataInner( const showStyleBlueprint = await context.getShowStyleBlueprint(showStyle.base._id) const allRundownWatchedPackages = await pAllRundownWatchedPackages + const extraRundownNotes: RundownNote[] = selectShowStyleContext.notes.map((note) => ({ + type: note.type, + message: wrapTranslatableMessageFromBlueprints(note.message, [showStyleBlueprint.blueprintId]), + origin: { + name: 'selectShowStyleVariant', + }, + })) + // Call blueprints, get rundown const dbRundown = await regenerateRundownAndBaselineFromIngestData( context, @@ -146,7 +152,8 @@ export async function updateRundownFromIngestDataInner( ingestRundown.rundownSource, showStyle, showStyleBlueprint, - allRundownWatchedPackages + allRundownWatchedPackages, + extraRundownNotes ) if (!dbRundown) { // We got no rundown, abort: @@ -183,6 +190,7 @@ export async function updateRundownFromIngestDataInner( * @param showStyle ShowStyle to regenerate for * @param showStyleBlueprint ShowStyle Blueprint to regenerate with * @param allRundownWatchedPackages WatchedPackagesHelper for all packages belonging to the rundown + * @param extraRundownNotes Additional notes to add to the Rundown, produced earlier in the ingest process * @returns Generated documents or null if Blueprints reject the Rundown */ export async function regenerateRundownAndBaselineFromIngestData( @@ -192,7 +200,8 @@ export async function regenerateRundownAndBaselineFromIngestData( rundownSource: RundownSource, showStyle: SelectedShowStyleVariant, showStyleBlueprint: ReadonlyDeep, - allRundownWatchedPackages: WatchedPackagesHelper + allRundownWatchedPackages: WatchedPackagesHelper, + extraRundownNotes: RundownNote[] ): Promise | null> { const rundownBaselinePackages = allRundownWatchedPackages.filter( context, @@ -255,15 +264,18 @@ export async function regenerateRundownAndBaselineFromIngestData( } // Ensure the ids in the notes are clean - const rundownNotes = blueprintContext.notes.map((note) => - literal({ - type: note.type, - message: wrapTranslatableMessageFromBlueprints(note.message, translationNamespaces), - origin: { - name: `${showStyle.base.name}-${showStyle.variant.name}`, - }, - }) - ) + const rundownNotes = [ + ...extraRundownNotes, + ...blueprintContext.notes.map((note) => + literal({ + type: note.type, + message: wrapTranslatableMessageFromBlueprints(note.message, translationNamespaces), + origin: { + name: `${showStyle.base.name}-${showStyle.variant.name}`, + }, + }) + ), + ] ingestModel.setRundownData( rundownRes.rundown, diff --git a/packages/job-worker/src/ingest/model/IngestModel.ts b/packages/job-worker/src/ingest/model/IngestModel.ts index f58ad024393..bfd015e210a 100644 --- a/packages/job-worker/src/ingest/model/IngestModel.ts +++ b/packages/job-worker/src/ingest/model/IngestModel.ts @@ -31,6 +31,7 @@ import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' import { ProcessedShowStyleBase, ProcessedShowStyleVariant } from '../../jobs/showStyle' import { WrappedShowStyleBlueprint } from '../../blueprints/cache' import { IBlueprintRundown } from '@sofie-automation/blueprints-integration' +import type { INotificationsModel } from '../../notifications/NotificationsModel' export type ExpectedPackageForIngestModelBaseline = | ExpectedPackageDBFromBaselineAdLibAction @@ -148,7 +149,7 @@ export interface IngestModelReadonly { findExpectedPackage(packageId: ExpectedPackageId): ReadonlyDeep | undefined } -export interface IngestModel extends IngestModelReadonly, BaseModel { +export interface IngestModel extends IngestModelReadonly, BaseModel, INotificationsModel { /** * Search for a Part through the whole Rundown * @param id Id of the Part @@ -270,13 +271,6 @@ export interface IngestModel extends IngestModelReadonly, BaseModel { * @param status Rundown air status */ setRundownAirStatus(status: string | undefined): void - - /** - * Add some user facing notes for this Rundown - * Future: it is only possible to add these, there is no way to 'replace' or remove them - * @param notes New notes to add - */ - appendRundownNotes(...notes: RundownNote[]): void } export type IngestReplaceSegmentType = Omit diff --git a/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts b/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts index 747da75b21c..15f33f777d9 100644 --- a/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts +++ b/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts @@ -61,6 +61,7 @@ import { SaveIngestModelHelper } from './SaveIngestModel' import { generateWriteOpsForLazyDocuments } from './DocumentChangeTracker' import { IS_PRODUCTION } from '../../../environment' import { logger } from '../../../logging' +import { NotificationsModelHelper } from '../../../notifications/NotificationsModelHelper' export interface IngestModelImplExistingData { rundown: DBRundown @@ -104,6 +105,8 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { readonly #rundownBaselineAdLibPieces: LazyInitialise readonly #rundownBaselineAdLibActions: LazyInitialise + readonly #notificationsHelper: NotificationsModelHelper + public get rundownId(): RundownId { return this.#rundownImpl?._id ?? getRundownId(this.context.studioId, this.rundownExternalId) } @@ -255,6 +258,8 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { this.#rundownBaselineAdLibPieces = new LazyInitialise(async () => []) this.#rundownBaselineAdLibActions = new LazyInitialise(async () => []) } + + this.#notificationsHelper = new NotificationsModelHelper(context, `ingest:${this.rundownId}`, null) } getRundown(): ReadonlyDeep { @@ -555,12 +560,27 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { } } - appendRundownNotes(...notes: RundownNote[]): void { - // Future: this doesnt allow for removing notes - if (!this.#rundownImpl) throw new Error(`Rundown "${this.rundownId}" ("${this.rundownExternalId}") not found`) + /** Notifications */ - this.#rundownImpl.notes = [...(this.#rundownImpl.notes ?? []), ...clone(notes)] - this.#rundownHasChanged = true + async getAllNotifications( + ...args: Parameters + ): ReturnType { + return this.#notificationsHelper.getAllNotifications(...args) + } + clearNotification( + ...args: Parameters + ): ReturnType { + return this.#notificationsHelper.clearNotification(...args) + } + setNotification( + ...args: Parameters + ): ReturnType { + return this.#notificationsHelper.setNotification(...args) + } + clearAllNotifications( + ...args: Parameters + ): ReturnType { + return this.#notificationsHelper.clearAllNotifications(...args) } /** BaseModel */ @@ -676,6 +696,7 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { this.context.directCollections.RundownBaselineAdLibPieces.bulkWrite(baselineAdLibPiecesOps), this.context.directCollections.RundownBaselineAdLibActions.bulkWrite(baselineAdLibActionsOps), ...saveHelper.commit(this.context), + this.#notificationsHelper.saveAllToDatabase(), ]) this.#rundownHasChanged = false diff --git a/packages/job-worker/src/ingest/mosDevice/__tests__/__snapshots__/mosIngest.test.ts.snap b/packages/job-worker/src/ingest/mosDevice/__tests__/__snapshots__/mosIngest.test.ts.snap index 0678fb3875d..b234b72d5f3 100644 --- a/packages/job-worker/src/ingest/mosDevice/__tests__/__snapshots__/mosIngest.test.ts.snap +++ b/packages/job-worker/src/ingest/mosDevice/__tests__/__snapshots__/mosIngest.test.ts.snap @@ -9,6 +9,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate 1`] = ` "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -302,6 +303,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate: replace existing 1`] = ` "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -587,6 +589,7 @@ exports[`Test recieved mos ingest payloads mosRoFullStory: Valid data 1`] = ` "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -893,6 +896,7 @@ exports[`Test recieved mos ingest payloads mosRoReadyToAir: Update ro 1`] = ` "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -1189,6 +1193,7 @@ exports[`Test recieved mos ingest payloads mosRoStatus: Update ro 1`] = ` "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -1483,6 +1488,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryDelete: Remove segment 1`] "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -1745,6 +1751,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: Into segment 1`] = "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -2052,6 +2059,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: New segment 1`] = ` "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -2367,6 +2375,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Move whole segment to "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -2665,6 +2674,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Within segment 1`] = "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -2963,6 +2973,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryReplace: Same segment 1`] = "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -3260,6 +3271,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -3550,6 +3562,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments2 "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -3872,6 +3885,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: With first in same se "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -4170,6 +4184,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Within same segment 1 "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ diff --git a/packages/job-worker/src/ingest/syncChangesToPartInstance.ts b/packages/job-worker/src/ingest/syncChangesToPartInstance.ts index ac90df10dbc..67a0bb37c69 100644 --- a/packages/job-worker/src/ingest/syncChangesToPartInstance.ts +++ b/packages/job-worker/src/ingest/syncChangesToPartInstance.ts @@ -9,9 +9,7 @@ import { PlayoutPartInstanceModel } from '../playout/model/PlayoutPartInstanceMo import { IngestModelReadonly } from './model/IngestModel' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' -import { PartNote, SegmentNote } from '@sofie-automation/corelib/dist/dataModel/Notes' import { Piece } from '@sofie-automation/corelib/dist/dataModel/Piece' -import { literal } from '@sofie-automation/corelib/dist/lib' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' import { logger } from '../logging' import { @@ -31,6 +29,7 @@ import { import { validateAdlibTestingPartInstanceProperties } from '../playout/adlibTesting' import { ReadonlyDeep } from 'type-fest' import { convertIngestModelToPlayoutRundownWithSegments } from './commit' +import { convertNoteToNotification } from '../notifications/util' type PlayStatus = 'previous' | 'current' | 'next' type SyncedInstance = { @@ -195,25 +194,20 @@ export async function syncChangesToPartInstances( } // Save notes: - const newNotes: PartNote[] = [] + const notificationCategory = `syncIngestUpdateToPartInstance:${existingPartInstance.partInstance._id}` + playoutModel.clearAllNotifications(notificationCategory) for (const note of syncContext.notes) { - newNotes.push( - literal({ - type: note.type, - message: note.message, - origin: { - name: '', // TODO - }, - }) - ) + playoutModel.setNotification(notificationCategory, { + ...convertNoteToNotification(note, [blueprint.blueprintId]), + relatedTo: { + type: 'partInstance', + rundownId: existingPartInstance.partInstance.part.rundownId, + partInstanceId: existingPartInstance.partInstance._id, + }, + }) } - if (newNotes.length) { - // TODO - these dont get shown to the user currently - // TODO - old notes from the sync may need to be pruned, or we will end up with duplicates and 'stuck' notes?+ - existingPartInstance.appendNotes(newNotes) - validateAdlibTestingPartInstanceProperties(context, playoutModel, existingPartInstance) - } + validateAdlibTestingPartInstanceProperties(context, playoutModel, existingPartInstance) if (existingPartInstance.partInstance._id === playoutModel.playlist.currentPartInfo?.partInstanceId) { // This should be run after 'current', before 'next': diff --git a/packages/job-worker/src/notifications/NotificationsModel.ts b/packages/job-worker/src/notifications/NotificationsModel.ts new file mode 100644 index 00000000000..7227bb8b553 --- /dev/null +++ b/packages/job-worker/src/notifications/NotificationsModel.ts @@ -0,0 +1,68 @@ +import type { NoteSeverity } from '@sofie-automation/blueprints-integration' +import { RundownId, PartInstanceId, PieceInstanceId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import type { ITranslatableMessage } from '@sofie-automation/corelib/dist/TranslatableMessage' + +export interface INotification { + id: string + + severity: NoteSeverity + message: ITranslatableMessage +} + +export interface INotificationWithTarget extends INotification { + relatedTo: INotificationTarget +} + +export type INotificationTarget = + | INotificationTargetPlaylist + | INotificationTargetRundown + | INotificationTargetPartInstance + | INotificationTargetPieceInstance +export interface INotificationTargetPlaylist { + type: 'playlist' +} +export interface INotificationTargetRundown { + type: 'rundown' + rundownId: RundownId +} + +export interface INotificationTargetPartInstance { + type: 'partInstance' + rundownId: RundownId + partInstanceId: PartInstanceId +} +export interface INotificationTargetPieceInstance { + type: 'pieceInstance' + rundownId: RundownId + partInstanceId: PartInstanceId + pieceInstanceId: PieceInstanceId +} + +export interface INotificationsModel { + /** + * Get the current notifications for a category + * This may fetch the notifications from the database if they are not already loaded + * @param category category of notifications to get + */ + getAllNotifications(category: string): Promise + + /** + * Remove a notification from the list + * @param category category of the notification + * @param notificationId id of the notification to remove + */ + clearNotification(category: string, notificationId: string): void + + /** + * Add/replace a notification to the list + * @param category category of the notification + * @param notification notification to add + */ + setNotification(category: string, notification: INotificationWithTarget): void + + /** + * Clear all notifications for a category + * @param category category of notifications to clear + */ + clearAllNotifications(category: string): void +} diff --git a/packages/job-worker/src/notifications/NotificationsModelHelper.ts b/packages/job-worker/src/notifications/NotificationsModelHelper.ts new file mode 100644 index 00000000000..3dc27293160 --- /dev/null +++ b/packages/job-worker/src/notifications/NotificationsModelHelper.ts @@ -0,0 +1,321 @@ +import { getCurrentTime } from '../lib' +import type { JobContext } from '../jobs' +import type { INotificationsModel, INotificationTarget, INotificationWithTarget } from './NotificationsModel' +import { + DBNotificationTarget, + DBNotificationTargetType, + type DBNotificationObj, +} from '@sofie-automation/corelib/dist/dataModel/Notifications' +import { getHash } from '@sofie-automation/corelib/dist/hash' +import { protectString } from '@sofie-automation/corelib/dist/protectedString' +import { assertNever, flatten, omit, type Complete } from '@sofie-automation/corelib/dist/lib' +import { type AnyBulkWriteOperation } from 'mongodb' +import { StudioId, RundownPlaylistId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { isEqual } from 'underscore' + +interface NotificationsLoadState { + dbNotifications: ReadonlyMap | null + updatedNotifications: Map | null> + // createdTimestamps: Map + + removeAllMissing: boolean +} + +export class NotificationsModelHelper implements INotificationsModel { + readonly #context: JobContext + readonly #categoryPrefix: string + readonly #playlistId: RundownPlaylistId | null + + readonly #notificationsByCategory = new Map() + + constructor(context: JobContext, categoryPrefix: string, playlistId: RundownPlaylistId | null) { + this.#context = context + this.#categoryPrefix = categoryPrefix + this.#playlistId = playlistId + } + + #getFullCategoryName(category: string): string { + return `${this.#categoryPrefix}:${category}` + } + + async #getAllNotificationsRaw( + notificationsForCategory: NotificationsLoadState, + category: string + ): Promise | null> { + if (!notificationsForCategory.dbNotifications) { + const dbNotifications = await this.#context.directCollections.Notifications.findFetch({ + // Ensure notifiations are owned by the current studio + 'relatedTo.studioId': this.#context.studioId, + // Limit to the current category + category: this.#getFullCategoryName(category), + }) + + const dbNotificationMap = new Map() + + // Interleave into the store, for any which haven't already been updated + for (const dbNotification of dbNotifications) { + dbNotificationMap.set(dbNotification.localId, dbNotification) + } + + // Indicate that this is now fully loaded in memory + notificationsForCategory.dbNotifications = dbNotificationMap + + return dbNotificationMap + } + + return null + } + + async getAllNotifications(category: string): Promise { + const notificationsForCategory = this.#getOrCategoryEntry(category) + + await this.#getAllNotificationsRaw(notificationsForCategory, category) + + const allLocalIds = new Set([ + ...Array.from(notificationsForCategory.updatedNotifications.keys()), + ...(notificationsForCategory.dbNotifications + ? Array.from(notificationsForCategory.dbNotifications.keys()) + : []), + ]) + + const allNotifications: INotificationWithTarget[] = [] + for (const localId of allLocalIds) { + const notification = notificationsForCategory.updatedNotifications.has(localId) + ? notificationsForCategory.updatedNotifications.get(localId) + : notificationsForCategory.dbNotifications?.get(localId) + + const relatedTo = notification && translateRelatedToFromDbType(notification.relatedTo) + if (!relatedTo) continue + + allNotifications.push({ + id: notification.localId, + severity: notification.severity, + message: notification.message, + relatedTo: relatedTo, + }) + } + + return allNotifications + } + + clearNotification(category: string, notificationId: string): void { + const notificationsForCategory = this.#getOrCategoryEntry(category) + + // The notification may or may not be loaded, but this indicates that to the saving that we intend to delete it + notificationsForCategory.updatedNotifications.set(notificationId, null) + } + + setNotification(category: string, notification: INotificationWithTarget): void { + const notificationsForCategory = this.#getOrCategoryEntry(category) + + const fullCategory = this.#getFullCategoryName(category) + notificationsForCategory.updatedNotifications.set(notification.id, { + _id: protectString(getHash(`${this.#context.studioId}:${fullCategory}:${notification.id}`)), + category: fullCategory, + localId: notification.id, + severity: notification.severity, + message: notification.message, + relatedTo: translateRelatedToIntoDbType(this.#context.studioId, this.#playlistId, notification.relatedTo), + } satisfies Complete>) + } + + clearAllNotifications(category: string): void { + const notificationsForCategory = this.#getOrCategoryEntry(category) + + // Tell this store that any documents not in the `updatedNotifications` should be deleted + notificationsForCategory.removeAllMissing = true + + // Clear any known in memory notifications + notificationsForCategory.updatedNotifications.clear() + } + + #getOrCategoryEntry(category: string): NotificationsLoadState { + let notificationsForCategory = this.#notificationsByCategory.get(category) + if (!notificationsForCategory) { + notificationsForCategory = { + dbNotifications: null, + updatedNotifications: new Map(), + + removeAllMissing: false, + } + this.#notificationsByCategory.set(category, notificationsForCategory) + } + return notificationsForCategory + } + + async saveAllToDatabase(): Promise { + // Quick return if there is nothing to save + if (this.#notificationsByCategory.size === 0) return + + const now = getCurrentTime() + + const allUpdates = flatten( + await Promise.all( + Array.from(this.#notificationsByCategory).map(async ([category, notificationsForCategory]) => { + /** + * This isn't the most efficient, to be loading all the notifications for every modified category, + * but it's a lot simpler than an optimal solution. The documents should be small and compact, so it should be quick enough. + */ + + const dbNotifications = + notificationsForCategory.dbNotifications ?? + (await this.#getAllNotificationsRaw(notificationsForCategory, category)) + + const allLocalIds = new Set([ + ...Array.from(notificationsForCategory.updatedNotifications.keys()), + ...(dbNotifications ? Array.from(dbNotifications.keys()) : []), + ]) + + const updates: AnyBulkWriteOperation[] = [] + const localIdsToKeep: string[] = [] + const localIdsToDelete: string[] = [] + for (const localId of allLocalIds) { + const updatedNotification = notificationsForCategory.updatedNotifications.get(localId) + const dbNotification = dbNotifications?.get(localId) + + // Marked for deletion + if (updatedNotification === null) { + if (dbNotification) { + // This notification has been deleted + localIdsToDelete.push(localId) + } + continue + } + + // No change made, keep it + if (updatedNotification === undefined) { + if (!notificationsForCategory.removeAllMissing) { + localIdsToKeep.push(localId) + } + continue + } + + localIdsToKeep.push(localId) + + if ( + !dbNotification || + !isEqual(omit(dbNotification, 'created', 'modified'), updatedNotification) + ) { + updates.push({ + replaceOne: { + filter: { + category: this.#getFullCategoryName(category), + localId: localId, + 'relatedTo.studioId': this.#context.studioId, + }, + replacement: { + ...updatedNotification, + created: dbNotification?.created ?? now, + modified: now, + }, + upsert: true, + }, + }) + } + } + + if (notificationsForCategory.removeAllMissing) { + updates.push({ + deleteMany: { + filter: { + category: this.#getFullCategoryName(category), + localId: { $nin: localIdsToKeep }, + 'relatedTo.studioId': this.#context.studioId, + }, + }, + }) + } else if (localIdsToDelete.length > 0) { + // Some documents were deleted + updates.push({ + deleteMany: { + filter: { + category: this.#getFullCategoryName(category), + localId: { $in: localIdsToDelete }, + 'relatedTo.studioId': this.#context.studioId, + }, + }, + }) + } + + return updates + }) + ) + ) + + this.#notificationsByCategory.clear() + + if (allUpdates.length > 0) { + await this.#context.directCollections.Notifications.bulkWrite(allUpdates) + } + } +} + +function translateRelatedToIntoDbType( + studioId: StudioId, + playlistId: RundownPlaylistId | null, + relatedTo: INotificationTarget +): DBNotificationTarget { + switch (relatedTo.type) { + case 'playlist': + if (!playlistId) throw new Error('Cannot create a playlist related notification without a playlist') + return { type: DBNotificationTargetType.PLAYLIST, studioId, playlistId } + case 'rundown': + return { + type: DBNotificationTargetType.RUNDOWN, + studioId, + rundownId: relatedTo.rundownId, + } + case 'partInstance': + return { + type: DBNotificationTargetType.PARTINSTANCE, + studioId, + rundownId: relatedTo.rundownId, + partInstanceId: relatedTo.partInstanceId, + } + case 'pieceInstance': + return { + type: DBNotificationTargetType.PIECEINSTANCE, + studioId, + rundownId: relatedTo.rundownId, + partInstanceId: relatedTo.partInstanceId, + pieceInstanceId: relatedTo.pieceInstanceId, + } + default: + assertNever(relatedTo) + throw new Error(`Unknown relatedTo type: ${relatedTo}`) + } +} + +function translateRelatedToFromDbType(relatedTo: DBNotificationTarget): INotificationTarget | null { + switch (relatedTo.type) { + case DBNotificationTargetType.PLAYLIST: + return { type: 'playlist' } + case DBNotificationTargetType.RUNDOWN: + return { + type: 'rundown', + rundownId: relatedTo.rundownId, + } + case DBNotificationTargetType.PARTINSTANCE: + return { + type: 'partInstance', + rundownId: relatedTo.rundownId, + partInstanceId: relatedTo.partInstanceId, + } + case DBNotificationTargetType.PIECEINSTANCE: + return { + type: 'pieceInstance', + rundownId: relatedTo.rundownId, + partInstanceId: relatedTo.partInstanceId, + pieceInstanceId: relatedTo.pieceInstanceId, + } + // case DBNotificationTargetType.EVERYWHERE: + // case DBNotificationTargetType.STUDIO: + // case DBNotificationTargetType.SEGMENT: + // case DBNotificationTargetType.PART: + // case DBNotificationTargetType.PIECE: + // return null + default: + assertNever(relatedTo) + return null + } +} diff --git a/packages/job-worker/src/notifications/__tests__/NotificationsModelHelper.spec.ts b/packages/job-worker/src/notifications/__tests__/NotificationsModelHelper.spec.ts new file mode 100644 index 00000000000..e0c44ed13a9 --- /dev/null +++ b/packages/job-worker/src/notifications/__tests__/NotificationsModelHelper.spec.ts @@ -0,0 +1,672 @@ +import { + DBNotificationObj, + DBNotificationTarget, + DBNotificationTargetType, +} from '@sofie-automation/corelib/dist/dataModel/Notifications' +import { setupDefaultJobEnvironment } from '../../__mocks__/context' +import { NotificationsModelHelper } from '../NotificationsModelHelper' +import { protectString } from '@sofie-automation/corelib/dist/protectedString' +import { NoteSeverity } from '@sofie-automation/blueprints-integration' +import { INotificationWithTarget } from '../NotificationsModel' +import { generateTranslation } from '@sofie-automation/corelib/dist/lib' + +describe('NotificationsModelHelper', () => { + it('no changes has no mongo write', async () => { + const context = setupDefaultJobEnvironment() + const notificationsCollection = context.mockCollections.Notifications + + const helper = new NotificationsModelHelper(context, 'test', null) + expect(notificationsCollection.operations).toHaveLength(0) + + await helper.saveAllToDatabase() + expect(notificationsCollection.operations).toHaveLength(0) + }) + + describe('from empty', () => { + it('do nothing', async () => { + const context = setupDefaultJobEnvironment() + const notificationsCollection = context.mockCollections.Notifications + + const helper = new NotificationsModelHelper(context, 'test', null) + + expect(notificationsCollection.operations).toHaveLength(0) + + await helper.saveAllToDatabase() + expect(notificationsCollection.operations).toHaveLength(0) + }) + + it('clearNotification', async () => { + const context = setupDefaultJobEnvironment() + const notificationsCollection = context.mockCollections.Notifications + + const helper = new NotificationsModelHelper(context, 'test', null) + + helper.clearNotification('my-category', 'id0') + + expect(notificationsCollection.operations).toHaveLength(0) + + await helper.saveAllToDatabase() + expect(notificationsCollection.operations).toEqual([ + { + type: 'findFetch', + args: [ + { + category: 'test:my-category', + 'relatedTo.studioId': context.studioId, + }, + ], + }, + ]) + }) + + it('set then clear Notification', async () => { + const context = setupDefaultJobEnvironment() + const notificationsCollection = context.mockCollections.Notifications + + const helper = new NotificationsModelHelper(context, 'test', protectString('playlist0')) + + helper.setNotification('my-category', { + id: 'id0', + message: generateTranslation('test'), + severity: NoteSeverity.INFO, + relatedTo: { type: 'playlist' }, + }) + + helper.clearNotification('my-category', 'id0') + + expect(notificationsCollection.operations).toHaveLength(0) + + await helper.saveAllToDatabase() + expect(notificationsCollection.operations).toEqual([ + { + type: 'findFetch', + args: [ + { + category: 'test:my-category', + 'relatedTo.studioId': context.studioId, + }, + ], + }, + ]) + }) + + it('getAllNotifications - empty db', async () => { + const context = setupDefaultJobEnvironment() + const notificationsCollection = context.mockCollections.Notifications + + const helper = new NotificationsModelHelper(context, 'test', null) + + const notifications = await helper.getAllNotifications('my-category') + expect(notifications).toHaveLength(0) + + expect(notificationsCollection.operations).toEqual([ + { + type: 'findFetch', + args: [ + { + category: 'test:my-category', + 'relatedTo.studioId': context.studioId, + }, + ], + }, + ]) + + // Save performs some cleanup + notificationsCollection.clearOpLog() + await helper.saveAllToDatabase() + expect(notificationsCollection.operations).toHaveLength(0) + }) + + it('getAllNotifications - with documents', async () => { + const context = setupDefaultJobEnvironment() + const notificationsCollection = context.mockCollections.Notifications + + await notificationsCollection.insertOne({ + _id: protectString('id0'), + category: 'test:my-category', + relatedTo: { + type: DBNotificationTargetType.PLAYLIST, + studioId: context.studioId, + playlistId: protectString('test'), + }, + created: 1, + modified: 2, + message: generateTranslation('test'), + severity: NoteSeverity.INFO, + localId: 'abc', + }) + notificationsCollection.clearOpLog() + + const helper = new NotificationsModelHelper(context, 'test', null) + + const notifications = await helper.getAllNotifications('my-category') + expect(notifications).toEqual([ + { + id: 'abc', + message: generateTranslation('test'), + severity: NoteSeverity.INFO, + relatedTo: { type: 'playlist' }, + } satisfies INotificationWithTarget, + ]) + + expect(notificationsCollection.operations).toEqual([ + { + type: 'findFetch', + args: [ + { + category: 'test:my-category', + 'relatedTo.studioId': context.studioId, + }, + ], + }, + ]) + + // Save performs some cleanup + notificationsCollection.clearOpLog() + await helper.saveAllToDatabase() + expect(notificationsCollection.operations).toHaveLength(0) + }) + + it('setNotification', async () => { + const context = setupDefaultJobEnvironment() + const notificationsCollection = context.mockCollections.Notifications + + const helper = new NotificationsModelHelper(context, 'test', protectString('playlist0')) + + helper.setNotification('my-category', { + id: 'abc', + message: generateTranslation('test'), + severity: NoteSeverity.INFO, + relatedTo: { type: 'playlist' }, + }) + + expect(notificationsCollection.operations).toHaveLength(0) + + await helper.saveAllToDatabase() + expect(notificationsCollection.operations).toEqual([ + { + type: 'findFetch', + args: [ + { + category: 'test:my-category', + 'relatedTo.studioId': context.studioId, + }, + ], + }, + { + type: 'bulkWrite', + args: [1], + }, + { + type: 'replace', + args: ['b8ynzcdIk5RXEAkIHXShWJ26FTQ_'], + }, + ]) + }) + + it('clearAllNotifications', async () => { + const context = setupDefaultJobEnvironment() + const notificationsCollection = context.mockCollections.Notifications + + const helper = new NotificationsModelHelper(context, 'test', null) + + helper.clearAllNotifications('my-category') + + expect(notificationsCollection.operations).toHaveLength(0) + + await helper.saveAllToDatabase() + expect(notificationsCollection.operations).toEqual([ + { + type: 'findFetch', + args: [ + { + category: 'test:my-category', + 'relatedTo.studioId': context.studioId, + }, + ], + }, + { + type: 'bulkWrite', + args: [1], + }, + { + type: 'remove', + args: [ + { + category: 'test:my-category', + localId: { $nin: [] }, + 'relatedTo.studioId': 'mockStudio0', + }, + ], + }, + ]) + }) + }) + + describe('created timestamp persisted', () => { + function runTest(runGetAllNotifications: boolean) { + it(`loading existing: ${runGetAllNotifications}`, async () => { + const context = setupDefaultJobEnvironment() + const notificationsCollection = context.mockCollections.Notifications + const playlistId = protectString('playlist0') + + const expectedNotificationId = protectString('b8ynzcdIk5RXEAkIHXShWJ26FTQ_') // Taken from a previous run + + await notificationsCollection.insertOne({ + _id: expectedNotificationId, + category: 'test:my-category', + created: 12345, + localId: 'abc', + message: { + key: 'test2', + }, + modified: 6789, + relatedTo: { + playlistId: playlistId, + studioId: context.studioId, + type: DBNotificationTargetType.PLAYLIST, + }, + severity: NoteSeverity.WARNING, + }) + notificationsCollection.clearOpLog() + + { + const updateHelper = new NotificationsModelHelper(context, 'test', playlistId) + + if (runGetAllNotifications) { + // eslint-disable-next-line jest/no-conditional-expect + expect(await updateHelper.getAllNotifications('my-category')).toHaveLength(1) + } + + updateHelper.setNotification('my-category', { + id: 'abc', + message: generateTranslation('test2'), + severity: NoteSeverity.WARNING, + relatedTo: { type: 'playlist' }, + }) + await updateHelper.saveAllToDatabase() + expect(notificationsCollection.operations).toHaveLength(3) + notificationsCollection.clearOpLog() + } + + // Check what was in the db + expect(await notificationsCollection.findFetch()).toEqual([ + { + _id: expectedNotificationId, + category: 'test:my-category', + created: 12345, + localId: 'abc', + message: { + key: 'test2', + }, + modified: expect.any(Number), + relatedTo: { + playlistId: playlistId, + studioId: context.studioId, + type: DBNotificationTargetType.PLAYLIST, + }, + severity: NoteSeverity.WARNING, + }, + ] satisfies DBNotificationObj[]) + }) + } + + runTest(true) + runTest(false) + }) + + describe('notifications with different relatedTo', () => { + it(`type: playlist`, async () => { + const context = setupDefaultJobEnvironment() + const notificationsCollection = context.mockCollections.Notifications + + const playlistId = protectString('playlist0') + + const helper = new NotificationsModelHelper(context, 'test', playlistId) + + helper.setNotification('my-category', { + id: 'abc', + message: generateTranslation('test'), + severity: NoteSeverity.INFO, + relatedTo: { type: 'playlist' }, + }) + + await helper.saveAllToDatabase() + + const doc = await notificationsCollection.findOne(protectString('b8ynzcdIk5RXEAkIHXShWJ26FTQ_')) + expect(doc).toBeTruthy() + expect(doc?.relatedTo).toEqual({ + type: DBNotificationTargetType.PLAYLIST, + studioId: context.studioId, + playlistId, + } satisfies DBNotificationTarget) + }) + + it(`type: playlist without it`, async () => { + const context = setupDefaultJobEnvironment() + + const helper = new NotificationsModelHelper(context, 'test', null) + + expect(() => + helper.setNotification('my-category', { + id: 'abc', + message: generateTranslation('test'), + severity: NoteSeverity.INFO, + relatedTo: { type: 'playlist' }, + }) + ).toThrow(/without a playlist/) + }) + + it(`type: rundown`, async () => { + const context = setupDefaultJobEnvironment() + const notificationsCollection = context.mockCollections.Notifications + + const rundownId = protectString('rundown0') + + const helper = new NotificationsModelHelper(context, 'test', null) + + helper.setNotification('my-category', { + id: 'abc', + message: generateTranslation('test'), + severity: NoteSeverity.INFO, + relatedTo: { type: 'rundown', rundownId }, + }) + + await helper.saveAllToDatabase() + + const doc = await notificationsCollection.findOne(protectString('b8ynzcdIk5RXEAkIHXShWJ26FTQ_')) + expect(doc).toBeTruthy() + expect(doc?.relatedTo).toEqual({ + type: DBNotificationTargetType.RUNDOWN, + studioId: context.studioId, + rundownId, + } satisfies DBNotificationTarget) + }) + + it(`type: partInstance`, async () => { + const context = setupDefaultJobEnvironment() + const notificationsCollection = context.mockCollections.Notifications + + const rundownId = protectString('rundown0') + const partInstanceId = protectString('partInstance0') + + const helper = new NotificationsModelHelper(context, 'test', null) + + helper.setNotification('my-category', { + id: 'abc', + message: generateTranslation('test'), + severity: NoteSeverity.INFO, + relatedTo: { + type: 'partInstance', + rundownId, + partInstanceId, + }, + }) + + await helper.saveAllToDatabase() + + const doc = await notificationsCollection.findOne(protectString('b8ynzcdIk5RXEAkIHXShWJ26FTQ_')) + expect(doc).toBeTruthy() + expect(doc?.relatedTo).toEqual({ + type: DBNotificationTargetType.PARTINSTANCE, + studioId: context.studioId, + rundownId, + partInstanceId, + } satisfies DBNotificationTarget) + }) + + it(`type: pieceInstance`, async () => { + const context = setupDefaultJobEnvironment() + const notificationsCollection = context.mockCollections.Notifications + + const rundownId = protectString('rundown0') + const partInstanceId = protectString('partInstance0') + const pieceInstanceId = protectString('pieceInstance0') + + const helper = new NotificationsModelHelper(context, 'test', null) + + helper.setNotification('my-category', { + id: 'abc', + message: generateTranslation('test'), + severity: NoteSeverity.INFO, + relatedTo: { + type: 'pieceInstance', + rundownId, + partInstanceId, + pieceInstanceId, + }, + }) + + await helper.saveAllToDatabase() + + const doc = await notificationsCollection.findOne(protectString('b8ynzcdIk5RXEAkIHXShWJ26FTQ_')) + expect(doc).toBeTruthy() + expect(doc?.relatedTo).toEqual({ + type: DBNotificationTargetType.PIECEINSTANCE, + studioId: context.studioId, + rundownId, + partInstanceId, + pieceInstanceId, + } satisfies DBNotificationTarget) + }) + + it('retrieve docs', async () => { + const context = setupDefaultJobEnvironment() + const notificationsCollection = context.mockCollections.Notifications + + await notificationsCollection.insertOne({ + _id: protectString('id1'), + category: 'test:my-category', + relatedTo: { + type: DBNotificationTargetType.PLAYLIST, + studioId: context.studioId, + playlistId: protectString('playlist0'), + }, + created: 1, + modified: 2, + message: generateTranslation('test playlist'), + severity: NoteSeverity.INFO, + localId: 'test-playlist', + }) + await notificationsCollection.insertOne({ + _id: protectString('id2'), + category: 'test:my-category', + relatedTo: { + type: DBNotificationTargetType.RUNDOWN, + studioId: context.studioId, + rundownId: protectString('rundown0'), + }, + created: 1, + modified: 2, + message: generateTranslation('test rundown'), + severity: NoteSeverity.WARNING, + localId: 'test-rundown', + }) + await notificationsCollection.insertOne({ + _id: protectString('id3'), + category: 'test:my-category', + relatedTo: { + type: DBNotificationTargetType.PARTINSTANCE, + studioId: context.studioId, + rundownId: protectString('rundown0'), + partInstanceId: protectString('partInstance0'), + }, + created: 1, + modified: 2, + message: generateTranslation('test partInstance'), + severity: NoteSeverity.ERROR, + localId: 'test-partInstance', + }) + await notificationsCollection.insertOne({ + _id: protectString('id4'), + category: 'test:my-category', + relatedTo: { + type: DBNotificationTargetType.PIECEINSTANCE, + studioId: context.studioId, + rundownId: protectString('rundown0'), + partInstanceId: protectString('partInstance0'), + pieceInstanceId: protectString('pieceInstance0'), + }, + created: 1, + modified: 2, + message: generateTranslation('test pieceInstance'), + severity: NoteSeverity.INFO, + localId: 'test-pieceInstance', + }) + notificationsCollection.clearOpLog() + + const helper = new NotificationsModelHelper(context, 'test', null) + + const notifications = await helper.getAllNotifications('my-category') + expect(notifications).toHaveLength(4) + + expect(notifications).toMatchObject([ + { + id: 'test-playlist', + relatedTo: { type: 'playlist' }, + }, + { + id: 'test-rundown', + relatedTo: { + type: 'rundown', + rundownId: protectString('rundown0'), + }, + }, + { + id: 'test-partInstance', + relatedTo: { + type: 'partInstance', + rundownId: protectString('rundown0'), + partInstanceId: protectString('partInstance0'), + }, + }, + { + id: 'test-pieceInstance', + relatedTo: { + type: 'pieceInstance', + rundownId: protectString('rundown0'), + partInstanceId: protectString('partInstance0'), + pieceInstanceId: protectString('pieceInstance0'), + }, + }, + ] satisfies Partial[]) + }) + }) + + it('setNotification and clearAllNotifications', async () => { + const context = setupDefaultJobEnvironment() + const notificationsCollection = context.mockCollections.Notifications + + const helper = new NotificationsModelHelper(context, 'test', protectString('playlist0')) + + helper.setNotification('my-category', { + id: 'abc', + message: generateTranslation('test'), + severity: NoteSeverity.INFO, + relatedTo: { type: 'playlist' }, + }) + + expect(notificationsCollection.operations).toHaveLength(0) + + await helper.saveAllToDatabase() + expect(notificationsCollection.operations).toEqual([ + { + type: 'findFetch', + args: [ + { + category: 'test:my-category', + 'relatedTo.studioId': context.studioId, + }, + ], + }, + { + type: 'bulkWrite', + args: [1], + }, + { + type: 'replace', + args: ['b8ynzcdIk5RXEAkIHXShWJ26FTQ_'], + }, + ]) + notificationsCollection.clearOpLog() + + helper.clearAllNotifications('my-category') + + expect(notificationsCollection.operations).toHaveLength(0) + + await helper.saveAllToDatabase() + expect(notificationsCollection.operations).toEqual([ + { + type: 'findFetch', + args: [ + { + category: 'test:my-category', + 'relatedTo.studioId': context.studioId, + }, + ], + }, + { + type: 'bulkWrite', + args: [1], + }, + { + type: 'remove', + args: [ + { + category: 'test:my-category', + localId: { $nin: [] }, + 'relatedTo.studioId': 'mockStudio0', + }, + ], + }, + ]) + }) + + it('clearAllNotifications then setNotification', async () => { + const context = setupDefaultJobEnvironment() + const notificationsCollection = context.mockCollections.Notifications + + const helper = new NotificationsModelHelper(context, 'test', protectString('playlist0')) + + helper.clearAllNotifications('my-category') + helper.setNotification('my-category', { + id: 'abc', + message: generateTranslation('test'), + severity: NoteSeverity.INFO, + relatedTo: { type: 'playlist' }, + }) + + expect(notificationsCollection.operations).toHaveLength(0) + + await helper.saveAllToDatabase() + expect(notificationsCollection.operations).toEqual([ + { + type: 'findFetch', + args: [ + { + category: 'test:my-category', + 'relatedTo.studioId': context.studioId, + }, + ], + }, + { + type: 'bulkWrite', + args: [2], + }, + { + type: 'replace', + args: ['b8ynzcdIk5RXEAkIHXShWJ26FTQ_'], + }, + { + type: 'remove', + args: [ + { + category: 'test:my-category', + localId: { $nin: ['abc'] }, + 'relatedTo.studioId': 'mockStudio0', + }, + ], + }, + ]) + }) +}) diff --git a/packages/job-worker/src/notifications/util.ts b/packages/job-worker/src/notifications/util.ts new file mode 100644 index 00000000000..c75bd00d06b --- /dev/null +++ b/packages/job-worker/src/notifications/util.ts @@ -0,0 +1,13 @@ +import { wrapTranslatableMessageFromBlueprints } from '@sofie-automation/corelib/dist/TranslatableMessage' +import type { INotification } from './NotificationsModel' +import { INoteBase } from '@sofie-automation/corelib/dist/dataModel/Notes' +import { BlueprintId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { getHash } from '@sofie-automation/corelib/dist/hash' + +export function convertNoteToNotification(note: INoteBase, blueprintIds: BlueprintId[]): INotification { + return { + id: getHash(JSON.stringify(note.message)), // Notes don't have an id, so fake one from the message + severity: note.type, + message: wrapTranslatableMessageFromBlueprints(note.message, blueprintIds), + } +} diff --git a/packages/job-worker/src/playout/abPlayback/__tests__/routeSetDisabling.spec.ts b/packages/job-worker/src/playout/abPlayback/__tests__/routeSetDisabling.spec.ts index ca88fa1bff0..88228e04ec9 100644 --- a/packages/job-worker/src/playout/abPlayback/__tests__/routeSetDisabling.spec.ts +++ b/packages/job-worker/src/playout/abPlayback/__tests__/routeSetDisabling.spec.ts @@ -26,6 +26,35 @@ describe('route set disabling ab players', () => { expect(result).toEqual(DEFAULT_PLAYERS) }) + test('mismatch of playerId types', () => { + const routesets: Record = { + route1: { + name: '', + active: false, + behavior: StudioRouteBehavior.TOGGLE, + routes: [], + abPlayers: [ + { + poolName: POOL_NAME, + playerId: '1', // because ui field is always a string + }, + ], + }, + } + + const players: ABPlayerDefinition[] = [ + { + playerId: 1, // number because blueprint defined it as such + }, + { playerId: 2 }, + ] + + const result = runDisablePlayersFiltering(routesets, players) + + const expectedPlayers = players.filter((p) => p.playerId !== 1) + expect(result).toEqual(expectedPlayers) + }) + describe('single routeset per player', () => { const ROUTESETS_SEPARATE: Record = { pl1: { diff --git a/packages/job-worker/src/playout/abPlayback/index.ts b/packages/job-worker/src/playout/abPlayback/index.ts index d803ecac460..9ab7534d06b 100644 --- a/packages/job-worker/src/playout/abPlayback/index.ts +++ b/packages/job-worker/src/playout/abPlayback/index.ts @@ -16,10 +16,16 @@ import { applyAbPlayerObjectAssignments } from './applyAssignments' import { AbSessionHelper } from './abSessionHelper' import { ShowStyleContext } from '../../blueprints/context' import { logger } from '../../logging' -import { ABPlayerDefinition } from '@sofie-automation/blueprints-integration' +import { ABPlayerDefinition, NoteSeverity } from '@sofie-automation/blueprints-integration' import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' import { abPoolFilterDisabled, findPlayersInRouteSets } from './routeSetDisabling' +import type { INotification } from '../../notifications/NotificationsModel' +import { generateTranslation } from '@sofie-automation/corelib/dist/lib' +export interface ABPlaybackResult { + assignments: Record + notifications: INotification[] +} /** * Resolve and apply AB-playback for the given timeline * @param context Context of the job @@ -39,8 +45,12 @@ export function applyAbPlaybackForTimeline( playlist: ReadonlyDeep, resolvedPieces: ResolvedPieceInstance[], timelineObjects: OnGenerateTimelineObjExt[] -): Record { - if (!blueprint.blueprint.getAbResolverConfiguration) return {} +): ABPlaybackResult { + if (!blueprint.blueprint.getAbResolverConfiguration) + return { + assignments: {}, + notifications: [], + } const blueprintContext = new ShowStyleContext( { @@ -72,6 +82,8 @@ export function applyAbPlaybackForTimeline( const now = getCurrentTime() + const notifications: INotification[] = [] + const abConfiguration = blueprint.blueprint.getAbResolverConfiguration(blueprintContext) const routeSetMembers = findPlayersInRouteSets(applyAndValidateOverrides(context.studio.routeSetsWithOverrides).obj) @@ -107,6 +119,13 @@ export function applyAbPlaybackForTimeline( logger.warn( `ABPlayback failed to assign sessions for "${poolName}": ${JSON.stringify(assignments.failedRequired)}` ) + notifications.push({ + id: `failedRequired-${poolName}`, + severity: NoteSeverity.ERROR, + message: generateTranslation('Failed to assign players for {{count}} sessions', { + count: assignments.failedRequired.length, + }), + }) } if (assignments.failedOptional.length > 0) { logger.info( @@ -114,6 +133,13 @@ export function applyAbPlaybackForTimeline( assignments.failedOptional )}` ) + notifications.push({ + id: `failedOptional-${poolName}`, + severity: NoteSeverity.WARNING, + message: generateTranslation('Failed to assign players for {{count}} non-critical sessions', { + count: assignments.failedOptional.length, + }), + }) } newAbSessionsResult[poolName] = applyAbPlayerObjectAssignments( @@ -130,5 +156,8 @@ export function applyAbPlaybackForTimeline( sendTrace(endTrace(influxTrace)) if (span) span.end() - return newAbSessionsResult + return { + assignments: newAbSessionsResult, + notifications, + } } diff --git a/packages/job-worker/src/playout/abPlayback/routeSetDisabling.ts b/packages/job-worker/src/playout/abPlayback/routeSetDisabling.ts index 64f9ccd0d86..d6be0a4ab65 100644 --- a/packages/job-worker/src/playout/abPlayback/routeSetDisabling.ts +++ b/packages/job-worker/src/playout/abPlayback/routeSetDisabling.ts @@ -1,11 +1,12 @@ -import type { ABPlayerDefinition, AbPlayerId } from '@sofie-automation/blueprints-integration' +import type { ABPlayerDefinition } from '@sofie-automation/blueprints-integration' import type { StudioRouteSet } from '@sofie-automation/corelib/dist/dataModel/Studio' import { logger } from '../../logging' /** * Map> + * Note: this explicitly uses a string for the playerId, to avoid issues with types for values from the ui */ -type MembersOfRouteSets = Map> +type MembersOfRouteSets = Map> export function findPlayersInRouteSets(routeSets: Record): MembersOfRouteSets { const routeSetEnabledPlayers: MembersOfRouteSets = new Map() @@ -18,8 +19,8 @@ export function findPlayersInRouteSets(routeSets: Record } // Make sure player is marked as enabled - const currentState = poolEntry.get(abPlayer.playerId) - poolEntry.set(abPlayer.playerId, currentState || routeSet.active) + const currentState = poolEntry.get(String(abPlayer.playerId)) + poolEntry.set(String(abPlayer.playerId), currentState || routeSet.active) } } return routeSetEnabledPlayers @@ -35,7 +36,7 @@ export function abPoolFilterDisabled( // Filter out any disabled players: return players.filter((player) => { - const playerState = poolRouteSetEnabledPlayers.get(player.playerId) + const playerState = poolRouteSetEnabledPlayers.get(String(player.playerId)) if (playerState === false) { logger.silly(`AB Pool ${poolName} playerId : ${player.playerId} are disabled`) return false diff --git a/packages/job-worker/src/playout/adlibAction.ts b/packages/job-worker/src/playout/adlibAction.ts index 2168ba716c8..e0907953fc7 100644 --- a/packages/job-worker/src/playout/adlibAction.ts +++ b/packages/job-worker/src/playout/adlibAction.ts @@ -15,10 +15,11 @@ import { runJobWithPlaylistLock } from './lock' import { updateTimeline } from './timeline/generate' import { performTakeToNextedPart } from './take' import { ActionUserData } from '@sofie-automation/blueprints-integration' -import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import { DBRundownPlaylist, SelectedPartInstance } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { logger } from '../logging' import { AdLibActionId, + BlueprintId, BucketAdLibActionId, RundownBaselineAdLibActionId, } from '@sofie-automation/corelib/dist/dataModel/Ids' @@ -29,6 +30,10 @@ import { PartAndPieceInstanceActionService, applyActionSideEffects, } from '../blueprints/context/services/PartAndPieceInstanceActionService' +import { convertNoteToNotification } from '../notifications/util' +import type { INoteBase } from '@sofie-automation/corelib/dist/dataModel/Notes' +import { NotificationsModelHelper } from '../notifications/NotificationsModelHelper' +import type { INotificationsModel } from '../notifications/NotificationsModel' /** * Execute an AdLib Action @@ -105,7 +110,7 @@ export async function executeAdlibActionAndSaveModel( } try { - await executeDataStoreAction( + const dataStoreActionNotes = await executeDataStoreAction( context, playlist, rundown, @@ -114,6 +119,23 @@ export async function executeAdlibActionAndSaveModel( watchedPackages, actionParameters ) + + // Save the notes immediately, as they are not dependent on the action and want to be saved even if the action fails + if (dataStoreActionNotes.length > 0) { + const notificationHelper = new NotificationsModelHelper(context, `playout:${playlist._id}`, playlist._id) + storeNotificationsForCategory( + notificationHelper, + `dataStoreAction:${getRandomId()}`, // Always append and leave existing notes + blueprint.blueprintId, + dataStoreActionNotes, + playlist.currentPartInfo ?? playlist.nextPartInfo + ) + + // Save the notifications asynchonously + notificationHelper.saveAllToDatabase().catch((err) => { + logger.error(`Saving notifications from executeDatastoreAction failed: ${stringifyError(err)}`) + }) + } } catch (err) { logger.error(`Error in showStyleBlueprint.executeDatastoreAction: ${stringifyError(err)}`) } @@ -182,7 +204,6 @@ export async function executeActionInner( identifier: `playlist=${playlist._id},rundown=${rundown.rundown._id},currentPartInstance=${ playlist.currentPartInfo?.partInstanceId },execution=${getRandomId()}`, - tempSendUserNotesIntoBlackHole: true, // TODO-CONTEXT store these notes }, context, playoutModel, @@ -215,6 +236,15 @@ export async function executeActionInner( throw UserError.fromUnknown(err) } + // Store any notes generated by the action + storeNotificationsForCategory( + playoutModel, + `adlibAction:${getRandomId()}`, // Always append and leave existing notes + blueprint.blueprintId, + actionContext.notes, + playlist.currentPartInfo ?? playlist.nextPartInfo + ) + await applyAnyExecutionSideEffects(context, playoutModel, actionContext, now) return { @@ -250,37 +280,61 @@ async function executeDataStoreAction( blueprint: ReadonlyDeep, watchedPackages: WatchedPackagesHelper, actionParameters: ExecuteActionParameters -) { +): Promise { const executeDataStoreAction = blueprint.blueprint.executeDataStoreAction - if (executeDataStoreAction) { - // now we can execute any datastore actions - const actionContext = new DatastoreActionExecutionContext( - { - name: `${rundown.name}(${playlist.name})`, - identifier: `playlist=${playlist._id},rundown=${rundown._id},currentPartInstance=${ - playlist.currentPartInfo?.partInstanceId - },execution=${getRandomId()}`, - tempSendUserNotesIntoBlackHole: true, // TODO-CONTEXT store these notes - }, - context, - showStyle, - watchedPackages - ) - logger.info(`Executing Datastore AdlibAction "${actionParameters.actionId}"`) - logger.silly( - `Datastore AdlibAction "${actionParameters.actionId}" Payload: ${JSON.stringify(actionParameters.userData)}` + if (!executeDataStoreAction) return [] + + // now we can execute any datastore actions + const actionContext = new DatastoreActionExecutionContext( + { + name: `${rundown.name}(${playlist.name})`, + identifier: `playlist=${playlist._id},rundown=${rundown._id},currentPartInstance=${ + playlist.currentPartInfo?.partInstanceId + },execution=${getRandomId()}`, + }, + context, + showStyle, + watchedPackages + ) + logger.info(`Executing Datastore AdlibAction "${actionParameters.actionId}"`) + logger.silly( + `Datastore AdlibAction "${actionParameters.actionId}" Payload: ${JSON.stringify(actionParameters.userData)}` + ) + + try { + await executeDataStoreAction( + actionContext, + actionParameters.actionId, + actionParameters.userData, + actionParameters.triggerMode ) - try { - await executeDataStoreAction( - actionContext, - actionParameters.actionId, - actionParameters.userData, - actionParameters.triggerMode - ) - } catch (err) { - logger.error(`Error in showStyleBlueprint.executeDatastoreAction: ${stringifyError(err)}`) - throw err - } + return actionContext.notes + } catch (err) { + logger.error(`Error in showStyleBlueprint.executeDatastoreAction: ${stringifyError(err)}`) + throw err + } +} + +function storeNotificationsForCategory( + notificationHelper: INotificationsModel, + notificationCategory: string, + blueprintId: BlueprintId, + notes: INoteBase[], + partInstanceInfo: SelectedPartInstance | null +) { + for (const note of notes) { + notificationHelper.setNotification(notificationCategory, { + ...convertNoteToNotification(note, [blueprintId]), + relatedTo: partInstanceInfo + ? { + type: 'partInstance', + rundownId: partInstanceInfo.rundownId, + partInstanceId: partInstanceInfo.partInstanceId, + } + : { + type: 'playlist', + }, + }) } } diff --git a/packages/job-worker/src/playout/lib.ts b/packages/job-worker/src/playout/lib.ts index c2c67bf0e2f..54b0fd1b79f 100644 --- a/packages/job-worker/src/playout/lib.ts +++ b/packages/job-worker/src/playout/lib.ts @@ -120,6 +120,13 @@ export function resetPartInstancesWithPieceInstances( } ) : undefined, + allToReset.length > 0 + ? context.directCollections.Notifications.remove({ + 'relatedTo.studioId': context.studioId, + 'relatedTo.rundownId': { $in: rundownIds }, + 'relatedTo.partInstanceId': { $in: allToReset }, + }) + : undefined, ]) }) } diff --git a/packages/job-worker/src/playout/model/PlayoutModel.ts b/packages/job-worker/src/playout/model/PlayoutModel.ts index a48d17ce9f7..8b02c1404f8 100644 --- a/packages/job-worker/src/playout/model/PlayoutModel.ts +++ b/packages/job-worker/src/playout/model/PlayoutModel.ts @@ -30,6 +30,7 @@ import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { PlayoutPieceInstanceModel } from './PlayoutPieceInstanceModel' import { PieceInstanceWithTimings } from '@sofie-automation/corelib/dist/playout/processAndPrune' import { PartCalculatedTimings } from '@sofie-automation/corelib/dist/playout/timings' +import type { INotificationsModel } from '../../notifications/NotificationsModel' export type DeferredFunction = (playoutModel: PlayoutModel) => void | Promise export type DeferredAfterSaveFunction = (playoutModel: PlayoutModelReadonly) => void | Promise @@ -179,7 +180,7 @@ export interface PlayoutModelReadonly extends StudioPlayoutModelBaseReadonly { /** * A view of a `RundownPlaylist` and its content in a `Studio` */ -export interface PlayoutModel extends PlayoutModelReadonly, StudioPlayoutModelBase, BaseModel { +export interface PlayoutModel extends PlayoutModelReadonly, StudioPlayoutModelBase, BaseModel, INotificationsModel { /** * Temporary hack for debug logging */ diff --git a/packages/job-worker/src/playout/model/PlayoutPartInstanceModel.ts b/packages/job-worker/src/playout/model/PlayoutPartInstanceModel.ts index fcbf74e4915..4894a18d78c 100644 --- a/packages/job-worker/src/playout/model/PlayoutPartInstanceModel.ts +++ b/packages/job-worker/src/playout/model/PlayoutPartInstanceModel.ts @@ -2,7 +2,6 @@ import { PieceId, PieceInstanceId, RundownPlaylistActivationId } from '@sofie-au import { ReadonlyDeep } from 'type-fest' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' import { PieceInstance, PieceInstancePiece } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' -import { PartNote } from '@sofie-automation/corelib/dist/dataModel/Notes' import { IBlueprintMutatablePart, PieceLifespan, Time } from '@sofie-automation/blueprints-integration' import { PartCalculatedTimings } from '@sofie-automation/corelib/dist/playout/timings' import { PlayoutPieceInstanceModel } from './PlayoutPieceInstanceModel' @@ -45,13 +44,6 @@ export interface PlayoutPartInstanceModel { */ snapshotRestore(snapshot: PlayoutPartInstanceModelSnapshot): void - /** - * Add some user facing notes for this PartInstance - * Future: it is only possible to add these, there is no way to 'replace' or remove them - * @param notes New notes to add - */ - appendNotes(notes: PartNote[]): void - /** * Block a take out of this PartInstance from happening until the specified timestamp * This can be necessary when an uninteruptable Piece is being played out diff --git a/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts b/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts index fd778dabecb..6c9aa7a2180 100644 --- a/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts +++ b/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts @@ -61,6 +61,7 @@ import { QuickLoopService } from '../services/QuickLoopService' import { calculatePartTimings, PartCalculatedTimings } from '@sofie-automation/corelib/dist/playout/timings' import { PieceInstanceWithTimings } from '@sofie-automation/corelib/dist/playout/processAndPrune' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' +import { NotificationsModelHelper } from '../../../notifications/NotificationsModelHelper' export class PlayoutModelReadonlyImpl implements PlayoutModelReadonly { public readonly playlistId: RundownPlaylistId @@ -262,6 +263,7 @@ export class PlayoutModelReadonlyImpl implements PlayoutModelReadonly { */ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements PlayoutModel, DatabasePersistedModel { readonly #baselineHelper: StudioBaselineHelper + readonly #notificationsHelper: NotificationsModelHelper #deferredBeforeSaveFunctions: DeferredFunction[] = [] #deferredAfterSaveFunctions: DeferredAfterSaveFunction[] = [] @@ -306,6 +308,7 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou context.trackCache(this) this.#baselineHelper = new StudioBaselineHelper(context) + this.#notificationsHelper = new NotificationsModelHelper(context, `playout:${playlist._id}`, playlistId) } public get displayName(): string { @@ -562,16 +565,39 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou rundownId: { $in: rundownIds }, }) : undefined, + allToRemove.length > 0 + ? this.context.directCollections.Notifications.remove({ + 'relatedTo.studioId': this.context.studioId, + 'relatedTo.rundownId': { $in: rundownIds }, + 'relatedTo.partInstanceId': { $in: allToRemove }, + }) + : undefined, ]) }) } removeUntakenPartInstances(): void { + const removedPartInstanceIds: PartInstanceId[] = [] + for (const partInstance of this.olderPartInstances) { if (!partInstance.partInstance.isTaken) { this.allPartInstances.set(partInstance.partInstance._id, null) + removedPartInstanceIds.push(partInstance.partInstance._id) } } + + // Ensure there are no notifications left for these partInstances + if (removedPartInstanceIds.length > 0) { + this.deferAfterSave(async (playoutModel) => { + const rundownIds = playoutModel.getRundownIds() + + await this.context.directCollections.Notifications.remove({ + 'relatedTo.studioId': this.context.studioId, + 'relatedTo.rundownId': { $in: rundownIds }, + 'relatedTo.partInstanceId': { $in: removedPartInstanceIds }, + }) + }) + } } /** @@ -641,6 +667,7 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou writeAdlibTestingSegments(this.context, this.rundownsImpl), this.#baselineHelper.saveAllToDatabase(), this.context.saveRouteSetChanges(), + this.#notificationsHelper.saveAllToDatabase(), ]) this.#playlistHasChanged = false @@ -802,6 +829,29 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou return this.quickLoopService.getSegmentsBetweenMarkers(start, end) } + /** Notifications */ + + async getAllNotifications( + ...args: Parameters + ): ReturnType { + return this.#notificationsHelper.getAllNotifications(...args) + } + clearNotification( + ...args: Parameters + ): ReturnType { + return this.#notificationsHelper.clearNotification(...args) + } + setNotification( + ...args: Parameters + ): ReturnType { + return this.#notificationsHelper.setNotification(...args) + } + clearAllNotifications( + ...args: Parameters + ): ReturnType { + return this.#notificationsHelper.clearAllNotifications(...args) + } + /** Lifecycle */ /** @deprecated */ diff --git a/packages/job-worker/src/playout/model/implementation/PlayoutPartInstanceModelImpl.ts b/packages/job-worker/src/playout/model/implementation/PlayoutPartInstanceModelImpl.ts index 9c295d32fee..fbc77a4b69d 100644 --- a/packages/job-worker/src/playout/model/implementation/PlayoutPartInstanceModelImpl.ts +++ b/packages/job-worker/src/playout/model/implementation/PlayoutPartInstanceModelImpl.ts @@ -14,7 +14,6 @@ import { calculatePartExpectedDurationWithTransition, PartCalculatedTimings, } from '@sofie-automation/corelib/dist/playout/timings' -import { PartNote } from '@sofie-automation/corelib/dist/dataModel/Notes' import { IBlueprintPieceType, PieceLifespan, Time } from '@sofie-automation/blueprints-integration' import { PlayoutMutatablePart, @@ -214,10 +213,6 @@ export class PlayoutPartInstanceModelImpl implements PlayoutPartInstanceModel { } } - appendNotes(notes: PartNote[]): void { - this.#setPartValue('notes', [...(this.partInstanceImpl.part.notes ?? []), ...clone(notes)]) - } - blockTakeUntil(timestamp: Time | null): void { this.#compareAndSetPartInstanceValue('blockTakeUntil', timestamp ?? undefined) } diff --git a/packages/job-worker/src/playout/setNext.ts b/packages/job-worker/src/playout/setNext.ts index de53256f54e..a49b21dfae9 100644 --- a/packages/job-worker/src/playout/setNext.ts +++ b/packages/job-worker/src/playout/setNext.ts @@ -1,4 +1,4 @@ -import { assertNever, getRandomId } from '@sofie-automation/corelib/dist/lib' +import { assertNever, getRandomId, generateTranslation } from '@sofie-automation/corelib/dist/lib' import { SegmentOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Segment' import { DBPart, isPartPlayable } from '@sofie-automation/corelib/dist/dataModel/Part' import { JobContext } from '../jobs' @@ -29,6 +29,8 @@ import { PartAndPieceInstanceActionService, applyActionSideEffects, } from '../blueprints/context/services/PartAndPieceInstanceActionService' +import { NoteSeverity } from '@sofie-automation/blueprints-integration' +import { convertNoteToNotification } from '../notifications/util' /** * Set or clear the nexted part, from a given PartInstance, or SelectNextPartResult @@ -184,6 +186,8 @@ async function executeOnSetAsNextCallback( newPartInstance: PlayoutPartInstanceModel, context: JobContext ) { + const NOTIFICATION_CATEGORY = 'onSetAsNext' + const rundownOfNextPart = playoutModel.getRundown(newPartInstance.partInstance.rundownId) if (!rundownOfNextPart) return null @@ -195,14 +199,16 @@ async function executeOnSetAsNextCallback( rundownOfNextPart.rundown.showStyleBaseId ) + const rundownId = rundownOfNextPart.rundown._id + const partInstanceId = playoutModel.playlist.nextPartInfo?.partInstanceId + const watchedPackagesHelper = WatchedPackagesHelper.empty(context) const onSetAsNextContext = new OnSetAsNextContext( { name: `${rundownOfNextPart.rundown.name}(${playoutModel.playlist.name})`, - identifier: `playlist=${playoutModel.playlist._id},rundown=${ - rundownOfNextPart.rundown._id - },currentPartInstance=${playoutModel.playlist.currentPartInfo?.partInstanceId},execution=${getRandomId()}`, - tempSendUserNotesIntoBlackHole: true, // TODO-CONTEXT store these notes + identifier: `playlist=${playoutModel.playlist._id},rundown=${rundownId},currentPartInstance=${ + playoutModel.playlist.currentPartInfo?.partInstanceId + },nextPartInstance=${partInstanceId},execution=${getRandomId()}`, }, context, playoutModel, @@ -211,11 +217,41 @@ async function executeOnSetAsNextCallback( new PartAndPieceInstanceActionService(context, playoutModel, showStyle, rundownOfNextPart) ) + // Clear any existing notifications for this partInstance. This will clear any from the previous setAsNext + playoutModel.clearAllNotifications(NOTIFICATION_CATEGORY) + try { await blueprint.blueprint.onSetAsNext(onSetAsNextContext) await applyOnSetAsNextSideEffects(context, playoutModel, onSetAsNextContext) + + for (const note of onSetAsNextContext.notes) { + // Update the notifications. Even though these are related to a partInstance, they will be cleared on the next take + playoutModel.setNotification(NOTIFICATION_CATEGORY, { + ...convertNoteToNotification(note, [blueprint.blueprintId]), + relatedTo: partInstanceId + ? { + type: 'partInstance', + rundownId, + partInstanceId, + } + : { type: 'playlist' }, + }) + } } catch (err) { logger.error(`Error in showStyleBlueprint.onSetAsNext: ${stringifyError(err)}`) + + playoutModel.setNotification(NOTIFICATION_CATEGORY, { + id: 'onSetNextError', + severity: NoteSeverity.ERROR, + message: generateTranslation('An error while setting the next Part, playout may be impacted'), + relatedTo: partInstanceId + ? { + type: 'partInstance', + rundownId, + partInstanceId, + } + : { type: 'playlist' }, + }) } return onSetAsNextContext.pendingMoveNextPart diff --git a/packages/job-worker/src/playout/take.ts b/packages/job-worker/src/playout/take.ts index e8e704296f1..d1cc8d721c1 100644 --- a/packages/job-worker/src/playout/take.ts +++ b/packages/job-worker/src/playout/take.ts @@ -10,11 +10,11 @@ import { resetPartInstancesWithPieceInstances } from './lib' import { selectNextPart } from './selectNextPart' import { setNextPart } from './setNext' import { getCurrentTime } from '../lib' -import { PartEndState, VTContent } from '@sofie-automation/blueprints-integration' +import { NoteSeverity, PartEndState, VTContent } from '@sofie-automation/blueprints-integration' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' import { ReadonlyDeep } from 'type-fest' import { getResolvedPiecesForCurrentPartInstance } from './resolvedPieces' -import { clone, getRandomId } from '@sofie-automation/corelib/dist/lib' +import { clone, generateTranslation, getRandomId } from '@sofie-automation/corelib/dist/lib' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' import { updateTimeline } from './timeline/generate' import { OnTakeContext, PartEventContext, RundownContext } from '../blueprints/context' @@ -33,6 +33,7 @@ import { applyActionSideEffects, } from '../blueprints/context/services/PartAndPieceInstanceActionService' import { PlayoutRundownModel } from './model/PlayoutRundownModel' +import { convertNoteToNotification } from '../notifications/util' /** * Take the currently Next:ed Part (start playing it) @@ -288,18 +289,24 @@ async function executeOnTakeCallback( blueprint: ReadonlyObjectDeep, currentRundown: PlayoutRundownModel ): Promise<{ isTakeAborted: boolean }> { + const NOTIFICATION_CATEGORY = 'onTake' + let isTakeAborted = false if (blueprint.blueprint.onTake) { + const rundownId = currentRundown.rundown._id + const partInstanceId = playoutModel.playlist.nextPartInfo?.partInstanceId + if (!partInstanceId) throw new Error('Cannot call blueprint onTake when there is no next partInstance!') + + // Clear any existing notifications for this partInstance. This will clear any from the previous take + playoutModel.clearAllNotifications(NOTIFICATION_CATEGORY) + const watchedPackagesHelper = WatchedPackagesHelper.empty(context) const onSetAsNextContext = new OnTakeContext( { name: `${currentRundown.rundown.name}(${playoutModel.playlist.name})`, - identifier: `playlist=${playoutModel.playlist._id},rundown=${ - currentRundown.rundown._id - },currentPartInstance=${ + identifier: `playlist=${playoutModel.playlist._id},rundown=${rundownId},currentPartInstance=${ playoutModel.playlist.currentPartInfo?.partInstanceId - },execution=${getRandomId()}`, - tempSendUserNotesIntoBlackHole: true, // TODO-CONTEXT store these notes + },nextPartInstance=${partInstanceId},execution=${getRandomId()}`, }, context, playoutModel, @@ -311,8 +318,31 @@ async function executeOnTakeCallback( await blueprint.blueprint.onTake(onSetAsNextContext) await applyOnTakeSideEffects(context, playoutModel, onSetAsNextContext) isTakeAborted = onSetAsNextContext.isTakeAborted + + for (const note of onSetAsNextContext.notes) { + // Update the notifications. Even though these are related to a partInstance, they will be cleared on the next take + playoutModel.setNotification(NOTIFICATION_CATEGORY, { + ...convertNoteToNotification(note, [blueprint.blueprintId]), + relatedTo: { + type: 'partInstance', + rundownId, + partInstanceId, + }, + }) + } } catch (err) { logger.error(`Error in showStyleBlueprint.onTake: ${stringifyError(err)}`) + + playoutModel.setNotification(NOTIFICATION_CATEGORY, { + id: 'onTakeError', + severity: NoteSeverity.ERROR, + message: generateTranslation('An error while performing the take, playout may be impacted'), + relatedTo: { + type: 'partInstance', + rundownId, + partInstanceId, + }, + }) } } return { isTakeAborted } diff --git a/packages/job-worker/src/playout/timeline/generate.ts b/packages/job-worker/src/playout/timeline/generate.ts index 77856edf601..8c71749fd68 100644 --- a/packages/job-worker/src/playout/timeline/generate.ts +++ b/packages/job-worker/src/playout/timeline/generate.ts @@ -384,6 +384,16 @@ async function getTimelineRundown( timelineObjs ) + // Store the new notes in the model + const notificationCategory = 'abPlayback' + playoutModel.clearAllNotifications(notificationCategory) + for (const notification of newAbSessionsResult.notifications) { + playoutModel.setNotification(notificationCategory, { + ...notification, + relatedTo: { type: 'playlist' }, + }) + } + let tlGenRes: BlueprintResultTimeline | undefined if (blueprint.blueprint.onTimelineGenerate) { const span = context.startSpan('blueprint.onTimelineGenerate') @@ -408,7 +418,7 @@ async function getTimelineRundown( playoutModel.setOnTimelineGenerateResult( tlGenRes?.persistentState, - newAbSessionsResult, + newAbSessionsResult.assignments, blueprintContext.abSessionsHelper.knownSessions ) } catch (err) { diff --git a/packages/job-worker/src/rundownPlaylists.ts b/packages/job-worker/src/rundownPlaylists.ts index b77a47d0d79..9306cc458a8 100644 --- a/packages/job-worker/src/rundownPlaylists.ts +++ b/packages/job-worker/src/rundownPlaylists.ts @@ -5,7 +5,13 @@ import { ForceQuickLoopAutoNext, QuickLoopMarkerType, } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' -import { clone, getHash, getRandomString, normalizeArrayToMap } from '@sofie-automation/corelib/dist/lib' +import { + clone, + getHash, + getRandomString, + normalizeArrayToMap, + generateTranslation, +} from '@sofie-automation/corelib/dist/lib' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' import { protectString, unprotectString } from '@sofie-automation/corelib/dist/protectedString' import { IngestJobs } from '@sofie-automation/corelib/dist/worker/ingest' @@ -16,7 +22,11 @@ import { RemovePlaylistProps, } from '@sofie-automation/corelib/dist/worker/studio' import { ReadonlyDeep } from 'type-fest' -import { BlueprintResultRundownPlaylist, IBlueprintRundown } from '@sofie-automation/blueprints-integration' +import { + BlueprintResultRundownPlaylist, + IBlueprintRundown, + NoteSeverity, +} from '@sofie-automation/blueprints-integration' import { JobContext } from './jobs' import { logger } from './logging' import { resetRundownPlaylist } from './playout/lib' @@ -34,18 +44,20 @@ import { import { allowedToMoveRundownOutOfPlaylist } from './rundown' import { PlaylistTiming } from '@sofie-automation/corelib/dist/playout/rundownTiming' import { UserError, UserErrorMessage } from '@sofie-automation/corelib/dist/error' -import { RundownLock } from './jobs/lock' +import { PlaylistLock, RundownLock } from './jobs/lock' import { runWithRundownLock } from './ingest/lock' import { convertRundownToBlueprints } from './blueprints/context/lib' import { sortRundownIDsInPlaylist } from '@sofie-automation/corelib/dist/playout/playlist' +import { INoteBase } from '@sofie-automation/corelib/dist/dataModel/Notes' /** * Debug: Remove a Playlist and all its contents */ export async function handleRemoveRundownPlaylist(context: JobContext, data: RemovePlaylistProps): Promise { - const removed = await runJobWithPlaylistLock(context, data, async (playlist) => { + const removed = await runJobWithPlaylistLock(context, data, async (playlist, lock) => { if (playlist) { - await context.directCollections.RundownPlaylists.remove(playlist._id) + await removePlaylistFromDb(context, lock) + return true } else { return false @@ -160,6 +172,19 @@ export async function removeRundownFromDb(context: JobContext, lock: RundownLock context.directCollections.PieceInstances.remove({ rundownId: rundownId }), context.directCollections.RundownBaselineAdLibActions.remove({ rundownId: rundownId }), context.directCollections.RundownBaselineObjects.remove({ rundownId: rundownId }), + context.directCollections.Notifications.remove({ 'relatedTo.rundownId': rundownId }), + ]) +} + +export async function removePlaylistFromDb(context: JobContext, lock: PlaylistLock): Promise { + if (!lock.isLocked) throw new Error(`Can't delete Playlist without lock: ${lock.toString()}`) + + const playlistId = lock.playlistId + + await Promise.allSettled([ + context.directCollections.RundownPlaylists.remove({ _id: playlistId }), + + context.directCollections.Notifications.remove({ 'relatedTo.playlistId': playlistId }), ]) } @@ -172,27 +197,38 @@ export function produceRundownPlaylistInfoFromRundown( rundowns: ReadonlyDeep> ): DBRundownPlaylist { let playlistInfo: BlueprintResultRundownPlaylist | null = null + + let notes: INoteBase[] = [] + try { if (studioBlueprint?.blueprint?.getRundownPlaylistInfo) { + const blueprintContext = new StudioUserContext( + { + name: 'produceRundownPlaylistInfoFromRundown', + identifier: `studioId=${context.studioId},playlistId=${playlistId},rundownIds=${rundowns + .map((r) => r._id) + .join(',')}`, + }, + context.studio, + context.getStudioBlueprintConfig() + ) + playlistInfo = studioBlueprint.blueprint.getRundownPlaylistInfo( - new StudioUserContext( - { - name: 'produceRundownPlaylistInfoFromRundown', - identifier: `studioId=${context.studioId},playlistId=${playlistId},rundownIds=${rundowns - .map((r) => r._id) - .join(',')}`, - tempSendUserNotesIntoBlackHole: true, - }, - context.studio, - context.getStudioBlueprintConfig() - ), + blueprintContext, rundowns.map(convertRundownToBlueprints), playlistExternalId ) + + notes = blueprintContext.notes } } catch (err) { logger.error(`Error in studioBlueprint.getRundownPlaylistInfo: ${stringifyError(err)}`) playlistInfo = null + + notes.push({ + type: NoteSeverity.ERROR, + message: generateTranslation(`Internal Error generating RundownPlaylist`), + }) } const rundownsInDefaultOrder = sortDefaultRundownInPlaylistOrder(rundowns) @@ -241,6 +277,14 @@ export function produceRundownPlaylistInfoFromRundown( } } + // Update the notes on the playlist + newPlaylist.notes = notes.map((note) => ({ + ...note, + origin: { + name: 'produceRundownPlaylistInfoFromRundown', + }, + })) + if (!newPlaylist.rundownRanksAreSetInSofie) { if (playlistInfo?.order) { // The blueprints gave us an order diff --git a/packages/job-worker/src/studio/cleanup.ts b/packages/job-worker/src/studio/cleanup.ts index 5dcb3e94694..c433b07f5cb 100644 --- a/packages/job-worker/src/studio/cleanup.ts +++ b/packages/job-worker/src/studio/cleanup.ts @@ -2,6 +2,7 @@ import { runJobWithPlaylistLock } from '../playout/lock' import { JobContext } from '../jobs' import { runJobWithStudioPlayoutModel } from './lock' import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' +import { removePlaylistFromDb } from '../rundownPlaylists' /** * Cleanup any RundownPlaylists that contain no Rundowns @@ -15,14 +16,14 @@ export async function handleRemoveEmptyPlaylists(context: JobContext, _data: voi await Promise.allSettled( tmpPlaylists.map(async (tmpPlaylist) => // Take the playlist lock, to ensure we don't fight something else - runJobWithPlaylistLock(context, { playlistId: tmpPlaylist._id }, async (playlist) => { + runJobWithPlaylistLock(context, { playlistId: tmpPlaylist._id }, async (playlist, playlistLock) => { if (playlist) { const rundowns: Pick[] = await context.directCollections.Rundowns.findFetch( { playlistId: playlist._id }, { projection: { _id: 1 } } ) if (rundowns.length === 0) { - await context.directCollections.RundownPlaylists.remove({ _id: playlist._id }) + await removePlaylistFromDb(context, playlistLock) } } }) diff --git a/packages/meteor-lib/src/api/pubsub.ts b/packages/meteor-lib/src/api/pubsub.ts index e7e3fb6382d..42670c8df0e 100644 --- a/packages/meteor-lib/src/api/pubsub.ts +++ b/packages/meteor-lib/src/api/pubsub.ts @@ -36,6 +36,7 @@ import { CorelibPubSub, CorelibPubSubCollections, CorelibPubSubTypes } from '@so import { CollectionName } from '@sofie-automation/corelib/dist/dataModel/Collections' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { PartInstance } from '../collections/PartInstances' +import type { DBNotificationObj } from '@sofie-automation/corelib/dist/dataModel/Notifications' /** * Ids of possible DDP subscriptions for the UI only @@ -110,6 +111,14 @@ export enum MeteorPubSub { * Fetch all translation bundles */ translationsBundles = 'translationsBundles', + /** + * Fetch notifications for playlist + */ + notificationsForRundownPlaylist = 'notificationsForRundownPlaylist', + /** + * Fetch notifications for rundown + */ + notificationsForRundown = 'notificationsForRundown', // custom publications: @@ -214,6 +223,11 @@ export interface MeteorPubSubTypes { [MeteorPubSub.organization]: (organizationId: OrganizationId | null, token?: string) => CollectionName.Organizations [MeteorPubSub.buckets]: (studioId: StudioId, bucketId: BucketId | null, token?: string) => CollectionName.Buckets [MeteorPubSub.translationsBundles]: (token?: string) => CollectionName.TranslationsBundles + [MeteorPubSub.notificationsForRundown]: (studioId: StudioId, rundownId: RundownId) => CollectionName.Notifications + [MeteorPubSub.notificationsForRundownPlaylist]: ( + studioId: StudioId, + playlistId: RundownPlaylistId + ) => CollectionName.Notifications // custom publications: @@ -286,6 +300,7 @@ export type MeteorPubSubCollections = { [CollectionName.TranslationsBundles]: TranslationsBundle [CollectionName.Users]: DBUser [CollectionName.ExpectedPlayoutItems]: ExpectedPlayoutItem + [CollectionName.Notifications]: DBNotificationObj [CollectionName.MediaWorkFlows]: MediaWorkFlow [CollectionName.MediaWorkFlowSteps]: MediaWorkFlowStep diff --git a/packages/meteor-lib/src/lib.ts b/packages/meteor-lib/src/lib.ts index c2af24b1f4a..bbe5a9daa6a 100644 --- a/packages/meteor-lib/src/lib.ts +++ b/packages/meteor-lib/src/lib.ts @@ -1,5 +1,3 @@ -import { ITranslatableMessage } from '@sofie-automation/corelib/dist/TranslatableMessage' - export enum LogLevel { SILLY = 'silly', DEBUG = 'debug', @@ -9,16 +7,3 @@ export enum LogLevel { ERROR = 'error', NONE = 'crit', } - -/** Generate the translation for a string, to be applied later when it gets rendered */ -export function generateTranslation( - key: string, - args?: { [k: string]: any }, - namespaces?: string[] -): ITranslatableMessage { - return { - key, - args, - namespaces, - } -} diff --git a/packages/meteor-lib/src/triggers/actionFilterChainCompilers.ts b/packages/meteor-lib/src/triggers/actionFilterChainCompilers.ts index e63e456f05c..eb2b9e47cbe 100644 --- a/packages/meteor-lib/src/triggers/actionFilterChainCompilers.ts +++ b/packages/meteor-lib/src/triggers/actionFilterChainCompilers.ts @@ -23,8 +23,7 @@ import { ReactivePlaylistActionContext } from './actionFactory' import { PartId, RundownId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { IWrappedAdLibBase } from '@sofie-automation/shared-lib/dist/input-gateway/deviceTriggerPreviews' import { MountedAdLibTriggerType } from '../api/MountedTriggers' -import { assertNever } from '@sofie-automation/corelib/dist/lib' -import { generateTranslation } from '../lib' +import { assertNever, generateTranslation } from '@sofie-automation/corelib/dist/lib' import { FindOptions } from '../collections/lib' import { TriggersContext } from './triggersContext' diff --git a/packages/webui/src/client/collections/index.ts b/packages/webui/src/client/collections/index.ts index ef7784e023a..0a8e5dd5bd6 100644 --- a/packages/webui/src/client/collections/index.ts +++ b/packages/webui/src/client/collections/index.ts @@ -41,6 +41,7 @@ import { RundownBaselineAdLibAction } from '@sofie-automation/corelib/dist/dataM import { RundownBaselineAdLibItem } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibPiece' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' +import { DBNotificationObj } from '@sofie-automation/corelib/dist/dataModel/Notifications' export const AdLibActions = createSyncReadOnlyMongoCollection(CollectionName.AdLibActions) @@ -76,6 +77,8 @@ export const MediaWorkFlowSteps = createSyncReadOnlyMongoCollection(CollectionName.Notifications) + export const PackageContainerStatuses = createSyncReadOnlyMongoCollection( CollectionName.PackageContainerStatuses ) diff --git a/packages/webui/src/client/lib/notifications/notifications.ts b/packages/webui/src/client/lib/notifications/notifications.ts index e1df7695e4f..0d1a0aff330 100644 --- a/packages/webui/src/client/lib/notifications/notifications.ts +++ b/packages/webui/src/client/lib/notifications/notifications.ts @@ -135,7 +135,7 @@ export class NotifierHandle { } } -type NotificationsSource = RundownId | SegmentId | string | undefined +export type NotificationsSource = RundownId | SegmentId | string | undefined /** * Singleton handling all the notifications. * diff --git a/packages/webui/src/client/lib/tempLib.ts b/packages/webui/src/client/lib/tempLib.ts index b173dabb98b..494b3d6ce7b 100644 --- a/packages/webui/src/client/lib/tempLib.ts +++ b/packages/webui/src/client/lib/tempLib.ts @@ -28,6 +28,7 @@ export { groupByToMapFunc, formatDurationAsTimecode, formatDateAsTimecode, + generateTranslation, } from '@sofie-automation/corelib/dist/lib' export type { Complete } from '@sofie-automation/corelib/dist/lib' -export { LogLevel, generateTranslation } from '@sofie-automation/meteor-lib/dist/lib' +export { LogLevel } from '@sofie-automation/meteor-lib/dist/lib' diff --git a/packages/webui/src/client/lib/uncaughtErrorHandler.ts b/packages/webui/src/client/lib/uncaughtErrorHandler.ts index fec2309fe76..509f2939ff7 100644 --- a/packages/webui/src/client/lib/uncaughtErrorHandler.ts +++ b/packages/webui/src/client/lib/uncaughtErrorHandler.ts @@ -47,6 +47,9 @@ function uncaughtErrorHandler(errorObj: any, context: string) { stringContent += stringifyError(errorObj) } + // Ignore any react deprecation warnings, as they add a lot of noise + if (stringContent.includes('will be removed in the next major release')) return + const caughtErrorStack = new Error('') if (caughtErrorStack.stack) { stringContent += `\nCaught stack: ${caughtErrorStack.stack}` diff --git a/packages/webui/src/client/ui/RundownView.tsx b/packages/webui/src/client/ui/RundownView.tsx index cb8c94e7462..c19a275eb62 100644 --- a/packages/webui/src/client/ui/RundownView.tsx +++ b/packages/webui/src/client/ui/RundownView.tsx @@ -27,7 +27,7 @@ import { QuickLoopMarker, RundownHoldState, } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' -import { Rundown, getRundownNrcsName } from '@sofie-automation/corelib/dist/dataModel/Rundown' +import { DBRundown, Rundown, getRundownNrcsName } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { DBSegment, SegmentOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Segment' import { StudioRouteSet } from '@sofie-automation/corelib/dist/dataModel/Studio' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' @@ -1342,6 +1342,31 @@ export function RundownView(props: Readonly): JSX.Element { } }, [playlistId]) + auxSubsReady.push( + useSubscriptionIfEnabled( + MeteorPubSub.notificationsForRundownPlaylist, + !!playlistId && !!playlistStudioId, + playlistStudioId || protectString(''), + playlistId + ) + ) + + useTracker(() => { + const rundowns = Rundowns.find( + { playlistId }, + { + fields: { + _id: 1, + studioId: 1, + }, + } + ).fetch() as Pick[] + + for (const rundown of rundowns) { + meteorSubscribe(MeteorPubSub.notificationsForRundown, rundown.studioId, rundown._id) + } + }, [playlistId]) + const subsReady = requiredSubsReady.findIndex((ready) => !ready) === -1 return } diff --git a/packages/webui/src/client/ui/RundownView/RundownNotifier.tsx b/packages/webui/src/client/ui/RundownView/RundownNotifier.tsx index 803c25f7425..56999eca0e8 100644 --- a/packages/webui/src/client/ui/RundownView/RundownNotifier.tsx +++ b/packages/webui/src/client/ui/RundownView/RundownNotifier.tsx @@ -9,6 +9,7 @@ import { Notification, NoticeLevel, getNoticeLevelForPieceStatus, + NotificationsSource, } from '../../lib/notifications/notifications' import { WithManagedTracker } from '../../lib/reactiveData/reactiveDataHelper' import { reactiveData } from '../../lib/reactiveData/reactiveData' @@ -30,7 +31,7 @@ import { UIPieceContentStatus, UISegmentPartNote } from '@sofie-automation/meteo import { isTranslatableMessage, translateMessage } from '@sofie-automation/corelib/dist/TranslatableMessage' import { NoteSeverity, StatusCode } from '@sofie-automation/blueprints-integration' import { getIgnorePieceContentStatus } from '../../lib/localStorage' -import { RundownPlaylists } from '../../collections' +import { Notifications, RundownPlaylists } from '../../collections' import { UIStudio } from '@sofie-automation/meteor-lib/dist/api/studios' import { PartId, @@ -42,11 +43,14 @@ import { SegmentId, StudioId, } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { UIPieceContentStatuses, UISegmentPartNotes } from '../Collections' +import { UIPartInstances, UIPieceContentStatuses, UISegmentPartNotes } from '../Collections' import { RundownPlaylistCollectionUtil } from '../../collections/rundownPlaylistUtil' import { logger } from '../../lib/logging' import { CorelibPubSub } from '@sofie-automation/corelib/dist/pubsub' import { UserPermissionsContext, UserPermissions } from '../UserPermissions' +import { PartInstance } from '@sofie-automation/meteor-lib/dist/collections/PartInstances' +import { assertNever } from '@sofie-automation/corelib/dist/lib' +import { DBNotificationTargetType } from '@sofie-automation/corelib/dist/dataModel/Notifications' export const onRONotificationClick = new ReactiveVar<((e: RONotificationEvent) => void) | undefined>(undefined) export const reloadRundownPlaylistClick = new ReactiveVar<((e: any) => void) | undefined>(undefined) @@ -91,6 +95,9 @@ class RundownViewNotifier extends WithManagedTracker { private _rundownStatus: Record = {} private _rundownStatusDep: Tracker.Dependency + private _dbNotifications: Record = {} + private _dbNotificationsDep: Tracker.Dependency + private _deviceStatus: Record = {} private _deviceStatusDep: Tracker.Dependency @@ -110,6 +117,7 @@ class RundownViewNotifier extends WithManagedTracker { this._notificationList = new NotificationList([]) this._mediaStatusDep = new Tracker.Dependency() this._rundownStatusDep = new Tracker.Dependency() + this._dbNotificationsDep = new Tracker.Dependency() this._deviceStatusDep = new Tracker.Dependency() this._rundownImportVersionStatusDep = new Tracker.Dependency() this._unsentExternalMessageStatusDep = new Tracker.Dependency() @@ -123,6 +131,7 @@ class RundownViewNotifier extends WithManagedTracker { if (playlistId) { this.reactiveRundownStatus(playlistId) this.reactiveVersionAndConfigStatus(playlistId) + this.reactiveDbNotifications(playlistId) this.autorun(() => { if (studio) { @@ -150,6 +159,7 @@ class RundownViewNotifier extends WithManagedTracker { this._mediaStatusDep.depend() this._deviceStatusDep.depend() this._rundownStatusDep.depend() + this._dbNotificationsDep.depend() this._notesDep.depend() this._rundownImportVersionStatusDep.depend() this._unsentExternalMessageStatusDep.depend() @@ -159,6 +169,7 @@ class RundownViewNotifier extends WithManagedTracker { ...Object.values(this._deviceStatus), ...Object.values(this._notes), ...Object.values(this._rundownStatus), + ...Object.values(this._dbNotifications), this._rundownImportVersionStatus, this._rundownStudioConfigStatus, ...Object.values(this._rundownShowStyleConfigStatuses), @@ -195,6 +206,28 @@ class RundownViewNotifier extends WithManagedTracker { const playlist = RundownPlaylists.findOne(playlistId) const rundowns = rRundowns.get() + if (playlist?.notes) { + const playlistNotesId = playlist._id + '_playlistnotes_' + playlist.notes.forEach((note) => { + const noteId = playlistNotesId + note.origin.name + '_' + note.message + '_' + note.type + const notificationFromNote = new Notification( + noteId, + getNoticeLevelForNoteSeverity(note.type), + note.message, + 'RundownPlaylist', + getCurrentTime(), + true, + [], + -1 + ) + if (!Notification.isEqual(this._rundownStatus[noteId], notificationFromNote)) { + this._rundownStatus[noteId] = notificationFromNote + this._rundownStatusDep.changed() + } + newNoteIds.push(noteId) + }) + } + if (playlist && rundowns) { rundowns.forEach((rundown) => { const unsyncedId = rundown._id + '_unsynced' @@ -293,6 +326,78 @@ class RundownViewNotifier extends WithManagedTracker { }) } + private reactiveDbNotifications(playlistId: RundownPlaylistId) { + let oldNoteIds: Array = [] + + const rRundowns = reactiveData.getRRundowns(playlistId, { + fields: { + _id: 1, + }, + }) as ReactiveVar[]> + this.autorun(() => { + const newNoteIds: Array = [] + + const dbNotifications = Notifications.find({ + $or: [ + { + 'relatedTo.rundownId': { $in: rRundowns.get().map((r) => r._id) }, + }, + { + 'relatedTo.playlistId': playlistId, + }, + ], + }).fetch() + + for (const dbNotification of dbNotifications) { + let source: NotificationsSource + + const relatedTo = dbNotification.relatedTo + switch (relatedTo.type) { + case DBNotificationTargetType.RUNDOWN: + source = relatedTo.rundownId + break + case DBNotificationTargetType.PARTINSTANCE: + case DBNotificationTargetType.PIECEINSTANCE: { + const partInstanceDoc = UIPartInstances.findOne(relatedTo.partInstanceId, { + fields: { segmentId: 1 }, + }) as Pick | undefined + source = partInstanceDoc?.segmentId ?? relatedTo.rundownId + break + } + case DBNotificationTargetType.PLAYLIST: + // No mapping + source = undefined + break + default: + assertNever(relatedTo) + break + } + + const id = `db_notification_${dbNotification._id}` + const uiNotification = new Notification( + id, + getNoticeLevelForNoteSeverity(dbNotification.severity), + dbNotification.message, + source, + dbNotification.created, + true, + [] + ) + newNoteIds.push(id) + + if (!Notification.isEqual(this._dbNotifications[id], uiNotification)) { + this._dbNotifications[id] = uiNotification + this._dbNotificationsDep.changed() + } + } + _.difference(oldNoteIds, newNoteIds).forEach((item) => { + delete this._dbNotifications[item] + this._dbNotificationsDep.changed() + }) + oldNoteIds = newNoteIds + }) + } + private reactivePeripheralDeviceStatus(studioId: StudioId | undefined) { let oldDevItemIds: PeripheralDeviceId[] = [] let reactivePeripheralDevices: diff --git a/packages/webui/src/client/ui/SegmentContainer/getReactivePieceNoteCountsForSegment.tsx b/packages/webui/src/client/ui/SegmentContainer/getReactivePieceNoteCountsForSegment.tsx index c47c9e40f91..b96d9e01795 100644 --- a/packages/webui/src/client/ui/SegmentContainer/getReactivePieceNoteCountsForSegment.tsx +++ b/packages/webui/src/client/ui/SegmentContainer/getReactivePieceNoteCountsForSegment.tsx @@ -4,8 +4,11 @@ import { MongoFieldSpecifierOnes } from '@sofie-automation/corelib/dist/mongo' import { UIPieceContentStatus, UISegmentPartNote } from '@sofie-automation/meteor-lib/dist/api/rundownNotifications' import { PieceStatusCode } from '@sofie-automation/corelib/dist/dataModel/Piece' import { getIgnorePieceContentStatus } from '../../lib/localStorage' -import { UIPieceContentStatuses, UISegmentPartNotes } from '../Collections' +import { UIPartInstances, UIPieceContentStatuses, UISegmentPartNotes } from '../Collections' import { SegmentNoteCounts, SegmentUi } from './withResolvedSegment' +import { Notifications } from '../../collections' +import { PartInstance } from '@sofie-automation/meteor-lib/dist/collections/PartInstances' +import { DBNotificationObj } from '@sofie-automation/corelib/dist/dataModel/Notifications' export function getReactivePieceNoteCountsForSegment(segment: SegmentUi): SegmentNoteCounts { const segmentNoteCounts: SegmentNoteCounts = { @@ -64,5 +67,37 @@ export function getReactivePieceNoteCountsForSegment(segment: SegmentUi): Segmen } } + // Find any relevant notifications + const partInstancesForSegment = UIPartInstances.find( + { segmentId: segment._id, reset: { $ne: true } }, + { + fields: { + _id: 1, + }, + } + ).fetch() as Array> + const rawNotifications = Notifications.find( + { + $or: [ + { 'relatedTo.segmentId': segment._id }, + { + 'relatedTo.partInstanceId': { $in: partInstancesForSegment.map((p) => p._id) }, + }, + ], + }, + { + fields: { + severity: 1, + }, + } + ).fetch() as Array> + for (const notification of rawNotifications) { + if (notification.severity === NoteSeverity.ERROR) { + segmentNoteCounts.criticial++ + } else if (notification.severity === NoteSeverity.WARNING) { + segmentNoteCounts.warning++ + } + } + return segmentNoteCounts } diff --git a/packages/webui/src/client/ui/Settings/Studio/Routings/RouteSetAbPlayers.tsx b/packages/webui/src/client/ui/Settings/Studio/Routings/RouteSetAbPlayers.tsx index e2c1c5025dc..f6dc204d505 100644 --- a/packages/webui/src/client/ui/Settings/Studio/Routings/RouteSetAbPlayers.tsx +++ b/packages/webui/src/client/ui/Settings/Studio/Routings/RouteSetAbPlayers.tsx @@ -115,9 +115,9 @@ function AbPlayerRow({
@@ -131,9 +131,9 @@ function AbPlayerRow({ )} diff --git a/packages/webui/vite.config.mts b/packages/webui/vite.config.mts index 86666f92e3e..69b894f48e5 100644 --- a/packages/webui/vite.config.mts +++ b/packages/webui/vite.config.mts @@ -35,6 +35,13 @@ export default defineConfig({ // Add all sofie paths, ensuring they use unix path syntax ...commonJsPaths.map((p) => p.replaceAll('\\', '/')), + // Commonjs monorepo dependencies + '@sofie-automation/blueprints-integration', + ], + exclude: [ + // Add all sofie paths, ensuring they use unix path syntax + ...commonJsPaths.map((p) => p.replaceAll('\\', '/')), + // Commonjs monorepo dependencies '@sofie-automation/blueprints-integration', ], From 62d693b7f72831d7098c93dc740b18f29f0db38c Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Wed, 30 Oct 2024 09:54:15 +0000 Subject: [PATCH 50/81] chore: fix type error --- .../src/topics/__tests__/activePlaylist.spec.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/live-status-gateway/src/topics/__tests__/activePlaylist.spec.ts b/packages/live-status-gateway/src/topics/__tests__/activePlaylist.spec.ts index a64d1e5a357..352df3cff26 100644 --- a/packages/live-status-gateway/src/topics/__tests__/activePlaylist.spec.ts +++ b/packages/live-status-gateway/src/topics/__tests__/activePlaylist.spec.ts @@ -241,6 +241,7 @@ describe('ActivePlaylistTopic', () => { timing: { timingMode: PlaylistTimingType.None, }, + quickLoop: undefined, } // eslint-disable-next-line @typescript-eslint/unbound-method From e885d66b85a8544f2233d98e41461d04a4a157e4 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Thu, 31 Oct 2024 13:29:01 +0000 Subject: [PATCH 51/81] feat: meteor 2.16 (#26) --- meteor/.meteor/packages | 10 +- meteor/.meteor/release | 2 +- meteor/.meteor/versions | 50 ++-- .../server/__tests__/_testEnvironment.test.ts | 2 +- meteor/server/api/ExternalMessageQueue.ts | 27 +- meteor/server/api/client.ts | 2 + .../deviceTriggers/RundownContentObserver.ts | 54 ++-- .../api/deviceTriggers/RundownsObserver.ts | 59 ++-- .../api/deviceTriggers/StudioObserver.ts | 126 ++++---- meteor/server/api/deviceTriggers/observer.ts | 6 +- .../mosDevice/__tests__/actions.test.ts | 6 +- meteor/server/api/ingest/rundownInput.ts | 48 ++- .../api/peripheralDevice/executeFunction.ts | 2 +- meteor/server/api/rest/v0/index.ts | 2 +- meteor/server/api/studio/api.ts | 31 +- meteor/server/collections/collection.ts | 4 +- .../implementations/asyncCollection.ts | 12 +- .../collections/implementations/mock.ts | 8 +- .../implementations/readonlyWrapper.ts | 8 +- meteor/server/collections/lib.ts | 2 +- meteor/server/coreSystem/index.ts | 4 +- meteor/server/lib/ReactiveStore.ts | 128 -------- meteor/server/lib/customPublication/index.ts | 2 +- .../optimizedObserverArray.ts | 5 +- .../optimizedObserverBase.ts | 9 +- .../optimizedObserverCollection.ts | 5 +- .../server/lib/customPublication/publish.ts | 2 +- meteor/server/lib/lib.ts | 13 +- meteor/server/publications/_publications.ts | 4 +- .../blueprintUpgradeStatus/publication.ts | 6 +- .../upgradesContentObserver.ts | 19 +- meteor/server/publications/buckets.ts | 2 +- .../publications/deviceTriggersPreview.ts | 4 +- .../lib/__tests__/debounce.test.ts | 273 ++++++++++++++++++ .../publications/lib/__tests__/lib.test.ts | 63 ++++ .../lib/__tests__/observerGroup.test.ts | 6 +- .../lib/__tests__/rundownsObserver.test.ts | 16 +- meteor/server/publications/lib/debounce.ts | 129 +++++++++ meteor/server/publications/{ => lib}/lib.ts | 43 ++- .../server/publications/lib/observerChain.ts | 17 +- .../server/publications/lib/observerGroup.ts | 6 +- .../publications/lib/rundownsObserver.ts | 64 ++-- meteor/server/publications/organization.ts | 2 +- .../expectedPackages/contentObserver.ts | 69 +++-- .../expectedPackages/publication.ts | 5 +- .../packageManager/packageContainers.ts | 9 +- .../packageManager/playoutContext.ts | 9 +- .../partInstancesUI/publication.ts | 13 +- .../partInstancesUI/rundownContentObserver.ts | 22 +- .../publications/partsUI/publication.ts | 8 +- .../partsUI/rundownContentObserver.ts | 24 +- .../server/publications/peripheralDevice.ts | 2 +- .../publications/peripheralDeviceForDevice.ts | 9 +- .../bucket/bucketContentObserver.ts | 88 +++--- .../bucket/publication.ts | 6 +- .../rundown/publication.ts | 8 +- .../rundown/rundownContentObserver.ts | 110 ++++--- meteor/server/publications/rundown.ts | 2 +- meteor/server/publications/rundownPlaylist.ts | 2 +- .../segmentPartNotesUI/publication.ts | 8 +- .../rundownContentObserver.ts | 19 +- meteor/server/publications/showStyle.ts | 2 +- meteor/server/publications/showStyleUI.ts | 12 +- meteor/server/publications/studio.ts | 5 +- meteor/server/publications/studioUI.ts | 4 +- meteor/server/publications/system.ts | 2 +- meteor/server/publications/timeline.ts | 5 +- .../publications/translationsBundles.ts | 2 +- .../server/publications/triggeredActionsUI.ts | 4 +- packages/meteor-lib/src/collections/lib.ts | 14 + 70 files changed, 1145 insertions(+), 601 deletions(-) delete mode 100644 meteor/server/lib/ReactiveStore.ts create mode 100644 meteor/server/publications/lib/__tests__/debounce.test.ts create mode 100644 meteor/server/publications/lib/__tests__/lib.test.ts create mode 100644 meteor/server/publications/lib/debounce.ts rename meteor/server/publications/{ => lib}/lib.ts (77%) diff --git a/meteor/.meteor/packages b/meteor/.meteor/packages index 04d09220d29..4e5355b0700 100644 --- a/meteor/.meteor/packages +++ b/meteor/.meteor/packages @@ -9,18 +9,18 @@ # but you can also edit it by hand. meteor-base@1.5.1 # Packages every Meteor app needs to have -mongo@1.16.7 # The database Meteor supports right now +mongo@1.16.10 # The database Meteor supports right now reactive-var@1.0.12 # Reactive variable for tracker -ecmascript@0.16.7 # Enable ECMAScript2015+ syntax in app code -typescript@4.9.4 # Enable TypeScript syntax in .ts and .tsx modules +ecmascript@0.16.8 # Enable ECMAScript2015+ syntax in app code +typescript@4.9.5 # Enable TypeScript syntax in .ts and .tsx modules shell-server@0.5.0 # Server-side component of the `meteor shell` command -tracker@1.3.2 # Meteor's client-side reactive programming library +tracker@1.3.3 # Meteor's client-side reactive programming library dynamic-import@0.7.3 ostrio:meteor-root -accounts-password@2.3.4 +accounts-password@2.4.0 julusian:meteor-elastic-apm@2.5.2 zodern:types diff --git a/meteor/.meteor/release b/meteor/.meteor/release index 6641d0478a1..5152abe9d58 100644 --- a/meteor/.meteor/release +++ b/meteor/.meteor/release @@ -1 +1 @@ -METEOR@2.13.3 +METEOR@2.16 diff --git a/meteor/.meteor/versions b/meteor/.meteor/versions index d337a7f4b4d..23b868e06f6 100644 --- a/meteor/.meteor/versions +++ b/meteor/.meteor/versions @@ -1,29 +1,29 @@ -accounts-base@2.2.8 -accounts-password@2.3.4 +accounts-base@2.2.11 +accounts-password@2.4.0 allow-deny@1.1.1 autoupdate@1.8.0 -babel-compiler@7.10.4 +babel-compiler@7.10.5 babel-runtime@1.5.1 base64@1.0.12 binary-heap@1.0.11 -boilerplate-generator@1.7.1 +boilerplate-generator@1.7.2 callback-hook@1.5.1 -check@1.3.2 +check@1.4.1 ddp@1.4.1 -ddp-client@2.6.1 -ddp-common@1.4.0 -ddp-rate-limiter@1.2.0 -ddp-server@2.6.2 +ddp-client@2.6.2 +ddp-common@1.4.1 +ddp-rate-limiter@1.2.1 +ddp-server@2.7.1 diff-sequence@1.1.2 dynamic-import@0.7.3 -ecmascript@0.16.7 +ecmascript@0.16.8 ecmascript-runtime@0.8.1 ecmascript-runtime-client@0.12.1 ecmascript-runtime-server@0.11.0 ejson@1.1.3 -email@2.2.5 +email@2.2.6 es5-shim@4.8.0 -fetch@0.1.3 +fetch@0.1.4 geojson-utils@1.0.11 hot-code-push@1.0.4 id-map@1.1.1 @@ -31,35 +31,35 @@ inter-process-messaging@0.1.1 julusian:meteor-elastic-apm@2.5.2 kschingiz:meteor-measured@1.0.3 localstorage@1.2.0 -logging@1.3.2 -meteor@1.11.3 +logging@1.3.4 +meteor@1.11.5 meteor-base@1.5.1 -minimongo@1.9.3 -modern-browsers@0.1.9 -modules@0.19.0 +minimongo@1.9.4 +modern-browsers@0.1.10 +modules@0.20.0 modules-runtime@0.13.1 -mongo@1.16.7 +mongo@1.16.10 mongo-decimal@0.1.3 mongo-dev-server@1.1.0 mongo-id@1.0.8 -npm-mongo@4.16.0 +npm-mongo@4.17.2 ordered-dict@1.1.0 ostrio:meteor-root@1.1.1 promise@0.12.2 random@1.2.1 rate-limit@1.1.1 -react-fast-refresh@0.2.7 +react-fast-refresh@0.2.8 reactive-var@1.0.12 reload@1.3.1 retry@1.1.0 routepolicy@1.1.1 sha@1.0.9 shell-server@0.5.0 -socket-stream-client@0.5.1 -tracker@1.3.2 -typescript@4.9.4 -underscore@1.0.13 +socket-stream-client@0.5.2 +tracker@1.3.3 +typescript@4.9.5 +underscore@1.6.1 url@1.3.2 -webapp@1.13.5 +webapp@1.13.8 webapp-hashing@1.1.1 zodern:types@1.0.9 diff --git a/meteor/server/__tests__/_testEnvironment.test.ts b/meteor/server/__tests__/_testEnvironment.test.ts index 9fde690091b..549cba2c9b7 100644 --- a/meteor/server/__tests__/_testEnvironment.test.ts +++ b/meteor/server/__tests__/_testEnvironment.test.ts @@ -153,7 +153,7 @@ describe('Basic test of test environment', () => { const studios = await Studios.findFetchAsync({}) expect(studios).toHaveLength(1) - const observer = Studios.observeChanges({ _id: protectString('abc') }, {}) + const observer = await Studios.observeChanges({ _id: protectString('abc') }, {}) expect(observer).toBeTruthy() await Studios.insertAsync({ diff --git a/meteor/server/api/ExternalMessageQueue.ts b/meteor/server/api/ExternalMessageQueue.ts index 8f1fe9c284b..51e08009856 100644 --- a/meteor/server/api/ExternalMessageQueue.ts +++ b/meteor/server/api/ExternalMessageQueue.ts @@ -1,7 +1,7 @@ import { Meteor } from 'meteor/meteor' import { check } from '../lib/check' import { StatusCode } from '@sofie-automation/blueprints-integration' -import { deferAsync, getCurrentTime } from '../lib/lib' +import { deferAsync, getCurrentTime, MeteorStartupAsync } from '../lib/lib' import { registerClassToMeteorMethods } from '../methods' import { NewExternalMessageQueueAPI, @@ -50,18 +50,19 @@ function updateExternalMessageQueueStatus(): void { } } -ExternalMessageQueue.observeChanges( - { - sent: { $not: { $gt: 0 } }, - tryCount: { $gt: 3 }, - }, - { - added: updateExternalMessageQueueStatus, - changed: updateExternalMessageQueueStatus, - removed: updateExternalMessageQueueStatus, - } -) -Meteor.startup(() => { +MeteorStartupAsync(async () => { + await ExternalMessageQueue.observeChanges( + { + sent: { $not: { $gt: 0 } }, + tryCount: { $gt: 3 }, + }, + { + added: updateExternalMessageQueueStatus, + changed: updateExternalMessageQueueStatus, + removed: updateExternalMessageQueueStatus, + } + ) + updateExternalMessageQueueStatus() // triggerdoMessageQueue(5000) }) diff --git a/meteor/server/api/client.ts b/meteor/server/api/client.ts index 9aeb3a71567..38839f55029 100644 --- a/meteor/server/api/client.ts +++ b/meteor/server/api/client.ts @@ -422,6 +422,8 @@ export namespace ServerClientAPI { class ServerClientAPIClass extends MethodContextAPI implements NewClientAPI { async clientLogger(type: string, ...args: string[]): Promise { + triggerWriteAccessBecauseNoCheckNecessary() + const loggerFunction: LeveledLogMethodFixed = (logger as any)[type] || logger.log loggerFunction(args.join(', ')) diff --git a/meteor/server/api/deviceTriggers/RundownContentObserver.ts b/meteor/server/api/deviceTriggers/RundownContentObserver.ts index 37581603203..04e73f99876 100644 --- a/meteor/server/api/deviceTriggers/RundownContentObserver.ts +++ b/meteor/server/api/deviceTriggers/RundownContentObserver.ts @@ -23,6 +23,7 @@ import { rundownPlaylistFieldSpecifier, segmentFieldSpecifier, } from './reactiveContentCache' +import { waitForAllObserversReady } from '../../publications/lib/lib' const REACTIVITY_DEBOUNCE = 20 @@ -37,13 +38,7 @@ export class RundownContentObserver { } #disposed = false - constructor( - rundownPlaylistId: RundownPlaylistId, - showStyleBaseId: ShowStyleBaseId, - rundownIds: RundownId[], - onChanged: ChangedHandler - ) { - logger.silly(`Creating RundownContentObserver for playlist "${rundownPlaylistId}"`) + private constructor(onChanged: ChangedHandler) { const { cache, cancel: cancelCache } = createReactiveContentCache(() => { this.#cleanup = onChanged(cache) if (this.#disposed) this.#cleanup() @@ -51,19 +46,40 @@ export class RundownContentObserver { this.#cache = cache this.#cancelCache = cancelCache + } + + static async create( + rundownPlaylistId: RundownPlaylistId, + showStyleBaseId: ShowStyleBaseId, + rundownIds: RundownId[], + onChanged: ChangedHandler + ): Promise { + logger.silly(`Creating RundownContentObserver for playlist "${rundownPlaylistId}"`) + + const observer = new RundownContentObserver(onChanged) + + await observer.initObservers(rundownPlaylistId, showStyleBaseId, rundownIds) - this.#observers = [ - RundownPlaylists.observeChanges(rundownPlaylistId, cache.RundownPlaylists.link(), { + return observer + } + + private async initObservers( + rundownPlaylistId: RundownPlaylistId, + showStyleBaseId: ShowStyleBaseId, + rundownIds: RundownId[] + ) { + this.#observers = await waitForAllObserversReady([ + RundownPlaylists.observeChanges(rundownPlaylistId, this.#cache.RundownPlaylists.link(), { projection: rundownPlaylistFieldSpecifier, }), - ShowStyleBases.observeChanges(showStyleBaseId, cache.ShowStyleBases.link()), + ShowStyleBases.observeChanges(showStyleBaseId, this.#cache.ShowStyleBases.link()), TriggeredActions.observeChanges( { showStyleBaseId: { $in: [showStyleBaseId, null], }, }, - cache.TriggeredActions.link() + this.#cache.TriggeredActions.link() ), Segments.observeChanges( { @@ -71,7 +87,7 @@ export class RundownContentObserver { $in: rundownIds, }, }, - cache.Segments.link(), + this.#cache.Segments.link(), { projection: segmentFieldSpecifier, } @@ -82,7 +98,7 @@ export class RundownContentObserver { $in: rundownIds, }, }, - cache.Parts.link(), + this.#cache.Parts.link(), { projection: partFieldSpecifier, } @@ -96,7 +112,7 @@ export class RundownContentObserver { $ne: true, }, }, - cache.PartInstances.link(), + this.#cache.PartInstances.link(), { projection: partInstanceFieldSpecifier, } @@ -107,7 +123,7 @@ export class RundownContentObserver { $in: rundownIds, }, }, - cache.RundownBaselineAdLibActions.link(), + this.#cache.RundownBaselineAdLibActions.link(), { projection: adLibActionFieldSpecifier, } @@ -118,7 +134,7 @@ export class RundownContentObserver { $in: rundownIds, }, }, - cache.RundownBaselineAdLibPieces.link(), + this.#cache.RundownBaselineAdLibPieces.link(), { projection: adLibPieceFieldSpecifier, } @@ -129,7 +145,7 @@ export class RundownContentObserver { $in: rundownIds, }, }, - cache.AdLibActions.link(), + this.#cache.AdLibActions.link(), { projection: adLibActionFieldSpecifier, } @@ -140,12 +156,12 @@ export class RundownContentObserver { $in: rundownIds, }, }, - cache.AdLibPieces.link(), + this.#cache.AdLibPieces.link(), { projection: adLibPieceFieldSpecifier, } ), - ] + ]) } public get cache(): ContentCache { diff --git a/meteor/server/api/deviceTriggers/RundownsObserver.ts b/meteor/server/api/deviceTriggers/RundownsObserver.ts index 9c915d7f848..5a70b27ba5e 100644 --- a/meteor/server/api/deviceTriggers/RundownsObserver.ts +++ b/meteor/server/api/deviceTriggers/RundownsObserver.ts @@ -1,14 +1,14 @@ import { Meteor } from 'meteor/meteor' import { RundownId, RundownPlaylistId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import _ from 'underscore' import { Rundowns } from '../../collections' import { literal } from '@sofie-automation/corelib/dist/lib' import { MongoFieldSpecifierOnesStrict } from '@sofie-automation/corelib/dist/mongo' import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' +import { PromiseDebounce } from '../../publications/lib/debounce' const REACTIVITY_DEBOUNCE = 20 -type ChangedHandler = (rundownIds: RundownId[]) => () => void +type ChangedHandler = (rundownIds: RundownId[]) => Promise<() => void> type RundownFields = '_id' const rundownFieldSpecifier = literal>>({ @@ -16,53 +16,68 @@ const rundownFieldSpecifier = literal = new Set() #changed: ChangedHandler | undefined #cleanup: (() => void) | undefined - constructor(activePlaylistId: RundownPlaylistId, onChanged: ChangedHandler) { + #disposed = false + + readonly #triggerUpdateRundownContent = new PromiseDebounce(async () => { + if (this.#disposed) return + + if (!this.#changed) return + this.#cleanup?.() + + const changed = this.#changed + this.#cleanup = await changed(this.rundownIds) + + if (this.#disposed) this.#cleanup?.() + }, REACTIVITY_DEBOUNCE) + + private constructor(onChanged: ChangedHandler) { this.#changed = onChanged - this.#rundownsLiveQuery = Rundowns.observeChanges( + } + + static async create(playlistId: RundownPlaylistId, onChanged: ChangedHandler): Promise { + const observer = new RundownsObserver(onChanged) + + await observer.init(playlistId) + + return observer + } + + private async init(activePlaylistId: RundownPlaylistId) { + this.#rundownsLiveQuery = await Rundowns.observeChanges( { playlistId: activePlaylistId, }, { added: (rundownId) => { this.#rundownIds.add(rundownId) - this.updateRundownContent() + this.#triggerUpdateRundownContent.trigger() }, removed: (rundownId) => { this.#rundownIds.delete(rundownId) - this.updateRundownContent() + this.#triggerUpdateRundownContent.trigger() }, }, { projection: rundownFieldSpecifier, } ) - this.updateRundownContent() + + this.#triggerUpdateRundownContent.trigger() } public get rundownIds(): RundownId[] { return Array.from(this.#rundownIds) } - private innerUpdateRundownContent = () => { - if (!this.#changed) return - this.#cleanup?.() - - const changed = this.#changed - this.#cleanup = changed(this.rundownIds) - } - - public updateRundownContent = _.debounce( - Meteor.bindEnvironment(this.innerUpdateRundownContent), - REACTIVITY_DEBOUNCE - ) - public stop = (): void => { - this.updateRundownContent.cancel() + this.#disposed = true + + this.#triggerUpdateRundownContent.cancelWaiting() this.#rundownsLiveQuery.stop() this.#changed = undefined this.#cleanup?.() diff --git a/meteor/server/api/deviceTriggers/StudioObserver.ts b/meteor/server/api/deviceTriggers/StudioObserver.ts index a6b38c239b9..0305c68a88e 100644 --- a/meteor/server/api/deviceTriggers/StudioObserver.ts +++ b/meteor/server/api/deviceTriggers/StudioObserver.ts @@ -20,6 +20,7 @@ import { ContentCache } from './reactiveContentCache' import { RundownContentObserver } from './RundownContentObserver' import { RundownsObserver } from './RundownsObserver' import { RundownPlaylists, Rundowns, ShowStyleBases } from '../../collections' +import { PromiseDebounce } from '../../publications/lib/debounce' type ChangedHandler = (showStyleBaseId: ShowStyleBaseId, cache: ContentCache) => () => void @@ -66,6 +67,8 @@ export class StudioObserver extends EventEmitter { #changed: ChangedHandler + #disposed = false + constructor(studioId: StudioId, onChanged: ChangedHandler) { super() this.#changed = onChanged @@ -93,6 +96,8 @@ export class StudioObserver extends EventEmitter { activePlaylist: Pick } | null ): void => { + if (this.#disposed) return + const activePlaylistId = state?.activePlaylist?._id const activationId = state?.activePlaylist?.activationId const currentRundownId = @@ -146,73 +151,72 @@ export class StudioObserver extends EventEmitter { ) as Promise>>) : null ) - .end(this.updateShowStyle) + .end(this.updateShowStyle.call) } - private updateShowStyle = _.debounce( - Meteor.bindEnvironment( - ( - state: { - currentRundown: Pick - showStyleBase: Pick - } | null - ) => { - const showStyleBaseId = state?.showStyleBase._id - - if ( - showStyleBaseId === undefined || - !this.nextProps?.activePlaylistId || - !this.nextProps?.activationId - ) { - this.currentProps = undefined - this.#rundownsLiveQuery?.stop() - this.#rundownsLiveQuery = undefined - this.showStyleBaseId = showStyleBaseId - return - } - - if ( - showStyleBaseId === this.showStyleBaseId && - this.nextProps?.activationId === this.currentProps?.activationId && - this.nextProps?.activePlaylistId === this.currentProps?.activePlaylistId && - this.nextProps?.currentRundownId === this.currentProps?.currentRundownId - ) - return - - this.#rundownsLiveQuery?.stop() - this.#rundownsLiveQuery = undefined - - this.currentProps = this.nextProps - this.nextProps = undefined - - const { activePlaylistId } = this.currentProps - - this.showStyleBaseId = showStyleBaseId - - let cleanupChanges: (() => void) | undefined = undefined - - this.#rundownsLiveQuery = new RundownsObserver(activePlaylistId, (rundownIds) => { - logger.silly(`Creating new RundownContentObserver`) - const obs1 = new RundownContentObserver(activePlaylistId, showStyleBaseId, rundownIds, (cache) => { - cleanupChanges = this.#changed(showStyleBaseId, cache) - - return () => { - void 0 - } - }) - - return () => { - obs1.stop() - cleanupChanges?.() - } - }) + private readonly updateShowStyle = new PromiseDebounce< + void, + [ + { + currentRundown: Pick + showStyleBase: Pick + } | null + ] + >(async (state): Promise => { + if (this.#disposed) return + + const showStyleBaseId = state?.showStyleBase._id + + if (showStyleBaseId === undefined || !this.nextProps?.activePlaylistId || !this.nextProps?.activationId) { + this.currentProps = undefined + this.#rundownsLiveQuery?.stop() + this.#rundownsLiveQuery = undefined + this.showStyleBaseId = showStyleBaseId + return + } + + if ( + showStyleBaseId === this.showStyleBaseId && + this.nextProps?.activationId === this.currentProps?.activationId && + this.nextProps?.activePlaylistId === this.currentProps?.activePlaylistId && + this.nextProps?.currentRundownId === this.currentProps?.currentRundownId + ) + return + + this.#rundownsLiveQuery?.stop() + this.#rundownsLiveQuery = undefined + + this.currentProps = this.nextProps + this.nextProps = undefined + + const { activePlaylistId } = this.currentProps + + this.showStyleBaseId = showStyleBaseId + + this.#rundownsLiveQuery = await RundownsObserver.create(activePlaylistId, async (rundownIds) => { + logger.silly(`Creating new RundownContentObserver`) + + const obs1 = await RundownContentObserver.create(activePlaylistId, showStyleBaseId, rundownIds, (cache) => { + return this.#changed(showStyleBaseId, cache) + }) + + return () => { + obs1.stop() } - ), - REACTIVITY_DEBOUNCE - ) + }) + + if (this.#disposed) { + // If we were disposed of while waiting for the observer to be created, stop it immediately + this.#rundownsLiveQuery.stop() + } + }, REACTIVITY_DEBOUNCE) public stop = (): void => { + this.#disposed = true + + this.updateShowStyle.cancelWaiting() this.#playlistInStudioLiveQuery.stop() this.updatePlaylistInStudio.cancel() + this.#rundownsLiveQuery?.stop() } } diff --git a/meteor/server/api/deviceTriggers/observer.ts b/meteor/server/api/deviceTriggers/observer.ts index b23ba371ae4..30e7a0f42f1 100644 --- a/meteor/server/api/deviceTriggers/observer.ts +++ b/meteor/server/api/deviceTriggers/observer.ts @@ -18,14 +18,14 @@ import { StudioObserver } from './StudioObserver' import { Studios } from '../../collections' import { ReactiveCacheCollection } from '../../publications/lib/ReactiveCacheCollection' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' +import { MeteorStartupAsync } from '../../lib/lib' type ObserverAndManager = { observer: StudioObserver manager: StudioDeviceTriggerManager } -Meteor.startup(() => { - if (!Meteor.isServer) return +MeteorStartupAsync(async () => { const studioObserversAndManagers = new Map() const jobQueue = new JobQueueWithClasses({ autoStart: true, @@ -69,7 +69,7 @@ Meteor.startup(() => { } } - Studios.observeChanges( + await Studios.observeChanges( {}, { added: (studioId) => { diff --git a/meteor/server/api/ingest/mosDevice/__tests__/actions.test.ts b/meteor/server/api/ingest/mosDevice/__tests__/actions.test.ts index ba2d1587972..5de794580a8 100644 --- a/meteor/server/api/ingest/mosDevice/__tests__/actions.test.ts +++ b/meteor/server/api/ingest/mosDevice/__tests__/actions.test.ts @@ -54,7 +54,7 @@ describe('Test sending mos actions', () => { const fakeRundown = { _id: rundownId, externalId: getRandomString(), studioId: studioId } // Listen for changes - observer = PeripheralDeviceCommands.observeChanges( + observer = await PeripheralDeviceCommands.observeChanges( { deviceId: device._id }, { added: (id: PeripheralDeviceCommandId) => { @@ -96,7 +96,7 @@ describe('Test sending mos actions', () => { } // Listen for changes - observer = PeripheralDeviceCommands.observeChanges( + observer = await PeripheralDeviceCommands.observeChanges( { deviceId: device._id }, { added: (id: PeripheralDeviceCommandId) => { @@ -153,7 +153,7 @@ describe('Test sending mos actions', () => { } // Listen for changes - observer = PeripheralDeviceCommands.observeChanges( + observer = await PeripheralDeviceCommands.observeChanges( { deviceId: device._id }, { added: (id: PeripheralDeviceCommandId) => { diff --git a/meteor/server/api/ingest/rundownInput.ts b/meteor/server/api/ingest/rundownInput.ts index 5b1cf69940b..6534daaf347 100644 --- a/meteor/server/api/ingest/rundownInput.ts +++ b/meteor/server/api/ingest/rundownInput.ts @@ -3,7 +3,7 @@ import { check } from '../../lib/check' import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' import { NrcsIngestDataCache, MediaObjects, Parts, Rundowns, Segments } from '../../collections' import { literal } from '../../lib/tempLib' -import { lazyIgnore } from '../../lib/lib' +import { lazyIgnore, MeteorStartupAsync } from '../../lib/lib' import { IngestRundown, IngestSegment, IngestPart, IngestPlaylist } from '@sofie-automation/blueprints-integration' import { logger } from '../../logging' import { RundownIngestDataCache } from './ingestCache' @@ -363,17 +363,15 @@ async function listIngestRundowns(peripheralDevice: PeripheralDevice): Promise { - if (Meteor.isServer) { - MediaObjects.observe( - {}, - { - added: onMediaObjectChanged, - changed: onMediaObjectChanged, - }, - { fields: { _id: 1, mediaId: 1, mediainfo: 1, studioId: 1 } } - ) - } +MeteorStartupAsync(async () => { + await MediaObjects.observe( + {}, + { + added: onMediaObjectChanged, + changed: onMediaObjectChanged, + }, + { fields: { _id: 1, mediaId: 1, mediainfo: 1, studioId: 1 } } + ) }) interface MediaObjectUpdatedIds { @@ -431,19 +429,19 @@ async function onMediaObjectChanged(newDocument: MediaObject, oldDocument?: Medi for (const mediaObjectUpdatedIds of updateIds) { if (validSegmentIds.has(mediaObjectUpdatedIds.segmentId)) { - try { - lazyIgnore( - `updateSegmentFromMediaObject_${mediaObjectUpdatedIds.segmentId}`, - async () => updateSegmentFromCache(newDocument.studioId, mediaObjectUpdatedIds), - 200 - ) - } catch (exception) { - logger.error( - `Error thrown while updating Segment from cache after MediaObject changed: ${stringifyError( - exception - )}` - ) - } + lazyIgnore( + `updateSegmentFromMediaObject_${mediaObjectUpdatedIds.segmentId}`, + () => { + updateSegmentFromCache(newDocument.studioId, mediaObjectUpdatedIds).catch((e) => { + logger.error( + `Error thrown while updating Segment from cache after MediaObject changed: ${stringifyError( + e + )}` + ) + }) + }, + 200 + ) } } } diff --git a/meteor/server/api/peripheralDevice/executeFunction.ts b/meteor/server/api/peripheralDevice/executeFunction.ts index e89c638ad1b..e3e7d4de8e8 100644 --- a/meteor/server/api/peripheralDevice/executeFunction.ts +++ b/meteor/server/api/peripheralDevice/executeFunction.ts @@ -126,7 +126,7 @@ export async function executePeripheralDeviceFunctionWithCustomTimeout( }) } - observer = PeripheralDeviceCommands.observeChanges( + observer = await PeripheralDeviceCommands.observeChanges( { _id: commandId, }, diff --git a/meteor/server/api/rest/v0/index.ts b/meteor/server/api/rest/v0/index.ts index 721a3a803c4..a2a150a51ff 100644 --- a/meteor/server/api/rest/v0/index.ts +++ b/meteor/server/api/rest/v0/index.ts @@ -8,7 +8,7 @@ import * as _ from 'underscore' import { Meteor } from 'meteor/meteor' import { MeteorMethodSignatures } from '../../../methods' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' -import { MeteorPublications, MeteorPublicationSignatures } from '../../../publications/lib' +import { MeteorPublications, MeteorPublicationSignatures } from '../../../publications/lib/lib' import { UserActionAPIMethods } from '@sofie-automation/meteor-lib/dist/api/userActions' import { logger } from '../../../logging' import { ClientAPI } from '@sofie-automation/meteor-lib/dist/api/client' diff --git a/meteor/server/api/studio/api.ts b/meteor/server/api/studio/api.ts index de6b0b93a43..1c085f3c4b8 100644 --- a/meteor/server/api/studio/api.ts +++ b/meteor/server/api/studio/api.ts @@ -4,7 +4,7 @@ import { registerClassToMeteorMethods } from '../../methods' import { NewStudiosAPI, StudiosAPIMethods } from '@sofie-automation/meteor-lib/dist/api/studios' import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' import { literal, getRandomId } from '../../lib/tempLib' -import { lazyIgnore } from '../../lib/lib' +import { lazyIgnore, MeteorStartupAsync } from '../../lib/lib' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' import { ExpectedPackages, @@ -133,17 +133,20 @@ function triggerUpdateStudioMappingsHash(studioId: StudioId) { 10 ) } -Studios.observeChanges( - {}, - { - added: triggerUpdateStudioMappingsHash, - changed: triggerUpdateStudioMappingsHash, - removed: triggerUpdateStudioMappingsHash, - }, - { - fields: { - mappingsWithOverrides: 1, - routeSetsWithOverrides: 1, + +MeteorStartupAsync(async () => { + await Studios.observeChanges( + {}, + { + added: triggerUpdateStudioMappingsHash, + changed: triggerUpdateStudioMappingsHash, + removed: triggerUpdateStudioMappingsHash, }, - } -) + { + fields: { + mappingsWithOverrides: 1, + routeSetsWithOverrides: 1, + }, + } + ) +}) diff --git a/meteor/server/collections/collection.ts b/meteor/server/collections/collection.ts index c889709021d..7f13f1839a6 100644 --- a/meteor/server/collections/collection.ts +++ b/meteor/server/collections/collection.ts @@ -283,7 +283,7 @@ export interface AsyncOnlyReadOnlyMongoCollection | DBInterface['_id'], callbacks: PromisifyCallbacks>, options?: FindOptions - ): Meteor.LiveQueryHandle + ): Promise /** * Observe changes on this collection @@ -293,7 +293,7 @@ export interface AsyncOnlyReadOnlyMongoCollection | DBInterface['_id'], callbacks: PromisifyCallbacks>, options?: FindOptions - ): Meteor.LiveQueryHandle + ): Promise /** * Count the number of docuyments in a collection that match the selector. diff --git a/meteor/server/collections/implementations/asyncCollection.ts b/meteor/server/collections/implementations/asyncCollection.ts index 506e48d5f4f..864748a5b78 100644 --- a/meteor/server/collections/implementations/asyncCollection.ts +++ b/meteor/server/collections/implementations/asyncCollection.ts @@ -48,20 +48,20 @@ export class WrappedAsyncMongoCollection | DBInterface['_id'], callbacks: PromisifyCallbacks>, options?: FindOptions - ): Meteor.LiveQueryHandle { - return this.find(selector as any, options).observeChanges(dePromiseObjectOfFunctions(callbacks)) + ): Promise { + return this.find(selector as any, options).observeChangesAsync(dePromiseObjectOfFunctions(callbacks)) } - observe( + async observe( selector: MongoQuery | DBInterface['_id'], callbacks: PromisifyCallbacks>, options?: FindOptions - ): Meteor.LiveQueryHandle { - return this.find(selector as any, options).observe(dePromiseObjectOfFunctions(callbacks)) + ): Promise { + return this.find(selector as any, options).observeAsync(dePromiseObjectOfFunctions(callbacks)) } async insertAsync(doc: DBInterface): Promise { diff --git a/meteor/server/collections/implementations/mock.ts b/meteor/server/collections/implementations/mock.ts index d208efc59bc..d8f0b6abbef 100644 --- a/meteor/server/collections/implementations/mock.ts +++ b/meteor/server/collections/implementations/mock.ts @@ -63,19 +63,19 @@ export class WrappedMockCollection | DBInterface['_id'], callbacks: PromisifyCallbacks>, options?: FindOptions - ): Meteor.LiveQueryHandle { + ): Promise { return this.find(selector, options).observeChanges(dePromiseObjectOfFunctions(callbacks)) } - observe( + async observe( selector: MongoQuery | DBInterface['_id'], callbacks: PromisifyCallbacks>, options?: FindOptions - ): Meteor.LiveQueryHandle { + ): Promise { return this.find(selector, options).observe(dePromiseObjectOfFunctions(callbacks)) } diff --git a/meteor/server/collections/implementations/readonlyWrapper.ts b/meteor/server/collections/implementations/readonlyWrapper.ts index eaf4418fd55..d2e0cd89494 100644 --- a/meteor/server/collections/implementations/readonlyWrapper.ts +++ b/meteor/server/collections/implementations/readonlyWrapper.ts @@ -53,13 +53,15 @@ export class WrappedReadOnlyMongoCollection['observeChanges']> - ): Meteor.LiveQueryHandle { + ): Promise { return this.#mutableCollection.observeChanges(...args) } - observe(...args: Parameters['observe']>): Meteor.LiveQueryHandle { + async observe( + ...args: Parameters['observe']> + ): Promise { return this.#mutableCollection.observe(...args) } diff --git a/meteor/server/collections/lib.ts b/meteor/server/collections/lib.ts index 161c3cfa47b..5d9d64d9fc6 100644 --- a/meteor/server/collections/lib.ts +++ b/meteor/server/collections/lib.ts @@ -60,7 +60,7 @@ export async function ObserveChangesHelper { diff --git a/meteor/server/coreSystem/index.ts b/meteor/server/coreSystem/index.ts index 3d58469effa..fa1bb84d467 100644 --- a/meteor/server/coreSystem/index.ts +++ b/meteor/server/coreSystem/index.ts @@ -78,7 +78,7 @@ async function initializeCoreSystem() { } // Monitor database changes: - CoreSystem.observeChanges(SYSTEM_ID, { + await CoreSystem.observeChanges(SYSTEM_ID, { added: onCoreSystemChanged, changed: onCoreSystemChanged, removed: onCoreSystemChanged, @@ -88,7 +88,7 @@ async function initializeCoreSystem() { checkDatabaseVersions() } - Blueprints.observeChanges( + await Blueprints.observeChanges( {}, { added: observeBlueprintChanges, diff --git a/meteor/server/lib/ReactiveStore.ts b/meteor/server/lib/ReactiveStore.ts deleted file mode 100644 index 429da1ab938..00000000000 --- a/meteor/server/lib/ReactiveStore.ts +++ /dev/null @@ -1,128 +0,0 @@ -import { Meteor } from 'meteor/meteor' -import { Tracker } from 'meteor/tracker' -import * as _ from 'underscore' -import { getRandomString, ProtectedString } from './tempLib' -import { lazyIgnore } from '../lib/lib' - -/** The ReactiveStore is a Reactive key-value store. - * Keeps track of when the reactive values aren't in use anymore and automatically cleans them up. - */ -export class ReactiveStore | string, Value> { - private _store: Record< - string, - { - dep: Tracker.Dependency - computation?: Tracker.Computation - value: Value - } - > = {} - private _depsToBatchInvalidate: Tracker.Dependency[] = [] - private _name = getRandomString() - - constructor( - private options: { - /** Delays all Reactive updates with this time [ms] */ - delayUpdateTime?: number - } = {} - ) {} - /** - * Retrieves a value from the store. - * @param key Key to fetch the value from - * @param callbackGetValue (Optional) A Reactive function. If the value isn't found in the store, set up a Reactive watch for the value using this callback. - */ - getValue(key: Key, callbackGetValue?: () => Value): Value | undefined { - if (Meteor.isServer) { - // Server-side we won't use the cache at all. - return callbackGetValue?.() - } - - const key0 = key as unknown as string - let o = this._store[key0] - - if (!o) { - if (callbackGetValue) { - // Set up a Reactive watch for the value: - - this._store[key0] = o = { - dep: new Tracker.Dependency(), - computation: undefined, - value: undefined as any, - } - o.computation = Tracker.nonreactive(() => { - // Set up a new Reactive context for the callback: - return Tracker.autorun(() => { - // This function is invalidated and re-run whenever the value changes. - const newValue = callbackGetValue() - - const o = this._store[key0] - if (o) { - // Do an additional check whether the returned value actually changed: - if (!_.isEqual(o.value, newValue)) { - o.value = newValue - // Invaludate the dependency: - this.invalidateDependency(o.dep) - } - } - }) - }) - } else { - // No callback provided - return undefined - } - } - - if (Tracker.active && Tracker.currentComputation) { - Tracker.currentComputation.onStop(() => { - // Called when the reactive context of the caller of this.getValue is invalidated. - - if (!o.dep.hasDependents()) { - // If no-one is using it anymore, we should clean it out. - // Wait a bit, to give it a change to be reused. - setTimeout(() => { - const o = this._store[key0] - if (o) { - if (!o.dep.hasDependents()) { - this.removeValue(key) - } - } - }, 2000) - } - }) - // Depend, so that the reactive context will be invalidated whenever the value changes. - o.dep.depend() - } - return o.value - } - /** Remove a value from the store */ - private removeValue(key: Key) { - const key0 = key as unknown as string - const o = this._store[key0] - if (o) { - o.computation?.stop() - delete this._store[key0] - } - } - private invalidateDependency(dep: Tracker.Dependency) { - if (this.options.delayUpdateTime) { - // Delay and batch-invalidate all changes that might have come in until then: - this._depsToBatchInvalidate.push(dep) - lazyIgnore( - this._name, - () => { - for (const dep of this._depsToBatchInvalidate) { - dep.changed() - } - this._depsToBatchInvalidate = [] - }, - this.options.delayUpdateTime - ) - } else { - dep.changed() - } - } - clear(): void { - for (const key of Object.keys(this._store)) { - this.removeValue(key as unknown as Key) - } - } -} diff --git a/meteor/server/lib/customPublication/index.ts b/meteor/server/lib/customPublication/index.ts index e06cd0511a9..836e0154544 100644 --- a/meteor/server/lib/customPublication/index.ts +++ b/meteor/server/lib/customPublication/index.ts @@ -1,5 +1,5 @@ export { CustomPublishCollection } from './customPublishCollection' export { setUpOptimizedObserverArray } from './optimizedObserverArray' -export { TriggerUpdate } from './optimizedObserverBase' +export { TriggerUpdate, SetupObserversResult } from './optimizedObserverBase' export { setUpCollectionOptimizedObserver } from './optimizedObserverCollection' export { meteorCustomPublish, CustomPublish, CustomPublishChanges } from './publish' diff --git a/meteor/server/lib/customPublication/optimizedObserverArray.ts b/meteor/server/lib/customPublication/optimizedObserverArray.ts index bd31ce073ba..7a677cc0b70 100644 --- a/meteor/server/lib/customPublication/optimizedObserverArray.ts +++ b/meteor/server/lib/customPublication/optimizedObserverArray.ts @@ -1,10 +1,9 @@ import { Meteor } from 'meteor/meteor' import { ReadonlyDeep } from 'type-fest' import { clone, ProtectedString } from '../tempLib' -import { TriggerUpdate, setUpOptimizedObserverInner } from './optimizedObserverBase' +import { SetupObserversResult, TriggerUpdate, setUpOptimizedObserverInner } from './optimizedObserverBase' import { CustomPublish, CustomPublishChanges } from './publish' import { diffObject } from './lib' -import { LiveQueryHandle } from '../lib' /** * This is an optimization to enable multiple listeners that observes (and manipulates) the same data, to only use one observer and manipulator, @@ -29,7 +28,7 @@ export async function setUpOptimizedObserverArray< args: ReadonlyDeep, /** Trigger an update by mutating the context of manipulateData */ triggerUpdate: TriggerUpdate - ) => Promise, + ) => Promise, manipulateData: ( args: ReadonlyDeep, state: Partial, diff --git a/meteor/server/lib/customPublication/optimizedObserverBase.ts b/meteor/server/lib/customPublication/optimizedObserverBase.ts index e25720d109a..077259ced4d 100644 --- a/meteor/server/lib/customPublication/optimizedObserverBase.ts +++ b/meteor/server/lib/customPublication/optimizedObserverBase.ts @@ -9,6 +9,7 @@ import { logger } from '../../logging' import { ReactiveCacheCollection } from '../../publications/lib/ReactiveCacheCollection' import { LiveQueryHandle, lazyIgnore } from '../lib' import { CustomPublish, CustomPublishChanges } from './publish' +import { waitForAllObserversReady } from '../../publications/lib/lib' const apmNamespace = 'optimizedObserver' @@ -41,6 +42,8 @@ const optimizedObservers: Record> = (updateProps: Partial) => void +export type SetupObserversResult = Array | LiveQueryHandle> + /** * This should not be used directly, and should be used through one of the setUpOptimizedObserverArray or setUpCollectionOptimizedObserver wrappers * @@ -65,7 +68,7 @@ export async function setUpOptimizedObserverInner< args: ReadonlyDeep, /** Trigger an update by mutating the context of manipulateData */ triggerUpdate: TriggerUpdate - ) => Promise, + ) => Promise, manipulateData: ( args: ReadonlyDeep, state: Partial, @@ -186,7 +189,7 @@ async function createOptimizedObserverWorker< args: ReadonlyDeep, /** Trigger an update by mutating the context of manipulateData */ triggerUpdate: TriggerUpdate - ) => Promise, + ) => Promise, manipulateData: ( args: ReadonlyDeep, state: Partial, @@ -324,7 +327,7 @@ async function createOptimizedObserverWorker< try { // Setup the mongo observers - const observers = await setupObservers(args, triggerUpdate) + const observers = await waitForAllObserversReady(await setupObservers(args, triggerUpdate)) thisObserverWorker = { args: args, diff --git a/meteor/server/lib/customPublication/optimizedObserverCollection.ts b/meteor/server/lib/customPublication/optimizedObserverCollection.ts index aad28131b2b..5cbd25801a9 100644 --- a/meteor/server/lib/customPublication/optimizedObserverCollection.ts +++ b/meteor/server/lib/customPublication/optimizedObserverCollection.ts @@ -1,8 +1,7 @@ import { ReadonlyDeep } from 'type-fest' import { ProtectedString } from '../tempLib' -import { LiveQueryHandle } from '../lib' import { CustomPublishCollection } from './customPublishCollection' -import { TriggerUpdate, setUpOptimizedObserverInner } from './optimizedObserverBase' +import { SetupObserversResult, TriggerUpdate, setUpOptimizedObserverInner } from './optimizedObserverBase' import { CustomPublish } from './publish' /** @@ -28,7 +27,7 @@ export async function setUpCollectionOptimizedObserver< args: ReadonlyDeep, /** Trigger an update by mutating the context of manipulateData */ triggerUpdate: TriggerUpdate - ) => Promise, + ) => Promise, manipulateData: ( args: ReadonlyDeep, state: Partial, diff --git a/meteor/server/lib/customPublication/publish.ts b/meteor/server/lib/customPublication/publish.ts index 0a864a28dca..b9ac5fc402b 100644 --- a/meteor/server/lib/customPublication/publish.ts +++ b/meteor/server/lib/customPublication/publish.ts @@ -2,7 +2,7 @@ import { UserId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { Meteor } from 'meteor/meteor' import { AllPubSubTypes } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { ProtectedString, unprotectString } from '../tempLib' -import { PublishDocType, SubscriptionContext, meteorPublishUnsafe } from '../../publications/lib' +import { PublishDocType, SubscriptionContext, meteorPublishUnsafe } from '../../publications/lib/lib' export interface CustomPublishChanges }> { added: Array diff --git a/meteor/server/lib/lib.ts b/meteor/server/lib/lib.ts index 860b6804960..dee6d8aa8a3 100644 --- a/meteor/server/lib/lib.ts +++ b/meteor/server/lib/lib.ts @@ -57,7 +57,7 @@ export function MeteorWrapAsync(func: Function, context?: Object): any { } const lazyIgnoreCache: { [name: string]: number } = {} -export function lazyIgnore(name: string, f1: () => Promise | void, t: number): void { +export function lazyIgnore(name: string, f1: () => void, t: number): void { // Don't execute the function f1 until the time t has passed. // Subsequent calls will extend the laziness and ignore the previous call @@ -66,12 +66,11 @@ export function lazyIgnore(name: string, f1: () => Promise | void, t: numb } lazyIgnoreCache[name] = Meteor.setTimeout(() => { delete lazyIgnoreCache[name] - if (Meteor.isClient) { - f1()?.catch((e) => { - throw new Error(e) - }) - } else { - waitForPromise(f1()) + + try { + f1() + } catch (e) { + logger.error(`Unhandled error in lazyIgnore "${name}": ${stringifyError(e)}`) } }, t) } diff --git a/meteor/server/publications/_publications.ts b/meteor/server/publications/_publications.ts index 30d002268b5..8bcb30b0b14 100644 --- a/meteor/server/publications/_publications.ts +++ b/meteor/server/publications/_publications.ts @@ -1,5 +1,5 @@ import { Meteor } from 'meteor/meteor' -import './lib' +import './lib/lib' import './buckets' import './blueprintUpgradeStatus/publication' @@ -28,7 +28,7 @@ import './mountedTriggers' import './deviceTriggersPreview' import { AllPubSubNames } from '@sofie-automation/meteor-lib/dist/api/pubsub' -import { MeteorPublications } from './lib' +import { MeteorPublications } from './lib/lib' import { logger } from '../logging' // Ensure all the publications were registered at startup diff --git a/meteor/server/publications/blueprintUpgradeStatus/publication.ts b/meteor/server/publications/blueprintUpgradeStatus/publication.ts index 0218e93e90e..568f9b07567 100644 --- a/meteor/server/publications/blueprintUpgradeStatus/publication.ts +++ b/meteor/server/publications/blueprintUpgradeStatus/publication.ts @@ -7,12 +7,12 @@ import { CustomPublishCollection, meteorCustomPublish, setUpCollectionOptimizedObserver, + SetupObserversResult, TriggerUpdate, } from '../../lib/customPublication' import { logger } from '../../logging' import { resolveCredentials } from '../../security/lib/credentials' import { NoSecurityReadAccess } from '../../security/noSecurity' -import { LiveQueryHandle } from '../../lib/lib' import { ContentCache, createReactiveContentCache, ShowStyleBaseFields, StudioFields } from './reactiveContentCache' import { UpgradesContentObserver } from './upgradesContentObserver' import { BlueprintMapEntry, checkDocUpgradeStatus } from './checkStatus' @@ -41,14 +41,14 @@ interface BlueprintUpgradeStatusUpdateProps { async function setupBlueprintUpgradeStatusPublicationObservers( _args: ReadonlyDeep, triggerUpdate: TriggerUpdate -): Promise { +): Promise { // TODO - can this be done cheaper? const cache = createReactiveContentCache() // Push update triggerUpdate({ newCache: cache }) - const mongoObserver = new UpgradesContentObserver(cache) + const mongoObserver = await UpgradesContentObserver.create(cache) // Set up observers: return [ diff --git a/meteor/server/publications/blueprintUpgradeStatus/upgradesContentObserver.ts b/meteor/server/publications/blueprintUpgradeStatus/upgradesContentObserver.ts index c26ea657e56..a88ba8575b9 100644 --- a/meteor/server/publications/blueprintUpgradeStatus/upgradesContentObserver.ts +++ b/meteor/server/publications/blueprintUpgradeStatus/upgradesContentObserver.ts @@ -7,16 +7,21 @@ import { studioFieldSpecifier, } from './reactiveContentCache' import { Blueprints, ShowStyleBases, Studios } from '../../collections' +import { waitForAllObserversReady } from '../lib/lib' export class UpgradesContentObserver { - #observers: Meteor.LiveQueryHandle[] = [] - #cache: ContentCache + readonly #cache: ContentCache + readonly #observers: Meteor.LiveQueryHandle[] - constructor(cache: ContentCache) { - logger.silly(`Creating UpgradesContentObserver`) + constructor(cache: ContentCache, observers: Meteor.LiveQueryHandle[]) { this.#cache = cache + this.#observers = observers + } - this.#observers = [ + static async create(cache: ContentCache): Promise { + logger.silly(`Creating UpgradesContentObserver`) + + const observers = await waitForAllObserversReady([ Studios.observeChanges({}, cache.Studios.link(), { projection: studioFieldSpecifier, }), @@ -26,7 +31,9 @@ export class UpgradesContentObserver { Blueprints.observeChanges({}, cache.Blueprints.link(), { projection: blueprintFieldSpecifier, }), - ] + ]) + + return new UpgradesContentObserver(cache, observers) } public get cache(): ContentCache { diff --git a/meteor/server/publications/buckets.ts b/meteor/server/publications/buckets.ts index 8db38ce907a..3801f8b4671 100644 --- a/meteor/server/publications/buckets.ts +++ b/meteor/server/publications/buckets.ts @@ -1,6 +1,6 @@ import { FindOptions } from '@sofie-automation/meteor-lib/dist/collections/lib' import { BucketSecurity } from '../security/buckets' -import { meteorPublish } from './lib' +import { meteorPublish } from './lib/lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { Bucket } from '@sofie-automation/meteor-lib/dist/collections/Buckets' import { StudioReadAccess } from '../security/studio' diff --git a/meteor/server/publications/deviceTriggersPreview.ts b/meteor/server/publications/deviceTriggersPreview.ts index c5aeed97a5d..67e9edbf039 100644 --- a/meteor/server/publications/deviceTriggersPreview.ts +++ b/meteor/server/publications/deviceTriggersPreview.ts @@ -7,7 +7,7 @@ import { ReadonlyDeep } from 'type-fest' import { CustomCollectionName, MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { DeviceTriggerArguments, UIDeviceTriggerPreview } from '@sofie-automation/meteor-lib/dist/api/MountedTriggers' import { getCurrentTime } from '../lib/lib' -import { setUpOptimizedObserverArray, TriggerUpdate } from '../lib/customPublication' +import { SetupObserversResult, setUpOptimizedObserverArray, TriggerUpdate } from '../lib/customPublication' import { CustomPublish, meteorCustomPublish } from '../lib/customPublication/publish' import { StudioReadAccess } from '../security/studio' import { PeripheralDevices } from '../collections' @@ -73,7 +73,7 @@ function prepareTriggerBufferForStudio(studioId: string) { async function setupDeviceTriggersPreviewsObservers( args: ReadonlyDeep, triggerUpdate: TriggerUpdate -): Promise { +): Promise { const studioId = unprotectString(args.studioId) const lastTriggersStudio = prepareTriggerBufferForStudio(studioId) diff --git a/meteor/server/publications/lib/__tests__/debounce.test.ts b/meteor/server/publications/lib/__tests__/debounce.test.ts new file mode 100644 index 00000000000..f611c80b805 --- /dev/null +++ b/meteor/server/publications/lib/__tests__/debounce.test.ts @@ -0,0 +1,273 @@ +import { sleep } from '@sofie-automation/shared-lib/dist/lib/lib' +import { PromiseDebounce } from '../debounce' + +describe('PromiseDebounce', () => { + beforeEach(() => { + jest.useFakeTimers() + }) + + it('trigger', async () => { + const fn = jest.fn() + const debounce = new PromiseDebounce(fn, 10) + + // No promise returned + expect(debounce.trigger()).toBe(undefined) + // Not called yet + expect(fn).toHaveBeenCalledTimes(0) + + // Wait for a bit + await jest.advanceTimersByTimeAsync(6) + expect(fn).toHaveBeenCalledTimes(0) + + // Wait a bit more + await jest.advanceTimersByTimeAsync(6) + expect(fn).toHaveBeenCalledTimes(1) + + // No more calls + fn.mockClear() + await jest.advanceTimersByTimeAsync(50) + expect(fn).toHaveBeenCalledTimes(0) + }) + + it('call', async () => { + const fn = jest.fn() + const debounce = new PromiseDebounce(fn, 10) + + const ps = debounce.call() + expect(ps).not.toBe(undefined) + // Not called yet + expect(fn).toHaveBeenCalledTimes(0) + + // Wait for a bit + await jest.advanceTimersByTimeAsync(6) + expect(fn).toHaveBeenCalledTimes(0) + + // Wait a bit more + await jest.advanceTimersByTimeAsync(6) + expect(fn).toHaveBeenCalledTimes(1) + + // Should resolve without any more timer ticking + await expect(ps).resolves.toBe(undefined) + + // No more calls + fn.mockClear() + await jest.advanceTimersByTimeAsync(50) + expect(fn).toHaveBeenCalledTimes(0) + }) + + it('cancelWaiting - trigger', async () => { + const fn = jest.fn() + const debounce = new PromiseDebounce(fn, 10) + + // No promise returned + expect(debounce.trigger()).toBe(undefined) + // Not called yet + expect(fn).toHaveBeenCalledTimes(0) + + // Wait for a bit + await jest.advanceTimersByTimeAsync(6) + expect(fn).toHaveBeenCalledTimes(0) + + // Cancel waiting + debounce.cancelWaiting() + + // Wait until the timer should have fired + await jest.advanceTimersByTimeAsync(50) + expect(fn).toHaveBeenCalledTimes(0) + }) + + it('cancelWaiting - call', async () => { + const fn = jest.fn() + const debounce = new PromiseDebounce(fn, 10) + + const ps = debounce.call() + ps.catch(() => null) // Add an error handler + expect(ps).not.toBe(undefined) + // Not called yet + expect(fn).toHaveBeenCalledTimes(0) + + // Wait for a bit + await jest.advanceTimersByTimeAsync(6) + expect(fn).toHaveBeenCalledTimes(0) + + // Cancel waiting + debounce.cancelWaiting() + + // Wait until the timer should have fired + await jest.advanceTimersByTimeAsync(50) + expect(fn).toHaveBeenCalledTimes(0) + + // Should have rejected + await expect(ps).rejects.toThrow('Cancelled') + }) + + it('cancelWaiting - call with error', async () => { + const fn = jest.fn() + const debounce = new PromiseDebounce(fn, 10) + + const ps = debounce.call() + ps.catch(() => null) // Add an error handler + expect(ps).not.toBe(undefined) + // Not called yet + expect(fn).toHaveBeenCalledTimes(0) + + // Wait for a bit + await jest.advanceTimersByTimeAsync(6) + expect(fn).toHaveBeenCalledTimes(0) + + // Cancel waiting + debounce.cancelWaiting(new Error('Custom error')) + + // Wait until the timer should have fired + await jest.advanceTimersByTimeAsync(50) + expect(fn).toHaveBeenCalledTimes(0) + + // Should have rejected + await expect(ps).rejects.toThrow('Custom error') + }) + + it('trigger - multiple', async () => { + const fn = jest.fn() + const debounce = new PromiseDebounce(fn, 10) + + // No promise returned + expect(debounce.trigger(1)).toBe(undefined) + // Not called yet + expect(fn).toHaveBeenCalledTimes(0) + + // Wait for a bit + await jest.advanceTimersByTimeAsync(6) + expect(fn).toHaveBeenCalledTimes(0) + + // Trigger again + expect(debounce.trigger(3)).toBe(undefined) + expect(debounce.trigger(5)).toBe(undefined) + + // Wait until the timer should have fired + await jest.advanceTimersByTimeAsync(50) + expect(fn).toHaveBeenCalledTimes(1) + expect(fn).toHaveBeenCalledWith(5) + }) + + it('trigger - during slow execution', async () => { + const fn = jest.fn(async () => sleep(100)) + const debounce = new PromiseDebounce(fn, 10) + + // No promise returned + expect(debounce.trigger(1)).toBe(undefined) + // Not called yet + expect(fn).toHaveBeenCalledTimes(0) + + // Wait for it to start executing + await jest.advanceTimersByTimeAsync(50) + expect(fn).toHaveBeenCalledTimes(1) + expect(fn).toHaveBeenCalledWith(1) + + // Trigger again + fn.mockClear() + expect(debounce.trigger(3)).toBe(undefined) + await jest.advanceTimersByTimeAsync(20) + expect(debounce.trigger(5)).toBe(undefined) + + // Wait until the second timer timer should + await jest.advanceTimersByTimeAsync(100) + expect(fn).toHaveBeenCalledTimes(1) + expect(fn).toHaveBeenCalledWith(5) + }) + + it('call - return value', async () => { + const fn = jest.fn(async (val) => { + await sleep(100) + return val + }) + const debounce = new PromiseDebounce(fn, 10) + + const ps1 = debounce.call(1) + expect(ps1).not.toBe(undefined) + // Not called yet + expect(fn).toHaveBeenCalledTimes(0) + + // Wait for it to start executing + await jest.advanceTimersByTimeAsync(50) + expect(fn).toHaveBeenCalledTimes(1) + expect(fn).toHaveBeenCalledWith(1) + + // Trigger again + fn.mockClear() + const ps3 = debounce.call(3) + await jest.advanceTimersByTimeAsync(20) + const ps5 = debounce.call(5) + + // Wait until the second timer timer should + await jest.advanceTimersByTimeAsync(150) + expect(fn).toHaveBeenCalledTimes(1) + expect(fn).toHaveBeenCalledWith(5) + + await expect(ps1).resolves.toBe(1) + await expect(ps3).resolves.toBe(5) + await expect(ps5).resolves.toBe(5) + }) + + it('call - throw error', async () => { + const fn = jest.fn(async (val) => { + await sleep(100) + throw new Error(`Bad value: ${val}`) + }) + const debounce = new PromiseDebounce(fn, 10) + + const ps1 = debounce.call(1) + ps1.catch(() => null) // Add an error handler + expect(ps1).not.toBe(undefined) + // Not called yet + expect(fn).toHaveBeenCalledTimes(0) + + // Wait for it to start executing + await jest.advanceTimersByTimeAsync(50) + expect(fn).toHaveBeenCalledTimes(1) + expect(fn).toHaveBeenCalledWith(1) + + // Trigger again + fn.mockClear() + const ps3 = debounce.call(3) + ps3.catch(() => null) // Add an error handler + await jest.advanceTimersByTimeAsync(20) + const ps5 = debounce.call(5) + ps5.catch(() => null) // Add an error handler + + // Wait until the second timer timer should + await jest.advanceTimersByTimeAsync(150) + expect(fn).toHaveBeenCalledTimes(1) + expect(fn).toHaveBeenCalledWith(5) + + await expect(ps1).rejects.toThrow('Bad value: 1') + await expect(ps3).rejects.toThrow('Bad value: 5') + await expect(ps5).rejects.toThrow('Bad value: 5') + }) + + it('canelWaiting - during slow execution', async () => { + const fn = jest.fn(async () => sleep(100)) + const debounce = new PromiseDebounce(fn, 10) + + // No promise returned + expect(debounce.trigger(1)).toBe(undefined) + // Not called yet + expect(fn).toHaveBeenCalledTimes(0) + + // Wait for it to start executing + await jest.advanceTimersByTimeAsync(50) + expect(fn).toHaveBeenCalledTimes(1) + expect(fn).toHaveBeenCalledWith(1) + + // Trigger again + fn.mockClear() + expect(debounce.trigger(3)).toBe(undefined) + await jest.advanceTimersByTimeAsync(20) + expect(debounce.trigger(5)).toBe(undefined) + + debounce.cancelWaiting() + + // Wait until the second timer timer should + await jest.advanceTimersByTimeAsync(100) + expect(fn).toHaveBeenCalledTimes(0) + }) +}) diff --git a/meteor/server/publications/lib/__tests__/lib.test.ts b/meteor/server/publications/lib/__tests__/lib.test.ts new file mode 100644 index 00000000000..61a8ed52cc4 --- /dev/null +++ b/meteor/server/publications/lib/__tests__/lib.test.ts @@ -0,0 +1,63 @@ +import { Meteor } from 'meteor/meteor' +import { waitForAllObserversReady } from '../lib' +import { sleep } from '@sofie-automation/shared-lib/dist/lib/lib' + +describe('waitForAllObserversReady', () => { + // beforeEach(() => { + // jest.useFakeTimers() + // }) + + it('no observers', async () => { + await expect(waitForAllObserversReady([])).resolves.toHaveLength(0) + }) + + async function createFakeObserver(waitTime: number, stopFn: () => void): Promise { + await sleep(waitTime) + + return { + stop: stopFn, + } + } + + async function createBadObserver(waitTime: number): Promise { + await sleep(waitTime) + + throw new Error('Some error') + } + + function stopAll(observers: Meteor.LiveQueryHandle[]) { + observers.forEach((o) => o.stop()) + } + + it('multiple good observers', async () => { + const stopFn = jest.fn() + + const res = waitForAllObserversReady([ + createFakeObserver(10, stopFn), + createFakeObserver(12, stopFn), + createFakeObserver(10, stopFn), + createFakeObserver(8, stopFn), + ]) + await expect(res).resolves.toHaveLength(4) + + expect(stopFn).toHaveBeenCalledTimes(0) + + stopAll(await res) + expect(stopFn).toHaveBeenCalledTimes(4) + }) + + it('multiple good with a bad observer', async () => { + const stopFn = jest.fn() + + const res = waitForAllObserversReady([ + createFakeObserver(10, stopFn), + createFakeObserver(12, stopFn), + createBadObserver(10), + createFakeObserver(8, stopFn), + ]) + await expect(res).rejects.toThrow('Some error') + + // Successful ones should be stopped + expect(stopFn).toHaveBeenCalledTimes(3) + }) +}) diff --git a/meteor/server/publications/lib/__tests__/observerGroup.test.ts b/meteor/server/publications/lib/__tests__/observerGroup.test.ts index 20a880af986..c48722a08fe 100644 --- a/meteor/server/publications/lib/__tests__/observerGroup.test.ts +++ b/meteor/server/publications/lib/__tests__/observerGroup.test.ts @@ -10,7 +10,7 @@ describe('ReactiveMongoObserverGroup', () => { test('cleanup on stop', async () => { const handle: LiveQueryHandle = { stop: jest.fn() } - const generator = jest.fn(async () => [handle]) + const generator = jest.fn(async () => [Promise.resolve(handle)]) const observerGroup = await ReactiveMongoObserverGroup(generator) @@ -39,7 +39,7 @@ describe('ReactiveMongoObserverGroup', () => { test('restarting', async () => { const handle: LiveQueryHandle = { stop: jest.fn() } - const generator = jest.fn(async () => [handle]) + const generator = jest.fn(async () => [Promise.resolve(handle)]) const observerGroup = await ReactiveMongoObserverGroup(generator) @@ -80,7 +80,7 @@ describe('ReactiveMongoObserverGroup', () => { test('restart debounce', async () => { const handle: LiveQueryHandle = { stop: jest.fn() } - const generator = jest.fn(async () => [handle]) + const generator = jest.fn(async () => [Promise.resolve(handle)]) const observerGroup = await ReactiveMongoObserverGroup(generator) diff --git a/meteor/server/publications/lib/__tests__/rundownsObserver.test.ts b/meteor/server/publications/lib/__tests__/rundownsObserver.test.ts index 80d90f43e44..d07d69cf515 100644 --- a/meteor/server/publications/lib/__tests__/rundownsObserver.test.ts +++ b/meteor/server/publications/lib/__tests__/rundownsObserver.test.ts @@ -20,12 +20,12 @@ describe('RundownsObserver', () => { const playlistId = protectString('playlist0') const onChangedCleanup = jest.fn() - const onChanged = jest.fn(() => onChangedCleanup) + const onChanged = jest.fn(async () => onChangedCleanup) // should not be any observers yet expect(RundownsMock.observers).toHaveLength(0) - const observer = new RundownsObserver(studioId, playlistId, onChanged) + const observer = await RundownsObserver.create(studioId, playlistId, onChanged) try { // should now be an observer expect(RundownsMock.observers).toHaveLength(1) @@ -73,12 +73,12 @@ describe('RundownsObserver', () => { const playlistId = protectString('playlist0') const onChangedCleanup = jest.fn() - const onChanged = jest.fn<() => void, [RundownId[]]>(() => onChangedCleanup) + const onChanged = jest.fn void>, [RundownId[]]>(async () => onChangedCleanup) // should not be any observers yet expect(RundownsMock.observers).toHaveLength(0) - const observer = new RundownsObserver(studioId, playlistId, onChanged) + const observer = await RundownsObserver.create(studioId, playlistId, onChanged) try { // ensure starts correct await waitUntil(async () => { @@ -127,12 +127,12 @@ describe('RundownsObserver', () => { const playlistId = protectString('playlist0') const onChangedCleanup = jest.fn() - const onChanged = jest.fn<() => void, [RundownId[]]>(() => onChangedCleanup) + const onChanged = jest.fn void>, [RundownId[]]>(async () => onChangedCleanup) // should not be any observers yet expect(RundownsMock.observers).toHaveLength(0) - const observer = new RundownsObserver(studioId, playlistId, onChanged) + const observer = await RundownsObserver.create(studioId, playlistId, onChanged) try { // ensure starts correct // ensure starts correct @@ -181,12 +181,12 @@ describe('RundownsObserver', () => { const playlistId = protectString('playlist0') const onChangedCleanup = jest.fn() - const onChanged = jest.fn<() => void, [RundownId[]]>(() => onChangedCleanup) + const onChanged = jest.fn void>, [RundownId[]]>(async () => onChangedCleanup) // should not be any observers yet expect(RundownsMock.observers).toHaveLength(0) - const observer = new RundownsObserver(studioId, playlistId, onChanged) + const observer = await RundownsObserver.create(studioId, playlistId, onChanged) try { // ensure starts correct // ensure starts correct diff --git a/meteor/server/publications/lib/debounce.ts b/meteor/server/publications/lib/debounce.ts new file mode 100644 index 00000000000..5af796d9ab7 --- /dev/null +++ b/meteor/server/publications/lib/debounce.ts @@ -0,0 +1,129 @@ +import { Meteor } from 'meteor/meteor' + +/** + * Based on https://github.com/sindresorhus/p-debounce + * With additional features: + * - `cancelWaiting` method + * - ensures only one execution in progress at a time + */ +export class PromiseDebounce { + readonly #fn: (...args: TArgs) => Promise + readonly #wait: number + + /** If an execution timeout has passed while */ + #pendingArgs: TArgs | null = null + #timeout: number | undefined + + #isExecuting = false + #waitingListeners: Listener[] = [] + + constructor(fn: (...args: TArgs) => Promise, wait: number) { + this.#fn = fn + this.#wait = wait + } + + /** + * Trigger an execution, and get the result. + * @returns A promise that resolves with the result of the function + */ + call = async (...args: TArgs): Promise => { + return new Promise((resolve, reject) => { + const listener: Listener = { resolve, reject } + this.#waitingListeners.push(listener) + + // Trigger an execution + this.trigger(...args) + }) + } + + /** + * Trigger an execution, but don't report the result. + */ + trigger = (...args: TArgs): void => { + // If an execution is 'imminent', don't do anything + if (this.#pendingArgs) { + this.#pendingArgs = args + return + } + + // Clear an existing timeout + if (this.#timeout) Meteor.clearTimeout(this.#timeout) + + // Start a new one + this.#timeout = Meteor.setTimeout(() => { + this.#timeout = undefined + + this.executeFn(args) + }, this.#wait) + } + + private executeFn(args: TArgs): void { + // If an execution is still in progress, mark as pending and stop + if (this.#isExecuting) { + this.#pendingArgs = args + return + } + + // We have the clear to begin executing + this.#isExecuting = true + this.#pendingArgs = null + + // Collect up the listeners for this execution + const listeners = this.#waitingListeners + this.#waitingListeners = [] + + Promise.resolve() + .then(async () => { + const result = await this.#fn(...args) + for (const listener of listeners) { + listener.resolve(result) + } + }) + .catch((error) => { + for (const listener of listeners) { + listener.reject(error) + } + }) + .finally(() => { + this.#isExecuting = false + + // If there is a pending execution, run that soon + if (this.#pendingArgs) { + const args = this.#pendingArgs + Meteor.setTimeout(() => this.executeFn(args), 0) + } + }) + } + + /** + * Cancel any waiting execution + */ + cancelWaiting = (error?: Error): void => { + this.#pendingArgs = null + + if (this.#timeout) { + Meteor.clearTimeout(this.#timeout) + this.#timeout = undefined + } + + // Inform any listeners + if (this.#waitingListeners.length > 0) { + const listeners = this.#waitingListeners + this.#waitingListeners = [] + + error = error ?? new Error('Cancelled') + + // Inform the listeners in the next tick + Meteor.defer(() => { + for (const listener of listeners) { + listener.reject(error) + } + }) + } + } +} + +interface Listener { + resolve: (value: TResult) => void + reject: (reason?: any) => void +} diff --git a/meteor/server/publications/lib.ts b/meteor/server/publications/lib/lib.ts similarity index 77% rename from meteor/server/publications/lib.ts rename to meteor/server/publications/lib/lib.ts index ee05a9b56ed..993fd4e1996 100644 --- a/meteor/server/publications/lib.ts +++ b/meteor/server/publications/lib/lib.ts @@ -1,9 +1,9 @@ import { Meteor, Subscription } from 'meteor/meteor' import { AllPubSubCollections, AllPubSubTypes } from '@sofie-automation/meteor-lib/dist/api/pubsub' -import { extractFunctionSignature } from '../lib' +import { extractFunctionSignature } from '../../lib' import { MongoQuery } from '@sofie-automation/corelib/dist/mongo' -import { ResolvedCredentials, resolveCredentials } from '../security/lib/credentials' -import { Settings } from '../Settings' +import { ResolvedCredentials, resolveCredentials } from '../../security/lib/credentials' +import { Settings } from '../../Settings' import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' import { MongoCursor } from '@sofie-automation/meteor-lib/dist/collections/lib' import { @@ -12,10 +12,9 @@ import { ShowStyleBaseId, UserId, } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { protectStringObject } from '../lib/tempLib' -import { waitForPromise } from '../lib/lib' +import { protectStringObject } from '../../lib/tempLib' import { DBShowStyleBase } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' -import { PeripheralDevices, ShowStyleBases } from '../collections' +import { PeripheralDevices, ShowStyleBases } from '../../collections' import { MetricsGauge } from '@sofie-automation/corelib/dist/prometheus' export const MeteorPublicationSignatures: { [key: string]: string[] } = {} @@ -56,7 +55,7 @@ export function meteorPublishUnsafe( publicationGauge.inc() this.onStop(() => publicationGauge.dec()) - return waitForPromise(callback.apply(protectStringObject(this), args)) || [] + return callback.apply(protectStringObject(this), args) || [] }) } @@ -164,3 +163,33 @@ export namespace AutoFillSelector { return { cred, selector } } } + +/** + * Await each observer, and return the handles + * If an observer throws, this will make sure to stop all the ones that were successfully started, to avoid leaking memory + */ +export async function waitForAllObserversReady( + observers: Array | Meteor.LiveQueryHandle> +): Promise { + // Wait for all the promises to complete + // Future: could this fail faster by aborting the rest once the first fails? + const results = await Promise.allSettled(observers) + const allSuccessfull = results.filter( + (r): r is PromiseFulfilledResult => r.status === 'fulfilled' + ) + + const firstFailure = results.find((r): r is PromiseRejectedResult => r.status === 'rejected') + if (firstFailure || allSuccessfull.length !== observers.length) { + // There was a failure, or not enough success so we should stop all the observers + for (const handle of allSuccessfull) { + handle.value.stop() + } + if (firstFailure) { + throw firstFailure.reason + } else { + throw new Meteor.Error(500, 'Not all observers were started') + } + } + + return allSuccessfull.map((r) => r.value) +} diff --git a/meteor/server/publications/lib/observerChain.ts b/meteor/server/publications/lib/observerChain.ts index 4b51df340af..abbbd49467d 100644 --- a/meteor/server/publications/lib/observerChain.ts +++ b/meteor/server/publications/lib/observerChain.ts @@ -3,7 +3,8 @@ import { Meteor } from 'meteor/meteor' import { MongoCursor } from '@sofie-automation/meteor-lib/dist/collections/lib' import { Simplify } from 'type-fest' import { assertNever } from '../../lib/tempLib' -import { waitForPromise } from '../../lib/lib' +import { logger } from '../../logging' +import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' /** * https://stackoverflow.com/a/66011942 @@ -41,18 +42,18 @@ export function observerChain(): Pick, 'next'> { throw new Error('nextChanged: Unfinished observer chain. This is a memory leak.') } - function changedLink(collectorObject: Record) { + async function changedLink(collectorObject: Record) { if (previousObserver) { previousObserver.stop() previousObserver = null } - const cursorResult = waitForPromise(chainedCursor(collectorObject)) + const cursorResult = await chainedCursor(collectorObject) if (cursorResult === null) { nextStop() return } - previousObserver = cursorResult.observe({ + previousObserver = await cursorResult.observeAsync({ added: (doc) => { if (!chainedKey) throw new Error('Chained key needs to be defined') const newCollectorObject: Record = { @@ -96,10 +97,10 @@ export function observerChain(): Pick, 'next'> { } return { - changed: (obj: Record) => { + changed: async (obj: Record) => { switch (mode) { case 'next': - changedLink(obj) + await changedLink(obj) break case 'end': changedEnd(obj) @@ -160,7 +161,9 @@ export function observerChain(): Pick, 'next'> { const nextLink = link.next(key, cursorChain) setImmediate( Meteor.bindEnvironment(() => { - changed({}) + changed({}).catch((e) => { + logger.error(`Error in observerChain: ${stringifyError(e)}`) + }) }) ) return nextLink as any diff --git a/meteor/server/publications/lib/observerGroup.ts b/meteor/server/publications/lib/observerGroup.ts index cce4654e432..20ed35b2e15 100644 --- a/meteor/server/publications/lib/observerGroup.ts +++ b/meteor/server/publications/lib/observerGroup.ts @@ -1,6 +1,7 @@ import { ManualPromise, createManualPromise, getRandomString } from '@sofie-automation/corelib/dist/lib' import { Meteor } from 'meteor/meteor' import { LiveQueryHandle, lazyIgnore } from '../../lib/lib' +import { waitForAllObserversReady } from './lib' export interface ReactiveMongoObserverGroupHandle extends LiveQueryHandle { /** @@ -18,7 +19,7 @@ const REACTIVITY_DEBOUNCE = 20 * @returns Handle to stop and restart the observer group */ export async function ReactiveMongoObserverGroup( - generator: () => Promise> + generator: () => Promise>> ): Promise { let running = true let pendingStop: ManualPromise | undefined @@ -69,8 +70,7 @@ export async function ReactiveMongoObserverGroup( // Start the child observers if (!handles) { - // handles = await generator() - handles = await generator() + handles = await waitForAllObserversReady(await generator()) // check for another pending operation deferCheck() diff --git a/meteor/server/publications/lib/rundownsObserver.ts b/meteor/server/publications/lib/rundownsObserver.ts index 82779b7c474..0471cd434c6 100644 --- a/meteor/server/publications/lib/rundownsObserver.ts +++ b/meteor/server/publications/lib/rundownsObserver.ts @@ -1,25 +1,53 @@ import { Meteor } from 'meteor/meteor' import { RundownId, RundownPlaylistId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import _ from 'underscore' import { Rundowns } from '../../collections' +import { PromiseDebounce } from './debounce' const REACTIVITY_DEBOUNCE = 20 -type ChangedHandler = (rundownIds: RundownId[]) => () => void +type ChangedHandler = (rundownIds: RundownId[]) => Promise<() => void> /** * A mongo observer/query for the RundownIds in a playlist. * Note: Updates are debounced to avoid rapid updates firing */ export class RundownsObserver implements Meteor.LiveQueryHandle { - #rundownsLiveQuery: Meteor.LiveQueryHandle + #rundownsLiveQuery!: Meteor.LiveQueryHandle #rundownIds: Set = new Set() #changed: ChangedHandler | undefined #cleanup: (() => void) | undefined - constructor(studioId: StudioId, playlistId: RundownPlaylistId, onChanged: ChangedHandler) { + #disposed = false + + readonly #triggerUpdateRundownContent = new PromiseDebounce(async () => { + if (this.#disposed) return + if (!this.#changed) return + this.#cleanup?.() + + const changed = this.#changed + this.#cleanup = await changed(this.rundownIds) + + if (this.#disposed) this.#cleanup?.() + }, REACTIVITY_DEBOUNCE) + + private constructor(onChanged: ChangedHandler) { this.#changed = onChanged - this.#rundownsLiveQuery = Rundowns.observe( + } + + static async create( + studioId: StudioId, + playlistId: RundownPlaylistId, + onChanged: ChangedHandler + ): Promise { + const observer = new RundownsObserver(onChanged) + + await observer.init(studioId, playlistId) + + return observer + } + + private async init(studioId: StudioId, playlistId: RundownPlaylistId) { + this.#rundownsLiveQuery = await Rundowns.observe( { playlistId, studioId, @@ -27,15 +55,15 @@ export class RundownsObserver implements Meteor.LiveQueryHandle { { added: (doc) => { this.#rundownIds.add(doc._id) - this.updateRundownContent() + this.#triggerUpdateRundownContent.trigger() }, changed: (doc) => { this.#rundownIds.add(doc._id) - this.updateRundownContent() + this.#triggerUpdateRundownContent.trigger() }, removed: (doc) => { this.#rundownIds.delete(doc._id) - this.updateRundownContent() + this.#triggerUpdateRundownContent.trigger() }, }, { @@ -44,28 +72,18 @@ export class RundownsObserver implements Meteor.LiveQueryHandle { }, } ) - this.updateRundownContent() + + this.#triggerUpdateRundownContent.trigger() } public get rundownIds(): RundownId[] { return Array.from(this.#rundownIds) } - private innerUpdateRundownContent = () => { - if (!this.#changed) return - this.#cleanup?.() - - const changed = this.#changed - this.#cleanup = changed(this.rundownIds) - } - - public updateRundownContent = _.debounce( - Meteor.bindEnvironment(this.innerUpdateRundownContent), - REACTIVITY_DEBOUNCE - ) - public stop = (): void => { - this.updateRundownContent.cancel() + this.#disposed = true + + this.#triggerUpdateRundownContent.cancelWaiting() this.#rundownsLiveQuery.stop() this.#changed = undefined this.#cleanup?.() diff --git a/meteor/server/publications/organization.ts b/meteor/server/publications/organization.ts index 58d13058910..f596d8b3c6f 100644 --- a/meteor/server/publications/organization.ts +++ b/meteor/server/publications/organization.ts @@ -1,4 +1,4 @@ -import { meteorPublish, AutoFillSelector } from './lib' +import { meteorPublish, AutoFillSelector } from './lib/lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { Blueprint } from '@sofie-automation/corelib/dist/dataModel/Blueprint' import { Evaluation } from '@sofie-automation/meteor-lib/dist/collections/Evaluations' diff --git a/meteor/server/publications/packageManager/expectedPackages/contentObserver.ts b/meteor/server/publications/packageManager/expectedPackages/contentObserver.ts index dcb2488a82c..eb15f97ce78 100644 --- a/meteor/server/publications/packageManager/expectedPackages/contentObserver.ts +++ b/meteor/server/publications/packageManager/expectedPackages/contentObserver.ts @@ -7,10 +7,10 @@ import { pieceInstanceFieldsSpecifier, } from './contentCache' import { ExpectedPackages, PieceInstances, RundownPlaylists } from '../../../collections' -import { waitForPromise } from '../../../lib/lib' import { ReactiveMongoObserverGroup, ReactiveMongoObserverGroupHandle } from '../../lib/observerGroup' import _ from 'underscore' import { equivalentArrays } from '@sofie-automation/shared-lib/dist/lib/lib' +import { waitForAllObserversReady } from '../../lib/lib' const REACTIVITY_DEBOUNCE = 20 @@ -19,35 +19,44 @@ export class ExpectedPackagesContentObserver implements Meteor.LiveQueryHandle { #cache: ExpectedPackagesContentCache #partInstanceIds: PartInstanceId[] = [] - #partInstanceIdObserver: ReactiveMongoObserverGroupHandle + #partInstanceIdObserver!: ReactiveMongoObserverGroupHandle - constructor(studioId: StudioId, cache: ExpectedPackagesContentCache) { - logger.silly(`Creating ExpectedPackagesContentObserver for "${studioId}"`) + #disposed = false + + private constructor(cache: ExpectedPackagesContentCache) { this.#cache = cache + } + + static async create( + studioId: StudioId, + cache: ExpectedPackagesContentCache + ): Promise { + logger.silly(`Creating ExpectedPackagesContentObserver for "${studioId}"`) + + const observer = new ExpectedPackagesContentObserver(cache) // Run the ShowStyleBase query in a ReactiveMongoObserverGroup, so that it can be restarted whenever - this.#partInstanceIdObserver = waitForPromise( - ReactiveMongoObserverGroup(async () => { - // Clear already cached data - cache.PieceInstances.remove({}) - - return [ - PieceInstances.observeChanges( - { - // We can use the `this.#partInstanceIds` here, as this is restarted every time that property changes - partInstanceId: { $in: this.#partInstanceIds }, - }, - cache.PieceInstances.link(), - { - projection: pieceInstanceFieldsSpecifier, - } - ), - ] - }) - ) + observer.#partInstanceIdObserver = await ReactiveMongoObserverGroup(async () => { + // Clear already cached data + cache.PieceInstances.remove({}) + + return [ + PieceInstances.observeChanges( + { + // We can use the `this.#partInstanceIds` here, as this is restarted every time that property changes + partInstanceId: { $in: observer.#partInstanceIds }, + }, + cache.PieceInstances.link(), + { + projection: pieceInstanceFieldsSpecifier, + } + ), + ] + }) // Subscribe to the database, and pipe any updates into the ReactiveCacheCollections - this.#observers = [ + // This takes ownership of the #partInstanceIdObserver, and will stop it if this throws + observer.#observers = await waitForAllObserversReady([ ExpectedPackages.observeChanges( { studioId: studioId, @@ -60,19 +69,23 @@ export class ExpectedPackagesContentObserver implements Meteor.LiveQueryHandle { studioId: studioId, }, cache.RundownPlaylists.link(() => { - this.updatePartInstanceIds() + observer.updatePartInstanceIds() }), { fields: rundownPlaylistFieldSpecifier, } ), - this.#partInstanceIdObserver, - ] + observer.#partInstanceIdObserver, + ]) + + return observer } private updatePartInstanceIds = _.debounce( Meteor.bindEnvironment(() => { + if (this.#disposed) return + const newPartInstanceIdsSet = new Set() this.#cache.RundownPlaylists.find({}).forEach((playlist) => { @@ -102,6 +115,8 @@ export class ExpectedPackagesContentObserver implements Meteor.LiveQueryHandle { } public stop = (): void => { + this.#disposed = true + this.#observers.forEach((observer) => observer.stop()) } } diff --git a/meteor/server/publications/packageManager/expectedPackages/publication.ts b/meteor/server/publications/packageManager/expectedPackages/publication.ts index e315421b765..1952fb7057c 100644 --- a/meteor/server/publications/packageManager/expectedPackages/publication.ts +++ b/meteor/server/publications/packageManager/expectedPackages/publication.ts @@ -6,6 +6,7 @@ import { meteorCustomPublish, setUpCollectionOptimizedObserver, CustomPublishCollection, + SetupObserversResult, } from '../../../lib/customPublication' import { literal, omit, protectString } from '../../../lib/tempLib' import { logger } from '../../../logging' @@ -72,7 +73,7 @@ const studioFieldSpecifier = literal, triggerUpdate: TriggerUpdate -): Promise { +): Promise { const contentCache = createReactiveContentCache() // Push update @@ -80,7 +81,7 @@ async function setupExpectedPackagesPublicationObservers( // Set up observers: return [ - new ExpectedPackagesContentObserver(args.studioId, contentCache), + ExpectedPackagesContentObserver.create(args.studioId, contentCache), contentCache.ExpectedPackages.find({}).observeChanges({ added: (id) => triggerUpdate({ invalidateExpectedPackageIds: [protectString(id)] }), diff --git a/meteor/server/publications/packageManager/packageContainers.ts b/meteor/server/publications/packageManager/packageContainers.ts index 8c43c0a6111..0accf66181d 100644 --- a/meteor/server/publications/packageManager/packageContainers.ts +++ b/meteor/server/publications/packageManager/packageContainers.ts @@ -8,7 +8,12 @@ import { check } from 'meteor/check' import { Meteor } from 'meteor/meteor' import { ReadonlyDeep } from 'type-fest' import { PeripheralDevices, Studios } from '../../collections' -import { meteorCustomPublish, setUpOptimizedObserverArray, TriggerUpdate } from '../../lib/customPublication' +import { + meteorCustomPublish, + SetupObserversResult, + setUpOptimizedObserverArray, + TriggerUpdate, +} from '../../lib/customPublication' import { logger } from '../../logging' import { PeripheralDeviceReadAccess } from '../../security/peripheralDevice' import { @@ -35,7 +40,7 @@ type PackageManagerPackageContainersState = Record async function setupExpectedPackagesPublicationObservers( args: ReadonlyDeep, triggerUpdate: TriggerUpdate -): Promise { +): Promise { // Set up observers: return [ Studios.observeChanges( diff --git a/meteor/server/publications/packageManager/playoutContext.ts b/meteor/server/publications/packageManager/playoutContext.ts index f3462daa794..08c881fafe5 100644 --- a/meteor/server/publications/packageManager/playoutContext.ts +++ b/meteor/server/publications/packageManager/playoutContext.ts @@ -8,7 +8,12 @@ import { check } from 'meteor/check' import { Meteor } from 'meteor/meteor' import { ReadonlyDeep } from 'type-fest' import { PeripheralDevices, RundownPlaylists, Rundowns } from '../../collections' -import { meteorCustomPublish, setUpOptimizedObserverArray, TriggerUpdate } from '../../lib/customPublication' +import { + meteorCustomPublish, + SetupObserversResult, + setUpOptimizedObserverArray, + TriggerUpdate, +} from '../../lib/customPublication' import { logger } from '../../logging' import { PeripheralDeviceReadAccess } from '../../security/peripheralDevice' import { @@ -36,7 +41,7 @@ type PackageManagerPlayoutContextState = Record async function setupExpectedPackagesPublicationObservers( args: ReadonlyDeep, triggerUpdate: TriggerUpdate -): Promise { +): Promise { // Set up observers: return [ RundownPlaylists.observeChanges( diff --git a/meteor/server/publications/partInstancesUI/publication.ts b/meteor/server/publications/partInstancesUI/publication.ts index 0d1d5a37f49..727d212fecd 100644 --- a/meteor/server/publications/partInstancesUI/publication.ts +++ b/meteor/server/publications/partInstancesUI/publication.ts @@ -7,6 +7,7 @@ import { import { check } from 'meteor/check' import { CustomPublishCollection, + SetupObserversResult, TriggerUpdate, meteorCustomPublish, setUpCollectionOptimizedObserver, @@ -17,7 +18,6 @@ import { resolveCredentials } from '../../security/lib/credentials' import { NoSecurityReadAccess } from '../../security/noSecurity' import { ContentCache, PartInstanceOmitedFields, createReactiveContentCache } from './reactiveContentCache' import { ReadonlyDeep } from 'type-fest' -import { LiveQueryHandle } from '../../lib/lib' import { RundownPlaylists } from '../../collections' import { literal } from '@sofie-automation/corelib/dist/lib' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' @@ -65,7 +65,7 @@ const rundownPlaylistFieldSpecifier = literal< async function setupUIPartInstancesPublicationObservers( args: ReadonlyDeep, triggerUpdate: TriggerUpdate -): Promise { +): Promise { const playlist = (await RundownPlaylists.findOneAsync( { activationId: args.playlistActivationId }, { @@ -74,7 +74,7 @@ async function setupUIPartInstancesPublicationObservers( )) as Pick | undefined if (!playlist) throw new Error(`RundownPlaylist with activationId="${args.playlistActivationId}" not found!`) - const rundownsObserver = new RundownsObserver(playlist.studioId, playlist._id, (rundownIds) => { + const rundownsObserver = await RundownsObserver.create(playlist.studioId, playlist._id, async (rundownIds) => { logger.silly(`Creating new RundownContentObserver`) const cache = createReactiveContentCache() @@ -82,7 +82,12 @@ async function setupUIPartInstancesPublicationObservers( // Push update triggerUpdate({ newCache: cache }) - const obs1 = new RundownContentObserver(playlist.studioId, args.playlistActivationId, rundownIds, cache) + const obs1 = await RundownContentObserver.create( + playlist.studioId, + args.playlistActivationId, + rundownIds, + cache + ) const innerQueries = [ cache.Segments.find({}).observeChanges({ diff --git a/meteor/server/publications/partInstancesUI/rundownContentObserver.ts b/meteor/server/publications/partInstancesUI/rundownContentObserver.ts index c8a21b842c2..a2f14e6c447 100644 --- a/meteor/server/publications/partInstancesUI/rundownContentObserver.ts +++ b/meteor/server/publications/partInstancesUI/rundownContentObserver.ts @@ -10,21 +10,27 @@ import { studioFieldSpecifier, } from './reactiveContentCache' import { PartInstances, Parts, RundownPlaylists, Segments, Studios } from '../../collections' +import { waitForAllObserversReady } from '../lib/lib' export class RundownContentObserver { - #observers: Meteor.LiveQueryHandle[] = [] - #cache: ContentCache + readonly #cache: ContentCache + readonly #observers: Meteor.LiveQueryHandle[] - constructor( + private constructor(cache: ContentCache, observers: Meteor.LiveQueryHandle[]) { + this.#cache = cache + + this.#observers = observers + } + + static async create( studioId: StudioId, playlistActivationId: RundownPlaylistActivationId, rundownIds: RundownId[], cache: ContentCache - ) { + ): Promise { logger.silly(`Creating RundownContentObserver for rundowns "${rundownIds.join(',')}"`) - this.#cache = cache - this.#observers = [ + const observers = await waitForAllObserversReady([ Studios.observeChanges( { _id: studioId, @@ -78,7 +84,9 @@ export class RundownContentObserver { projection: partInstanceFieldSpecifier, } ), - ] + ]) + + return new RundownContentObserver(cache, observers) } public get cache(): ContentCache { diff --git a/meteor/server/publications/partsUI/publication.ts b/meteor/server/publications/partsUI/publication.ts index 1b36fe86a3a..379e47f31cc 100644 --- a/meteor/server/publications/partsUI/publication.ts +++ b/meteor/server/publications/partsUI/publication.ts @@ -2,6 +2,7 @@ import { PartId, RundownPlaylistId, SegmentId } from '@sofie-automation/corelib/ import { check } from 'meteor/check' import { CustomPublishCollection, + SetupObserversResult, TriggerUpdate, meteorCustomPublish, setUpCollectionOptimizedObserver, @@ -14,7 +15,6 @@ import { NoSecurityReadAccess } from '../../security/noSecurity' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { ContentCache, PartOmitedFields, createReactiveContentCache } from './reactiveContentCache' import { ReadonlyDeep } from 'type-fest' -import { LiveQueryHandle } from '../../lib/lib' import { RundownPlaylists } from '../../collections' import { literal } from '@sofie-automation/corelib/dist/lib' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' @@ -53,13 +53,13 @@ const rundownPlaylistFieldSpecifier = literal< async function setupUIPartsPublicationObservers( args: ReadonlyDeep, triggerUpdate: TriggerUpdate -): Promise { +): Promise { const playlist = (await RundownPlaylists.findOneAsync(args.playlistId, { projection: rundownPlaylistFieldSpecifier, })) as Pick | undefined if (!playlist) throw new Error(`RundownPlaylist "${args.playlistId}" not found!`) - const rundownsObserver = new RundownsObserver(playlist.studioId, playlist._id, (rundownIds) => { + const rundownsObserver = await RundownsObserver.create(playlist.studioId, playlist._id, async (rundownIds) => { logger.silly(`Creating new RundownContentObserver`) const cache = createReactiveContentCache() @@ -67,7 +67,7 @@ async function setupUIPartsPublicationObservers( // Push update triggerUpdate({ newCache: cache }) - const obs1 = new RundownContentObserver(playlist.studioId, playlist._id, rundownIds, cache) + const obs1 = await RundownContentObserver.create(playlist.studioId, playlist._id, rundownIds, cache) const innerQueries = [ cache.Segments.find({}).observeChanges({ diff --git a/meteor/server/publications/partsUI/rundownContentObserver.ts b/meteor/server/publications/partsUI/rundownContentObserver.ts index e9de9dd780c..ee7e92c7d62 100644 --- a/meteor/server/publications/partsUI/rundownContentObserver.ts +++ b/meteor/server/publications/partsUI/rundownContentObserver.ts @@ -9,16 +9,26 @@ import { studioFieldSpecifier, } from './reactiveContentCache' import { Parts, RundownPlaylists, Segments, Studios } from '../../collections' +import { waitForAllObserversReady } from '../lib/lib' export class RundownContentObserver { - #observers: Meteor.LiveQueryHandle[] = [] - #cache: ContentCache + readonly #cache: ContentCache + readonly #observers: Meteor.LiveQueryHandle[] - constructor(studioId: StudioId, playlistId: RundownPlaylistId, rundownIds: RundownId[], cache: ContentCache) { - logger.silly(`Creating RundownContentObserver for rundowns "${rundownIds.join(',')}"`) + private constructor(cache: ContentCache, observers: Meteor.LiveQueryHandle[]) { this.#cache = cache + this.#observers = observers + } - this.#observers = [ + static async create( + studioId: StudioId, + playlistId: RundownPlaylistId, + rundownIds: RundownId[], + cache: ContentCache + ): Promise { + logger.silly(`Creating RundownContentObserver for rundowns "${rundownIds.join(',')}"`) + + const observers = await waitForAllObserversReady([ Studios.observeChanges( { _id: studioId, @@ -59,7 +69,9 @@ export class RundownContentObserver { projection: partFieldSpecifier, } ), - ] + ]) + + return new RundownContentObserver(cache, observers) } public get cache(): ContentCache { diff --git a/meteor/server/publications/peripheralDevice.ts b/meteor/server/publications/peripheralDevice.ts index 61588d0e908..1ead8e0e6d3 100644 --- a/meteor/server/publications/peripheralDevice.ts +++ b/meteor/server/publications/peripheralDevice.ts @@ -1,6 +1,6 @@ import { Meteor } from 'meteor/meteor' import { check, Match } from '../lib/check' -import { meteorPublish, AutoFillSelector } from './lib' +import { meteorPublish, AutoFillSelector } from './lib/lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { PeripheralDeviceReadAccess } from '../security/peripheralDevice' import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' diff --git a/meteor/server/publications/peripheralDeviceForDevice.ts b/meteor/server/publications/peripheralDeviceForDevice.ts index bcead000511..f98b37e6ffd 100644 --- a/meteor/server/publications/peripheralDeviceForDevice.ts +++ b/meteor/server/publications/peripheralDeviceForDevice.ts @@ -3,7 +3,12 @@ import { PeripheralDeviceReadAccess } from '../security/peripheralDevice' import { PeripheralDevice, PeripheralDeviceCategory } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' import { PeripheralDeviceId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { PeripheralDevices, Studios } from '../collections' -import { TriggerUpdate, meteorCustomPublish, setUpOptimizedObserverArray } from '../lib/customPublication' +import { + SetupObserversResult, + TriggerUpdate, + meteorCustomPublish, + setUpOptimizedObserverArray, +} from '../lib/customPublication' import { PeripheralDeviceForDevice } from '@sofie-automation/shared-lib/dist/core/model/peripheralDevice' import { ReadonlyDeep } from 'type-fest' import { ReactiveMongoObserverGroup } from './lib/observerGroup' @@ -120,7 +125,7 @@ export function convertPeripheralDeviceForGateway( async function setupPeripheralDevicePublicationObservers( args: ReadonlyDeep, triggerUpdate: TriggerUpdate -): Promise { +): Promise { const studioObserver = await ReactiveMongoObserverGroup(async () => { const peripheralDeviceCompact = (await PeripheralDevices.findOneAsync(args.deviceId, { fields: { studioId: 1 }, diff --git a/meteor/server/publications/pieceContentStatusUI/bucket/bucketContentObserver.ts b/meteor/server/publications/pieceContentStatusUI/bucket/bucketContentObserver.ts index 28ab518c816..e80ab6076b5 100644 --- a/meteor/server/publications/pieceContentStatusUI/bucket/bucketContentObserver.ts +++ b/meteor/server/publications/pieceContentStatusUI/bucket/bucketContentObserver.ts @@ -11,11 +11,11 @@ import { } from './bucketContentCache' import { BucketAdLibActions, BucketAdLibs, ShowStyleBases } from '../../../collections' import { DBShowStyleBase } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' -import { waitForPromise } from '../../../lib/lib' import { equivalentArrays } from '@sofie-automation/shared-lib/dist/lib/lib' import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' import { ReactiveMongoObserverGroup, ReactiveMongoObserverGroupHandle } from '../../lib/observerGroup' import _ from 'underscore' +import { waitForAllObserversReady } from '../../lib/lib' const REACTIVITY_DEBOUNCE = 20 @@ -31,47 +31,53 @@ export class BucketContentObserver implements Meteor.LiveQueryHandle { #cache: BucketContentCache #showStyleBaseIds: ShowStyleBaseId[] = [] - #showStyleBaseIdObserver: ReactiveMongoObserverGroupHandle + #showStyleBaseIdObserver!: ReactiveMongoObserverGroupHandle - constructor(bucketId: BucketId, cache: BucketContentCache) { - logger.silly(`Creating BucketContentObserver for "${bucketId}"`) + #disposed = false + + private constructor(cache: BucketContentCache) { this.#cache = cache + } + + static async create(bucketId: BucketId, cache: BucketContentCache): Promise { + logger.silly(`Creating BucketContentObserver for "${bucketId}"`) + + const observer = new BucketContentObserver(cache) // Run the ShowStyleBase query in a ReactiveMongoObserverGroup, so that it can be restarted whenever - this.#showStyleBaseIdObserver = waitForPromise( - ReactiveMongoObserverGroup(async () => { - // Clear already cached data - cache.ShowStyleSourceLayers.remove({}) - - return [ - ShowStyleBases.observe( - { - // We can use the `this.#showStyleBaseIds` here, as this is restarted every time that property changes - _id: { $in: this.#showStyleBaseIds }, + observer.#showStyleBaseIdObserver = await ReactiveMongoObserverGroup(async () => { + // Clear already cached data + cache.ShowStyleSourceLayers.remove({}) + + return [ + ShowStyleBases.observe( + { + // We can use the `this.#showStyleBaseIds` here, as this is restarted every time that property changes + _id: { $in: observer.#showStyleBaseIds }, + }, + { + added: (doc) => { + const newDoc = convertShowStyleBase(doc) + cache.ShowStyleSourceLayers.upsert(doc._id, { $set: newDoc as Partial }) + }, + changed: (doc) => { + const newDoc = convertShowStyleBase(doc) + cache.ShowStyleSourceLayers.upsert(doc._id, { $set: newDoc as Partial }) }, - { - added: (doc) => { - const newDoc = convertShowStyleBase(doc) - cache.ShowStyleSourceLayers.upsert(doc._id, { $set: newDoc as Partial }) - }, - changed: (doc) => { - const newDoc = convertShowStyleBase(doc) - cache.ShowStyleSourceLayers.upsert(doc._id, { $set: newDoc as Partial }) - }, - removed: (doc) => { - cache.ShowStyleSourceLayers.remove(doc._id) - }, + removed: (doc) => { + cache.ShowStyleSourceLayers.remove(doc._id) }, - { - projection: showStyleBaseFieldSpecifier, - } - ), - ] - }) - ) + }, + { + projection: showStyleBaseFieldSpecifier, + } + ), + ] + }) // Subscribe to the database, and pipe any updates into the ReactiveCacheCollections - this.#observers = [ + // This takes ownership of the #showStyleBaseIdObserver, and will stop it if this throws + observer.#observers = await waitForAllObserversReady([ BucketAdLibs.observeChanges( { bucketId: bucketId, @@ -79,7 +85,7 @@ export class BucketContentObserver implements Meteor.LiveQueryHandle { cache.BucketAdLibs.link(() => { // Check if the ShowStyleBaseIds needs updating // TODO - is this over-eager? - this.updateShowStyleBaseIds() + observer.updateShowStyleBaseIds() }), { projection: bucketAdlibFieldSpecifier, @@ -92,19 +98,23 @@ export class BucketContentObserver implements Meteor.LiveQueryHandle { cache.BucketAdLibActions.link(() => { // Check if the ShowStyleBaseIds needs updating // TODO - is this over-eager? - this.updateShowStyleBaseIds() + observer.updateShowStyleBaseIds() }), { projection: bucketActionFieldSpecifier, } ), - this.#showStyleBaseIdObserver, - ] + observer.#showStyleBaseIdObserver, + ]) + + return observer } private updateShowStyleBaseIds = _.debounce( Meteor.bindEnvironment(() => { + if (this.#disposed) return + const newShowStyleBaseIdsSet = new Set() this.#cache.BucketAdLibs.find({}).forEach((adlib) => newShowStyleBaseIdsSet.add(adlib.showStyleBaseId)) this.#cache.BucketAdLibActions.find({}).forEach((action) => @@ -127,6 +137,8 @@ export class BucketContentObserver implements Meteor.LiveQueryHandle { } public stop = (): void => { + this.#disposed = true + this.#observers.forEach((observer) => observer.stop()) } } diff --git a/meteor/server/publications/pieceContentStatusUI/bucket/publication.ts b/meteor/server/publications/pieceContentStatusUI/bucket/publication.ts index d1ecc15ed86..8661244883f 100644 --- a/meteor/server/publications/pieceContentStatusUI/bucket/publication.ts +++ b/meteor/server/publications/pieceContentStatusUI/bucket/publication.ts @@ -18,12 +18,12 @@ import { meteorCustomPublish, setUpCollectionOptimizedObserver, TriggerUpdate, + SetupObserversResult, } from '../../../lib/customPublication' import { logger } from '../../../logging' import { resolveCredentials } from '../../../security/lib/credentials' import { NoSecurityReadAccess } from '../../../security/noSecurity' import { BucketContentCache, createReactiveContentCache } from './bucketContentCache' -import { LiveQueryHandle } from '../../../lib/lib' import { StudioReadAccess } from '../../../security/studio' import { Bucket } from '@sofie-automation/meteor-lib/dist/collections/Buckets' import { @@ -72,7 +72,7 @@ const bucketFieldSpecifier = literal, triggerUpdate: TriggerUpdate -): Promise { +): Promise { const trackMediaObjectChange = (mediaId: string): Partial => ({ invalidateMediaObjectMediaId: [mediaId], }) @@ -103,7 +103,7 @@ async function setupUIBucketContentStatusesPublicationObservers( // Set up observers: return [ - new BucketContentObserver(args.bucketId, contentCache), + BucketContentObserver.create(args.bucketId, contentCache), contentCache.BucketAdLibs.find({}).observeChanges({ added: (id) => triggerUpdate(trackAdlibChange(protectString(id))), diff --git a/meteor/server/publications/pieceContentStatusUI/rundown/publication.ts b/meteor/server/publications/pieceContentStatusUI/rundown/publication.ts index 9b4941e0871..1b20d6de6a7 100644 --- a/meteor/server/publications/pieceContentStatusUI/rundown/publication.ts +++ b/meteor/server/publications/pieceContentStatusUI/rundown/publication.ts @@ -28,6 +28,7 @@ import { CustomPublishCollection, meteorCustomPublish, setUpCollectionOptimizedObserver, + SetupObserversResult, TriggerUpdate, } from '../../../lib/customPublication' import { logger } from '../../../logging' @@ -37,7 +38,6 @@ import { RundownPlaylistReadAccess } from '../../../security/rundownPlaylist' import { ContentCache, PartInstanceFields, createReactiveContentCache } from './reactiveContentCache' import { RundownContentObserver } from './rundownContentObserver' import { RundownsObserver } from '../../lib/rundownsObserver' -import { LiveQueryHandle } from '../../../lib/lib' import { addItemsWithDependenciesChangesToChangedSet, fetchStudio, @@ -104,7 +104,7 @@ const rundownPlaylistFieldSpecifier = literal< async function setupUIPieceContentStatusesPublicationObservers( args: ReadonlyDeep, triggerUpdate: TriggerUpdate -): Promise { +): Promise { const trackMediaObjectChange = (mediaId: string): Partial => ({ invalidateMediaObjectMediaId: [mediaId], }) @@ -122,14 +122,14 @@ async function setupUIPieceContentStatusesPublicationObservers( })) as Pick | undefined if (!playlist) throw new Error(`RundownPlaylist "${args.rundownPlaylistId}" not found!`) - const rundownsObserver = new RundownsObserver(playlist.studioId, playlist._id, (rundownIds) => { + const rundownsObserver = await RundownsObserver.create(playlist.studioId, playlist._id, async (rundownIds) => { logger.silly(`Creating new RundownContentObserver`) // TODO - can this be done cheaper? const contentCache = createReactiveContentCache() triggerUpdate({ newCache: contentCache }) - const obs1 = new RundownContentObserver(rundownIds, contentCache) + const obs1 = await RundownContentObserver.create(rundownIds, contentCache) const innerQueries = [ contentCache.Segments.find({}).observeChanges({ diff --git a/meteor/server/publications/pieceContentStatusUI/rundown/rundownContentObserver.ts b/meteor/server/publications/pieceContentStatusUI/rundown/rundownContentObserver.ts index 26d836aaa90..6ba425ab3ee 100644 --- a/meteor/server/publications/pieceContentStatusUI/rundown/rundownContentObserver.ts +++ b/meteor/server/publications/pieceContentStatusUI/rundown/rundownContentObserver.ts @@ -33,7 +33,7 @@ import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settin import { ReactiveMongoObserverGroup, ReactiveMongoObserverGroupHandle } from '../../lib/observerGroup' import _ from 'underscore' import { equivalentArrays } from '@sofie-automation/shared-lib/dist/lib/lib' -import { waitForPromise } from '../../../lib/lib' +import { waitForAllObserversReady } from '../../lib/lib' const REACTIVITY_DEBOUNCE = 20 @@ -46,59 +46,73 @@ function convertShowStyleBase(doc: Pick): export class RundownContentObserver { #observers: Meteor.LiveQueryHandle[] = [] - #cache: ContentCache + readonly #cache: ContentCache #showStyleBaseIds: ShowStyleBaseId[] = [] - #showStyleBaseIdObserver: ReactiveMongoObserverGroupHandle + #showStyleBaseIdObserver!: ReactiveMongoObserverGroupHandle - constructor(rundownIds: RundownId[], cache: ContentCache) { - logger.silly(`Creating RundownContentObserver for rundowns "${rundownIds.join(',')}"`) + private constructor(cache: ContentCache) { this.#cache = cache + } + + static async create(rundownIds: RundownId[], cache: ContentCache): Promise { + logger.silly(`Creating RundownContentObserver for rundowns "${rundownIds.join(',')}"`) + const observer = new RundownContentObserver(cache) + + await observer.initShowStyleBaseIdObserver() + + // This takes ownership of the #showStyleBaseIdObserver, and will stop it if this throws + await observer.initContentObservers(rundownIds) + + return observer + } + + private async initShowStyleBaseIdObserver() { // Run the ShowStyleBase query in a ReactiveMongoObserverGroup, so that it can be restarted whenever - this.#showStyleBaseIdObserver = waitForPromise( - ReactiveMongoObserverGroup(async () => { - // Clear already cached data - cache.ShowStyleSourceLayers.remove({}) - - logger.silly(`optimized observer restarting ${this.#showStyleBaseIds}`) - - return [ - ShowStyleBases.observe( - { - // We can use the `this.#showStyleBaseIds` here, as this is restarted every time that property changes - _id: { $in: this.#showStyleBaseIds }, + this.#showStyleBaseIdObserver = await ReactiveMongoObserverGroup(async () => { + // Clear already cached data + this.#cache.ShowStyleSourceLayers.remove({}) + + logger.silly(`optimized observer restarting ${this.#showStyleBaseIds}`) + + return [ + ShowStyleBases.observe( + { + // We can use the `this.#showStyleBaseIds` here, as this is restarted every time that property changes + _id: { $in: this.#showStyleBaseIds }, + }, + { + added: (doc) => { + const newDoc = convertShowStyleBase(doc) + this.#cache.ShowStyleSourceLayers.upsert(doc._id, { $set: newDoc as Partial }) }, - { - added: (doc) => { - const newDoc = convertShowStyleBase(doc) - cache.ShowStyleSourceLayers.upsert(doc._id, { $set: newDoc as Partial }) - }, - changed: (doc) => { - const newDoc = convertShowStyleBase(doc) - cache.ShowStyleSourceLayers.upsert(doc._id, { $set: newDoc as Partial }) - }, - removed: (doc) => { - cache.ShowStyleSourceLayers.remove(doc._id) - }, + changed: (doc) => { + const newDoc = convertShowStyleBase(doc) + this.#cache.ShowStyleSourceLayers.upsert(doc._id, { $set: newDoc as Partial }) }, - { - projection: showStyleBaseFieldSpecifier, - } - ), - ] - }) - ) + removed: (doc) => { + this.#cache.ShowStyleSourceLayers.remove(doc._id) + }, + }, + { + projection: showStyleBaseFieldSpecifier, + } + ), + ] + }) + } + private async initContentObservers(rundownIds: RundownId[]) { // Subscribe to the database, and pipe any updates into the ReactiveCacheCollections - this.#observers = [ + this.#observers = await waitForAllObserversReady([ Rundowns.observeChanges( { _id: { $in: rundownIds, }, }, - cache.Rundowns.link(() => { + this.#cache.Rundowns.link(() => { // Check if the ShowStyleBaseIds needs updating this.updateShowStyleBaseIds() }), @@ -114,7 +128,7 @@ export class RundownContentObserver { $in: rundownIds, }, }, - cache.Segments.link(), + this.#cache.Segments.link(), { projection: segmentFieldSpecifier, } @@ -125,7 +139,7 @@ export class RundownContentObserver { $in: rundownIds, }, }, - cache.Parts.link(), + this.#cache.Parts.link(), { projection: partFieldSpecifier, } @@ -136,7 +150,7 @@ export class RundownContentObserver { $in: rundownIds, }, }, - cache.Pieces.link(), + this.#cache.Pieces.link(), { projection: pieceFieldSpecifier, } @@ -148,7 +162,7 @@ export class RundownContentObserver { }, reset: { $ne: true }, }, - cache.PartInstances.link(), + this.#cache.PartInstances.link(), { projection: partInstanceFieldSpecifier, } @@ -160,7 +174,7 @@ export class RundownContentObserver { }, reset: { $ne: true }, }, - cache.PieceInstances.link(), + this.#cache.PieceInstances.link(), { projection: pieceInstanceFieldSpecifier, } @@ -171,7 +185,7 @@ export class RundownContentObserver { $in: rundownIds, }, }, - cache.AdLibPieces.link(), + this.#cache.AdLibPieces.link(), { projection: adLibPieceFieldSpecifier, } @@ -182,7 +196,7 @@ export class RundownContentObserver { $in: rundownIds, }, }, - cache.AdLibActions.link(), + this.#cache.AdLibActions.link(), { projection: adLibActionFieldSpecifier, } @@ -193,7 +207,7 @@ export class RundownContentObserver { $in: rundownIds, }, }, - cache.BaselineAdLibPieces.link(), + this.#cache.BaselineAdLibPieces.link(), { projection: adLibPieceFieldSpecifier, } @@ -204,12 +218,12 @@ export class RundownContentObserver { $in: rundownIds, }, }, - cache.BaselineAdLibActions.link(), + this.#cache.BaselineAdLibActions.link(), { projection: adLibActionFieldSpecifier, } ), - ] + ]) } private updateShowStyleBaseIds = _.debounce( diff --git a/meteor/server/publications/rundown.ts b/meteor/server/publications/rundown.ts index a34c85824c8..f939a9baffa 100644 --- a/meteor/server/publications/rundown.ts +++ b/meteor/server/publications/rundown.ts @@ -1,5 +1,5 @@ import { Meteor } from 'meteor/meteor' -import { meteorPublish, AutoFillSelector } from './lib' +import { meteorPublish, AutoFillSelector } from './lib/lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { MongoFieldSpecifierZeroes, MongoQuery } from '@sofie-automation/corelib/dist/mongo' import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' diff --git a/meteor/server/publications/rundownPlaylist.ts b/meteor/server/publications/rundownPlaylist.ts index b8b3cbe69a2..89378b15875 100644 --- a/meteor/server/publications/rundownPlaylist.ts +++ b/meteor/server/publications/rundownPlaylist.ts @@ -1,5 +1,5 @@ import { RundownPlaylistReadAccess } from '../security/rundownPlaylist' -import { meteorPublish, AutoFillSelector } from './lib' +import { meteorPublish, AutoFillSelector } from './lib/lib' import { StudioReadAccess } from '../security/studio' import { OrganizationReadAccess } from '../security/organization' import { NoSecurityReadAccess } from '../security/noSecurity' diff --git a/meteor/server/publications/segmentPartNotesUI/publication.ts b/meteor/server/publications/segmentPartNotesUI/publication.ts index dd71797d5fe..5ab2a86a445 100644 --- a/meteor/server/publications/segmentPartNotesUI/publication.ts +++ b/meteor/server/publications/segmentPartNotesUI/publication.ts @@ -12,13 +12,13 @@ import { CustomPublishCollection, meteorCustomPublish, setUpCollectionOptimizedObserver, + SetupObserversResult, TriggerUpdate, } from '../../lib/customPublication' import { logger } from '../../logging' import { resolveCredentials } from '../../security/lib/credentials' import { NoSecurityReadAccess } from '../../security/noSecurity' import { RundownPlaylistReadAccess } from '../../security/rundownPlaylist' -import { LiveQueryHandle } from '../../lib/lib' import { ContentCache, createReactiveContentCache, @@ -60,13 +60,13 @@ const rundownPlaylistFieldSpecifier = literal< async function setupUISegmentPartNotesPublicationObservers( args: ReadonlyDeep, triggerUpdate: TriggerUpdate -): Promise { +): Promise { const playlist = (await RundownPlaylists.findOneAsync(args.playlistId, { projection: rundownPlaylistFieldSpecifier, })) as Pick | undefined if (!playlist) throw new Error(`RundownPlaylist "${args.playlistId}" not found!`) - const rundownsObserver = new RundownsObserver(playlist.studioId, playlist._id, (rundownIds) => { + const rundownsObserver = await RundownsObserver.create(playlist.studioId, playlist._id, async (rundownIds) => { logger.silly(`Creating new RundownContentObserver`) // TODO - can this be done cheaper? @@ -75,7 +75,7 @@ async function setupUISegmentPartNotesPublicationObservers( // Push update triggerUpdate({ newCache: cache }) - const obs1 = new RundownContentObserver(rundownIds, cache) + const obs1 = await RundownContentObserver.create(rundownIds, cache) const innerQueries = [ cache.Segments.find({}).observeChanges({ diff --git a/meteor/server/publications/segmentPartNotesUI/rundownContentObserver.ts b/meteor/server/publications/segmentPartNotesUI/rundownContentObserver.ts index 43f9c252700..214a5dac962 100644 --- a/meteor/server/publications/segmentPartNotesUI/rundownContentObserver.ts +++ b/meteor/server/publications/segmentPartNotesUI/rundownContentObserver.ts @@ -9,16 +9,21 @@ import { segmentFieldSpecifier, } from './reactiveContentCache' import { PartInstances, Parts, Rundowns, Segments } from '../../collections' +import { waitForAllObserversReady } from '../lib/lib' export class RundownContentObserver { - #observers: Meteor.LiveQueryHandle[] = [] - #cache: ContentCache + readonly #observers: Meteor.LiveQueryHandle[] + readonly #cache: ContentCache - constructor(rundownIds: RundownId[], cache: ContentCache) { - logger.silly(`Creating RundownContentObserver for rundowns "${rundownIds.join(',')}"`) + private constructor(cache: ContentCache, observers: Meteor.LiveQueryHandle[]) { this.#cache = cache + this.#observers = observers + } - this.#observers = [ + static async create(rundownIds: RundownId[], cache: ContentCache): Promise { + logger.silly(`Creating RundownContentObserver for rundowns "${rundownIds.join(',')}"`) + + const observers = await waitForAllObserversReady([ Rundowns.observeChanges( { _id: { @@ -57,7 +62,9 @@ export class RundownContentObserver { cache.DeletedPartInstances.link(), { fields: partInstanceFieldSpecifier } ), - ] + ]) + + return new RundownContentObserver(cache, observers) } public get cache(): ContentCache { diff --git a/meteor/server/publications/showStyle.ts b/meteor/server/publications/showStyle.ts index bd755481517..99b3099e508 100644 --- a/meteor/server/publications/showStyle.ts +++ b/meteor/server/publications/showStyle.ts @@ -1,4 +1,4 @@ -import { meteorPublish, AutoFillSelector } from './lib' +import { meteorPublish, AutoFillSelector } from './lib/lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { DBShowStyleBase } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' import { DBShowStyleVariant } from '@sofie-automation/corelib/dist/dataModel/ShowStyleVariant' diff --git a/meteor/server/publications/showStyleUI.ts b/meteor/server/publications/showStyleUI.ts index 78883578434..68309db7d95 100644 --- a/meteor/server/publications/showStyleUI.ts +++ b/meteor/server/publications/showStyleUI.ts @@ -1,19 +1,23 @@ import { ShowStyleBaseId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { MongoFieldSpecifierOnesStrict } from '@sofie-automation/corelib/dist/mongo' import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' -import { Meteor } from 'meteor/meteor' import { ReadonlyDeep } from 'type-fest' import { CustomCollectionName, MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { UIShowStyleBase } from '@sofie-automation/meteor-lib/dist/api/showStyles' import { DBShowStyleBase } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' import { Complete, literal } from '../lib/tempLib' -import { meteorCustomPublish, setUpOptimizedObserverArray, TriggerUpdate } from '../lib/customPublication' +import { + meteorCustomPublish, + SetupObserversResult, + setUpOptimizedObserverArray, + TriggerUpdate, +} from '../lib/customPublication' import { logger } from '../logging' import { NoSecurityReadAccess } from '../security/noSecurity' import { OrganizationReadAccess } from '../security/organization' import { ShowStyleReadAccess } from '../security/showStyle' import { ShowStyleBases } from '../collections' -import { AutoFillSelector } from './lib' +import { AutoFillSelector } from './lib/lib' import { check } from 'meteor/check' interface UIShowStyleBaseArgs { @@ -38,7 +42,7 @@ const fieldSpecifier = literal, triggerUpdate: TriggerUpdate -): Promise { +): Promise { // Set up observers: return [ ShowStyleBases.observeChanges( diff --git a/meteor/server/publications/studio.ts b/meteor/server/publications/studio.ts index 43854b763a6..08002e6938a 100644 --- a/meteor/server/publications/studio.ts +++ b/meteor/server/publications/studio.ts @@ -1,6 +1,6 @@ import { Meteor } from 'meteor/meteor' import { check, Match } from '../lib/check' -import { meteorPublish, AutoFillSelector } from './lib' +import { meteorPublish, AutoFillSelector } from './lib/lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { getActiveRoutes, getRoutedMappings } from '@sofie-automation/meteor-lib/dist/collections/Studios' import { PeripheralDeviceReadAccess } from '../security/peripheralDevice' @@ -11,6 +11,7 @@ import { NoSecurityReadAccess } from '../security/noSecurity' import { CustomPublish, meteorCustomPublish, + SetupObserversResult, setUpOptimizedObserverArray, TriggerUpdate, } from '../lib/customPublication' @@ -170,7 +171,7 @@ interface RoutedMappingsUpdateProps { async function setupMappingsPublicationObservers( args: ReadonlyDeep, triggerUpdate: TriggerUpdate -): Promise { +): Promise { // Set up observers: return [ Studios.observeChanges( diff --git a/meteor/server/publications/studioUI.ts b/meteor/server/publications/studioUI.ts index 6e6ca92b96e..b8de6f1b7d4 100644 --- a/meteor/server/publications/studioUI.ts +++ b/meteor/server/publications/studioUI.ts @@ -1,7 +1,6 @@ import { StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { MongoFieldSpecifierOnesStrict } from '@sofie-automation/corelib/dist/mongo' import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' -import { Meteor } from 'meteor/meteor' import { ReadonlyDeep } from 'type-fest' import { CustomCollectionName, MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { UIStudio } from '@sofie-automation/meteor-lib/dist/api/studios' @@ -11,6 +10,7 @@ import { CustomPublishCollection, meteorCustomPublish, setUpCollectionOptimizedObserver, + SetupObserversResult, TriggerUpdate, } from '../lib/customPublication' import { logger } from '../logging' @@ -62,7 +62,7 @@ const fieldSpecifier = literal, triggerUpdate: TriggerUpdate -): Promise { +): Promise { const trackChange = (id: StudioId): Partial => ({ invalidateStudioIds: [id], }) diff --git a/meteor/server/publications/system.ts b/meteor/server/publications/system.ts index ed36f597054..e494807e859 100644 --- a/meteor/server/publications/system.ts +++ b/meteor/server/publications/system.ts @@ -1,5 +1,5 @@ import { Meteor } from 'meteor/meteor' -import { meteorPublish } from './lib' +import { meteorPublish } from './lib/lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { SystemReadAccess } from '../security/system' import { OrganizationReadAccess } from '../security/organization' diff --git a/meteor/server/publications/timeline.ts b/meteor/server/publications/timeline.ts index d4f24d782af..15cf679157e 100644 --- a/meteor/server/publications/timeline.ts +++ b/meteor/server/publications/timeline.ts @@ -8,12 +8,13 @@ import { serializeTimelineBlob, TimelineBlob, } from '@sofie-automation/corelib/dist/dataModel/Timeline' -import { meteorPublish } from './lib' +import { meteorPublish } from './lib/lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { FindOptions } from '@sofie-automation/meteor-lib/dist/collections/lib' import { CustomPublish, meteorCustomPublish, + SetupObserversResult, setUpOptimizedObserverArray, TriggerUpdate, } from '../lib/customPublication' @@ -124,7 +125,7 @@ interface RoutedTimelineUpdateProps { async function setupTimelinePublicationObservers( args: ReadonlyDeep, triggerUpdate: TriggerUpdate -): Promise { +): Promise { // Set up observers: return [ Studios.observeChanges( diff --git a/meteor/server/publications/translationsBundles.ts b/meteor/server/publications/translationsBundles.ts index 55e98e9503c..8173fd3ec56 100644 --- a/meteor/server/publications/translationsBundles.ts +++ b/meteor/server/publications/translationsBundles.ts @@ -1,5 +1,5 @@ import { TranslationsBundlesSecurity } from '../security/translationsBundles' -import { meteorPublish } from './lib' +import { meteorPublish } from './lib/lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { TranslationsBundles } from '../collections' import { MongoQuery } from '@sofie-automation/corelib/dist/mongo' diff --git a/meteor/server/publications/triggeredActionsUI.ts b/meteor/server/publications/triggeredActionsUI.ts index 854c20450a0..5a431daf10c 100644 --- a/meteor/server/publications/triggeredActionsUI.ts +++ b/meteor/server/publications/triggeredActionsUI.ts @@ -1,6 +1,5 @@ import { ShowStyleBaseId, TriggeredActionId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' -import { Meteor } from 'meteor/meteor' import { ReadonlyDeep } from 'type-fest' import { CustomCollectionName, MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { @@ -12,6 +11,7 @@ import { CustomPublishCollection, meteorCustomPublish, setUpCollectionOptimizedObserver, + SetupObserversResult, TriggerUpdate, } from '../lib/customPublication' import { logger } from '../logging' @@ -64,7 +64,7 @@ function convertDocument(doc: DBTriggeredActions): UITriggeredActionsObj { async function setupUITriggeredActionsPublicationObservers( args: ReadonlyDeep, triggerUpdate: TriggerUpdate -): Promise { +): Promise { const trackChange = (id: TriggeredActionId): Partial => ({ invalidateTriggeredActions: [id], }) diff --git a/packages/meteor-lib/src/collections/lib.ts b/packages/meteor-lib/src/collections/lib.ts index 636a56578fb..093ba0bd432 100644 --- a/packages/meteor-lib/src/collections/lib.ts +++ b/packages/meteor-lib/src/collections/lib.ts @@ -169,11 +169,25 @@ export interface MongoCursor }> * @param callbacks Functions to call to deliver the result set as it changes */ observe(callbacks: ObserveCallbacks): MongoLiveQueryHandle + /** + * Watch a query. Receive callbacks as the result set changes. + * @param callbacks Functions to call to deliver the result set as it changes + */ + observeAsync(callbacks: ObserveCallbacks): Promise /** * Watch a query. Receive callbacks as the result set changes. Only the differences between the old and new documents are passed to the callbacks. * @param callbacks Functions to call to deliver the result set as it changes */ observeChanges(callbacks: ObserveChangesCallbacks): MongoLiveQueryHandle + /** + * Watch a query. Receive callbacks as the result set changes. Only the differences between the old and new documents are passed to the callbacks. + * @param callbacks Functions to call to deliver the result set as it changes + * @param options { nonMutatingCallbacks: boolean } + */ + observeChangesAsync( + callbacks: ObserveChangesCallbacks, + options?: { nonMutatingCallbacks?: boolean | undefined } + ): Promise } export interface ObserveCallbacks { added?(document: DBInterface): void From 13b3379c8e04228d648d74ec5204a43abaaa7c7b Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Fri, 1 Nov 2024 13:34:03 +0000 Subject: [PATCH 52/81] fix: remove unused `rundownIds` parameter from `MeteorPubSub.uiPartInstances` publication --- .../partInstancesUI/publication.ts | 25 +++++-------------- packages/meteor-lib/src/api/pubsub.ts | 1 - .../ui/ClockView/CameraScreen/index.tsx | 2 +- .../client/ui/ClockView/PresenterScreen.tsx | 2 +- .../src/client/ui/Prompter/PrompterView.tsx | 2 +- packages/webui/src/client/ui/RundownView.tsx | 7 +----- .../TriggeredActionsEditor.tsx | 2 +- 7 files changed, 11 insertions(+), 30 deletions(-) diff --git a/meteor/server/publications/partInstancesUI/publication.ts b/meteor/server/publications/partInstancesUI/publication.ts index 727d212fecd..01a21711f21 100644 --- a/meteor/server/publications/partInstancesUI/publication.ts +++ b/meteor/server/publications/partInstancesUI/publication.ts @@ -1,9 +1,4 @@ -import { - PartInstanceId, - RundownId, - RundownPlaylistActivationId, - SegmentId, -} from '@sofie-automation/corelib/dist/dataModel/Ids' +import { PartInstanceId, RundownPlaylistActivationId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { check } from 'meteor/check' import { CustomPublishCollection, @@ -26,7 +21,6 @@ import { RundownsObserver } from '../lib/rundownsObserver' import { RundownContentObserver } from './rundownContentObserver' import { protectString } from '@sofie-automation/corelib/dist/protectedString' import { Match } from '../../lib/check' -import { RundownReadAccess } from '../../security/rundown' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' import { extractRanks, @@ -37,7 +31,6 @@ import { interface UIPartInstancesArgs { readonly playlistActivationId: RundownPlaylistActivationId - readonly rundownIds: RundownId[] } export interface UIPartInstancesState { @@ -210,32 +203,26 @@ export async function manipulateUIPartInstancesPublicationData( meteorCustomPublish( MeteorPubSub.uiPartInstances, CustomCollectionName.UIPartInstances, - async function (pub, rundownIds: RundownId[], playlistActivationId: RundownPlaylistActivationId | null) { - check(rundownIds, [String]) + async function (pub, playlistActivationId: RundownPlaylistActivationId | null) { check(playlistActivationId, Match.Maybe(String)) const credentials = await resolveCredentials({ userId: this.userId, token: undefined }) - if ( - playlistActivationId && - (!credentials || - NoSecurityReadAccess.any() || - (await RundownReadAccess.rundownContent({ $in: rundownIds }, credentials))) - ) { + if (playlistActivationId && (!credentials || NoSecurityReadAccess.any())) { await setUpCollectionOptimizedObserver< Omit, UIPartInstancesArgs, UIPartInstancesState, UIPartInstancesUpdateProps >( - `pub_${MeteorPubSub.uiPartInstances}_${rundownIds.join(',')}_${playlistActivationId}`, - { rundownIds, playlistActivationId }, + `pub_${MeteorPubSub.uiPartInstances}_${playlistActivationId}`, + { playlistActivationId }, setupUIPartInstancesPublicationObservers, manipulateUIPartInstancesPublicationData, pub ) } else { - logger.warn(`Pub.uiPartInstances: Not allowed: [${rundownIds.join(',')}] "${playlistActivationId}"`) + logger.warn(`Pub.uiPartInstances: Not allowed:"${playlistActivationId}"`) } } ) diff --git a/packages/meteor-lib/src/api/pubsub.ts b/packages/meteor-lib/src/api/pubsub.ts index 42670c8df0e..c0ec1c2e5b0 100644 --- a/packages/meteor-lib/src/api/pubsub.ts +++ b/packages/meteor-lib/src/api/pubsub.ts @@ -263,7 +263,6 @@ export interface MeteorPubSubTypes { [MeteorPubSub.uiBlueprintUpgradeStatuses]: () => CustomCollectionName.UIBlueprintUpgradeStatuses [MeteorPubSub.uiParts]: (playlistId: RundownPlaylistId) => CustomCollectionName.UIParts [MeteorPubSub.uiPartInstances]: ( - rundownIds: RundownId[], playlistActivationId: RundownPlaylistActivationId | null ) => CustomCollectionName.UIPartInstances } diff --git a/packages/webui/src/client/ui/ClockView/CameraScreen/index.tsx b/packages/webui/src/client/ui/ClockView/CameraScreen/index.tsx index b6e9c21903d..8e36680d689 100644 --- a/packages/webui/src/client/ui/ClockView/CameraScreen/index.tsx +++ b/packages/webui/src/client/ui/ClockView/CameraScreen/index.tsx @@ -100,7 +100,7 @@ export function CameraScreen({ playlist, studioId }: Readonly): JSX.Elem useSubscription(CorelibPubSub.segments, rundownIds, {}) const studioReady = useSubscription(MeteorPubSub.uiStudio, studioId) - useSubscription(MeteorPubSub.uiPartInstances, rundownIds, playlist?.activationId ?? null) + useSubscription(MeteorPubSub.uiPartInstances, playlist?.activationId ?? null) useSubscription(CorelibPubSub.parts, rundownIds, null) diff --git a/packages/webui/src/client/ui/ClockView/PresenterScreen.tsx b/packages/webui/src/client/ui/ClockView/PresenterScreen.tsx index 02eaff04863..6f3469d7bc5 100644 --- a/packages/webui/src/client/ui/ClockView/PresenterScreen.tsx +++ b/packages/webui/src/client/ui/ClockView/PresenterScreen.tsx @@ -341,7 +341,7 @@ export function usePresenterScreenSubscriptions(props: PresenterScreenProps): vo useSubscription(CorelibPubSub.segments, rundownIds, {}) useSubscription(CorelibPubSub.parts, rundownIds, null) - useSubscription(MeteorPubSub.uiPartInstances, rundownIds, playlist?.activationId ?? null) + useSubscription(MeteorPubSub.uiPartInstances, playlist?.activationId ?? null) useSubscriptions( MeteorPubSub.uiShowStyleBase, showStyleBaseIds.map((id) => [id]) diff --git a/packages/webui/src/client/ui/Prompter/PrompterView.tsx b/packages/webui/src/client/ui/Prompter/PrompterView.tsx index 0e13a5263bb..0a473fa9707 100644 --- a/packages/webui/src/client/ui/Prompter/PrompterView.tsx +++ b/packages/webui/src/client/ui/Prompter/PrompterView.tsx @@ -597,7 +597,7 @@ function Prompter(props: Readonly>): JSX.Eleme const rundownIDs = playlist ? RundownPlaylistCollectionUtil.getRundownUnorderedIDs(playlist) : [] useSubscription(CorelibPubSub.segments, rundownIDs, {}) useSubscription(MeteorPubSub.uiParts, props.rundownPlaylistId) - useSubscription(MeteorPubSub.uiPartInstances, rundownIDs, playlist?.activationId ?? null) + useSubscription(MeteorPubSub.uiPartInstances, playlist?.activationId ?? null) useSubscription(CorelibPubSub.pieces, rundownIDs, null) useSubscription(CorelibPubSub.pieceInstancesSimple, rundownIDs, null) diff --git a/packages/webui/src/client/ui/RundownView.tsx b/packages/webui/src/client/ui/RundownView.tsx index cac48c6d72f..e6253658420 100644 --- a/packages/webui/src/client/ui/RundownView.tsx +++ b/packages/webui/src/client/ui/RundownView.tsx @@ -1308,12 +1308,7 @@ export function RundownView(props: Readonly): JSX.Element { ) auxSubsReady.push(useSubscriptionIfEnabled(MeteorPubSub.uiParts, rundownIds.length > 0, playlistId)) auxSubsReady.push( - useSubscriptionIfEnabled( - MeteorPubSub.uiPartInstances, - rundownIds.length > 0, - rundownIds, - playlistActivationId ?? null - ) + useSubscriptionIfEnabled(MeteorPubSub.uiPartInstances, !!playlistActivationId, playlistActivationId ?? null) ) useTracker(() => { diff --git a/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionsEditor.tsx b/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionsEditor.tsx index 1ff832302c1..9e21ffce8e8 100644 --- a/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionsEditor.tsx +++ b/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionsEditor.tsx @@ -191,7 +191,7 @@ export const TriggeredActionsEditor: React.FC = function TriggeredAction null ) - useSubscription(MeteorPubSub.uiPartInstances, rundown ? [rundown._id] : [], rundownPlaylist?.activationId ?? null) + useSubscription(MeteorPubSub.uiPartInstances, rundownPlaylist?.activationId ?? null) useSubscription(CorelibPubSub.parts, rundown ? [rundown._id] : [], null) const previewContext = useTracker( From 8fe75577e33509444019d41b133362111cb1d505 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Fri, 1 Nov 2024 13:35:02 +0000 Subject: [PATCH 53/81] fix: action triggers editor not showing previews --- meteor/server/publications/partsUI/publication.ts | 4 ++-- packages/meteor-lib/src/api/pubsub.ts | 2 +- .../components/triggeredActions/TriggeredActionEntry.tsx | 2 +- .../components/triggeredActions/TriggeredActionsEditor.tsx | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/meteor/server/publications/partsUI/publication.ts b/meteor/server/publications/partsUI/publication.ts index 379e47f31cc..31af1ed0319 100644 --- a/meteor/server/publications/partsUI/publication.ts +++ b/meteor/server/publications/partsUI/publication.ts @@ -190,7 +190,7 @@ export async function manipulateUIPartsPublicationData( meteorCustomPublish( MeteorPubSub.uiParts, CustomCollectionName.UIParts, - async function (pub, playlistId: RundownPlaylistId) { + async function (pub, playlistId: RundownPlaylistId | null) { check(playlistId, String) const credentials = await resolveCredentials({ userId: this.userId, token: undefined }) @@ -198,7 +198,7 @@ meteorCustomPublish( if ( !credentials || NoSecurityReadAccess.any() || - (await RundownPlaylistReadAccess.rundownPlaylistContent(playlistId, credentials)) + (playlistId && (await RundownPlaylistReadAccess.rundownPlaylistContent(playlistId, credentials))) ) { await setUpCollectionOptimizedObserver< Omit, diff --git a/packages/meteor-lib/src/api/pubsub.ts b/packages/meteor-lib/src/api/pubsub.ts index c0ec1c2e5b0..49c18b78165 100644 --- a/packages/meteor-lib/src/api/pubsub.ts +++ b/packages/meteor-lib/src/api/pubsub.ts @@ -261,7 +261,7 @@ export interface MeteorPubSubTypes { bucketId: BucketId ) => CustomCollectionName.UIBucketContentStatuses [MeteorPubSub.uiBlueprintUpgradeStatuses]: () => CustomCollectionName.UIBlueprintUpgradeStatuses - [MeteorPubSub.uiParts]: (playlistId: RundownPlaylistId) => CustomCollectionName.UIParts + [MeteorPubSub.uiParts]: (playlistId: RundownPlaylistId | null) => CustomCollectionName.UIParts [MeteorPubSub.uiPartInstances]: ( playlistActivationId: RundownPlaylistActivationId | null ) => CustomCollectionName.UIPartInstances diff --git a/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionEntry.tsx b/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionEntry.tsx index a9db70ce754..8b1ec894b8f 100644 --- a/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionEntry.tsx +++ b/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionEntry.tsx @@ -198,7 +198,7 @@ export const TriggeredActionEntry: React.FC = React.memo(function Trigge } return [] as IWrappedAdLib[] }, - [selected, resolvedActions, sourceLayers], + [selected, resolvedActions, sourceLayers, previewContext], [] as IWrappedAdLib[] ) diff --git a/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionsEditor.tsx b/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionsEditor.tsx index 9e21ffce8e8..8a623bc7730 100644 --- a/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionsEditor.tsx +++ b/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionsEditor.tsx @@ -192,7 +192,7 @@ export const TriggeredActionsEditor: React.FC = function TriggeredAction ) useSubscription(MeteorPubSub.uiPartInstances, rundownPlaylist?.activationId ?? null) - useSubscription(CorelibPubSub.parts, rundown ? [rundown._id] : [], null) + useSubscription(MeteorPubSub.uiParts, rundownPlaylist?._id ?? null) const previewContext = useTracker( () => { From faf31f6e48f40e8cba0535b2bd818478d16acd8c Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Fri, 1 Nov 2024 15:39:30 +0000 Subject: [PATCH 54/81] feat: convert action-triggers computation to async (#30) --- .../StudioDeviceTriggerManager.ts | 14 +- meteor/server/api/deviceTriggers/observer.ts | 12 +- .../api/deviceTriggers/triggersContext.ts | 195 +++++++----- .../triggersContextCollection.ts | 76 ----- meteor/server/lib/reactiveMap.ts | 38 --- .../meteor-lib/src/triggers/actionFactory.ts | 278 ++++++++++-------- .../triggers/actionFilterChainCompilers.ts | 211 +++++++------ .../meteor-lib/src/triggers/reactive-var.ts | 8 +- .../src/triggers/triggersContext.ts | 73 ++++- .../lib/ReactMeteorData/ReactMeteorData.tsx | 47 ++- .../src/client/lib/memoizedIsolatedAutorun.ts | 85 ++++++ .../client/lib/notifications/notifications.ts | 2 +- .../lib/reactiveData/reactiveDataHelper.ts | 2 +- .../client/lib/triggers/TriggersHandler.tsx | 72 +++-- .../client/lib/triggers/triggersContext.ts | 89 +++++- packages/webui/src/client/ui/RundownView.tsx | 2 +- .../triggeredActions/TriggeredActionEntry.tsx | 47 +-- packages/webui/src/meteor/reactive-var.d.ts | 45 +-- packages/webui/src/meteor/reactive-var.js | 6 +- packages/webui/src/meteor/tracker.d.ts | 252 ++++++++-------- packages/webui/src/meteor/tracker.js | 89 ++++-- 21 files changed, 961 insertions(+), 682 deletions(-) delete mode 100644 meteor/server/api/deviceTriggers/triggersContextCollection.ts delete mode 100644 meteor/server/lib/reactiveMap.ts diff --git a/meteor/server/api/deviceTriggers/StudioDeviceTriggerManager.ts b/meteor/server/api/deviceTriggers/StudioDeviceTriggerManager.ts index f710070abab..03efd5c753a 100644 --- a/meteor/server/api/deviceTriggers/StudioDeviceTriggerManager.ts +++ b/meteor/server/api/deviceTriggers/StudioDeviceTriggerManager.ts @@ -43,7 +43,7 @@ export class StudioDeviceTriggerManager { StudioActionManagers.set(studioId, new StudioActionManager()) } - updateTriggers(cache: ContentCache, showStyleBaseId: ShowStyleBaseId): void { + async updateTriggers(cache: ContentCache, showStyleBaseId: ShowStyleBaseId): Promise { const studioId = this.studioId this.#lastShowStyleBaseId = showStyleBaseId @@ -88,7 +88,7 @@ export class StudioDeviceTriggerManager { const addedPreviewIds: PreviewWrappedAdLibId[] = [] - Object.entries(triggeredAction.actions).forEach(([key, action]) => { + for (const [key, action] of Object.entries(triggeredAction.actions)) { // Since the compiled action is cached using this actionId as a key, having the action // and the filterChain allows for a quicker invalidation without doing a deepEquals const actionId = protectString( @@ -106,9 +106,9 @@ export class StudioDeviceTriggerManager { } touchedActionIds.push(actionId) - Object.entries(triggeredAction.triggers).forEach(([key, trigger]) => { + for (const [key, trigger] of Object.entries(triggeredAction.triggers)) { if (!isDeviceTrigger(trigger)) { - return + continue } let deviceActionArguments: ShiftRegisterActionArguments | undefined = undefined @@ -141,7 +141,7 @@ export class StudioDeviceTriggerManager { }, }) upsertedDeviceTriggerMountedActionIds.push(deviceTriggerMountedActionId) - }) + } if (!isPreviewableAction(thisAction)) { const adLibPreviewId = protectString(`${actionId}_preview`) @@ -165,7 +165,7 @@ export class StudioDeviceTriggerManager { addedPreviewIds.push(adLibPreviewId) } else { - const previewedAdLibs = thisAction.preview(context) + const previewedAdLibs = await thisAction.preview(context, null) previewedAdLibs.forEach((adLib) => { const adLibPreviewId = protectString( @@ -195,7 +195,7 @@ export class StudioDeviceTriggerManager { addedPreviewIds.push(adLibPreviewId) }) } - }) + } DeviceTriggerMountedActionAdlibsPreview.remove({ triggeredActionId: triggeredAction._id, diff --git a/meteor/server/api/deviceTriggers/observer.ts b/meteor/server/api/deviceTriggers/observer.ts index 30e7a0f42f1..fb1448f24e4 100644 --- a/meteor/server/api/deviceTriggers/observer.ts +++ b/meteor/server/api/deviceTriggers/observer.ts @@ -44,7 +44,7 @@ MeteorStartupAsync(async () => { const manager = new StudioDeviceTriggerManager(studioId) const observer = new StudioObserver(studioId, (showStyleBaseId, cache) => { workInQueue(async () => { - manager.updateTriggers(cache, showStyleBaseId) + await manager.updateTriggers(cache, showStyleBaseId) }) return () => { @@ -117,10 +117,12 @@ export async function receiveInputDeviceTrigger( if (!actionManager) throw new Meteor.Error(500, `No Studio Action Manager available to handle trigger in Studio "${studioId}"`) - DeviceTriggerMountedActions.find({ + const mountedActions = DeviceTriggerMountedActions.find({ deviceId, deviceTriggerId: triggerId, - }).forEach((mountedAction) => { + }).fetch() + + for (const mountedAction of mountedActions) { if (values && !_.isMatch(values, mountedAction.values)) return const executableAction = actionManager.getAction(mountedAction.actionId) if (!executableAction) @@ -132,6 +134,6 @@ export async function receiveInputDeviceTrigger( const context = actionManager.getContext() if (!context) throw new Meteor.Error(500, `Undefined Device Trigger context for studio "${studioId}"`) - executableAction.execute((t: ITranslatableMessage) => t.key ?? t, `${deviceId}: ${triggerId}`, context) - }) + await executableAction.execute((t: ITranslatableMessage) => t.key ?? t, `${deviceId}: ${triggerId}`, context) + } } diff --git a/meteor/server/api/deviceTriggers/triggersContext.ts b/meteor/server/api/deviceTriggers/triggersContext.ts index c8c95db6b50..a68002a9b99 100644 --- a/meteor/server/api/deviceTriggers/triggersContext.ts +++ b/meteor/server/api/deviceTriggers/triggersContext.ts @@ -1,51 +1,66 @@ -import { TriggersContext } from '@sofie-automation/meteor-lib/dist/triggers/triggersContext' +import { + TriggersAsyncCollection, + TriggersContext, + TriggerTrackerComputation, +} from '@sofie-automation/meteor-lib/dist/triggers/triggersContext' import { SINGLE_USE_TOKEN_SALT } from '@sofie-automation/meteor-lib/dist/api/userActions' -import { assertNever, getHash, Time } from '../../lib/tempLib' +import { assertNever, getHash, ProtectedString, Time } from '../../lib/tempLib' import { getCurrentTime } from '../../lib/lib' import { MeteorCall } from '../methods' import { ClientAPI } from '@sofie-automation/meteor-lib/dist/api/client' import { UserAction } from '@sofie-automation/meteor-lib/dist/userAction' import { TFunction } from 'i18next' -import { Tracker } from 'meteor/tracker' - import { logger } from '../../logging' import { IBaseFilterLink, IRundownPlaylistFilterLink } from '@sofie-automation/blueprints-integration' import { PartId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { DummyReactiveVar } from '@sofie-automation/meteor-lib/dist/triggers/reactive-var' import { ReactivePlaylistActionContext } from '@sofie-automation/meteor-lib/dist/triggers/actionFactory' -import { MongoQuery } from '@sofie-automation/corelib/dist/mongo' -import { CollectionName } from '@sofie-automation/corelib/dist/dataModel/Collections' -import { AdLibAction } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' -import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' -import { PartInstance } from '@sofie-automation/meteor-lib/dist/collections/PartInstances' +import { FindOneOptions, FindOptions, MongoQuery } from '@sofie-automation/corelib/dist/mongo' +import { DBRundownPlaylist, SelectedPartInstance } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import { + AdLibActions, + AdLibPieces, + PartInstances, + Parts, + RundownBaselineAdLibActions, + RundownBaselineAdLibPieces, + RundownPlaylists, + Rundowns, + Segments, +} from '../../collections' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' -import { RundownBaselineAdLibAction } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibAction' -import { RundownBaselineAdLibItem } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibPiece' -import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' -import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' -import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' -import { createSyncReadOnlyMongoCollection } from './triggersContextCollection' +import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' +import { AsyncOnlyReadOnlyMongoCollection } from '../../collections/collection' export function hashSingleUseToken(token: string): string { return getHash(SINGLE_USE_TOKEN_SALT + token) } -/** - * Some synchronous read-only collections to satisfy the TriggersContext interface - */ -const AdLibActions = createSyncReadOnlyMongoCollection(CollectionName.AdLibActions) -const AdLibPieces = createSyncReadOnlyMongoCollection(CollectionName.AdLibPieces) -const PartInstances = createSyncReadOnlyMongoCollection(CollectionName.PartInstances) -const Parts = createSyncReadOnlyMongoCollection(CollectionName.Parts) -const RundownBaselineAdLibActions = createSyncReadOnlyMongoCollection( - CollectionName.RundownBaselineAdLibActions -) -const RundownBaselineAdLibPieces = createSyncReadOnlyMongoCollection( - CollectionName.RundownBaselineAdLibPieces -) -const RundownPlaylists = createSyncReadOnlyMongoCollection(CollectionName.RundownPlaylists) -const Rundowns = createSyncReadOnlyMongoCollection(CollectionName.Rundowns) -const Segments = createSyncReadOnlyMongoCollection(CollectionName.Segments) +class MeteorTriggersCollectionWrapper }> + implements TriggersAsyncCollection +{ + readonly #collection: AsyncOnlyReadOnlyMongoCollection + + constructor(collection: AsyncOnlyReadOnlyMongoCollection) { + this.#collection = collection + } + + async findFetchAsync( + _computation: TriggerTrackerComputation | null, + selector: MongoQuery, + options?: FindOptions + ): Promise> { + return this.#collection.findFetchAsync(selector, options) + } + + async findOneAsync( + _computation: TriggerTrackerComputation | null, + selector: MongoQuery | DBInterface['_id'], + options?: FindOneOptions + ): Promise { + return this.#collection.findOneAsync(selector, options) + } +} export const MeteorTriggersContext: TriggersContext = { MeteorCall, @@ -54,14 +69,14 @@ export const MeteorTriggersContext: TriggersContext = { isClient: false, - AdLibActions, - AdLibPieces, - Parts, - RundownBaselineAdLibActions, - RundownBaselineAdLibPieces, - RundownPlaylists, - Rundowns, - Segments, + AdLibActions: new MeteorTriggersCollectionWrapper(AdLibActions), + AdLibPieces: new MeteorTriggersCollectionWrapper(AdLibPieces), + Parts: new MeteorTriggersCollectionWrapper(Parts), + RundownBaselineAdLibActions: new MeteorTriggersCollectionWrapper(RundownBaselineAdLibActions), + RundownBaselineAdLibPieces: new MeteorTriggersCollectionWrapper(RundownBaselineAdLibPieces), + RundownPlaylists: new MeteorTriggersCollectionWrapper(RundownPlaylists), + Rundowns: new MeteorTriggersCollectionWrapper(Rundowns), + Segments: new MeteorTriggersCollectionWrapper(Segments), hashSingleUseToken, @@ -81,73 +96,91 @@ export const MeteorTriggersContext: TriggersContext = { ) }, - nonreactiveTracker: Tracker.nonreactive, + withComputation: async (_computation, func) => { + return func() + }, - memoizedIsolatedAutorun: any>( - fnc: T, + memoizedIsolatedAutorun: async ( + computation: TriggerTrackerComputation | null, + fnc: (computation: TriggerTrackerComputation | null, ...args: TArgs) => Promise, _functionName: string, - ...params: Parameters - ): ReturnType => { - return fnc(...(params as any)) + ...params: TArgs + ): Promise => { + return fnc(computation, ...params) }, createContextForRundownPlaylistChain, } -function createContextForRundownPlaylistChain( +async function createContextForRundownPlaylistChain( studioId: StudioId, filterChain: IBaseFilterLink[] -): ReactivePlaylistActionContext | undefined { - const playlist = rundownPlaylistFilter( +): Promise { + const playlist = await rundownPlaylistFilter( studioId, filterChain.filter((link) => link.object === 'rundownPlaylist') as IRundownPlaylistFilterLink[] ) if (!playlist) return undefined - let currentPartId: PartId | null = null, - nextPartId: PartId | null = null, - currentPartInstance: PartInstance | null = null, - currentSegmentPartIds: PartId[] = [], - nextSegmentPartIds: PartId[] = [] - - if (playlist.currentPartInfo) { - currentPartInstance = PartInstances.findOne(playlist.currentPartInfo.partInstanceId) ?? null - const currentPart = currentPartInstance?.part ?? null - if (currentPart) { - currentPartId = currentPart._id - currentSegmentPartIds = Parts.find({ - segmentId: currentPart.segmentId, - }).map((part) => part._id) - } - } - if (playlist.nextPartInfo) { - const nextPart = PartInstances.findOne(playlist.nextPartInfo.partInstanceId)?.part ?? null - if (nextPart) { - nextPartId = nextPart._id - nextSegmentPartIds = Parts.find({ - segmentId: nextPart.segmentId, - }).map((part) => part._id) - } - } + const [currentPartInfo, nextPartInfo] = await Promise.all([ + fetchInfoForSelectedPart(playlist.currentPartInfo), + fetchInfoForSelectedPart(playlist.nextPartInfo), + ]) return { studioId: new DummyReactiveVar(studioId), rundownPlaylistId: new DummyReactiveVar(playlist?._id), rundownPlaylist: new DummyReactiveVar(playlist), - currentRundownId: new DummyReactiveVar(currentPartInstance?.rundownId ?? playlist.rundownIdsInOrder[0] ?? null), - currentPartId: new DummyReactiveVar(currentPartId), - currentSegmentPartIds: new DummyReactiveVar(currentSegmentPartIds), - nextPartId: new DummyReactiveVar(nextPartId), - nextSegmentPartIds: new DummyReactiveVar(nextSegmentPartIds), + currentRundownId: new DummyReactiveVar( + playlist.currentPartInfo?.rundownId ?? playlist.rundownIdsInOrder[0] ?? null + ), + currentPartId: new DummyReactiveVar(currentPartInfo?.partId ?? null), + currentSegmentPartIds: new DummyReactiveVar(currentPartInfo?.segmentPartIds ?? []), + nextPartId: new DummyReactiveVar(nextPartInfo?.partId ?? null), + nextSegmentPartIds: new DummyReactiveVar(nextPartInfo?.segmentPartIds ?? []), currentPartInstanceId: new DummyReactiveVar(playlist.currentPartInfo?.partInstanceId ?? null), } } -function rundownPlaylistFilter( +async function fetchInfoForSelectedPart(partInfo: SelectedPartInstance | null): Promise<{ + partId: PartId + segmentPartIds: PartId[] +} | null> { + if (!partInfo) return null + + const partInstance = (await PartInstances.findOneAsync(partInfo.partInstanceId, { + projection: { + // @ts-expect-error deep property + 'part._id': 1, + segmentId: 1, + }, + })) as (Pick & { part: Pick }) | null + + if (!partInstance) return null + + const partId = partInstance.part._id + const segmentPartIds = await Parts.findFetchAsync( + { + segmentId: partInstance.segmentId, + }, + { + projection: { + _id: 1, + }, + } + ).then((parts) => parts.map((part) => part._id)) + + return { + partId, + segmentPartIds, + } +} + +async function rundownPlaylistFilter( studioId: StudioId, filterChain: IRundownPlaylistFilterLink[] -): DBRundownPlaylist | undefined { +): Promise { const selector: MongoQuery = { $and: [ { @@ -181,5 +214,5 @@ function rundownPlaylistFilter( } }) - return RundownPlaylists.findOne(selector) + return RundownPlaylists.findOneAsync(selector) } diff --git a/meteor/server/api/deviceTriggers/triggersContextCollection.ts b/meteor/server/api/deviceTriggers/triggersContextCollection.ts deleted file mode 100644 index 23711d92bbc..00000000000 --- a/meteor/server/api/deviceTriggers/triggersContextCollection.ts +++ /dev/null @@ -1,76 +0,0 @@ -import { Meteor } from 'meteor/meteor' -import { Mongo } from 'meteor/mongo' -import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' -import { CollectionName } from '@sofie-automation/corelib/dist/dataModel/Collections' -import { MongoQuery } from '@sofie-automation/corelib/dist/mongo' -import { - MongoReadOnlyCollection, - MongoCursor, - FindOptions, - FindOneOptions, -} from '@sofie-automation/meteor-lib/dist/collections/lib' -import { ProtectedString } from '@sofie-automation/corelib/dist/protectedString' -import { getOrCreateMongoCollection } from '../../collections/collection' - -/** - * Create a Mongo Collection for use in the client (has sync apis) - * @param name Name of the collection - */ -export function createSyncReadOnlyMongoCollection }>( - name: CollectionName -): MongoReadOnlyCollection { - const collection = getOrCreateMongoCollection(name) - const wrapped = new WrappedMongoReadOnlyCollection(collection, name) - - // registerClientCollection(name, wrapped) - - return wrapped -} - -class WrappedMongoReadOnlyCollection }> - implements MongoReadOnlyCollection -{ - protected readonly _collection: Mongo.Collection - - public readonly name: string | null - - constructor(collection: Mongo.Collection, name: string | null) { - this._collection = collection - this.name = name - } - - protected get _isMock() { - // @ts-expect-error re-export private property - return this._collection._isMock - } - - public get mockCollection() { - return this._collection - } - - protected wrapMongoError(e: any): never { - const str = stringifyError(e) || 'Unknown MongoDB Error' - throw new Meteor.Error((e && e.error) || 500, `Collection "${this.name}": ${str}`) - } - - find( - selector?: MongoQuery | DBInterface['_id'], - options?: FindOptions - ): MongoCursor { - try { - return this._collection.find((selector ?? {}) as any, options as any) as MongoCursor - } catch (e) { - this.wrapMongoError(e) - } - } - findOne( - selector?: MongoQuery | DBInterface['_id'], - options?: FindOneOptions - ): DBInterface | undefined { - try { - return this._collection.findOne((selector ?? {}) as any, options as any) - } catch (e) { - this.wrapMongoError(e) - } - } -} diff --git a/meteor/server/lib/reactiveMap.ts b/meteor/server/lib/reactiveMap.ts deleted file mode 100644 index 67ec848e420..00000000000 --- a/meteor/server/lib/reactiveMap.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { Tracker } from 'meteor/tracker' - -export class ReactiveMap { - private baseMap = new Map() - private dependencyMap = new Map() - private globalDependency = new Tracker.Dependency() - - set(key: string, value: T): void { - const prevVal = this.baseMap.get(key) - this.baseMap.set(key, value) - if (this.dependencyMap.has(key) && prevVal !== value) { - this.dependencyMap.get(key)?.changed() - } else { - this.dependencyMap.set(key, new Tracker.Dependency()) - } - if (prevVal !== value) this.globalDependency.changed() - } - - get(key: string): T | undefined { - if (this.dependencyMap.has(key)) { - this.dependencyMap.get(key)?.depend() - } else { - const dependency = new Tracker.Dependency() - dependency?.depend() - this.dependencyMap.set(key, dependency) - } - return this.baseMap.get(key) - } - - getAll(): { [key: string]: T } { - const result: { [key: string]: T } = {} - for (const [key, value] of this.baseMap.entries()) { - result[key] = value - } - this.globalDependency.depend() - return result - } -} diff --git a/packages/meteor-lib/src/triggers/actionFactory.ts b/packages/meteor-lib/src/triggers/actionFactory.ts index c3ba6d97baa..7796716ffc7 100644 --- a/packages/meteor-lib/src/triggers/actionFactory.ts +++ b/packages/meteor-lib/src/triggers/actionFactory.ts @@ -26,8 +26,8 @@ import { import { DeviceActions } from '@sofie-automation/shared-lib/dist/core/model/ShowStyle' import { UserError, UserErrorMessage } from '@sofie-automation/corelib/dist/error' import { MountedAdLibTriggerType } from '../api/MountedTriggers' -import { DummyReactiveVar, ReactiveVar } from './reactive-var' -import { TriggersContext } from './triggersContext' +import { DummyReactiveVar, TriggerReactiveVar } from './reactive-var' +import { TriggersContext, TriggerTrackerComputation } from './triggersContext' import { assertNever } from '@sofie-automation/corelib/dist/lib' // as described in this issue: https://github.com/Microsoft/TypeScript/issues/14094 @@ -36,18 +36,18 @@ type Without = { [P in Exclude]?: never } type XOR = T | U extends object ? (Without & U) | (Without & T) : T | U export interface ReactivePlaylistActionContext { - studioId: ReactiveVar - rundownPlaylistId: ReactiveVar - rundownPlaylist: ReactiveVar< + studioId: TriggerReactiveVar + rundownPlaylistId: TriggerReactiveVar + rundownPlaylist: TriggerReactiveVar< Pick > - currentRundownId: ReactiveVar - currentSegmentPartIds: ReactiveVar - nextSegmentPartIds: ReactiveVar - currentPartInstanceId: ReactiveVar - currentPartId: ReactiveVar - nextPartId: ReactiveVar + currentRundownId: TriggerReactiveVar + currentSegmentPartIds: TriggerReactiveVar + nextSegmentPartIds: TriggerReactiveVar + currentPartInstanceId: TriggerReactiveVar + currentPartId: TriggerReactiveVar + nextPartId: TriggerReactiveVar } interface PlainPlaylistContext { @@ -64,11 +64,11 @@ interface PlainStudioContext { showStyleBase: DBShowStyleBase } -type PlainActionContext = XOR +export type PlainActionContext = XOR export type ActionContext = XOR -type ActionExecutor = (t: TFunction, e: any, ctx: ActionContext) => void +type ActionExecutor = (t: TFunction, e: any, ctx: ActionContext) => Promise | void /** * An action compiled down to a single function that can be executed @@ -89,7 +89,7 @@ export interface ExecutableAction { * @extends {ExecutableAction} */ interface PreviewableAction extends ExecutableAction { - preview: (ctx: ReactivePlaylistActionContext) => IWrappedAdLib[] + preview: (ctx: ActionContext, computation: TriggerTrackerComputation | null) => Promise } interface ExecutableAdLibAction extends PreviewableAction { @@ -99,31 +99,35 @@ interface ExecutableAdLibAction extends PreviewableAction { export function isPreviewableAction(action: ExecutableAction): action is PreviewableAction { return action.action && 'preview' in action && typeof action['preview'] === 'function' } -function createRundownPlaylistContext( +async function createRundownPlaylistContext( + computation: TriggerTrackerComputation | null, triggersContext: TriggersContext, context: ActionContext, filterChain: IBaseFilterLink[] -): ReactivePlaylistActionContext | undefined { +): Promise { if (filterChain.length < 1) { return undefined } else if (filterChain[0].object === 'view' && context.rundownPlaylistId) { return context as ReactivePlaylistActionContext } else if (filterChain[0].object === 'view' && context.rundownPlaylist) { const playlistContext = context as PlainPlaylistContext - return { - studioId: new DummyReactiveVar(playlistContext.rundownPlaylist.studioId), - rundownPlaylistId: new DummyReactiveVar(playlistContext.rundownPlaylist._id), - rundownPlaylist: new DummyReactiveVar(playlistContext.rundownPlaylist), - currentRundownId: new DummyReactiveVar(playlistContext.currentRundownId), - currentPartId: new DummyReactiveVar(playlistContext.currentPartId), - nextPartId: new DummyReactiveVar(playlistContext.nextPartId), - currentSegmentPartIds: new DummyReactiveVar(playlistContext.currentSegmentPartIds), - nextSegmentPartIds: new DummyReactiveVar(playlistContext.nextSegmentPartIds), - currentPartInstanceId: new DummyReactiveVar( - playlistContext.rundownPlaylist.currentPartInfo?.partInstanceId ?? null - ), - } + return triggersContext.withComputation(computation, async () => { + return { + studioId: new DummyReactiveVar(playlistContext.rundownPlaylist.studioId), + rundownPlaylistId: new DummyReactiveVar(playlistContext.rundownPlaylist._id), + rundownPlaylist: new DummyReactiveVar(playlistContext.rundownPlaylist), + currentRundownId: new DummyReactiveVar(playlistContext.currentRundownId), + currentPartId: new DummyReactiveVar(playlistContext.currentPartId), + nextPartId: new DummyReactiveVar(playlistContext.nextPartId), + currentSegmentPartIds: new DummyReactiveVar(playlistContext.currentSegmentPartIds), + nextSegmentPartIds: new DummyReactiveVar(playlistContext.nextSegmentPartIds), + currentPartInstanceId: new DummyReactiveVar( + playlistContext.rundownPlaylist.currentPartInfo?.partInstanceId ?? null + ), + } + }) } else if (filterChain[0].object === 'rundownPlaylist' && context.studio) { + // Note: this is only implemented on the server return triggersContext.createContextForRundownPlaylistChain(context.studio._id, filterChain) } else { throw new Error('Invalid filter combination') @@ -148,12 +152,12 @@ function createAdLibAction( return { action: PlayoutActions.adlib, - preview: (ctx) => { - const innerCtx = createRundownPlaylistContext(triggersContext, ctx, filterChain) + preview: async (ctx, computation) => { + const innerCtx = await createRundownPlaylistContext(computation, triggersContext, ctx, filterChain) if (innerCtx) { try { - return compiledAdLibFilter(innerCtx) + return compiledAdLibFilter(innerCtx, computation) } catch (e) { triggersContext.logger.error(e) return [] @@ -162,8 +166,8 @@ function createAdLibAction( return [] } }, - execute: (t, e, ctx) => { - const innerCtx = createRundownPlaylistContext(triggersContext, ctx, filterChain) + execute: async (t, e, ctx) => { + const innerCtx = await createRundownPlaylistContext(null, triggersContext, ctx, filterChain) if (!innerCtx) { triggersContext.logger.warn( @@ -172,93 +176,97 @@ function createAdLibAction( ) return } - const currentPartInstanceId = innerCtx.rundownPlaylist.get().currentPartInfo?.partInstanceId + const currentPartInstanceId = innerCtx.rundownPlaylist.get(null).currentPartInfo?.partInstanceId const sourceLayerIdsToClear: string[] = [] - triggersContext - .nonreactiveTracker(() => compiledAdLibFilter(innerCtx)) - .forEach((wrappedAdLib) => { - switch (wrappedAdLib.type) { - case MountedAdLibTriggerType.adLibPiece: - triggersContext.doUserAction(t, e, UserAction.START_ADLIB, async (e, ts) => - currentPartInstanceId - ? triggersContext.MeteorCall.userAction.segmentAdLibPieceStart( - e, - ts, - innerCtx.rundownPlaylistId.get(), - currentPartInstanceId, - wrappedAdLib.item._id, - false - ) - : ClientAPI.responseSuccess(undefined) - ) - break - case MountedAdLibTriggerType.rundownBaselineAdLibItem: - triggersContext.doUserAction(t, e, UserAction.START_GLOBAL_ADLIB, async (e, ts) => - currentPartInstanceId - ? triggersContext.MeteorCall.userAction.baselineAdLibPieceStart( - e, - ts, - innerCtx.rundownPlaylistId.get(), - currentPartInstanceId, - wrappedAdLib.item._id, - false - ) - : ClientAPI.responseSuccess(undefined) - ) - break - case MountedAdLibTriggerType.adLibAction: - triggersContext.doUserAction(t, e, UserAction.START_ADLIB, async (e, ts) => - triggersContext.MeteorCall.userAction.executeAction( - e, - ts, - innerCtx.rundownPlaylistId.get(), - wrappedAdLib._id, - wrappedAdLib.item.actionId, - wrappedAdLib.item.userData, - (actionArguments && actionArguments.triggerMode) || undefined - ) + + // This withComputation is probably not needed, but it ensures there is no accidental reactivity + const wrappedAdLibs = await triggersContext.withComputation(null, async () => + compiledAdLibFilter(innerCtx, null) + ) + + wrappedAdLibs.forEach((wrappedAdLib) => { + switch (wrappedAdLib.type) { + case MountedAdLibTriggerType.adLibPiece: + triggersContext.doUserAction(t, e, UserAction.START_ADLIB, async (e, ts) => + currentPartInstanceId + ? triggersContext.MeteorCall.userAction.segmentAdLibPieceStart( + e, + ts, + innerCtx.rundownPlaylistId.get(null), + currentPartInstanceId, + wrappedAdLib.item._id, + false + ) + : ClientAPI.responseSuccess(undefined) + ) + break + case MountedAdLibTriggerType.rundownBaselineAdLibItem: + triggersContext.doUserAction(t, e, UserAction.START_GLOBAL_ADLIB, async (e, ts) => + currentPartInstanceId + ? triggersContext.MeteorCall.userAction.baselineAdLibPieceStart( + e, + ts, + innerCtx.rundownPlaylistId.get(null), + currentPartInstanceId, + wrappedAdLib.item._id, + false + ) + : ClientAPI.responseSuccess(undefined) + ) + break + case MountedAdLibTriggerType.adLibAction: + triggersContext.doUserAction(t, e, UserAction.START_ADLIB, async (e, ts) => + triggersContext.MeteorCall.userAction.executeAction( + e, + ts, + innerCtx.rundownPlaylistId.get(null), + wrappedAdLib._id, + wrappedAdLib.item.actionId, + wrappedAdLib.item.userData, + (actionArguments && actionArguments.triggerMode) || undefined ) - break - case MountedAdLibTriggerType.rundownBaselineAdLibAction: - triggersContext.doUserAction(t, e, UserAction.START_GLOBAL_ADLIB, async (e, ts) => - triggersContext.MeteorCall.userAction.executeAction( - e, - ts, - innerCtx.rundownPlaylistId.get(), - wrappedAdLib._id, - wrappedAdLib.item.actionId, - wrappedAdLib.item.userData, - (actionArguments && actionArguments.triggerMode) || undefined - ) + ) + break + case MountedAdLibTriggerType.rundownBaselineAdLibAction: + triggersContext.doUserAction(t, e, UserAction.START_GLOBAL_ADLIB, async (e, ts) => + triggersContext.MeteorCall.userAction.executeAction( + e, + ts, + innerCtx.rundownPlaylistId.get(null), + wrappedAdLib._id, + wrappedAdLib.item.actionId, + wrappedAdLib.item.userData, + (actionArguments && actionArguments.triggerMode) || undefined ) - break - case MountedAdLibTriggerType.clearSourceLayer: - // defer this action to send a single clear action all at once - sourceLayerIdsToClear.push(wrappedAdLib.sourceLayerId) - break - case MountedAdLibTriggerType.sticky: - triggersContext.doUserAction(t, e, UserAction.START_STICKY_PIECE, async (e, ts) => - triggersContext.MeteorCall.userAction.sourceLayerStickyPieceStart( - e, - ts, - innerCtx.rundownPlaylistId.get(), - wrappedAdLib.sourceLayerId // - ) + ) + break + case MountedAdLibTriggerType.clearSourceLayer: + // defer this action to send a single clear action all at once + sourceLayerIdsToClear.push(wrappedAdLib.sourceLayerId) + break + case MountedAdLibTriggerType.sticky: + triggersContext.doUserAction(t, e, UserAction.START_STICKY_PIECE, async (e, ts) => + triggersContext.MeteorCall.userAction.sourceLayerStickyPieceStart( + e, + ts, + innerCtx.rundownPlaylistId.get(null), + wrappedAdLib.sourceLayerId // ) - break - default: - assertNever(wrappedAdLib) - return - } - }) + ) + break + default: + assertNever(wrappedAdLib) + return + } + }) if (currentPartInstanceId && sourceLayerIdsToClear.length > 0) { triggersContext.doUserAction(t, e, UserAction.CLEAR_SOURCELAYER, async (e, ts) => triggersContext.MeteorCall.userAction.sourceLayerOnPartStop( e, ts, - innerCtx.rundownPlaylistId.get(), + innerCtx.rundownPlaylistId.get(null), currentPartInstanceId, sourceLayerIdsToClear ) @@ -409,9 +417,10 @@ function createUserActionWithCtx( ): ExecutableAction { return { action: action.action, - execute: (t, e, ctx) => { - const innerCtx = triggersContext.nonreactiveTracker(() => - createRundownPlaylistContext(triggersContext, ctx, action.filterChain) + execute: async (t, e, ctx) => { + // This outer withComputation is probably not needed, but it ensures there is no accidental reactivity + const innerCtx = await triggersContext.withComputation(null, async () => + createRundownPlaylistContext(null, triggersContext, ctx, action.filterChain) ) if (innerCtx) { triggersContext.doUserAction(t, e, userAction, async (e, ts) => userActionExec(e, ts, innerCtx)) @@ -450,7 +459,7 @@ export function createAction( triggersContext.MeteorCall.userAction.forceResetAndActivate( e, ts, - ctx.rundownPlaylistId.get(), + ctx.rundownPlaylistId.get(null), !!action.rehearsal || false ) ) @@ -469,7 +478,7 @@ export function createAction( triggersContext.MeteorCall.userAction.activate( e, ts, - ctx.rundownPlaylistId.get(), + ctx.rundownPlaylistId.get(null), !!action.rehearsal || false ) ) @@ -484,7 +493,7 @@ export function createAction( action, UserAction.DEACTIVATE_RUNDOWN_PLAYLIST, async (e, ts, ctx) => - triggersContext.MeteorCall.userAction.deactivate(e, ts, ctx.rundownPlaylistId.get()) + triggersContext.MeteorCall.userAction.deactivate(e, ts, ctx.rundownPlaylistId.get(null)) ) case PlayoutActions.activateAdlibTestingMode: return createUserActionWithCtx( @@ -492,12 +501,12 @@ export function createAction( action, UserAction.ACTIVATE_ADLIB_TESTING, async (e, ts, ctx) => { - const rundownId = ctx.currentRundownId.get() + const rundownId = ctx.currentRundownId.get(null) if (rundownId) { return triggersContext.MeteorCall.userAction.activateAdlibTestingMode( e, ts, - ctx.rundownPlaylistId.get(), + ctx.rundownPlaylistId.get(null), rundownId ) } else { @@ -513,21 +522,26 @@ export function createAction( triggersContext.MeteorCall.userAction.take( e, ts, - ctx.rundownPlaylistId.get(), - ctx.currentPartInstanceId.get() + ctx.rundownPlaylistId.get(null), + ctx.currentPartInstanceId.get(null) ) ) } case PlayoutActions.hold: return createUserActionWithCtx(triggersContext, action, UserAction.ACTIVATE_HOLD, async (e, ts, ctx) => - triggersContext.MeteorCall.userAction.activateHold(e, ts, ctx.rundownPlaylistId.get(), !!action.undo) + triggersContext.MeteorCall.userAction.activateHold( + e, + ts, + ctx.rundownPlaylistId.get(null), + !!action.undo + ) ) case PlayoutActions.disableNextPiece: return createUserActionWithCtx(triggersContext, action, UserAction.DISABLE_NEXT_PIECE, async (e, ts, ctx) => triggersContext.MeteorCall.userAction.disableNextPiece( e, ts, - ctx.rundownPlaylistId.get(), + ctx.rundownPlaylistId.get(null), !!action.undo ) ) @@ -546,7 +560,7 @@ export function createAction( e, ts, triggersContext.hashSingleUseToken(tokenResult.result), - ctx.rundownPlaylistId.get(), + ctx.rundownPlaylistId.get(null), `action`, false ) @@ -558,7 +572,7 @@ export function createAction( triggersContext.MeteorCall.userAction.moveNext( e, ts, - ctx.rundownPlaylistId.get(), + ctx.rundownPlaylistId.get(null), action.parts ?? 0, action.segments ?? 0 ) @@ -574,7 +588,11 @@ export function createAction( async (e, ts, ctx) => // TODO: Needs some handling of the response. Perhaps this should switch to // an event on the RundownViewEventBus, if ran on the client? - triggersContext.MeteorCall.userAction.resyncRundownPlaylist(e, ts, ctx.rundownPlaylistId.get()) + triggersContext.MeteorCall.userAction.resyncRundownPlaylist( + e, + ts, + ctx.rundownPlaylistId.get(null) + ) ) } case PlayoutActions.resetRundownPlaylist: @@ -586,7 +604,11 @@ export function createAction( action, UserAction.RESET_RUNDOWN_PLAYLIST, async (e, ts, ctx) => - triggersContext.MeteorCall.userAction.resetRundownPlaylist(e, ts, ctx.rundownPlaylistId.get()) + triggersContext.MeteorCall.userAction.resetRundownPlaylist( + e, + ts, + ctx.rundownPlaylistId.get(null) + ) ) } case PlayoutActions.resyncRundownPlaylist: @@ -595,14 +617,14 @@ export function createAction( action, UserAction.RESYNC_RUNDOWN_PLAYLIST, async (e, ts, ctx) => - triggersContext.MeteorCall.userAction.resyncRundownPlaylist(e, ts, ctx.rundownPlaylistId.get()) + triggersContext.MeteorCall.userAction.resyncRundownPlaylist(e, ts, ctx.rundownPlaylistId.get(null)) ) case PlayoutActions.switchRouteSet: return createUserActionWithCtx(triggersContext, action, UserAction.SWITCH_ROUTE_SET, async (e, ts, ctx) => triggersContext.MeteorCall.userAction.switchRouteSet( e, ts, - ctx.studioId.get(), + ctx.studioId.get(null), action.routeSetId, action.state ) diff --git a/packages/meteor-lib/src/triggers/actionFilterChainCompilers.ts b/packages/meteor-lib/src/triggers/actionFilterChainCompilers.ts index eb2b9e47cbe..d8afa19d94a 100644 --- a/packages/meteor-lib/src/triggers/actionFilterChainCompilers.ts +++ b/packages/meteor-lib/src/triggers/actionFilterChainCompilers.ts @@ -25,7 +25,7 @@ import { IWrappedAdLibBase } from '@sofie-automation/shared-lib/dist/input-gatew import { MountedAdLibTriggerType } from '../api/MountedTriggers' import { assertNever, generateTranslation } from '@sofie-automation/corelib/dist/lib' import { FindOptions } from '../collections/lib' -import { TriggersContext } from './triggersContext' +import { TriggersContext, TriggerTrackerComputation } from './triggersContext' export type AdLibFilterChainLink = IRundownPlaylistFilterLink | IGUIContextFilterLink | IAdLibFilterLink @@ -490,7 +490,7 @@ export function compileAdLibFilter( triggersContext: TriggersContext, filterChain: AdLibFilterChainLink[], sourceLayers: SourceLayers -): (context: ReactivePlaylistActionContext) => IWrappedAdLib[] { +): (context: ReactivePlaylistActionContext, computation: TriggerTrackerComputation | null) => Promise { const onlyAdLibLinks = filterChain.filter((link) => link.object === 'adLib') as IAdLibFilterLink[] const adLibPieceTypeFilter = compileAdLibPieceFilter(onlyAdLibLinks, sourceLayers) const adLibActionTypeFilter = compileAdLibActionFilter(onlyAdLibLinks, sourceLayers) @@ -498,23 +498,23 @@ export function compileAdLibFilter( const clearAdLibs = compileAndRunClearFilter(onlyAdLibLinks, sourceLayers) const stickyAdLibs = compileAndRunStickyFilter(onlyAdLibLinks, sourceLayers) - return (context: ReactivePlaylistActionContext) => { + return async (context: ReactivePlaylistActionContext, computation: TriggerTrackerComputation | null) => { let rundownBaselineAdLibItems: IWrappedAdLib[] = [] let adLibPieces: IWrappedAdLib[] = [] let rundownBaselineAdLibActions: IWrappedAdLib[] = [] let adLibActions: IWrappedAdLib[] = [] const segmentPartIds = adLibPieceTypeFilter.segment === 'current' - ? context.currentSegmentPartIds.get() + ? context.currentSegmentPartIds.get(computation) : adLibPieceTypeFilter.segment === 'next' - ? context.nextSegmentPartIds.get() + ? context.nextSegmentPartIds.get(computation) : undefined const singlePartId = adLibPieceTypeFilter.part === 'current' - ? context.currentPartId.get() + ? context.currentPartId.get(computation) : adLibPieceTypeFilter.part === 'next' - ? context.nextPartId.get() + ? context.nextPartId.get(computation) : undefined /** Note: undefined means that all parts are to be considered */ @@ -554,25 +554,31 @@ export function compileAdLibFilter( } } - const currentRundownId = context.currentRundownId.get() + const currentRundownId = context.currentRundownId.get(computation) if (!skip && currentRundownId) { if (adLibPieceTypeFilter.global === undefined || adLibPieceTypeFilter.global === true) - rundownBaselineAdLibItems = triggersContext.RundownBaselineAdLibPieces.find( - { - ...adLibPieceTypeFilter.selector, - ...currentNextOverride, - rundownId: currentRundownId, - } as MongoQuery, - adLibPieceTypeFilter.options + rundownBaselineAdLibItems = ( + await triggersContext.RundownBaselineAdLibPieces.findFetchAsync( + computation, + { + ...adLibPieceTypeFilter.selector, + ...currentNextOverride, + rundownId: currentRundownId, + } as MongoQuery, + adLibPieceTypeFilter.options + ) ).map((item) => wrapAdLibPiece(item, MountedAdLibTriggerType.rundownBaselineAdLibItem)) if (adLibPieceTypeFilter.global === undefined || adLibPieceTypeFilter.global === false) - adLibPieces = triggersContext.AdLibPieces.find( - { - ...adLibPieceTypeFilter.selector, - ...currentNextOverride, - rundownId: currentRundownId, - } as MongoQuery, - adLibPieceTypeFilter.options + adLibPieces = ( + await triggersContext.AdLibPieces.findFetchAsync( + computation, + { + ...adLibPieceTypeFilter.selector, + ...currentNextOverride, + rundownId: currentRundownId, + } as MongoQuery, + adLibPieceTypeFilter.options + ) ).map((item) => wrapAdLibPiece(item, MountedAdLibTriggerType.adLibPiece)) } } @@ -591,27 +597,33 @@ export function compileAdLibFilter( } } - const currentRundownId = context.currentRundownId.get() + const currentRundownId = context.currentRundownId.get(computation) if (!skip && currentRundownId) { if (adLibActionTypeFilter.global === undefined || adLibActionTypeFilter.global === true) - rundownBaselineAdLibActions = triggersContext.RundownBaselineAdLibActions.find( - { - ...adLibActionTypeFilter.selector, - ...currentNextOverride, - rundownId: currentRundownId, - } as MongoQuery, - adLibActionTypeFilter.options + rundownBaselineAdLibActions = ( + await triggersContext.RundownBaselineAdLibActions.findFetchAsync( + computation, + { + ...adLibActionTypeFilter.selector, + ...currentNextOverride, + rundownId: currentRundownId, + } as MongoQuery, + adLibActionTypeFilter.options + ) ).map((item) => wrapRundownBaselineAdLibAction(item, MountedAdLibTriggerType.rundownBaselineAdLibAction) ) if (adLibActionTypeFilter.global === undefined || adLibActionTypeFilter.global === false) - adLibActions = triggersContext.AdLibActions.find( - { - ...adLibActionTypeFilter.selector, - ...currentNextOverride, - rundownId: currentRundownId, - } as MongoQuery, - adLibActionTypeFilter.options + adLibActions = ( + await triggersContext.AdLibActions.findFetchAsync( + computation, + { + ...adLibActionTypeFilter.selector, + ...currentNextOverride, + rundownId: currentRundownId, + } as MongoQuery, + adLibActionTypeFilter.options + ) ).map((item) => wrapAdLibAction(item, MountedAdLibTriggerType.adLibAction)) } } @@ -624,38 +636,49 @@ export function compileAdLibFilter( // Note: We need to return an array from within memoizedIsolatedAutorun, // because _.isEqual (used in memoizedIsolatedAutorun) doesn't work with Maps.. - const rundownPlaylistId = context.rundownPlaylistId.get() - const rundownRanks = triggersContext.memoizedIsolatedAutorun(() => { - const playlist = triggersContext.RundownPlaylists.findOne(rundownPlaylistId, { - projection: { - rundownIdsInOrder: 1, - }, - }) as Pick | undefined - - if (playlist?.rundownIdsInOrder) { - return playlist.rundownIdsInOrder - } else { - const rundowns = triggersContext.Rundowns.find( + const rundownPlaylistId = context.rundownPlaylistId.get(computation) + const rundownRanks = await triggersContext.memoizedIsolatedAutorun( + computation, + async (computation) => { + const playlist = (await triggersContext.RundownPlaylists.findOneAsync( + computation, + rundownPlaylistId, { - playlistId: rundownPlaylistId, - }, - { - fields: { - _id: 1, + projection: { + rundownIdsInOrder: 1, }, } - ).fetch() as Pick[] - - return rundowns.map((r) => r._id) - } - }, `rundownsRanksForPlaylist_${rundownPlaylistId}`) + )) as Pick | undefined + + if (playlist?.rundownIdsInOrder) { + return playlist.rundownIdsInOrder + } else { + const rundowns = (await triggersContext.Rundowns.findFetchAsync( + computation, + { + playlistId: rundownPlaylistId, + }, + { + fields: { + _id: 1, + }, + } + )) as Pick[] + + return rundowns.map((r) => r._id) + } + }, + `rundownsRanksForPlaylist_${rundownPlaylistId}` + ) rundownRanks.forEach((id, index) => { rundownRankMap.set(id, index) }) - const segmentRanks = triggersContext.memoizedIsolatedAutorun( - () => - triggersContext.Segments.find( + const segmentRanks = await triggersContext.memoizedIsolatedAutorun( + computation, + async (computation) => + (await triggersContext.Segments.findFetchAsync( + computation, { rundownId: { $in: Array.from(rundownRankMap.keys()) }, }, @@ -665,42 +688,48 @@ export function compileAdLibFilter( _rank: 1, }, } - ).fetch() as Pick[], + )) as Pick[], `segmentRanksForRundowns_${Array.from(rundownRankMap.keys()).join(',')}` ) segmentRanks.forEach((segment) => { segmentRankMap.set(segment._id, segment._rank) }) - const partRanks = triggersContext.memoizedIsolatedAutorun(() => { - if (!partFilter) { - return triggersContext.Parts.find( - { - rundownId: { $in: Array.from(rundownRankMap.keys()) }, - }, - { - fields: { - _id: 1, - segmentId: 1, - rundownId: 1, - _rank: 1, - }, - } - ).fetch() as Pick[] - } else { - return triggersContext.Parts.find( - { _id: { $in: partFilter } }, - { - fields: { - _id: 1, - segmentId: 1, - rundownId: 1, - _rank: 1, + const partRanks = await triggersContext.memoizedIsolatedAutorun( + computation, + async (computation) => { + if (!partFilter) { + return (await triggersContext.Parts.findFetchAsync( + computation, + { + rundownId: { $in: Array.from(rundownRankMap.keys()) }, }, - } - ).fetch() as Pick[] - } - }, `partRanks_${JSON.stringify(partFilter ?? rundownRankMap.keys())}`) + { + fields: { + _id: 1, + segmentId: 1, + rundownId: 1, + _rank: 1, + }, + } + )) as Pick[] + } else { + return (await triggersContext.Parts.findFetchAsync( + computation, + { _id: { $in: partFilter } }, + { + fields: { + _id: 1, + segmentId: 1, + rundownId: 1, + _rank: 1, + }, + } + )) as Pick[] + } + }, + `partRanks_${JSON.stringify(partFilter ?? rundownRankMap.keys())}` + ) partRanks.forEach((part) => { partRankMap.set(part._id, part) diff --git a/packages/meteor-lib/src/triggers/reactive-var.ts b/packages/meteor-lib/src/triggers/reactive-var.ts index f9d7d58758a..765174dd7fa 100644 --- a/packages/meteor-lib/src/triggers/reactive-var.ts +++ b/packages/meteor-lib/src/triggers/reactive-var.ts @@ -1,9 +1,11 @@ +import type { TriggerTrackerComputation } from './triggersContext' + // Copied from Meteor -export interface ReactiveVar { +export interface TriggerReactiveVar { /** * Returns the current value of the ReactiveVar, establishing a reactive dependency. */ - get(): T + get(computation: TriggerTrackerComputation | null): T /** * Sets the current value of the ReactiveVar, invalidating the Computations that called `get` if `newValue` is different from the old value. */ @@ -14,7 +16,7 @@ export interface ReactiveVar { * This just looks like a ReactiveVar, but is not reactive. * It's used to use the same interface/typings, but when code is run on both client and server side. * */ -export class DummyReactiveVar implements ReactiveVar { +export class DummyReactiveVar implements TriggerReactiveVar { constructor(private value: T) {} public get(): T { return this.value diff --git a/packages/meteor-lib/src/triggers/triggersContext.ts b/packages/meteor-lib/src/triggers/triggersContext.ts index 4c94b4ac52f..94b179cb77a 100644 --- a/packages/meteor-lib/src/triggers/triggersContext.ts +++ b/packages/meteor-lib/src/triggers/triggersContext.ts @@ -2,7 +2,7 @@ import { UserAction } from '../userAction' import { IMeteorCall } from '../api/methods' import { Time } from '@sofie-automation/shared-lib/dist/lib/lib' import { ClientAPI } from '../api/client' -import { MongoReadOnlyCollection } from '../collections/lib' +import { FindOneOptions, FindOptions } from '../collections/lib' import { AdLibAction } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' @@ -16,6 +16,37 @@ import { IBaseFilterLink } from '@sofie-automation/blueprints-integration' import { StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { ReactivePlaylistActionContext } from './actionFactory' import { TFunction } from 'i18next' +import { ProtectedString } from '@sofie-automation/corelib/dist/protectedString' +import { MongoQuery } from '@sofie-automation/corelib/dist/mongo' + +/** + * A opaque type that is used in the meteor-lib api instead of implementation specific computations. + * This should be treated as equivalent to the Meteor `Tracker.Computation` type. + */ +export type TriggerTrackerComputation = { __internal: true } + +export interface TriggersAsyncCollection }> { + /** + * Find and return multiple documents + * @param selector A query describing the documents to find + * @param options Options for the operation + */ + findFetchAsync( + computation: TriggerTrackerComputation | null, + selector: MongoQuery, + options?: FindOptions + ): Promise> + + /** + * Finds the first document that matches the selector, as ordered by sort and skip options. Returns `undefined` if no matching document is found. + * @param selector A query describing the documents to find + */ + findOneAsync( + computation: TriggerTrackerComputation | null, + selector: MongoQuery | DBInterface['_id'], + options?: FindOneOptions + ): Promise +} export interface TriggersContext { readonly MeteorCall: IMeteorCall @@ -24,14 +55,14 @@ export interface TriggersContext { readonly isClient: boolean - readonly AdLibActions: MongoReadOnlyCollection - readonly AdLibPieces: MongoReadOnlyCollection - readonly Parts: MongoReadOnlyCollection - readonly RundownBaselineAdLibActions: MongoReadOnlyCollection - readonly RundownBaselineAdLibPieces: MongoReadOnlyCollection - readonly RundownPlaylists: MongoReadOnlyCollection - readonly Rundowns: MongoReadOnlyCollection - readonly Segments: MongoReadOnlyCollection + readonly AdLibActions: TriggersAsyncCollection + readonly AdLibPieces: TriggersAsyncCollection + readonly Parts: TriggersAsyncCollection + readonly RundownBaselineAdLibActions: TriggersAsyncCollection + readonly RundownBaselineAdLibPieces: TriggersAsyncCollection + readonly RundownPlaylists: TriggersAsyncCollection + readonly Rundowns: TriggersAsyncCollection + readonly Segments: TriggersAsyncCollection hashSingleUseToken(token: string): string @@ -44,16 +75,28 @@ export interface TriggersContext { _okMessage?: string ): void - nonreactiveTracker(func: () => T): T + /** + * Equivalent to the Meteor `Tracker.withComputation` function, but implementation specific. + * Use this to ensure that a function is run as part of the provided computation. + */ + withComputation(computation: TriggerTrackerComputation | null, func: () => Promise): Promise - memoizedIsolatedAutorun any>( - fnc: T, + /** + * Create a reactive computation that will be run independently of the outer one. If the same function (using the same + * name and parameters) will be used again, this computation will only be computed once on invalidation and it's + * result will be memoized and reused on every other call. + * + * This will be run as part of the provided computation, and passes the inner computation to the function. + */ + memoizedIsolatedAutorun( + computation: TriggerTrackerComputation | null, + fnc: (computation: TriggerTrackerComputation | null, ...args: TArgs) => Promise, functionName: string, - ...params: Parameters - ): ReturnType + ...params: TArgs + ): Promise createContextForRundownPlaylistChain( _studioId: StudioId, _filterChain: IBaseFilterLink[] - ): ReactivePlaylistActionContext | undefined + ): Promise } diff --git a/packages/webui/src/client/lib/ReactMeteorData/ReactMeteorData.tsx b/packages/webui/src/client/lib/ReactMeteorData/ReactMeteorData.tsx index b97ab3cb71c..379908a27c0 100644 --- a/packages/webui/src/client/lib/ReactMeteorData/ReactMeteorData.tsx +++ b/packages/webui/src/client/lib/ReactMeteorData/ReactMeteorData.tsx @@ -342,17 +342,56 @@ export type Translated = T & WithTranslation * @param {K} [initial] An optional, initial state of the tracker. If not provided, the tracker may return undefined. * @return {*} {(T | K)} */ -export function useTracker(autorun: () => T, deps: React.DependencyList): T | undefined -export function useTracker(autorun: () => T, deps: React.DependencyList, initial: T): T +export function useTracker( + autorun: (computation: Tracker.Computation) => T, + deps: React.DependencyList +): T | undefined +export function useTracker( + autorun: (computation: Tracker.Computation) => T, + deps: React.DependencyList, + initial: T +): T export function useTracker( - autorun: () => T, + autorun: (computation: Tracker.Computation) => T, deps: React.DependencyList, initial?: K ): T | K { const [meteorData, setMeteorData] = useState(initial as K) useEffect(() => { - const computation = Tracker.nonreactive(() => Tracker.autorun(() => setMeteorData(autorun()))) + const computation = Tracker.nonreactive(() => + Tracker.autorun((innerComputation) => setMeteorData(autorun(innerComputation))) + ) + return () => computation.stop() + }, deps) + + return meteorData +} + +/** + * A Meteor Tracker hook that allows using React Functional Components and the Hooks API with Meteor Tracker + * + * This is an alternate implementation which supports promises in the autorun function, and will preserve the previous value until the promise resolves. + * + * @param {() => Promise} autorun The autorun function to be run. + * @param {React.DependencyList} [deps] A required list of dependenices to limit the tracker re-running. Can be left empty, if tracker + * has no external dependencies and should only be rerun when it's invalidated. + * @param {K} [initial] An optional, initial state of the tracker. If not provided, the tracker may return undefined. + * @return {*} {(T | K)} + */ +export function useTrackerAsyncTest( + autorun: (computation: Tracker.Computation) => Promise, + deps: React.DependencyList, + initial?: K +): T | K { + const [meteorData, setMeteorData] = useState(initial as K) + + useEffect(() => { + const computation = Tracker.nonreactive(() => + Tracker.autorun(async (innerComputation) => { + setMeteorData(await autorun(innerComputation)) + }) + ) return () => computation.stop() }, deps) diff --git a/packages/webui/src/client/lib/memoizedIsolatedAutorun.ts b/packages/webui/src/client/lib/memoizedIsolatedAutorun.ts index 992654c5cb1..41c10bc649d 100644 --- a/packages/webui/src/client/lib/memoizedIsolatedAutorun.ts +++ b/packages/webui/src/client/lib/memoizedIsolatedAutorun.ts @@ -1,6 +1,8 @@ +import { isPromise } from '@sofie-automation/shared-lib/dist/lib/lib' import { Meteor } from 'meteor/meteor' import { Tracker } from 'meteor/tracker' import _ from 'underscore' +import { getRandomString } from './tempLib' const isolatedAutorunsMem: { [key: string]: { @@ -78,3 +80,86 @@ export function memoizedIsolatedAutorun any>( // @ts-expect-error it is assigned by the tracker return result } + +interface IsolatedAsyncAutorunState { + computationId: string + dependancy: Tracker.Dependency + value: any +} + +const isolatedAsyncAutorunsMem: { + [key: string]: IsolatedAsyncAutorunState +} = {} + +export async function memoizedIsolatedAutorunAsync( + parentComputation: Tracker.Computation | null, + fnc: (computation: Tracker.Computation, ...args: TArgs) => Promise, + functionName: string, + ...params: TArgs +): Promise { + function hashFncAndParams(fName: string, p: any): string { + return fName + '_' + JSON.stringify(p) + } + + const fId = hashFncAndParams(functionName, params) + // Computation is already running, depend on it + if (isolatedAsyncAutorunsMem[fId]) { + const result = isolatedAsyncAutorunsMem[fId].value + isolatedAsyncAutorunsMem[fId].dependancy.depend(parentComputation) + + return result + } + + // Setup the computation + const computationId = getRandomString() + const dep = new Tracker.Dependency() + dep.depend(parentComputation) + const computation = Tracker.nonreactive(() => { + const computationState: IsolatedAsyncAutorunState = { + computationId, + dependancy: dep, + value: null, // Filled in later + } + + const computation = Tracker.autorun(async (innerComputation) => { + // Start executing the function + const rawValue: Promise = fnc(innerComputation, ...params) + + // Fetch the previous value and the new value + const oldValue = computationState.value + const newValue = await rawValue + + // If the old value is an unresolved promise, we can't compare it + const oldRealValue = isPromise(oldValue) ? null : oldValue + + // If the values are different, invalidate the dependancy + // Do this even for the first run, as other listeners might have joined while the promise was resolving + if (!_.isEqual(oldRealValue, newValue)) { + dep.changed() + } + + return newValue as void // Tracker.autorun isn't generic + }) + computation.onStop(() => { + // Only delete if it is this computation that is stopping + if (isolatedAsyncAutorunsMem[fId]?.computationId === computationId) { + delete isolatedAsyncAutorunsMem[fId] + } + }) + + // Store the first value + computationState.value = computation.firstRunPromise + isolatedAsyncAutorunsMem[fId] = computationState + + return computation + }) + const gc = Meteor.setInterval(() => { + if (!dep.hasDependents()) { + Meteor.clearInterval(gc) + computation.stop() + } + }, 5000) + + // Return the promise of the first value + return computation.firstRunPromise as TRes // Tracker.autorun isn't generic +} diff --git a/packages/webui/src/client/lib/notifications/notifications.ts b/packages/webui/src/client/lib/notifications/notifications.ts index 0d1a0aff330..ef8f63bfb5c 100644 --- a/packages/webui/src/client/lib/notifications/notifications.ts +++ b/packages/webui/src/client/lib/notifications/notifications.ts @@ -116,7 +116,7 @@ export class NotifierHandle { this.result = source().get() notificationsDep.changed() }) - }) as any as Tracker.Computation + }) notifiers[notifierId] = this } diff --git a/packages/webui/src/client/lib/reactiveData/reactiveDataHelper.ts b/packages/webui/src/client/lib/reactiveData/reactiveDataHelper.ts index 7384c8dee05..119e70a8c4a 100644 --- a/packages/webui/src/client/lib/reactiveData/reactiveDataHelper.ts +++ b/packages/webui/src/client/lib/reactiveData/reactiveDataHelper.ts @@ -83,7 +83,7 @@ export abstract class WithManagedTracker { const comp = Tracker.autorun(func, options) this._autoruns.push(comp) return comp - }) as any as Tracker.Computation + }) } } diff --git a/packages/webui/src/client/lib/triggers/TriggersHandler.tsx b/packages/webui/src/client/lib/triggers/TriggersHandler.tsx index 5c28ff8f54a..6d3173ba622 100644 --- a/packages/webui/src/client/lib/triggers/TriggersHandler.tsx +++ b/packages/webui/src/client/lib/triggers/TriggersHandler.tsx @@ -45,7 +45,7 @@ import { RundownPlaylistCollectionUtil } from '../../collections/rundownPlaylist import { catchError } from '../lib' import { logger } from '../logging' import { CorelibPubSub } from '@sofie-automation/corelib/dist/pubsub' -import { UiTriggersContext } from './triggersContext' +import { toTriggersComputation, toTriggersReactiveVar, UiTriggersContext } from './triggersContext' type HotkeyTriggerListener = (e: KeyboardEvent) => void @@ -94,30 +94,42 @@ function createAction( actions: SomeAction[], showStyleBase: UIShowStyleBase, t: TFunction, - collectContext: () => ReactivePlaylistActionContext | null + collectContext: (computation: Tracker.Computation | null) => ReactivePlaylistActionContext | null ): { listener: HotkeyTriggerListener - preview: () => IWrappedAdLib[] + preview: (computation: Tracker.Computation) => Promise } { const executableActions = actions.map((value) => libCreateAction(UiTriggersContext, value, showStyleBase.sourceLayers) ) return { - preview: () => { - const ctx = collectContext() - if (ctx) { - return flatten(executableActions.map((action) => (isPreviewableAction(action) ? action.preview(ctx) : []))) - } else { - return [] - } + preview: async (computation: Tracker.Computation) => { + const trackerComputation = toTriggersComputation(computation) + const ctx = collectContext(computation) + if (!ctx) return [] + + return flatten( + await Promise.all( + executableActions.map( + async (action): Promise => + isPreviewableAction(action) ? action.preview(ctx, trackerComputation) : [] + ) + ) + ) }, listener: (e) => { e.preventDefault() e.stopPropagation() - const ctx = collectContext() + const ctx = collectContext(null) if (ctx) { - executableActions.forEach((action) => action.execute(t, e, ctx)) + executableActions.forEach((action) => + Promise.resolve() + .then(async () => action.execute(t, e, ctx)) + .catch((e) => { + logger.error(`Execution Triggered Action "${_id}" failed: ${e}`) + }) + ) } }, } @@ -128,8 +140,8 @@ const rundownPlaylistContext: ReactiveVar function setRundownPlaylistContext(ctx: ReactivePlaylistActionContext | null) { rundownPlaylistContext.set(ctx) } -function getCurrentContext(): ReactivePlaylistActionContext | null { - return rundownPlaylistContext.get() +function getCurrentContext(computation: Tracker.Computation | null): ReactivePlaylistActionContext | null { + return rundownPlaylistContext.get(computation ?? undefined) } export const MountedAdLibTriggers = createInMemorySyncMongoCollection( @@ -145,10 +157,12 @@ export function isMountedAdLibTrigger( return 'targetId' in mountedTrigger && !!mountedTrigger['targetId'] } -function isolatedAutorunWithCleanup(autorun: () => void | (() => void)): Tracker.Computation { +function isolatedAutorunWithCleanup( + autorun: (computation: Tracker.Computation) => Promise void)> +): Tracker.Computation { return Tracker.nonreactive(() => - Tracker.autorun((computation) => { - const cleanUp = autorun() + Tracker.autorun(async (computation) => { + const cleanUp = await autorun(computation) if (typeof cleanUp === 'function') { computation.onInvalidate(cleanUp) @@ -322,15 +336,17 @@ export const TriggersHandler: React.FC = function TriggersHandler( let context = rundownPlaylistContext.get() if (context === null) { context = { - studioId: new ReactiveVar(props.studioId), - rundownPlaylistId: new ReactiveVar(playlist._id), - rundownPlaylist: new ReactiveVar(playlist), - currentRundownId: new ReactiveVar(props.currentRundownId), - currentPartId: new ReactiveVar(props.currentPartId), - nextPartId: new ReactiveVar(props.nextPartId), - currentSegmentPartIds: new ReactiveVar(props.currentSegmentPartIds), - nextSegmentPartIds: new ReactiveVar(props.nextSegmentPartIds), - currentPartInstanceId: new ReactiveVar(playlist.currentPartInfo?.partInstanceId ?? null), + studioId: toTriggersReactiveVar(new ReactiveVar(props.studioId)), + rundownPlaylistId: toTriggersReactiveVar(new ReactiveVar(playlist._id)), + rundownPlaylist: toTriggersReactiveVar(new ReactiveVar(playlist)), + currentRundownId: toTriggersReactiveVar(new ReactiveVar(props.currentRundownId)), + currentPartId: toTriggersReactiveVar(new ReactiveVar(props.currentPartId)), + nextPartId: toTriggersReactiveVar(new ReactiveVar(props.nextPartId)), + currentSegmentPartIds: toTriggersReactiveVar(new ReactiveVar(props.currentSegmentPartIds)), + nextSegmentPartIds: toTriggersReactiveVar(new ReactiveVar(props.nextSegmentPartIds)), + currentPartInstanceId: toTriggersReactiveVar( + new ReactiveVar(playlist.currentPartInfo?.partInstanceId ?? null) + ), } rundownPlaylistContext.set(context) } else { @@ -444,10 +460,10 @@ export const TriggersHandler: React.FC = function TriggersHandler( const hotkeyFinalKeys = hotkeyTriggers.map((key) => getFinalKey(key)) previewAutoruns.push( - isolatedAutorunWithCleanup(() => { + isolatedAutorunWithCleanup(async (computation) => { let previewAdLibs: IWrappedAdLib[] = [] try { - previewAdLibs = action.preview() + previewAdLibs = await action.preview(computation) } catch (e) { logger.error(e) } diff --git a/packages/webui/src/client/lib/triggers/triggersContext.ts b/packages/webui/src/client/lib/triggers/triggersContext.ts index 1fe46fc9f36..fc922c45e6f 100644 --- a/packages/webui/src/client/lib/triggers/triggersContext.ts +++ b/packages/webui/src/client/lib/triggers/triggersContext.ts @@ -1,9 +1,12 @@ -import { TriggersContext } from '@sofie-automation/meteor-lib/dist/triggers/triggersContext' +import { + TriggersAsyncCollection, + TriggersContext, + TriggerTrackerComputation, +} from '@sofie-automation/meteor-lib/dist/triggers/triggersContext' import { hashSingleUseToken } from '../lib' import { MeteorCall } from '../meteorApi' import { IBaseFilterLink } from '@sofie-automation/blueprints-integration' import { doUserAction } from '../clientUserAction' -import { memoizedIsolatedAutorun } from '../memoizedIsolatedAutorun' import { Tracker } from 'meteor/tracker' import { AdLibActions, @@ -18,6 +21,42 @@ import { import { logger } from '../logging' import { StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { ReactivePlaylistActionContext } from '@sofie-automation/meteor-lib/dist/triggers/actionFactory' +import { FindOneOptions, MongoReadOnlyCollection } from '../../collections/lib' +import { ProtectedString } from '../tempLib' +import { ReactiveVar as MeteorReactiveVar } from 'meteor/reactive-var' +import { TriggerReactiveVar } from '@sofie-automation/meteor-lib/dist/triggers/reactive-var' +import { FindOptions, MongoQuery } from '@sofie-automation/corelib/dist/mongo' +import { memoizedIsolatedAutorunAsync } from '../memoizedIsolatedAutorun' + +class UiTriggersCollectionWrapper }> + implements TriggersAsyncCollection +{ + readonly #collection: MongoReadOnlyCollection + + constructor(collection: MongoReadOnlyCollection) { + this.#collection = collection + } + + async findFetchAsync( + computation: TriggerTrackerComputation | null, + selector: MongoQuery, + options?: FindOptions + ): Promise> { + return Tracker.withComputation(computation as Tracker.Computation | null, async () => { + return this.#collection.find(selector, options).fetch() + }) + } + + async findOneAsync( + computation: TriggerTrackerComputation | null, + selector: MongoQuery | DBInterface['_id'], + options?: FindOneOptions + ): Promise { + return Tracker.withComputation(computation as Tracker.Computation | null, async () => { + return this.#collection.findOne(selector, options) + }) + } +} export const UiTriggersContext: TriggersContext = { MeteorCall, @@ -26,29 +65,51 @@ export const UiTriggersContext: TriggersContext = { isClient: true, - AdLibActions, - AdLibPieces, - Parts, - RundownBaselineAdLibActions, - RundownBaselineAdLibPieces, - RundownPlaylists, - Rundowns, - Segments, + AdLibActions: new UiTriggersCollectionWrapper(AdLibActions), + AdLibPieces: new UiTriggersCollectionWrapper(AdLibPieces), + Parts: new UiTriggersCollectionWrapper(Parts), + RundownBaselineAdLibActions: new UiTriggersCollectionWrapper(RundownBaselineAdLibActions), + RundownBaselineAdLibPieces: new UiTriggersCollectionWrapper(RundownBaselineAdLibPieces), + RundownPlaylists: new UiTriggersCollectionWrapper(RundownPlaylists), + Rundowns: new UiTriggersCollectionWrapper(Rundowns), + Segments: new UiTriggersCollectionWrapper(Segments), hashSingleUseToken, doUserAction, - nonreactiveTracker: Tracker.nonreactive, + withComputation: async (computation, func) => { + return Tracker.withComputation(computation as Tracker.Computation | null, func) + }, - memoizedIsolatedAutorun, + memoizedIsolatedAutorun: async ( + computation: TriggerTrackerComputation | null, + fnc: (computation: TriggerTrackerComputation | null, ...args: TArgs) => Promise, + functionName: string, + ...params: TArgs + ): Promise => { + return memoizedIsolatedAutorunAsync( + computation as Tracker.Computation | null, + async (innerComputation, ...params2) => fnc(toTriggersComputation(innerComputation), ...params2), + functionName, + ...params + ) + }, - createContextForRundownPlaylistChain( + async createContextForRundownPlaylistChain( _studioId: StudioId, _filterChain: IBaseFilterLink[] - ): ReactivePlaylistActionContext | undefined { + ): Promise { // Server only throw new Error('Invalid filter combination') }, } + +export function toTriggersReactiveVar(reactiveVar: MeteorReactiveVar): TriggerReactiveVar { + return reactiveVar as any +} + +export function toTriggersComputation(computation: Tracker.Computation): TriggerTrackerComputation { + return computation as any +} diff --git a/packages/webui/src/client/ui/RundownView.tsx b/packages/webui/src/client/ui/RundownView.tsx index e6253658420..021c09e9f69 100644 --- a/packages/webui/src/client/ui/RundownView.tsx +++ b/packages/webui/src/client/ui/RundownView.tsx @@ -1388,7 +1388,7 @@ const RundownViewContent = translateWithTracker RundownPlaylistCollectionUtil.getRundownsOrdered(playlist), + (_playlistId: RundownPlaylistId) => RundownPlaylistCollectionUtil.getRundownsOrdered(playlist), 'playlist.getRundowns', playlistId ) diff --git a/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionEntry.tsx b/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionEntry.tsx index 8b1ec894b8f..0eacc998b14 100644 --- a/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionEntry.tsx +++ b/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionEntry.tsx @@ -10,11 +10,15 @@ import { } from '@sofie-automation/blueprints-integration' import classNames from 'classnames' import { DBBlueprintTrigger } from '@sofie-automation/meteor-lib/dist/collections/TriggeredActions' -import { useTracker } from '../../../../lib/ReactMeteorData/ReactMeteorData' +import { useTracker, useTrackerAsyncTest } from '../../../../lib/ReactMeteorData/ReactMeteorData' import { ActionEditor } from './actionEditors/ActionEditor' import { OutputLayers, SourceLayers } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' import { flatten, getRandomString } from '../../../../lib/tempLib' -import { createAction, isPreviewableAction } from '@sofie-automation/meteor-lib/dist/triggers/actionFactory' +import { + createAction, + isPreviewableAction, + PlainActionContext, +} from '@sofie-automation/meteor-lib/dist/triggers/actionFactory' import { PreviewContext } from './TriggeredActionsEditor' import { IWrappedAdLib } from '@sofie-automation/meteor-lib/dist/triggers/actionFilterChainCompilers' import { RundownUtils } from '../../../../lib/rundown' @@ -35,7 +39,7 @@ import { isHotkeyTrigger } from '@sofie-automation/meteor-lib/dist/triggers/trig import { getAllCurrentAndDeletedItemsFromOverrides, useOverrideOpHelper } from '../../util/OverrideOpHelper' import { TriggeredActions } from '../../../../collections' import { catchError } from '../../../../lib/lib' -import { UiTriggersContext } from '../../../../lib/triggers/triggersContext' +import { toTriggersComputation, UiTriggersContext } from '../../../../lib/triggers/triggersContext' import { last, literal } from '@sofie-automation/shared-lib/dist/lib/lib' interface IProps { @@ -179,27 +183,36 @@ export const TriggeredActionEntry: React.FC = React.memo(function Trigge [triggeredAction?.actionsWithOverrides] ) - const previewItems = useTracker( - () => { + const previewItems = useTrackerAsyncTest( + async (computation) => { try { - if (resolvedActions && selected && sourceLayers) { - const executableActions = Object.values(resolvedActions).map((value) => - createAction(UiTriggersContext, value, sourceLayers) - ) - const ctx = previewContext - if (ctx && ctx.rundownPlaylist) { - return flatten( - executableActions.map((action) => (isPreviewableAction(action) ? action.preview(ctx as any) : [])) + if (!resolvedActions || !selected || !sourceLayers) return [] + + const triggerComputation = toTriggersComputation(computation) + + const executableActions = Object.values(resolvedActions).map((value) => + createAction(UiTriggersContext, value, sourceLayers) + ) + const ctx = previewContext + if (!ctx || !ctx.rundownPlaylist) return [] + + const actionCtx = ctx as PlainActionContext + + return flatten( + await Promise.all( + executableActions.map( + async (action): Promise => + isPreviewableAction(action) ? action.preview(actionCtx, triggerComputation) : [] ) - } - } + ) + ) } catch (e) { catchError('TriggeredActionEntry previewItems')(e) } - return [] as IWrappedAdLib[] + return [] }, [selected, resolvedActions, sourceLayers, previewContext], - [] as IWrappedAdLib[] + [] ) function getType(sourceLayerId: string | undefined): SourceLayerType { diff --git a/packages/webui/src/meteor/reactive-var.d.ts b/packages/webui/src/meteor/reactive-var.d.ts index 122ea35c0dd..933f6fd32bc 100644 --- a/packages/webui/src/meteor/reactive-var.d.ts +++ b/packages/webui/src/meteor/reactive-var.d.ts @@ -1,22 +1,25 @@ - var ReactiveVar: ReactiveVarStatic; - interface ReactiveVarStatic { - /** - * Constructor for a ReactiveVar, which represents a single reactive variable. - * @param initialValue The initial value to set. `equalsFunc` is ignored when setting the initial value. - * @param equalsFunc A function of two arguments, called on the old value and the new value whenever the ReactiveVar is set. If it returns true, no set is performed. If omitted, the default - * `equalsFunc` returns true if its arguments are `===` and are of type number, boolean, string, undefined, or null. - */ - new (initialValue: T, equalsFunc?: (oldValue: T, newValue: T) => boolean): ReactiveVar; - } - interface ReactiveVar { - /** - * Returns the current value of the ReactiveVar, establishing a reactive dependency. - */ - get(): T; - /** - * Sets the current value of the ReactiveVar, invalidating the Computations that called `get` if `newValue` is different from the old value. - */ - set(newValue: T): void; - } +import type { Tracker } from './tracker' - export { ReactiveVar } \ No newline at end of file +var ReactiveVar: ReactiveVarStatic +interface ReactiveVarStatic { + /** + * Constructor for a ReactiveVar, which represents a single reactive variable. + * @param initialValue The initial value to set. `equalsFunc` is ignored when setting the initial value. + * @param equalsFunc A function of two arguments, called on the old value and the new value whenever the ReactiveVar is set. If it returns true, no set is performed. If omitted, the default + * `equalsFunc` returns true if its arguments are `===` and are of type number, boolean, string, undefined, or null. + */ + new (initialValue: T, equalsFunc?: (oldValue: T, newValue: T) => boolean): ReactiveVar +} +interface ReactiveVar { + /** + * Returns the current value of the ReactiveVar, establishing a reactive dependency. + * @param fromComputation An optional computation declared to depend on `dependency` instead of the current computation. + */ + get(fromComputation?: Tracker.Computation): T + /** + * Sets the current value of the ReactiveVar, invalidating the Computations that called `get` if `newValue` is different from the old value. + */ + set(newValue: T): void +} + +export { ReactiveVar } diff --git a/packages/webui/src/meteor/reactive-var.js b/packages/webui/src/meteor/reactive-var.js index 0817f62e55a..7552f363649 100644 --- a/packages/webui/src/meteor/reactive-var.js +++ b/packages/webui/src/meteor/reactive-var.js @@ -61,9 +61,9 @@ export const ReactiveVar = function (initialValue, equalsFunc) { * @summary Returns the current value of the ReactiveVar, establishing a reactive dependency. * @locus Client */ - ReactiveVar.prototype.get = function () { - if (Tracker.active) - this.dep.depend(); + ReactiveVar.prototype.get = function (computation) { + if (Tracker.active || computation) + this.dep.depend(computation); return this.curValue; }; diff --git a/packages/webui/src/meteor/tracker.d.ts b/packages/webui/src/meteor/tracker.d.ts index d210c0e408d..da9f0229ff7 100644 --- a/packages/webui/src/meteor/tracker.d.ts +++ b/packages/webui/src/meteor/tracker.d.ts @@ -1,129 +1,139 @@ - /** - * The namespace for Tracker-related methods. - */ - export namespace Tracker { - function Computation(): void; - /** - * A Computation object represents code that is repeatedly rerun - * in response to - * reactive data changes. Computations don't have return values; they just - * perform actions, such as rerendering a template on the screen. Computations - * are created using Tracker.autorun. Use stop to prevent further rerunning of a - * computation. - */ - interface Computation { - /** - * True during the initial run of the computation at the time `Tracker.autorun` is called, and false on subsequent reruns and at other times. - */ - firstRun: boolean; - /** - * Invalidates this computation so that it will be rerun. - */ - invalidate(): void; - /** - * True if this computation has been invalidated (and not yet rerun), or if it has been stopped. - */ - invalidated: boolean; - /** - * Registers `callback` to run when this computation is next invalidated, or runs it immediately if the computation is already invalidated. The callback is run exactly once and not upon - * future invalidations unless `onInvalidate` is called again after the computation becomes valid again. - * @param callback Function to be called on invalidation. Receives one argument, the computation that was invalidated. - */ - onInvalidate(callback: Function): void; - /** - * Registers `callback` to run when this computation is stopped, or runs it immediately if the computation is already stopped. The callback is run after any `onInvalidate` callbacks. - * @param callback Function to be called on stop. Receives one argument, the computation that was stopped. - */ - onStop(callback: Function): void; - /** - * Prevents this computation from rerunning. - */ - stop(): void; - /** - * True if this computation has been stopped. - */ - stopped: boolean; - } - /** - * The current computation, or `null` if there isn't one. The current computation is the `Tracker.Computation` object created by the innermost active call to - * `Tracker.autorun`, and it's the computation that gains dependencies when reactive data sources are accessed. - */ - var currentComputation: Computation; +/** + * The namespace for Tracker-related methods. + */ +export namespace Tracker { + function Computation(): void + /** + * A Computation object represents code that is repeatedly rerun + * in response to + * reactive data changes. Computations don't have return values; they just + * perform actions, such as rerendering a template on the screen. Computations + * are created using Tracker.autorun. Use stop to prevent further rerunning of a + * computation. + */ + interface Computation { + /** + * True during the initial run of the computation at the time `Tracker.autorun` is called, and false on subsequent reruns and at other times. + */ + firstRun: boolean + /** + * Forces autorun blocks to be executed in synchronous-looking order by storing the value autorun promise thus making it awaitable. + */ + firstRunPromise: Promise + /** + * Invalidates this computation so that it will be rerun. + */ + invalidate(): void + /** + * True if this computation has been invalidated (and not yet rerun), or if it has been stopped. + */ + invalidated: boolean + /** + * Registers `callback` to run when this computation is next invalidated, or runs it immediately if the computation is already invalidated. The callback is run exactly once and not upon + * future invalidations unless `onInvalidate` is called again after the computation becomes valid again. + * @param callback Function to be called on invalidation. Receives one argument, the computation that was invalidated. + */ + onInvalidate(callback: Function): void + /** + * Registers `callback` to run when this computation is stopped, or runs it immediately if the computation is already stopped. The callback is run after any `onInvalidate` callbacks. + * @param callback Function to be called on stop. Receives one argument, the computation that was stopped. + */ + onStop(callback: Function): void + /** + * Prevents this computation from rerunning. + */ + stop(): void + /** + * True if this computation has been stopped. + */ + stopped: boolean + } + /** + * The current computation, or `null` if there isn't one. The current computation is the `Tracker.Computation` object created by the innermost active call to + * `Tracker.autorun`, and it's the computation that gains dependencies when reactive data sources are accessed. + */ + var currentComputation: Computation | null - var Dependency: DependencyStatic; - /** - * A Dependency represents an atomic unit of reactive data that a - * computation might depend on. Reactive data sources such as Session or - * Minimongo internally create different Dependency objects for different - * pieces of data, each of which may be depended on by multiple computations. - * When the data changes, the computations are invalidated. - */ - interface DependencyStatic { - new (): Dependency; - } - interface Dependency { - /** - * Invalidate all dependent computations immediately and remove them as dependents. - */ - changed(): void; - /** - * Declares that the current computation (or `fromComputation` if given) depends on `dependency`. The computation will be invalidated the next time `dependency` changes. - * If there is no current computation and `depend()` is called with no arguments, it does nothing and returns false. - * Returns true if the computation is a new dependent of `dependency` rather than an existing one. - * @param fromComputation An optional computation declared to depend on `dependency` instead of the current computation. - */ - depend(fromComputation?: Computation): boolean; - /** - * True if this Dependency has one or more dependent Computations, which would be invalidated if this Dependency were to change. - */ - hasDependents(): boolean; - } + var Dependency: DependencyStatic + /** + * A Dependency represents an atomic unit of reactive data that a + * computation might depend on. Reactive data sources such as Session or + * Minimongo internally create different Dependency objects for different + * pieces of data, each of which may be depended on by multiple computations. + * When the data changes, the computations are invalidated. + */ + interface DependencyStatic { + new (): Dependency + } + interface Dependency { + /** + * Invalidate all dependent computations immediately and remove them as dependents. + */ + changed(): void + /** + * Declares that the current computation (or `fromComputation` if given) depends on `dependency`. The computation will be invalidated the next time `dependency` changes. + * If there is no current computation and `depend()` is called with no arguments, it does nothing and returns false. + * Returns true if the computation is a new dependent of `dependency` rather than an existing one. + * @param fromComputation An optional computation declared to depend on `dependency` instead of the current computation. + */ + depend(fromComputation?: Computation | null): boolean + /** + * True if this Dependency has one or more dependent Computations, which would be invalidated if this Dependency were to change. + */ + hasDependents(): boolean + } - /** - * True if there is a current computation, meaning that dependencies on reactive data sources will be tracked and potentially cause the current computation to be rerun. - */ - var active: boolean; + /** + * True if there is a current computation, meaning that dependencies on reactive data sources will be tracked and potentially cause the current computation to be rerun. + */ + var active: boolean - /** - * Schedules a function to be called during the next flush, or later in the current flush if one is in progress, after all invalidated computations have been rerun. The function will be run - * once and not on subsequent flushes unless `afterFlush` is called again. - * @param callback A function to call at flush time. - */ - function afterFlush(callback: Function): void; + /** + * Schedules a function to be called during the next flush, or later in the current flush if one is in progress, after all invalidated computations have been rerun. The function will be run + * once and not on subsequent flushes unless `afterFlush` is called again. + * @param callback A function to call at flush time. + */ + function afterFlush(callback: Function): void - /** - * Run a function now and rerun it later whenever its dependencies - * change. Returns a Computation object that can be used to stop or observe the - * rerunning. - * @param runFunc The function to run. It receives one argument: the Computation object that will be returned. - */ - function autorun( - runFunc: (computation: Computation) => void, - options?: { - /** - * The function to run when an error - * happens in the Computation. The only argument it receives is the Error - * thrown. Defaults to the error being logged to the console. - */ - onError?: Function | undefined; - }, - ): Computation; + /** + * Run a function now and rerun it later whenever its dependencies + * change. Returns a Computation object that can be used to stop or observe the + * rerunning. + * @param runFunc The function to run. It receives one argument: the Computation object that will be returned. + */ + function autorun( + runFunc: (computation: Computation) => void | Promise, + options?: { + /** + * The function to run when an error + * happens in the Computation. The only argument it receives is the Error + * thrown. Defaults to the error being logged to the console. + */ + onError?: Function | undefined + } + ): Computation - /** - * Process all reactive updates immediately and ensure that all invalidated computations are rerun. - */ - function flush(): void; + /** + * @summary Helper function to make the tracker work with promises. + * @param computation Computation that tracked + * @param func async function that needs to be called and be reactive + */ + function withComputation(computation: Computation | null, func: () => Promise): Promise - /** - * Run a function without tracking dependencies. - * @param func A function to call immediately. - */ - function nonreactive(func: () => T): T; + /** + * Process all reactive updates immediately and ensure that all invalidated computations are rerun. + */ + function flush(): void - /** - * Registers a new `onInvalidate` callback on the current computation (which must exist), to be called immediately when the current computation is invalidated or stopped. - * @param callback A callback function that will be invoked as `func(c)`, where `c` is the computation on which the callback is registered. - */ - function onInvalidate(callback: Function): void; - } + /** + * Run a function without tracking dependencies. + * @param func A function to call immediately. + */ + function nonreactive(func: () => T): T + /** + * Registers a new `onInvalidate` callback on the current computation (which must exist), to be called immediately when the current computation is invalidated or stopped. + * @param callback A callback function that will be invoked as `func(c)`, where `c` is the computation on which the callback is registered. + */ + function onInvalidate(callback: Function): void +} diff --git a/packages/webui/src/meteor/tracker.js b/packages/webui/src/meteor/tracker.js index d860e190874..632352b1a50 100644 --- a/packages/webui/src/meteor/tracker.js +++ b/packages/webui/src/meteor/tracker.js @@ -1,4 +1,4 @@ -// https://github.com/meteor/meteor/blob/73fd519de6eef8e116d813fb457c8442db9d1cdd/packages/tracker/tracker.js +// https://github.com/meteor/meteor/blob/0afa7df1fa4146f1f5dd26d867b32c19b7e8d4ad/packages/tracker/tracker.js ///////////////////////////////////////////////////// // Package docs at http://docs.meteor.com/#tracker // @@ -6,8 +6,6 @@ import { Meteor } from './meteor' -export const a = 'a' - /** * @namespace Tracker * @summary The namespace for Tracker-related methods. @@ -38,11 +36,6 @@ Tracker.active = false */ Tracker.currentComputation = null -function setCurrentComputation(c) { - Tracker.currentComputation = c - Tracker.active = !!c -} - function _debugFunc() { // We want this code to work without Meteor, and also without // "console" (which is technically non-standard and may be missing @@ -189,6 +182,16 @@ Tracker.Computation = class Computation { this._onError = onError this._recomputing = false + /** + * @summary Forces autorun blocks to be executed in synchronous-looking order by storing the value autorun promise thus making it awaitable. + * @locus Client + * @memberOf Tracker.Computation + * @instance + * @name firstRunPromise + * @returns {Promise} + */ + this.firstRunPromise = undefined + var errored = true try { this._compute() @@ -199,6 +202,20 @@ Tracker.Computation = class Computation { } } + /** + * Resolves the firstRunPromise with the result of the autorun function. + * @param {*} onResolved + * @param {*} onRejected + * @returns{Promise { + return this._func(this) + }) + // We'll store the firstRunPromise on the computation so it can be awaited by the callers, but only + // during the first run. We don't want things to get mixed up. + if (this.firstRun) { + this.firstRunPromise = Promise.resolve(firstRunPromise) + } } finally { - setCurrentComputation(previous) inCompute = previousInCompute } } @@ -368,15 +392,15 @@ Tracker.Dependency = class Dependency { // if there is no currentComputation. /** - * @summary Declares that the current computation (or `fromComputation` if given) depends on `dependency`. The computation will be invalidated the next time `dependency` changes. - - If there is no current computation and `depend()` is called with no arguments, it does nothing and returns false. - - Returns true if the computation is a new dependent of `dependency` rather than an existing one. - * @locus Client - * @param {Tracker.Computation} [fromComputation] An optional computation declared to depend on `dependency` instead of the current computation. - * @returns {Boolean} - */ + * @summary Declares that the current computation (or `fromComputation` if given) depends on `dependency`. The computation will be invalidated the next time `dependency` changes. + + If there is no current computation and `depend()` is called with no arguments, it does nothing and returns false. + + Returns true if the computation is a new dependent of `dependency` rather than an existing one. + * @locus Client + * @param {Tracker.Computation} [fromComputation] An optional computation declared to depend on `dependency` instead of the current computation. + * @returns {Boolean} + */ depend(computation) { if (!computation) { if (!Tracker.active) return false @@ -542,11 +566,9 @@ Tracker._runFlush = function (options) { * thrown. Defaults to the error being logged to the console. * @returns {Tracker.Computation} */ -Tracker.autorun = function (f, options) { +Tracker.autorun = function (f, options = {}) { if (typeof f !== 'function') throw new Error('Tracker.autorun requires a function argument') - options = options || {} - constructingComputation = true var c = new Tracker.Computation(f, Tracker.currentComputation, options.onError) @@ -571,12 +593,25 @@ Tracker.autorun = function (f, options) { * @param {Function} func A function to call immediately. */ Tracker.nonreactive = function (f) { - var previous = Tracker.currentComputation - setCurrentComputation(null) + return Tracker.withComputation(null, f) +} + +/** + * @summary Helper function to make the tracker work with promises. + * @param computation Computation that tracked + * @param func async function that needs to be called and be reactive + */ +Tracker.withComputation = function (computation, f) { + var previousComputation = Tracker.currentComputation + + Tracker.currentComputation = computation + Tracker.active = !!computation + try { return f() } finally { - setCurrentComputation(previous) + Tracker.currentComputation = previousComputation + Tracker.active = !!previousComputation } } From a7c91e6e32ead441babfc306aa09b06666903bad Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Mon, 4 Nov 2024 11:01:08 +0000 Subject: [PATCH 55/81] fix: Unable to clear infinites --- packages/job-worker/src/playout/timeline/rundown.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/job-worker/src/playout/timeline/rundown.ts b/packages/job-worker/src/playout/timeline/rundown.ts index 0e7a1079865..941c0a80d97 100644 --- a/packages/job-worker/src/playout/timeline/rundown.ts +++ b/packages/job-worker/src/playout/timeline/rundown.ts @@ -334,6 +334,7 @@ function generateCurrentInfinitePieceObjects( // If the cap is a number, it is relative to the part, not the parent group so needs to be handled here if (typeof pieceInstance.resolvedEndCap === 'number') { infiniteGroup.enable.end = `#${timingContext.currentPartGroup.id}.start + ${pieceInstance.resolvedEndCap}` + delete infiniteGroup.enable.duration delete pieceInstanceWithUpdatedEndCap.resolvedEndCap } } else if ( From 00ec691b24776942a915585a17e6518b48b7632b Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Tue, 5 Nov 2024 12:49:55 +0000 Subject: [PATCH 56/81] feat: meteor 3.0.4 (#27) --- .github/actions/setup-meteor/action.yaml | 4 +- .github/workflows/audit.yaml | 4 +- .github/workflows/node.yaml | 32 +- .github/workflows/prerelease-libs.yml | 2 +- .node-version | 2 +- DEVELOPER.md | 6 +- meteor/.eslintignore | 1 - meteor/.eslintrc.js | 3 +- meteor/.meteor/packages | 20 +- meteor/.meteor/release | 2 +- meteor/.meteor/versions | 124 ++- meteor/Dockerfile | 34 +- meteor/Dockerfile.circle | 24 +- meteor/__mocks__/Fibers.ts | 34 - meteor/__mocks__/_setupMocks.ts | 6 - meteor/__mocks__/helpers/jest.ts | 46 -- meteor/__mocks__/helpers/lib.ts | 2 +- meteor/__mocks__/meteor.ts | 164 +--- meteor/__mocks__/mongo.ts | 181 ++++- .../__mocks__/plugins/meteor-async-await.js | 74 -- meteor/__mocks__/reactive-var.ts | 18 - meteor/__mocks__/suppressLogging.ts | 1 + meteor/eslint-rules/README.md | 3 - meteor/eslint-rules/index.js | 15 - meteor/eslint-rules/noFocusedTestRule.js | 99 --- meteor/eslint-rules/noFocusedTestRule.ts | 112 --- meteor/eslint-rules/package.json | 6 - meteor/eslint-rules/utils.js | 484 ------------ meteor/eslint-rules/utils.ts | 724 ------------------ meteor/jest.config.js | 18 +- meteor/package.json | 17 +- .../server/__tests__/_testEnvironment.test.ts | 79 +- meteor/server/__tests__/coreSystem.test.ts | 3 +- meteor/server/__tests__/cronjobs.test.ts | 64 +- meteor/server/__tests__/logging.test.ts | 5 +- meteor/server/__tests__/systemTime.test.ts | 4 +- meteor/server/api/ExternalMessageQueue.ts | 4 +- meteor/server/api/__tests__/cleanup.test.ts | 11 +- meteor/server/api/__tests__/client.test.ts | 144 ++-- .../__tests__/externalMessageQueue.test.ts | 11 +- meteor/server/api/__tests__/methods.test.ts | 27 - .../api/__tests__/peripheralDevice.test.ts | 60 +- .../api/__tests__/rundownLayouts.test.ts | 5 +- .../__tests__/userActions/buckets.test.ts2 | 13 +- .../api/__tests__/userActions/general.test.ts | 9 +- .../userActions/mediaManager.test.ts | 12 +- .../api/__tests__/userActions/system.test.ts | 11 +- meteor/server/api/blueprintConfigPresets.ts | 8 +- .../api/blueprints/__tests__/api.test.ts | 49 +- .../__tests__/migrationContext.test.ts | 326 ++++---- .../server/api/blueprints/migrationContext.ts | 128 ++-- .../api/deviceTriggers/StudioObserver.ts | 8 +- meteor/server/api/deviceTriggers/observer.ts | 3 +- .../mosDevice/__tests__/actions.test.ts | 7 +- meteor/server/api/ingest/rundownInput.ts | 4 +- meteor/server/api/methods.ts | 22 +- meteor/server/api/profiler/apm.ts | 52 ++ .../api/{profiler.ts => profiler/index.ts} | 6 +- meteor/server/api/rest/koa.ts | 25 + .../server/api/rest/v0/__tests__/rest.test.ts | 5 +- meteor/server/api/rest/v0/index.ts | 6 +- meteor/server/api/snapshot.ts | 1 - meteor/server/api/studio/api.ts | 4 +- meteor/server/api/system.ts | 6 +- meteor/server/api/user.ts | 17 +- meteor/server/collections/collection.ts | 66 +- .../implementations/asyncCollection.ts | 297 +++++-- .../collections/implementations/base.ts | 130 ---- .../collections/implementations/mock.ts | 122 +-- .../implementations/readonlyWrapper.ts | 12 +- meteor/server/collections/index.ts | 3 +- meteor/server/coreSystem/index.ts | 12 +- meteor/server/lib.ts | 5 +- meteor/server/lib/__tests__/lib.test.ts | 41 +- meteor/server/lib/lib.ts | 87 --- meteor/server/methods.ts | 3 +- .../migration/__tests__/migrations.test.ts | 5 +- meteor/server/migration/databaseMigration.ts | 18 +- .../upgrades/__tests__/showStyleBase.test.ts | 23 +- meteor/server/performanceMonitor.ts | 3 +- .../lib/ReactiveCacheCollection.ts | 4 + .../lib/__tests__/rundownsObserver.test.ts | 10 +- meteor/server/publications/lib/lib.ts | 4 +- .../server/publications/lib/observerChain.ts | 6 +- .../__tests__/checkPieceContentStatus.test.ts | 7 +- .../__tests__/publication.test.ts | 15 +- .../security/__tests__/security.test.ts | 12 +- meteor/server/security/lib/security.ts | 2 +- meteor/server/security/lib/securityVerify.ts | 6 +- meteor/server/security/system.ts | 2 +- .../server/systemStatus/__tests__/api.test.ts | 4 +- .../__tests__/systemStatus.test.ts | 15 +- .../typings/meteor-kschingiz-elastic-apm.d.ts | 355 --------- meteor/server/worker/worker.ts | 4 +- meteor/yarn.lock | 371 +++++---- package.json | 14 +- packages/blueprints-integration/package.json | 2 +- .../blueprints-integration/src/migrations.ts | 18 +- packages/corelib/package.json | 2 +- packages/documentation/package.json | 2 +- packages/is_node_14.js | 5 - packages/job-worker/package.json | 4 +- .../__tests__/externalMessageQueue.test.ts | 24 +- .../lookahead/__tests__/lookahead.test.ts | 2 +- .../job-worker/src/playout/timings/events.ts | 2 +- packages/live-status-gateway/Dockerfile | 4 +- .../live-status-gateway/Dockerfile.circle | 2 +- packages/live-status-gateway/package.json | 2 +- packages/meteor-lib/package.json | 2 +- packages/mos-gateway/Dockerfile | 4 +- packages/mos-gateway/Dockerfile.circle | 2 +- packages/mos-gateway/package.json | 2 +- packages/openapi/package.json | 2 +- packages/package.json | 4 +- packages/playout-gateway/Dockerfile | 4 +- packages/playout-gateway/Dockerfile.circle | 2 +- packages/playout-gateway/package.json | 2 +- packages/playout-gateway/src/tsrHandler.ts | 2 +- packages/server-core-integration/package.json | 2 +- .../src/lib/methods.ts | 2 +- .../server-core-integration/src/lib/ping.ts | 2 +- .../src/lib/watchDog.ts | 4 +- packages/shared-lib/package.json | 2 +- packages/webui/.eslintrc.cjs | 1 - packages/webui/package.json | 2 +- packages/webui/src/__mocks__/mongo.ts | 2 +- .../data/mos/__tests__/plugin-support.test.ts | 2 + packages/webui/src/client/lib/viewPort.ts | 2 +- .../src/client/ui/Prompter/PrompterView.tsx | 2 +- .../Parts/SegmentTimelinePart.tsx | 2 +- .../ui/SegmentTimeline/SegmentTimeline.tsx | 2 +- .../ui/SegmentTimeline/SourceLayerItem.tsx | 2 +- .../ui/Shelf/TimelineDashboardPanel.tsx | 2 +- packages/webui/src/meteor/meteor.js | 7 - packages/webui/src/meteor/tracker.js | 13 - packages/yarn.lock | 324 ++++---- scripts/fixTestFibers.js | 21 - scripts/run.mjs | 4 +- sonar-project.properties | 2 +- 139 files changed, 1590 insertions(+), 3966 deletions(-) delete mode 100644 meteor/__mocks__/Fibers.ts delete mode 100644 meteor/__mocks__/plugins/meteor-async-await.js delete mode 100644 meteor/__mocks__/reactive-var.ts delete mode 100644 meteor/eslint-rules/README.md delete mode 100644 meteor/eslint-rules/index.js delete mode 100644 meteor/eslint-rules/noFocusedTestRule.js delete mode 100644 meteor/eslint-rules/noFocusedTestRule.ts delete mode 100644 meteor/eslint-rules/package.json delete mode 100644 meteor/eslint-rules/utils.js delete mode 100644 meteor/eslint-rules/utils.ts delete mode 100644 meteor/server/api/__tests__/methods.test.ts create mode 100644 meteor/server/api/profiler/apm.ts rename meteor/server/api/{profiler.ts => profiler/index.ts} (68%) delete mode 100644 meteor/server/collections/implementations/base.ts delete mode 100644 meteor/server/typings/meteor-kschingiz-elastic-apm.d.ts delete mode 100644 packages/is_node_14.js delete mode 100644 scripts/fixTestFibers.js diff --git a/.github/actions/setup-meteor/action.yaml b/.github/actions/setup-meteor/action.yaml index 52d26fcaa13..b96960585d7 100644 --- a/.github/actions/setup-meteor/action.yaml +++ b/.github/actions/setup-meteor/action.yaml @@ -3,7 +3,5 @@ description: "Setup Meteor" runs: using: "composite" steps: - - run: curl "https://install.meteor.com/?release=2.13.3" | sh - shell: bash - - run: meteor npm install -g yarn + - run: curl "https://install.meteor.com/?release=3.0.4" | sh shell: bash diff --git a/.github/workflows/audit.yaml b/.github/workflows/audit.yaml index 223cb181554..c236b7dfb16 100644 --- a/.github/workflows/audit.yaml +++ b/.github/workflows/audit.yaml @@ -29,7 +29,7 @@ jobs: run: | yarn cd meteor - meteor yarn validate:prod-dependencies + yarn validate:prod-dependencies env: CI: true @@ -57,7 +57,7 @@ jobs: run: | yarn cd meteor - meteor yarn run validate:all-dependencies + yarn run validate:all-dependencies env: CI: true diff --git a/.github/workflows/node.yaml b/.github/workflows/node.yaml index bed7511205b..2573f57bc15 100644 --- a/.github/workflows/node.yaml +++ b/.github/workflows/node.yaml @@ -46,7 +46,7 @@ jobs: # setup zodern:types. No linters are setup, so this simply installs the packages meteor lint - meteor yarn ci:lint + yarn ci:lint env: CI: true @@ -85,7 +85,7 @@ jobs: # setup zodern:types. No linters are setup, so this simply installs the packages meteor lint - NODE_OPTIONS="--max-old-space-size=6144" meteor yarn unitci --force-exit + NODE_OPTIONS="--max-old-space-size=6144" yarn unitci --force-exit env: CI: true - name: Send coverage @@ -480,38 +480,30 @@ jobs: - blueprints-integration - server-core-integration - shared-lib - node-version: [14.x, 18.x, 20.x, 22.x] + - openapi + node-version: [20.x, 22.x] include: # include additional configs, to run certain packages only for a certain version of node - - node-version: 14.x + - node-version: 20.x package-name: corelib send-coverage: true - - node-version: 14.x + - node-version: 20.x package-name: job-worker send-coverage: true - # manual openapi to avoid testing for 14.x - - node-version: 18.x - package-name: openapi - - node-version: 20.x - package-name: openapi - - node-version: 22.x - package-name: openapi # No tests for the gateways yet - # - node-version: 18.x + # - node-version: 20.x # package-name: playout-gateway - # - node-version: 18.x + # - node-version: 20.x # package-name: mos-gateway - - node-version: 18.x + - node-version: 20.x package-name: live-status-gateway send-coverage: true - - node-version: 18.x + - node-version: 20.x package-name: webui # manual meteor-lib as it only needs a couple of versions - - node-version: 18.x + - node-version: 20.x package-name: meteor-lib send-coverage: true - - node-version: 14.x - package-name: meteor-lib steps: - uses: actions/checkout@v4 @@ -531,7 +523,7 @@ jobs: run: | cd packages yarn config set cacheFolder /home/runner/test-packages-cache - node is_node_14.js && yarn lerna run --ignore openapi install || yarn install + yarn install yarn lerna run --scope \*\*/${{ matrix.package-name }} --include-dependencies --stream build env: CI: true diff --git a/.github/workflows/prerelease-libs.yml b/.github/workflows/prerelease-libs.yml index e9750028c49..7ca1a31f2a0 100644 --- a/.github/workflows/prerelease-libs.yml +++ b/.github/workflows/prerelease-libs.yml @@ -53,7 +53,7 @@ jobs: - blueprints-integration - server-core-integration - shared-lib - node-version: [14.x, 18.x, 20.x, 22.x] + node-version: [20.x, 22.x] steps: - uses: actions/checkout@v4 diff --git a/.node-version b/.node-version index b492b086355..10fef252a9f 100644 --- a/.node-version +++ b/.node-version @@ -1 +1 @@ -18.16 +20.18 diff --git a/DEVELOPER.md b/DEVELOPER.md index 43b1749c7fd..c12785b412d 100644 --- a/DEVELOPER.md +++ b/DEVELOPER.md @@ -18,10 +18,8 @@ Follow these instructions to start up Sofie Core in development mode. (For produ ### Prerequisites -- Install [Node.js](https://nodejs.org) 14 (using [nvm](https://github.com/nvm-sh/nvm) or [nvm-windows](https://github.com/coreybutler/nvm-windows) is the recommended way to install Node.js) -- Install [Meteor](https://www.meteor.com/install) (`npm install --global meteor@2`) -- Install [Node.js](https://nodejs.org) 18 (using the same method you used above, you can uninstall node 14 if needed) -- Install an older version of corepack (`npm install --global corepack@0.15.3`) +- Install [Node.js](https://nodejs.org) 20 (using [nvm](https://github.com/nvm-sh/nvm) or [nvm-windows](https://github.com/coreybutler/nvm-windows) is the recommended way to install Node.js) +- Install [Meteor](https://www.meteor.com/install) (`npm install --global meteor`) - Enable [corepack](https://nodejs.org/api/corepack.html#corepack) (`corepack enable`) as administrator/root. If `corepack` is not found, you may need to install it first with `npm install --global corepack` - If on Windows, you may need to `npm install --global windows-build-tools` but this is not always necessary diff --git a/meteor/.eslintignore b/meteor/.eslintignore index 8b3e1e2f6b0..e2a1ee2fca3 100644 --- a/meteor/.eslintignore +++ b/meteor/.eslintignore @@ -1,6 +1,5 @@ .meteor public -eslint-rules scripts server/_force_restart.js /packages/ diff --git a/meteor/.eslintrc.js b/meteor/.eslintrc.js index dedc9026767..a6d6491a5a4 100644 --- a/meteor/.eslintrc.js +++ b/meteor/.eslintrc.js @@ -20,7 +20,7 @@ const tmpRules = { } const tsBase = { - extends: [...tsExtends, 'plugin:custom-rules/all'], + extends: [...tsExtends], plugins: tsPlugins, ...tsParser, settings: { @@ -50,7 +50,6 @@ const tsBase = { allowModules: ['meteor', 'mongodb'], }, ], - 'jest/no-standalone-expect': 'off', // testInFiber confuses the rule ...tmpRules, }, } diff --git a/meteor/.meteor/packages b/meteor/.meteor/packages index 4e5355b0700..8d1724b1db7 100644 --- a/meteor/.meteor/packages +++ b/meteor/.meteor/packages @@ -8,19 +8,17 @@ # but you can also edit it by hand. -meteor-base@1.5.1 # Packages every Meteor app needs to have -mongo@1.16.10 # The database Meteor supports right now -reactive-var@1.0.12 # Reactive variable for tracker +meteor@2.0.1 +webapp@2.0.3 +ddp@1.4.2 -ecmascript@0.16.8 # Enable ECMAScript2015+ syntax in app code -typescript@4.9.5 # Enable TypeScript syntax in .ts and .tsx modules -shell-server@0.5.0 # Server-side component of the `meteor shell` command +mongo@2.0.2 # The database Meteor supports right now -tracker@1.3.3 # Meteor's client-side reactive programming library +ecmascript@0.16.9 # Enable ECMAScript2015+ syntax in app code +typescript@5.4.3 # Enable TypeScript syntax in .ts and .tsx modules -dynamic-import@0.7.3 -ostrio:meteor-root -accounts-password@2.4.0 +tracker@1.3.4 # Meteor's client-side reactive programming library + +accounts-password@3.0.2 -julusian:meteor-elastic-apm@2.5.2 zodern:types diff --git a/meteor/.meteor/release b/meteor/.meteor/release index 5152abe9d58..b1e86a359f7 100644 --- a/meteor/.meteor/release +++ b/meteor/.meteor/release @@ -1 +1 @@ -METEOR@2.16 +METEOR@3.0.4 diff --git a/meteor/.meteor/versions b/meteor/.meteor/versions index 23b868e06f6..6048cd78971 100644 --- a/meteor/.meteor/versions +++ b/meteor/.meteor/versions @@ -1,65 +1,59 @@ -accounts-base@2.2.11 -accounts-password@2.4.0 -allow-deny@1.1.1 -autoupdate@1.8.0 -babel-compiler@7.10.5 -babel-runtime@1.5.1 -base64@1.0.12 -binary-heap@1.0.11 -boilerplate-generator@1.7.2 -callback-hook@1.5.1 -check@1.4.1 -ddp@1.4.1 -ddp-client@2.6.2 -ddp-common@1.4.1 -ddp-rate-limiter@1.2.1 -ddp-server@2.7.1 -diff-sequence@1.1.2 -dynamic-import@0.7.3 -ecmascript@0.16.8 -ecmascript-runtime@0.8.1 -ecmascript-runtime-client@0.12.1 -ecmascript-runtime-server@0.11.0 -ejson@1.1.3 -email@2.2.6 -es5-shim@4.8.0 -fetch@0.1.4 -geojson-utils@1.0.11 -hot-code-push@1.0.4 -id-map@1.1.1 -inter-process-messaging@0.1.1 -julusian:meteor-elastic-apm@2.5.2 -kschingiz:meteor-measured@1.0.3 -localstorage@1.2.0 -logging@1.3.4 -meteor@1.11.5 -meteor-base@1.5.1 -minimongo@1.9.4 -modern-browsers@0.1.10 -modules@0.20.0 -modules-runtime@0.13.1 -mongo@1.16.10 -mongo-decimal@0.1.3 -mongo-dev-server@1.1.0 -mongo-id@1.0.8 -npm-mongo@4.17.2 -ordered-dict@1.1.0 -ostrio:meteor-root@1.1.1 -promise@0.12.2 -random@1.2.1 -rate-limit@1.1.1 -react-fast-refresh@0.2.8 -reactive-var@1.0.12 -reload@1.3.1 -retry@1.1.0 -routepolicy@1.1.1 -sha@1.0.9 -shell-server@0.5.0 -socket-stream-client@0.5.2 -tracker@1.3.3 -typescript@4.9.5 -underscore@1.6.1 -url@1.3.2 -webapp@1.13.8 -webapp-hashing@1.1.1 -zodern:types@1.0.9 +accounts-base@3.0.3 +accounts-password@3.0.2 +allow-deny@2.0.0 +babel-compiler@7.11.1 +babel-runtime@1.5.2 +base64@1.0.13 +binary-heap@1.0.12 +boilerplate-generator@2.0.0 +callback-hook@1.6.0 +check@1.4.4 +core-runtime@1.0.0 +ddp@1.4.2 +ddp-client@3.0.2 +ddp-common@1.4.4 +ddp-rate-limiter@1.2.2 +ddp-server@3.0.2 +diff-sequence@1.1.3 +dynamic-import@0.7.4 +ecmascript@0.16.9 +ecmascript-runtime@0.8.3 +ecmascript-runtime-client@0.12.2 +ecmascript-runtime-server@0.11.1 +ejson@1.1.4 +email@3.1.0 +facts-base@1.0.2 +fetch@0.1.5 +geojson-utils@1.0.12 +id-map@1.2.0 +inter-process-messaging@0.1.2 +localstorage@1.2.1 +logging@1.3.5 +meteor@2.0.1 +minimongo@2.0.1 +modern-browsers@0.1.11 +modules@0.20.2 +modules-runtime@0.13.2 +mongo@2.0.2 +mongo-decimal@0.1.4 +mongo-dev-server@1.1.1 +mongo-id@1.0.9 +npm-mongo@4.17.4 +ordered-dict@1.2.0 +promise@1.0.0 +random@1.2.2 +rate-limit@1.1.2 +react-fast-refresh@0.2.9 +reactive-var@1.0.13 +reload@1.3.2 +retry@1.1.1 +routepolicy@1.1.2 +sha@1.0.10 +socket-stream-client@0.5.3 +tracker@1.3.4 +typescript@5.4.3 +underscore@1.6.4 +url@1.3.4 +webapp@2.0.3 +webapp-hashing@1.1.2 +zodern:types@1.0.13 diff --git a/meteor/Dockerfile b/meteor/Dockerfile index 10b06912e1a..cee205aede8 100644 --- a/meteor/Dockerfile +++ b/meteor/Dockerfile @@ -1,7 +1,7 @@ # syntax=docker/dockerfile:experimental # BUILD WEBUI -FROM node:18 +FROM node:20 COPY packages /opt/core/packages WORKDIR /opt/core/packages @@ -14,8 +14,8 @@ RUN yarn install && yarn build # RUN yarn workspaces focus --production @sofie-automation/job-worker @sofie-automation/corelib # BUILD IMAGE -FROM meteor/node:14.21.4 -RUN curl "https://install.meteor.com/?release=2.13.3" | sh +FROM node:20 +RUN curl "https://install.meteor.com/?release=3.0.4" | sh # Temporary change the NODE_ENV env variable, so that all libraries are installed: ENV NODE_ENV_TMP $NODE_ENV @@ -37,8 +37,8 @@ RUN rm -R /opt/core/packages/webui # Force meteor to setup the runtime RUN meteor --version --allow-superuser -RUN meteor corepack enable -RUN meteor yarn install +RUN corepack enable +RUN yarn install # Restore the NODE_ENV variable: ENV NODE_ENV $NODE_ENV_TMP @@ -50,29 +50,9 @@ RUN npm install RUN mv /opt/bundle/programs/web.browser/assets /opt/bundle/programs/web.browser/app/assets || true # DEPLOY IMAGE -FROM alpine:3.19 - -ENV NODE_VERSION=14.21.4 -ENV NODE_URL="https://static.meteor.com/dev-bundle-node-os/unofficial-builds/v${NODE_VERSION}/node-v${NODE_VERSION}-linux-x64.tar.gz" -ENV DIR_NODE=/usr/local - -RUN apk add --no-cache \ - libstdc++ \ - && apk add --no-cache --virtual .build-deps-full \ - binutils-gold \ - curl \ - gnupg \ - xz - -RUN echo $NODE_URL \ - && curl -sSL "$NODE_URL" | tar -xz -C /usr/local/ && mv $DIR_NODE/node-v${NODE_VERSION}-linux-x64 $DIR_NODE/v$NODE_VERSION - -# add node and npm to path so the commands are available -ENV NODE_PATH $DIR_NODE/v$NODE_VERSION/lib/node_modules -ENV PATH $DIR_NODE/v$NODE_VERSION/bin:$PATH +FROM node:20-alpine -# confirm installation -RUN node -v && npm -v +RUN apk add --no-cache tzdata COPY --from=1 /opt/bundle /opt/core COPY meteor/docker-entrypoint.sh /opt diff --git a/meteor/Dockerfile.circle b/meteor/Dockerfile.circle index 9456265025b..1e39e80f817 100644 --- a/meteor/Dockerfile.circle +++ b/meteor/Dockerfile.circle @@ -1,27 +1,7 @@ # DEPLOY IMAGE -FROM alpine:3.19 +FROM node:20-alpine -ENV NODE_VERSION=14.21.4 -ENV NODE_URL="https://static.meteor.com/dev-bundle-node-os/unofficial-builds/v${NODE_VERSION}/node-v${NODE_VERSION}-linux-x64.tar.gz" -ENV DIR_NODE=/usr/local - -RUN apk add --no-cache \ - libstdc++ \ - && apk add --no-cache --virtual .build-deps-full \ - binutils-gold \ - curl \ - gnupg \ - xz - -RUN echo $NODE_URL \ - && curl -sSL "$NODE_URL" | tar -xz -C /usr/local/ && mv $DIR_NODE/node-v${NODE_VERSION}-linux-x64 $DIR_NODE/v$NODE_VERSION - -# add node and npm to path so the commands are available -ENV NODE_PATH $DIR_NODE/v$NODE_VERSION/lib/node_modules -ENV PATH $DIR_NODE/v$NODE_VERSION/bin:$PATH - -# confirm installation -RUN node -v && npm -v +RUN apk add --no-cache tzdata COPY meteor/bundle /opt/core COPY meteor/docker-entrypoint.sh /opt diff --git a/meteor/__mocks__/Fibers.ts b/meteor/__mocks__/Fibers.ts deleted file mode 100644 index d17290fce69..00000000000 --- a/meteor/__mocks__/Fibers.ts +++ /dev/null @@ -1,34 +0,0 @@ -let Fiber: any -try { - Fiber = require('fibers-npm') -} catch (e: any) { - if (e.toString().match(/Missing binary/)) { - // Temporary workaround: - throw Error(` -Note: When you get the "Missing binary"-error when running in Jest -be sure you have run npm install (so that the postInstall script has run) -and that you ran npm install with the correct Node version - -Original error: -${e.toString()}`) - // Head over to - // meteor/node_modules/fibers/fibers.js - // and add this line to line 13: - // if (process.env.JEST_WORKER_ID !== undefined ) modPath += '.node' - } else throw e -} -/** - * Run function in a Fiber - * Example Jest test: - * test('tempTestAsync', async () => { - * await runInFiber(() => { - * // This code runs in a fiber - * const val = tempTestAsync(1,2,3) - * expect(val).toEqual(1 + 2 + 3) - * }) - * }) - */ -export function isInFiber(): boolean { - return !!Fiber.current -} -export { Fiber } diff --git a/meteor/__mocks__/_setupMocks.ts b/meteor/__mocks__/_setupMocks.ts index c869b5d3e4e..b4508a82bb1 100644 --- a/meteor/__mocks__/_setupMocks.ts +++ b/meteor/__mocks__/_setupMocks.ts @@ -1,15 +1,10 @@ import { setLogLevel } from '../server/logging' -import { Fiber } from './Fibers' import { resetRandomId } from './random' -import { makeCompatible } from 'meteor-promise' import { LogLevel } from '../server/lib/tempLib' import { SupressLogMessages } from './suppressLogging' // This file is run before all tests start. -// Set up how Meteor handles Promises & Fibers: -makeCompatible(Promise, Fiber) - // 'Mock' the random string generator jest.mock('nanoid', (...args) => require('./random').setup(args), { virtual: true }) @@ -21,7 +16,6 @@ jest.mock('meteor/check', (...args) => require('./check').setup(args), { virtual jest.mock('meteor/tracker', (...args) => require('./tracker').setup(args), { virtual: true }) jest.mock('meteor/accounts-base', (...args) => require('./accounts-base').setup(args), { virtual: true }) jest.mock('meteor/ejson', (...args) => require('./ejson').setup(args), { virtual: true }) -jest.mock('meteor/reactive-var', (...args) => require('./reactive-var').setup(args), { virtual: true }) jest.mock('meteor/mdg:validated-method', (...args) => require('./validated-method').setup(args), { virtual: true }) jest.mock('meteor/julusian:meteor-elastic-apm', (...args) => require('./meteor-elastic-apm').setup(args), { diff --git a/meteor/__mocks__/helpers/jest.ts b/meteor/__mocks__/helpers/jest.ts index 192fd073e95..1ed979896d8 100644 --- a/meteor/__mocks__/helpers/jest.ts +++ b/meteor/__mocks__/helpers/jest.ts @@ -1,47 +1,3 @@ -/* eslint-disable jest/no-export, jest/valid-title, jest/expect-expect, jest/no-focused-tests */ -import { runInFiber } from '../meteor' - -export function beforeAllInFiber(fcn: () => void | Promise, timeout?: number): void { - beforeAll(async () => { - await runInFiber(fcn) - }, timeout) -} -export function afterAllInFiber(fcn: () => void | Promise, timeout?: number): void { - afterAll(async () => { - await runInFiber(fcn) - }, timeout) -} -export function beforeEachInFiber(fcn: () => void | Promise, timeout?: number): void { - beforeEach(async () => { - await runInFiber(fcn) - }, timeout) -} -export function afterEachInFiber(fcn: () => void | Promise, timeout?: number): void { - afterEach(async () => { - await runInFiber(fcn) - }, timeout) -} - -export function testInFiber(testName: string, fcn: () => void | Promise, timeout?: number): void { - test( - testName, - async () => { - await runInFiber(fcn) - }, - timeout - ) -} - -export function testInFiberOnly(testName: string, fcn: () => void | Promise, timeout?: number): void { - // eslint-disable-next-line custom-rules/no-focused-test - test.only( - testName, - async () => { - await runInFiber(fcn) - }, - timeout - ) -} const orgSetTimeout = setTimeout const DateOrg = Date export async function runAllTimers(): Promise { @@ -98,5 +54,3 @@ export async function waitUntil(expectFcn: () => void | Promise, maxWaitTi } } } - -// testInFiber.only = testInFiberOnly diff --git a/meteor/__mocks__/helpers/lib.ts b/meteor/__mocks__/helpers/lib.ts index 5424c55a433..06b0adc2417 100644 --- a/meteor/__mocks__/helpers/lib.ts +++ b/meteor/__mocks__/helpers/lib.ts @@ -24,7 +24,7 @@ const METHOD_NAMES = [ 'remove', 'update', 'upsert', - '_ensureIndex', + 'createIndex', 'findFetchAsync', 'findOneAsync', 'insertAsync', diff --git a/meteor/__mocks__/meteor.ts b/meteor/__mocks__/meteor.ts index 693aab07e3f..1b2ec694180 100644 --- a/meteor/__mocks__/meteor.ts +++ b/meteor/__mocks__/meteor.ts @@ -1,6 +1,3 @@ -import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' -import * as _ from 'underscore' -import { Fiber } from './Fibers' import { MongoMock } from './mongo' let controllableDefer = false @@ -167,7 +164,10 @@ export namespace MeteorMock { export function methods(addMethods: { [name: string]: Function }): void { Object.assign(mockMethods, addMethods) } - export function call(methodName: string, ...args: any[]): any { + export function call(_methodName: string, ..._args: any[]): any { + throw new Error(500, `Meteor.call should not be used, use Meteor.callAsync instead`) + } + export async function callAsync(methodName: string, ...args: any[]): Promise { const fcn: Function = mockMethods[methodName] if (!fcn) { console.log(methodName) @@ -176,26 +176,10 @@ export namespace MeteorMock { throw new Error(404, `Method '${methodName}' not found`) } - const lastArg = args.length > 0 && args[args.length - 1] - if (lastArg && typeof lastArg === 'function') { - const callback = args.pop() + // Defer + await sleepNoFakeTimers(0) - defer(() => { - try { - Promise.resolve(fcn.call(getMethodContext(), ...args)) - .then((result) => { - callback(undefined, result) - }) - .catch((e) => { - callback(e) - }) - } catch (e) { - callback(e) - } - }) - } else { - return waitForPromiseLocal(Promise.resolve(fcn.call(getMethodContext(), ...args))) - } + return fcn.call(getMethodContext(), ...args) } export function apply( methodName: string, @@ -213,12 +197,29 @@ export namespace MeteorMock { // but it'll do for now: call(methodName, ...args, asyncCallback) } + export async function applyAsync( + methodName: string, + args: any[], + _options?: { + wait?: boolean + onResultReceived?: Function + returnStubValue?: boolean + throwStubExceptions?: boolean + } + ): Promise { + // ? + // This is a bad mock, since it doesn't support any of the options.. + // but it'll do for now: + return callAsync(methodName, ...args) + } export function absoluteUrl(path?: string): string { return path + '' // todo } export function setTimeout(fcn: () => void | Promise, time: number): number { return $.setTimeout(() => { - runInFiber(fcn).catch(console.error) + Promise.resolve() + .then(async () => fcn()) + .catch(console.error) }, time) as number } export function clearTimeout(timer: number): void { @@ -226,7 +227,9 @@ export namespace MeteorMock { } export function setInterval(fcn: () => void | Promise, time: number): number { return $.setInterval(() => { - runInFiber(fcn).catch(console.error) + Promise.resolve() + .then(async () => fcn()) + .catch(console.error) }, time) as number } export function clearInterval(timer: number): void { @@ -234,7 +237,9 @@ export namespace MeteorMock { } export function defer(fcn: () => void | Promise): void { return (controllableDefer ? $.setTimeout : $.orgSetTimeout)(() => { - runInFiber(fcn).catch(console.error) + Promise.resolve() + .then(async () => fcn()) + .catch(console.error) }, 0) } @@ -242,43 +247,13 @@ export namespace MeteorMock { mockStartupFunctions.push(fcn) } - export function wrapAsync(fcn: Function, context?: Object): any { - return (...args: any[]) => { - const fiber = Fiber.current - if (!fiber) throw new Error(500, `It appears that wrapAsync isn't running in a fiber`) - - const callback = (err: any, value: any) => { - if (err) { - fiber.throwInto(err) - } else { - fiber.run(value) - } - } - fcn.apply(context, [...args, callback]) - - const returnValue = Fiber.yield() - return returnValue - } - } - export function publish(publicationName: string, handler: Function): any { publications[publicationName] = handler } export function bindEnvironment(fcn: Function): any { - { - // the outer bindEnvironment must be called from a fiber - const fiber = Fiber.current - if (!fiber) throw new Error(500, `It appears that bindEnvironment isn't running in a fiber`) - } - return (...args: any[]) => { - const fiber = Fiber.current - if (fiber) { - return fcn(...args) - } else { - return runInFiber(() => fcn(...args)).catch(console.error) - } + return fcn(...args) } } export let users: MongoMock.Collection | undefined = undefined @@ -287,12 +262,12 @@ export namespace MeteorMock { /** * Run the Meteor.startup() functions */ - export function mockRunMeteorStartup(): void { - _.each(mockStartupFunctions, (fcn) => { - fcn() - }) + export async function mockRunMeteorStartup(): Promise { + for (const fcn of mockStartupFunctions) { + await fcn() + } - waitTimeNoFakeTimers(10) // So that any observers or defers has had time to run. + await waitTimeNoFakeTimers(10) // So that any observers or defers has had time to run. } export function mockLoginUser(newUser: Meteor.User): void { mockUser = newUser @@ -310,25 +285,6 @@ export namespace MeteorMock { return publications } - // locally defined function here, so there are no import to the rest of the code - const waitForPromiseLocal: (p: Promise) => T = wrapAsync(function waitForPromises( - p: Promise, - cb: (err: any | null, result?: any) => T - ) { - if (cb === undefined && typeof p === 'function') { - cb = p as any - p = undefined as any - } - - Promise.resolve(p) - .then((result) => { - cb(null, result) - }) - .catch((e) => { - cb(e) - }) - }) - /** Wait for time to pass ( unaffected by jest.useFakeTimers() ) */ export async function sleepNoFakeTimers(time: number): Promise { return new Promise((resolve) => $.orgSetTimeout(resolve, time)) @@ -341,48 +297,6 @@ export function setup(): any { } /** Wait for time to pass ( unaffected by jest.useFakeTimers() ) */ -export function waitTimeNoFakeTimers(time: number): void { - waitForPromise(MeteorMock.sleepNoFakeTimers(time)) -} -export const waitForPromise: (p: Promise) => T = MeteorMock.wrapAsync(function waitForPromises( - p: Promise, - cb: (err: any | null, result?: any) => T -) { - if (MeteorMock.isClient) throw new MeteorMock.Error(500, `waitForPromise can't be used client-side`) - if (cb === undefined && typeof p === 'function') { - cb = p as any - p = undefined as any - } - - Promise.resolve(p) - .then((result) => { - cb(null, result) - }) - .catch((e) => { - cb(e) - }) -}) - -export async function runInFiber(fcn: () => T | Promise): Promise { - return new Promise((resolve, reject) => { - Fiber(() => { - try { - // Run the function - const out = fcn() - if (out instanceof Promise) { - out.then(resolve).catch((e) => { - console.log('Error: ' + e) - reject(e) - }) - } else { - // the function has finished - resolve(out) - } - } catch (e: any) { - // Note: we cannot use - console.log('Error: ' + stringifyError(e)) - reject(e) - } - }).run() - }) +export async function waitTimeNoFakeTimers(time: number): Promise { + return MeteorMock.sleepNoFakeTimers(time) } diff --git a/meteor/__mocks__/mongo.ts b/meteor/__mocks__/mongo.ts index cf17069139f..d39e071ef09 100644 --- a/meteor/__mocks__/mongo.ts +++ b/meteor/__mocks__/mongo.ts @@ -1,7 +1,6 @@ /* eslint-disable @typescript-eslint/explicit-module-boundary-types */ import * as _ from 'underscore' import { literal, ProtectedString, unprotectString, protectString, getRandomString } from '../server/lib/tempLib' -import { sleep } from '../server/lib/lib' import { RandomMock } from './random' import { MeteorMock } from './meteor' import { Random } from 'meteor/random' @@ -10,21 +9,19 @@ import type { AnyBulkWriteOperation } from 'mongodb' import { FindOneOptions, FindOptions, + MongoCursor, MongoReadOnlyCollection, ObserveCallbacks, ObserveChangesCallbacks, UpdateOptions, UpsertOptions, } from '@sofie-automation/meteor-lib/dist/collections/lib' -import { - mongoWhere, - mongoFindOptions, - mongoModify, - MongoQuery, - MongoModifier, -} from '@sofie-automation/corelib/dist/mongo' -import { Mongo } from 'meteor/mongo' +import { mongoWhere, mongoFindOptions, mongoModify, MongoQuery } from '@sofie-automation/corelib/dist/mongo' import { AsyncOnlyMongoCollection, AsyncOnlyReadOnlyMongoCollection } from '../server/collections/collection' +import type { + MinimalMeteorMongoCollection, + MinimalMongoCursor, +} from '../server/collections/implementations/asyncCollection' const clone = require('fast-clone') export namespace MongoMock { @@ -46,9 +43,9 @@ export namespace MongoMock { } const mockCollections: MockCollections = {} - export type MongoCollection = {} - export class Collection implements MongoCollection { + export class Collection implements Omit, 'find'> { public _name: string + private _isTemporaryCollection: boolean private _options: any = {} // @ts-expect-error used in test to check that it's a mock private _isMock = true as const @@ -59,11 +56,15 @@ export namespace MongoMock { constructor(name: string | null, options?: { transform?: never }) { this._options = options || {} this._name = name || getRandomString() // If `null`, then its an in memory unique collection + this._isTemporaryCollection = name === null if (this._options.transform) throw new Error('document transform is no longer supported') } - find(query: any, options?: FindOptions) { + find( + query: any, + options?: FindOptions + ): MinimalMongoCursor & { _fetchRaw: () => T[] } & Pick, 'fetch' | 'forEach'> { if (_.isString(query)) query = { _id: query } query = query || {} @@ -96,13 +97,28 @@ export namespace MongoMock { _fetchRaw: () => { return docs }, + fetchAsync: async () => { + // Force this to be performed async + await MeteorMock.sleepNoFakeTimers(0) + + return clone(docs) + }, fetch: () => { + if (!this._isTemporaryCollection) + throw new Meteor.Error(500, 'sync methods can only be used for unnamed collections') + return clone(docs) }, - count: () => { + countAsync: async () => { + // Force this to be performed async + await MeteorMock.sleepNoFakeTimers(0) + return docs.length }, - observe(clbs: ObserveCallbacks): Meteor.LiveQueryHandle { + async observeAsync(clbs: ObserveCallbacks): Promise { + // Force this to be performed async + await MeteorMock.sleepNoFakeTimers(0) + const id = Random.id(5) observers.push( literal>({ @@ -117,7 +133,10 @@ export namespace MongoMock { }, } }, - observeChanges(clbs: ObserveChangesCallbacks): Meteor.LiveQueryHandle { + async observeChangesAsync(clbs: ObserveChangesCallbacks): Promise { + // Force this to be performed async + await MeteorMock.sleepNoFakeTimers(0) + // todo - finish implementing uses of callbacks const id = Random.id(5) observers.push( @@ -133,18 +152,43 @@ export namespace MongoMock { }, } }, - forEach(f: any) { + forEach: (f: any) => { + if (!this._isTemporaryCollection) + throw new Meteor.Error(500, 'sync methods can only be used for unnamed collections') + docs.forEach(f) }, - map(f: any) { - return docs.map(f) - }, + // async mapAsync(f: any) { + // return docs.map(f) + // }, } } + async findOneAsync(query: MongoQuery, options?: FindOneOptions) { + const docs = await this.find(query, options).fetchAsync() + return docs[0] + } findOne(query: MongoQuery, options?: FindOneOptions) { - return this.find(query, options).fetch()[0] + if (!this._isTemporaryCollection) + throw new Meteor.Error(500, 'sync methods can only be used for unnamed collections') + + const docs = this.find(query, options).fetch() + return docs[0] + } + + async updateAsync(query: any, modifier: any, options?: UpdateOptions): Promise { + // Force this to be performed async + await MeteorMock.sleepNoFakeTimers(0) + + return this.updateRaw(query, modifier, options) + } + update(query: any, modifier: any, options?: UpdateOptions): number { + if (!this._isTemporaryCollection) + throw new Meteor.Error(500, 'sync methods can only be used for unnamed collections') + + return this.updateRaw(query, modifier, options) } - update(query: MongoQuery, modifier: MongoModifier, options?: UpdateOptions): number { + + private updateRaw(query: any, modifier: any, options?: UpdateOptions): number { const unimplementedUsedOptions = _.without(_.keys(options), 'multi') if (unimplementedUsedOptions.length > 0) { throw new Error(`update being performed using unimplemented options: ${unimplementedUsedOptions}`) @@ -178,7 +222,20 @@ export namespace MongoMock { return docs.length } - insert(doc: T): T['_id'] { + + async insertAsync(doc: any): Promise { + // Force this to be performed async + await MeteorMock.sleepNoFakeTimers(0) + + return this.insertRaw(doc) + } + insert(doc: any): string { + if (!this._isTemporaryCollection) + throw new Meteor.Error(500, 'sync methods can only be used for unnamed collections') + + return this.insertRaw(doc) + } + private insertRaw(doc: any): string { const d = _.clone(doc) if (!d._id) d._id = protectString(RandomMock.id()) @@ -207,25 +264,59 @@ export namespace MongoMock { return d._id } + + async upsertAsync( + query: any, + modifier: any, + options?: UpsertOptions + ): Promise<{ numberAffected: number | undefined; insertedId: string | undefined }> { + // Force this to be performed async + await MeteorMock.sleepNoFakeTimers(0) + + return this.upsertRaw(query, modifier, options) + } upsert( query: any, - modifier: MongoModifier, + modifier: any, + options?: UpsertOptions + ): { numberAffected: number | undefined; insertedId: string | undefined } { + if (!this._isTemporaryCollection) + throw new Meteor.Error(500, 'sync methods can only be used for unnamed collections') + + return this.upsertRaw(query, modifier, options) + } + private upsertRaw( + query: any, + modifier: any, options?: UpsertOptions - ): { numberAffected: number | undefined; insertedId: T['_id'] | undefined } { + ): { numberAffected: number | undefined; insertedId: string | undefined } { const id = _.isString(query) ? query : query._id const docs = this.find(id)._fetchRaw() if (docs.length) { - const count = this.update(docs[0]._id, modifier, options) + const count = this.updateRaw(docs[0]._id, modifier, options) return { insertedId: undefined, numberAffected: count } } else { const doc = mongoModify(query, { _id: id } as any, modifier) - const insertedId = this.insert(doc) + const insertedId = this.insertRaw(doc) return { insertedId: insertedId, numberAffected: undefined } } } + + async removeAsync(query: any): Promise { + // Force this to be performed async + await MeteorMock.sleepNoFakeTimers(0) + + return this.removeRaw(query) + } remove(query: any): number { + if (!this._isTemporaryCollection) + throw new Meteor.Error(500, 'sync methods can only be used for unnamed collections') + + return this.removeRaw(query) + } + private removeRaw(query: any): number { const docs = this.find(query)._fetchRaw() _.each(docs, (doc) => { @@ -247,41 +338,53 @@ export namespace MongoMock { return docs.length } - _ensureIndex(_obj: any) { + createIndex(_obj: any) { // todo } allow() { // todo } - rawCollection() { + + rawDatabase(): any { + throw new Error('Not implemented') + } + rawCollection(): any { return { bulkWrite: async (updates: AnyBulkWriteOperation[], _options: unknown) => { - await sleep(this.asyncBulkWriteDelay) + await MeteorMock.sleepNoFakeTimers(this.asyncBulkWriteDelay) for (const update of updates) { if ('insertOne' in update) { - this.insert(update.insertOne.document) + await this.insertAsync(update.insertOne.document) } else if ('updateOne' in update) { if (update.updateOne.upsert) { - this.upsert(update.updateOne.filter, update.updateOne.update as any, { multi: false }) + await this.upsertAsync(update.updateOne.filter, update.updateOne.update as any, { + multi: false, + }) } else { - this.update(update.updateOne.filter, update.updateOne.update as any, { multi: false }) + await this.updateAsync(update.updateOne.filter, update.updateOne.update as any, { + multi: false, + }) } } else if ('updateMany' in update) { if (update.updateMany.upsert) { - this.upsert(update.updateMany.filter, update.updateMany.update as any, { multi: true }) + await this.upsertAsync(update.updateMany.filter, update.updateMany.update as any, { + multi: true, + }) } else { - this.update(update.updateMany.filter, update.updateMany.update as any, { multi: true }) + await this.updateAsync(update.updateMany.filter, update.updateMany.update as any, { + multi: true, + }) } } else if ('deleteOne' in update) { - const docs = this.find(update.deleteOne.filter).fetch() + const docs = await this.find(update.deleteOne.filter).fetchAsync() if (docs.length) { - this.remove(docs[0]._id) + await this.removeAsync(docs[0]._id) } } else if ('deleteMany' in update) { - this.remove(update.deleteMany.filter) + await this.removeAsync(update.deleteMany.filter) } else if (update['replaceOne']) { - this.upsert(update.replaceOne.filter, update.replaceOne.replacement) + await this.upsertAsync(update.replaceOne.filter, update.replaceOne.replacement) } } }, @@ -341,7 +444,7 @@ export namespace MongoMock { export function getInnerMockCollection }>( collection: MongoReadOnlyCollection | AsyncOnlyReadOnlyMongoCollection - ): Mongo.Collection { + ): MinimalMeteorMongoCollection { return (collection as any).mockCollection } } diff --git a/meteor/__mocks__/plugins/meteor-async-await.js b/meteor/__mocks__/plugins/meteor-async-await.js deleted file mode 100644 index 2b633255eb9..00000000000 --- a/meteor/__mocks__/plugins/meteor-async-await.js +++ /dev/null @@ -1,74 +0,0 @@ -/* eslint-disable */ -// Copied from: https://github.com/meteor/meteor/blob/7a168776b444a48f18c9ba5ce72363360e59e678/npm-packages/meteor-babel/plugins/async-await.js - -'use strict' - -module.exports = function (babel) { - const t = babel.types - - return { - name: 'transform-meteor-async-await', - visitor: { - Function: { - exit: function (path) { - const node = path.node - if (!node.async) { - return - } - - // The original function becomes a non-async function that - // returns a Promise. - node.async = false - - // The inner function should inherit lexical environment items - // like `this`, `super`, and `arguments` from the outer - // function, and arrow functions provide exactly that behavior. - const innerFn = t.arrowFunctionExpression( - // The inner function has no parameters of its own, but can - // refer to the outer parameters of the original function. - [], - node.body, - // The inner function called by Promise.asyncApply should be - // async if we have native async/await support. - !!this.opts.useNativeAsyncAwait - ) - - const promiseResultExpression = t.callExpression( - t.memberExpression(t.identifier('Promise'), t.identifier('asyncApply'), false), - [innerFn] - ) - - // Calling the async function with Promise.asyncApply is - // important to ensure that the part before the first await - // expression runs synchronously in its own Fiber, even when - // there is native support for async/await. - if (node.type === 'ArrowFunctionExpression') { - node.body = promiseResultExpression - } else { - node.body = t.blockStatement([t.returnStatement(promiseResultExpression)]) - } - }, - }, - - AwaitExpression: function (path) { - if (this.opts.useNativeAsyncAwait) { - // No need to transform await expressions if we have native - // support for them. - return - } - - const node = path.node - path.replaceWith( - t.callExpression( - t.memberExpression( - t.identifier('Promise'), - t.identifier(node.all ? 'awaitAll' : 'await'), - false - ), - [node.argument] - ) - ) - }, - }, - } -} diff --git a/meteor/__mocks__/reactive-var.ts b/meteor/__mocks__/reactive-var.ts deleted file mode 100644 index 3edc490a63c..00000000000 --- a/meteor/__mocks__/reactive-var.ts +++ /dev/null @@ -1,18 +0,0 @@ -class ReactiveVar { - val: T - constructor(initVal: T) { - this.val = initVal - } - get = () => { - return this.val - } - set = (newVal: T) => { - this.val = newVal - } -} - -export function setup(): any { - return { - ReactiveVar, - } -} diff --git a/meteor/__mocks__/suppressLogging.ts b/meteor/__mocks__/suppressLogging.ts index ef64368edeb..a50865afe78 100644 --- a/meteor/__mocks__/suppressLogging.ts +++ b/meteor/__mocks__/suppressLogging.ts @@ -40,6 +40,7 @@ export class SupressLogMessages { static expectAllMessagesToHaveBeenHandled(): void { const unhandledSuppressMessages = [...SupressLogMessages.suppressMessages] SupressLogMessages.suppressMessages.length = 0 + // eslint-disable-next-line jest/no-standalone-expect expect(unhandledSuppressMessages).toHaveLength(0) } } diff --git a/meteor/eslint-rules/README.md b/meteor/eslint-rules/README.md deleted file mode 100644 index a69061ec9cf..00000000000 --- a/meteor/eslint-rules/README.md +++ /dev/null @@ -1,3 +0,0 @@ -The typescript in this folder needs compiling before use, so it is easiest to commit the compiled js too. - -It can be recompiled with `meteor npx --no-install tsc eslint-rules/*.ts --module commonjs --skipLibCheck`, then the rules can be referenced as if they are npm installed. diff --git a/meteor/eslint-rules/index.js b/meteor/eslint-rules/index.js deleted file mode 100644 index b982cd1c4b3..00000000000 --- a/meteor/eslint-rules/index.js +++ /dev/null @@ -1,15 +0,0 @@ -const noFocusedTestRule = require('./noFocusedTestRule') - -module.exports = { - rules: { - 'no-focused-test': noFocusedTestRule.default, - }, - configs: { - all: { - plugins: ['custom-rules'], - rules: { - 'custom-rules/no-focused-test': 'error', - } - } - } -} diff --git a/meteor/eslint-rules/noFocusedTestRule.js b/meteor/eslint-rules/noFocusedTestRule.js deleted file mode 100644 index 53b1e5fffe1..00000000000 --- a/meteor/eslint-rules/noFocusedTestRule.js +++ /dev/null @@ -1,99 +0,0 @@ -"use strict"; -exports.__esModule = true; -/** Based on https://github.com/jest-community/eslint-plugin-jest/blob/7cba106d0ade884a231b61098fa0bf33af2a1ad7/src/rules/no-focused-tests.ts */ -var experimental_utils_1 = require("@typescript-eslint/utils"); -var utils_1 = require("./utils"); -var findOnlyNode = function (node) { - var callee = node.callee.type === experimental_utils_1.AST_NODE_TYPES.TaggedTemplateExpression - ? node.callee.tag - : node.callee.type === experimental_utils_1.AST_NODE_TYPES.CallExpression - ? node.callee.callee - : node.callee; - if (callee.type === experimental_utils_1.AST_NODE_TYPES.MemberExpression) { - if (callee.object.type === experimental_utils_1.AST_NODE_TYPES.MemberExpression) { - if (utils_1.isSupportedAccessor(callee.object.property, 'only')) { - return callee.object.property; - } - } - if (utils_1.isSupportedAccessor(callee.property, 'only')) { - return callee.property; - } - } - return null; -}; -exports["default"] = utils_1.createRule({ - name: __filename, - meta: { - docs: { - // category: 'Best Practices', - description: 'Disallow focused tests', - recommended: 'error', - suggestion: true - }, - messages: { - focusedTest: 'Unexpected focused test.', - suggestRemoveFocus: 'Remove focus from test.' - }, - schema: [], - type: 'suggestion', - hasSuggestions: true - }, - defaultOptions: [], - create: function (context) { return ({ - CallExpression: function (node) { - if (node.callee.type === experimental_utils_1.AST_NODE_TYPES.Identifier && node.callee.name === 'testInFiberOnly') { - context.report({ - messageId: 'focusedTest', - node: node, - suggest: [ - { - messageId: 'suggestRemoveFocus', - fix: function (fixer) { - return fixer.removeRange([node.range[0], node.range[0] + 1]); - } - }, - ] - }); - return; - } - if (!utils_1.isDescribeCall(node) && !utils_1.isTestCaseCall(node)) { - return; - } - if (utils_1.getNodeName(node).startsWith('f')) { - context.report({ - messageId: 'focusedTest', - node: node, - suggest: [ - { - messageId: 'suggestRemoveFocus', - fix: function (fixer) { - return fixer.removeRange([node.range[0], node.range[0] + 1]); - } - }, - ] - }); - return; - } - var onlyNode = findOnlyNode(node); - if (!onlyNode) { - return; - } - context.report({ - messageId: 'focusedTest', - node: onlyNode, - suggest: [ - { - messageId: 'suggestRemoveFocus', - fix: function (fixer) { - return fixer.removeRange([ - onlyNode.range[0] - 1, - onlyNode.range[1] + - Number(onlyNode.type !== experimental_utils_1.AST_NODE_TYPES.Identifier), - ]); - } - }, - ] - }); - } - }); } -}); diff --git a/meteor/eslint-rules/noFocusedTestRule.ts b/meteor/eslint-rules/noFocusedTestRule.ts deleted file mode 100644 index 11f3aabc5ec..00000000000 --- a/meteor/eslint-rules/noFocusedTestRule.ts +++ /dev/null @@ -1,112 +0,0 @@ -/** Based on https://github.com/jest-community/eslint-plugin-jest/blob/7cba106d0ade884a231b61098fa0bf33af2a1ad7/src/rules/no-focused-tests.ts */ -import { AST_NODE_TYPES } from '@typescript-eslint/utils' -import { - AccessorNode, - JestFunctionCallExpression, - createRule, - getNodeName, - isDescribeCall, - isSupportedAccessor, - isTestCaseCall, -} from './utils' - -const findOnlyNode = (node: JestFunctionCallExpression): AccessorNode<'only'> | null => { - const callee = - node.callee.type === AST_NODE_TYPES.TaggedTemplateExpression - ? node.callee.tag - : node.callee.type === AST_NODE_TYPES.CallExpression - ? node.callee.callee - : node.callee - - if (callee.type === AST_NODE_TYPES.MemberExpression) { - if (callee.object.type === AST_NODE_TYPES.MemberExpression) { - if (isSupportedAccessor(callee.object.property, 'only')) { - return callee.object.property - } - } - - if (isSupportedAccessor(callee.property, 'only')) { - return callee.property - } - } - - return null -} - -export default createRule({ - name: __filename, - meta: { - docs: { - // category: 'Best Practices', - description: 'Disallow focused tests', - recommended: 'error', - suggestion: true, - }, - messages: { - focusedTest: 'Unexpected focused test.', - suggestRemoveFocus: 'Remove focus from test.', - }, - schema: [], - type: 'suggestion', - hasSuggestions: true, - }, - defaultOptions: [], - create: (context) => ({ - CallExpression(node) { - if (node.callee.type === AST_NODE_TYPES.Identifier && node.callee.name === 'testInFiberOnly') { - context.report({ - messageId: 'focusedTest', - node, - suggest: [ - { - messageId: 'suggestRemoveFocus', - fix: (fixer) => fixer.removeRange([node.range[0], node.range[0] + 1]), - }, - ], - }) - - return - } - - if (!isDescribeCall(node) && !isTestCaseCall(node)) { - return - } - - if (getNodeName(node).startsWith('f')) { - context.report({ - messageId: 'focusedTest', - node, - suggest: [ - { - messageId: 'suggestRemoveFocus', - fix: (fixer) => fixer.removeRange([node.range[0], node.range[0] + 1]), - }, - ], - }) - - return - } - - const onlyNode = findOnlyNode(node) - - if (!onlyNode) { - return - } - - context.report({ - messageId: 'focusedTest', - node: onlyNode, - suggest: [ - { - messageId: 'suggestRemoveFocus', - fix: (fixer) => - fixer.removeRange([ - onlyNode.range[0] - 1, - onlyNode.range[1] + Number(onlyNode.type !== AST_NODE_TYPES.Identifier), - ]), - }, - ], - }) - }, - }), -}) diff --git a/meteor/eslint-rules/package.json b/meteor/eslint-rules/package.json deleted file mode 100644 index 724013b78bd..00000000000 --- a/meteor/eslint-rules/package.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "name": "eslint-plugin-custom-rules", - "version": "0.0.1", - "license": "MIT", - "main": "index.js" -} diff --git a/meteor/eslint-rules/utils.js b/meteor/eslint-rules/utils.js deleted file mode 100644 index 88293c70e89..00000000000 --- a/meteor/eslint-rules/utils.js +++ /dev/null @@ -1,484 +0,0 @@ -"use strict"; -var __assign = (this && this.__assign) || function () { - __assign = Object.assign || function(t) { - for (var s, i = 1, n = arguments.length; i < n; i++) { - s = arguments[i]; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) - t[p] = s[p]; - } - return t; - }; - return __assign.apply(this, arguments); -}; -exports.__esModule = true; -exports.scopeHasLocalReference = exports.isDescribeCall = exports.isTestCaseCall = exports.getTestCallExpressionsFromDeclaredVariables = exports.isHook = exports.isFunction = exports.getNodeName = exports.TestCaseProperty = exports.DescribeProperty = exports.HookName = exports.TestCaseName = exports.DescribeAlias = exports.parseExpectCall = exports.isParsedEqualityMatcherCall = exports.EqualityMatcher = exports.ModifierName = exports.isExpectMember = exports.isExpectCall = exports.getAccessorValue = exports.isSupportedAccessor = exports.isIdentifier = exports.hasOnlyOneArgument = exports.getStringValue = exports.isStringNode = exports.followTypeAssertionChain = exports.createRule = void 0; -/** https://github.com/jest-community/eslint-plugin-jest/blob/540326879df242daa3d96f43903178e36ba6b546/src/rules/utils.ts */ -// import { parse as parsePath } from 'path'; -var experimental_utils_1 = require("@typescript-eslint/utils"); -// import { version } from '../../package.json'; -// const REPO_URL = 'https://github.com/jest-community/eslint-plugin-jest'; -exports.createRule = experimental_utils_1.ESLintUtils.RuleCreator(function (name) { - return "local:" + name; - // const ruleName = parsePath(name).name; - // return `${REPO_URL}/blob/v${version}/docs/rules/${ruleName}.md`; -}); -var isTypeCastExpression = function (node) { - return node.type === experimental_utils_1.AST_NODE_TYPES.TSAsExpression || - node.type === experimental_utils_1.AST_NODE_TYPES.TSTypeAssertion; -}; -var followTypeAssertionChain = function (expression) { - return isTypeCastExpression(expression) - ? exports.followTypeAssertionChain(expression.expression) - : expression; -}; -exports.followTypeAssertionChain = followTypeAssertionChain; -/** - * Checks if the given `node` is a `StringLiteral`. - * - * If a `value` is provided & the `node` is a `StringLiteral`, - * the `value` will be compared to that of the `StringLiteral`. - * - * @param {Node} node - * @param {V} [value] - * - * @return {node is StringLiteral} - * - * @template V - */ -var isStringLiteral = function (node, value) { - return node.type === experimental_utils_1.AST_NODE_TYPES.Literal && - typeof node.value === 'string' && - (value === undefined || node.value === value); -}; -/** - * Checks if the given `node` is a `TemplateLiteral`. - * - * Complex `TemplateLiteral`s are not considered specific, and so will return `false`. - * - * If a `value` is provided & the `node` is a `TemplateLiteral`, - * the `value` will be compared to that of the `TemplateLiteral`. - * - * @param {Node} node - * @param {V} [value] - * - * @return {node is TemplateLiteral} - * - * @template V - */ -var isTemplateLiteral = function (node, value) { - return node.type === experimental_utils_1.AST_NODE_TYPES.TemplateLiteral && - node.quasis.length === 1 && // bail out if not simple - (value === undefined || node.quasis[0].value.raw === value); -}; -/** - * Checks if the given `node` is a {@link StringNode}. - * - * @param {Node} node - * @param {V} [specifics] - * - * @return {node is StringNode} - * - * @template V - */ -var isStringNode = function (node, specifics) { - return isStringLiteral(node, specifics) || isTemplateLiteral(node, specifics); -}; -exports.isStringNode = isStringNode; -/** - * Gets the value of the given `StringNode`. - * - * If the `node` is a `TemplateLiteral`, the `raw` value is used; - * otherwise, `value` is returned instead. - * - * @param {StringNode} node - * - * @return {S} - * - * @template S - */ -var getStringValue = function (node) { - return isTemplateLiteral(node) ? node.quasis[0].value.raw : node.value; -}; -exports.getStringValue = getStringValue; -/** - * Guards that the given `call` has only one `argument`. - * - * @param {CallExpression} call - * - * @return {call is CallExpressionWithSingleArgument} - */ -var hasOnlyOneArgument = function (call) { return call.arguments.length === 1; }; -exports.hasOnlyOneArgument = hasOnlyOneArgument; -/** - * Checks if the given `node` is an `Identifier`. - * - * If a `name` is provided, & the `node` is an `Identifier`, - * the `name` will be compared to that of the `identifier`. - * - * @param {Node} node - * @param {V} [name] - * - * @return {node is KnownIdentifier} - * - * @template V - */ -var isIdentifier = function (node, name) { - return node.type === experimental_utils_1.AST_NODE_TYPES.Identifier && - (name === undefined || node.name === name); -}; -exports.isIdentifier = isIdentifier; -/** - * Checks if the given `node` is a "supported accessor". - * - * This means that it's a node can be used to access properties, - * and who's "value" can be statically determined. - * - * `MemberExpression` nodes most commonly contain accessors, - * but it's possible for other nodes to contain them. - * - * If a `value` is provided & the `node` is an `AccessorNode`, - * the `value` will be compared to that of the `AccessorNode`. - * - * Note that `value` here refers to the normalised value. - * The property that holds the value is not always called `name`. - * - * @param {Node} node - * @param {V} [value] - * - * @return {node is AccessorNode} - * - * @template V - */ -var isSupportedAccessor = function (node, value) { - return exports.isIdentifier(node, value) || exports.isStringNode(node, value); -}; -exports.isSupportedAccessor = isSupportedAccessor; -/** - * Gets the value of the given `AccessorNode`, - * account for the different node types. - * - * @param {AccessorNode} accessor - * - * @return {S} - * - * @template S - */ -var getAccessorValue = function (accessor) { - return accessor.type === experimental_utils_1.AST_NODE_TYPES.Identifier - ? accessor.name - : exports.getStringValue(accessor); -}; -exports.getAccessorValue = getAccessorValue; -/** - * Checks if the given `node` is a valid `ExpectCall`. - * - * In order to be an `ExpectCall`, the `node` must: - * * be a `CallExpression`, - * * have an accessor named 'expect', - * * have a `parent`. - * - * @param {Node} node - * - * @return {node is ExpectCall} - */ -var isExpectCall = function (node) { - return node.type === experimental_utils_1.AST_NODE_TYPES.CallExpression && - exports.isSupportedAccessor(node.callee, 'expect') && - node.parent !== undefined; -}; -exports.isExpectCall = isExpectCall; -var isExpectMember = function (node, name) { - return node.type === experimental_utils_1.AST_NODE_TYPES.MemberExpression && - exports.isSupportedAccessor(node.property, name); -}; -exports.isExpectMember = isExpectMember; -var ModifierName; -(function (ModifierName) { - ModifierName["not"] = "not"; - ModifierName["rejects"] = "rejects"; - ModifierName["resolves"] = "resolves"; -})(ModifierName = exports.ModifierName || (exports.ModifierName = {})); -var EqualityMatcher; -(function (EqualityMatcher) { - EqualityMatcher["toBe"] = "toBe"; - EqualityMatcher["toEqual"] = "toEqual"; - EqualityMatcher["toStrictEqual"] = "toStrictEqual"; -})(EqualityMatcher = exports.EqualityMatcher || (exports.EqualityMatcher = {})); -var isParsedEqualityMatcherCall = function (matcher, name) { - return (name - ? matcher.name === name - : EqualityMatcher.hasOwnProperty(matcher.name)) && - matcher.arguments !== null && - matcher.arguments.length === 1; -}; -exports.isParsedEqualityMatcherCall = isParsedEqualityMatcherCall; -var parseExpectMember = function (expectMember) { return ({ - name: exports.getAccessorValue(expectMember.property), - node: expectMember -}); }; -var reparseAsMatcher = function (parsedMember) { return (__assign(__assign({}, parsedMember), { - /** - * The arguments being passed to this `Matcher`, if any. - * - * If this matcher isn't called, this will be `null`. - */ - arguments: parsedMember.node.parent.type === experimental_utils_1.AST_NODE_TYPES.CallExpression - ? parsedMember.node.parent.arguments - : null })); }; -/** - * Re-parses the given `parsedMember` as a `ParsedExpectModifier`. - * - * If the given `parsedMember` does not have a `name` of a valid `Modifier`, - * an exception will be thrown. - * - * @param {ParsedExpectMember} parsedMember - * - * @return {ParsedExpectModifier} - */ -var reparseMemberAsModifier = function (parsedMember) { - if (isSpecificMember(parsedMember, ModifierName.not)) { - return parsedMember; - } - /* istanbul ignore if */ - if (!isSpecificMember(parsedMember, ModifierName.resolves) && - !isSpecificMember(parsedMember, ModifierName.rejects)) { - // ts doesn't think that the ModifierName.not check is the direct inverse as the above two checks - // todo: impossible at runtime, but can't be typed w/o negation support - throw new Error("modifier name must be either \"" + ModifierName.resolves + "\" or \"" + ModifierName.rejects + "\" (got \"" + parsedMember.name + "\")"); - } - var negation = exports.isExpectMember(parsedMember.node.parent, ModifierName.not) - ? parsedMember.node.parent - : undefined; - return __assign(__assign({}, parsedMember), { negation: negation }); -}; -var isSpecificMember = function (member, specific) { return member.name === specific; }; -/** - * Checks if the given `ParsedExpectMember` should be re-parsed as an `ParsedExpectModifier`. - * - * @param {ParsedExpectMember} member - * - * @return {member is ParsedExpectMember} - */ -var shouldBeParsedExpectModifier = function (member) { - return ModifierName.hasOwnProperty(member.name); -}; -var parseExpectCall = function (expect) { - var expectation = { - expect: expect - }; - if (!exports.isExpectMember(expect.parent)) { - return expectation; - } - var parsedMember = parseExpectMember(expect.parent); - if (!shouldBeParsedExpectModifier(parsedMember)) { - expectation.matcher = reparseAsMatcher(parsedMember); - return expectation; - } - var modifier = (expectation.modifier = - reparseMemberAsModifier(parsedMember)); - var memberNode = modifier.negation || modifier.node; - if (!exports.isExpectMember(memberNode.parent)) { - return expectation; - } - expectation.matcher = reparseAsMatcher(parseExpectMember(memberNode.parent)); - return expectation; -}; -exports.parseExpectCall = parseExpectCall; -var DescribeAlias; -(function (DescribeAlias) { - DescribeAlias["describe"] = "describe"; - DescribeAlias["fdescribe"] = "fdescribe"; - DescribeAlias["xdescribe"] = "xdescribe"; -})(DescribeAlias = exports.DescribeAlias || (exports.DescribeAlias = {})); -var TestCaseName; -(function (TestCaseName) { - TestCaseName["fit"] = "fit"; - TestCaseName["it"] = "it"; - TestCaseName["test"] = "test"; - TestCaseName["xit"] = "xit"; - TestCaseName["xtest"] = "xtest"; -})(TestCaseName = exports.TestCaseName || (exports.TestCaseName = {})); -var HookName; -(function (HookName) { - HookName["beforeAll"] = "beforeAll"; - HookName["beforeEach"] = "beforeEach"; - HookName["afterAll"] = "afterAll"; - HookName["afterEach"] = "afterEach"; -})(HookName = exports.HookName || (exports.HookName = {})); -var DescribeProperty; -(function (DescribeProperty) { - DescribeProperty["each"] = "each"; - DescribeProperty["only"] = "only"; - DescribeProperty["skip"] = "skip"; -})(DescribeProperty = exports.DescribeProperty || (exports.DescribeProperty = {})); -var TestCaseProperty; -(function (TestCaseProperty) { - TestCaseProperty["each"] = "each"; - TestCaseProperty["concurrent"] = "concurrent"; - TestCaseProperty["only"] = "only"; - TestCaseProperty["skip"] = "skip"; - TestCaseProperty["todo"] = "todo"; -})(TestCaseProperty = exports.TestCaseProperty || (exports.TestCaseProperty = {})); -var joinNames = function (a, b) { - return a && b ? a + "." + b : null; -}; -function getNodeName(node) { - if (exports.isSupportedAccessor(node)) { - return exports.getAccessorValue(node); - } - switch (node.type) { - case experimental_utils_1.AST_NODE_TYPES.TaggedTemplateExpression: - return getNodeName(node.tag); - case experimental_utils_1.AST_NODE_TYPES.MemberExpression: - return joinNames(getNodeName(node.object), getNodeName(node.property)); - case experimental_utils_1.AST_NODE_TYPES.NewExpression: - case experimental_utils_1.AST_NODE_TYPES.CallExpression: - return getNodeName(node.callee); - } - return null; -} -exports.getNodeName = getNodeName; -var isFunction = function (node) { - return node.type === experimental_utils_1.AST_NODE_TYPES.FunctionExpression || - node.type === experimental_utils_1.AST_NODE_TYPES.ArrowFunctionExpression; -}; -exports.isFunction = isFunction; -var isHook = function (node) { - return node.callee.type === experimental_utils_1.AST_NODE_TYPES.Identifier && - HookName.hasOwnProperty(node.callee.name); -}; -exports.isHook = isHook; -var getTestCallExpressionsFromDeclaredVariables = function (declaredVariables) { - return declaredVariables.reduce(function (acc, _a) { - var references = _a.references; - return acc.concat(references - .map(function (_a) { - var identifier = _a.identifier; - return identifier.parent; - }) - .filter(function (node) { - return !!node && - node.type === experimental_utils_1.AST_NODE_TYPES.CallExpression && - exports.isTestCaseCall(node); - })); - }, []); -}; -exports.getTestCallExpressionsFromDeclaredVariables = getTestCallExpressionsFromDeclaredVariables; -var isTestCaseName = function (node) { - return node.type === experimental_utils_1.AST_NODE_TYPES.Identifier && - TestCaseName.hasOwnProperty(node.name); -}; -var isTestCaseProperty = function (node) { - return exports.isSupportedAccessor(node) && - TestCaseProperty.hasOwnProperty(exports.getAccessorValue(node)); -}; -/** - * Checks if the given `node` is a *call* to a test case function that would - * result in tests being run by `jest`. - * - * Note that `.each()` does not count as a call in this context, as it will not - * result in `jest` running any tests. - * - * @param {TSESTree.CallExpression} node - * - * @return {node is JestFunctionCallExpression} - */ -var isTestCaseCall = function (node) { - if (isTestCaseName(node.callee)) { - return true; - } - var callee = node.callee.type === experimental_utils_1.AST_NODE_TYPES.TaggedTemplateExpression - ? node.callee.tag - : node.callee.type === experimental_utils_1.AST_NODE_TYPES.CallExpression - ? node.callee.callee - : node.callee; - if (callee.type === experimental_utils_1.AST_NODE_TYPES.MemberExpression && - isTestCaseProperty(callee.property)) { - // if we're an `each()`, ensure we're the outer CallExpression (i.e `.each()()`) - if (exports.getAccessorValue(callee.property) === 'each' && - node.callee.type !== experimental_utils_1.AST_NODE_TYPES.TaggedTemplateExpression && - node.callee.type !== experimental_utils_1.AST_NODE_TYPES.CallExpression) { - return false; - } - return callee.object.type === experimental_utils_1.AST_NODE_TYPES.MemberExpression - ? isTestCaseName(callee.object.object) - : isTestCaseName(callee.object); - } - return false; -}; -exports.isTestCaseCall = isTestCaseCall; -var isDescribeAlias = function (node) { - return node.type === experimental_utils_1.AST_NODE_TYPES.Identifier && - DescribeAlias.hasOwnProperty(node.name); -}; -var isDescribeProperty = function (node) { - return exports.isSupportedAccessor(node) && - DescribeProperty.hasOwnProperty(exports.getAccessorValue(node)); -}; -/** - * Checks if the given `node` is a *call* to a `describe` function that would - * result in a `describe` block being created by `jest`. - * - * Note that `.each()` does not count as a call in this context, as it will not - * result in `jest` creating any `describe` blocks. - * - * @param {TSESTree.CallExpression} node - * - * @return {node is JestFunctionCallExpression} - */ -var isDescribeCall = function (node) { - if (isDescribeAlias(node.callee)) { - return true; - } - var callee = node.callee.type === experimental_utils_1.AST_NODE_TYPES.TaggedTemplateExpression - ? node.callee.tag - : node.callee.type === experimental_utils_1.AST_NODE_TYPES.CallExpression - ? node.callee.callee - : node.callee; - if (callee.type === experimental_utils_1.AST_NODE_TYPES.MemberExpression && - isDescribeProperty(callee.property)) { - // if we're an `each()`, ensure we're the outer CallExpression (i.e `.each()()`) - if (exports.getAccessorValue(callee.property) === 'each' && - node.callee.type !== experimental_utils_1.AST_NODE_TYPES.TaggedTemplateExpression && - node.callee.type !== experimental_utils_1.AST_NODE_TYPES.CallExpression) { - return false; - } - return callee.object.type === experimental_utils_1.AST_NODE_TYPES.MemberExpression - ? isDescribeAlias(callee.object.object) - : isDescribeAlias(callee.object); - } - return false; -}; -exports.isDescribeCall = isDescribeCall; -var collectReferences = function (scope) { - var locals = new Set(); - var unresolved = new Set(); - var currentScope = scope; - while (currentScope !== null) { - for (var _i = 0, _a = currentScope.variables; _i < _a.length; _i++) { - var ref = _a[_i]; - var isReferenceDefined = ref.defs.some(function (def) { - return def.type !== 'ImplicitGlobalVariable'; - }); - if (isReferenceDefined) { - locals.add(ref.name); - } - } - for (var _b = 0, _c = currentScope.through; _b < _c.length; _b++) { - var ref = _c[_b]; - unresolved.add(ref.identifier.name); - } - currentScope = currentScope.upper; - } - return { locals: locals, unresolved: unresolved }; -}; -var scopeHasLocalReference = function (scope, referenceName) { - var references = collectReferences(scope); - return ( - // referenceName was found as a local variable or function declaration. - references.locals.has(referenceName) || - // referenceName was not found as an unresolved reference, - // meaning it is likely not an implicit global reference. - !references.unresolved.has(referenceName)); -}; -exports.scopeHasLocalReference = scopeHasLocalReference; diff --git a/meteor/eslint-rules/utils.ts b/meteor/eslint-rules/utils.ts deleted file mode 100644 index 854447c335f..00000000000 --- a/meteor/eslint-rules/utils.ts +++ /dev/null @@ -1,724 +0,0 @@ -/** https://github.com/jest-community/eslint-plugin-jest/blob/7cba106d0ade884a231b61098fa0bf33af2a1ad7/src/rules/utils.ts */ -// import { parse as parsePath } from 'path'; -import { AST_NODE_TYPES, ESLintUtils, TSESLint, TSESTree } from '@typescript-eslint/utils' -// import { version } from '../../package.json'; - -// const REPO_URL = 'https://github.com/jest-community/eslint-plugin-jest'; - -export const createRule = ESLintUtils.RuleCreator((name) => { - return `local:${name}` - // const ruleName = parsePath(name).name; - - // return `${REPO_URL}/blob/v${version}/docs/rules/${ruleName}.md`; -}) - -export type MaybeTypeCast = TSTypeCastExpression | Expression - -type TSTypeCastExpression = - | AsExpressionChain - | TypeAssertionChain - -interface AsExpressionChain - extends TSESTree.TSAsExpression { - expression: AsExpressionChain | Expression -} - -interface TypeAssertionChain - extends TSESTree.TSTypeAssertion { - expression: TypeAssertionChain | Expression -} - -const isTypeCastExpression = ( - node: MaybeTypeCast -): node is TSTypeCastExpression => - node.type === AST_NODE_TYPES.TSAsExpression || node.type === AST_NODE_TYPES.TSTypeAssertion - -export const followTypeAssertionChain = ( - expression: MaybeTypeCast -): Expression => (isTypeCastExpression(expression) ? followTypeAssertionChain(expression.expression) : expression) - -/** - * A `Literal` with a `value` of type `string`. - */ -interface StringLiteral extends TSESTree.StringLiteral { - value: Value -} - -/** - * Checks if the given `node` is a `StringLiteral`. - * - * If a `value` is provided & the `node` is a `StringLiteral`, - * the `value` will be compared to that of the `StringLiteral`. - * - * @param {Node} node - * @param {V} [value] - * - * @return {node is StringLiteral} - * - * @template V - */ -const isStringLiteral = (node: TSESTree.Node, value?: V): node is StringLiteral => - node.type === AST_NODE_TYPES.Literal && - typeof node.value === 'string' && - (value === undefined || node.value === value) - -interface TemplateLiteral extends TSESTree.TemplateLiteral { - quasis: [TSESTree.TemplateElement & { value: { raw: Value; cooked: Value } }] -} - -/** - * Checks if the given `node` is a `TemplateLiteral`. - * - * Complex `TemplateLiteral`s are not considered specific, and so will return `false`. - * - * If a `value` is provided & the `node` is a `TemplateLiteral`, - * the `value` will be compared to that of the `TemplateLiteral`. - * - * @param {Node} node - * @param {V} [value] - * - * @return {node is TemplateLiteral} - * - * @template V - */ -const isTemplateLiteral = (node: TSESTree.Node, value?: V): node is TemplateLiteral => - node.type === AST_NODE_TYPES.TemplateLiteral && - node.quasis.length === 1 && // bail out if not simple - (value === undefined || node.quasis[0].value.raw === value) - -export type StringNode = StringLiteral | TemplateLiteral - -/** - * Checks if the given `node` is a {@link StringNode}. - * - * @param {Node} node - * @param {V} [specifics] - * - * @return {node is StringNode} - * - * @template V - */ -export const isStringNode = (node: TSESTree.Node, specifics?: V): node is StringNode => - isStringLiteral(node, specifics) || isTemplateLiteral(node, specifics) - -/** - * Gets the value of the given `StringNode`. - * - * If the `node` is a `TemplateLiteral`, the `raw` value is used; - * otherwise, `value` is returned instead. - * - * @param {StringNode} node - * - * @return {S} - * - * @template S - */ -export const getStringValue = (node: StringNode): S => - isTemplateLiteral(node) ? node.quasis[0].value.raw : node.value - -/** - * Represents a `MemberExpression` with a "known" `property`. - */ -interface KnownMemberExpression extends TSESTree.MemberExpressionComputedName { - property: AccessorNode -} - -/** - * Represents a `CallExpression` with a "known" `property` accessor. - * - * i.e `KnownCallExpression<'includes'>` represents `.includes()`. - */ -export interface KnownCallExpression extends TSESTree.CallExpression { - callee: CalledKnownMemberExpression -} - -/** - * Represents a `MemberExpression` with a "known" `property`, that is called. - * - * This is `KnownCallExpression` from the perspective of the `MemberExpression` node. - */ -export interface CalledKnownMemberExpression extends KnownMemberExpression { - parent: KnownCallExpression -} - -/** - * Represents a `CallExpression` with a single argument. - */ -export interface CallExpressionWithSingleArgument - extends TSESTree.CallExpression { - arguments: [Argument] -} - -/** - * Guards that the given `call` has only one `argument`. - * - * @param {CallExpression} call - * - * @return {call is CallExpressionWithSingleArgument} - */ -export const hasOnlyOneArgument = (call: TSESTree.CallExpression): call is CallExpressionWithSingleArgument => - call.arguments.length === 1 - -/** - * An `Identifier` with a known `name` value - i.e `expect`. - */ -interface KnownIdentifier extends TSESTree.Identifier { - name: Name -} - -/** - * Checks if the given `node` is an `Identifier`. - * - * If a `name` is provided, & the `node` is an `Identifier`, - * the `name` will be compared to that of the `identifier`. - * - * @param {Node} node - * @param {V} [name] - * - * @return {node is KnownIdentifier} - * - * @template V - */ -export const isIdentifier = (node: TSESTree.Node, name?: V): node is KnownIdentifier => - node.type === AST_NODE_TYPES.Identifier && (name === undefined || node.name === name) - -/** - * Checks if the given `node` is a "supported accessor". - * - * This means that it's a node can be used to access properties, - * and who's "value" can be statically determined. - * - * `MemberExpression` nodes most commonly contain accessors, - * but it's possible for other nodes to contain them. - * - * If a `value` is provided & the `node` is an `AccessorNode`, - * the `value` will be compared to that of the `AccessorNode`. - * - * Note that `value` here refers to the normalised value. - * The property that holds the value is not always called `name`. - * - * @param {Node} node - * @param {V} [value] - * - * @return {node is AccessorNode} - * - * @template V - */ -export const isSupportedAccessor = (node: TSESTree.Node, value?: V): node is AccessorNode => - isIdentifier(node, value) || isStringNode(node, value) - -/** - * Gets the value of the given `AccessorNode`, - * account for the different node types. - * - * @param {AccessorNode} accessor - * - * @return {S} - * - * @template S - */ -export const getAccessorValue = (accessor: AccessorNode): S => - accessor.type === AST_NODE_TYPES.Identifier ? accessor.name : getStringValue(accessor) - -export type AccessorNode = StringNode | KnownIdentifier - -interface ExpectCall extends TSESTree.CallExpression { - callee: AccessorNode<'expect'> - parent: TSESTree.Node -} - -/** - * Checks if the given `node` is a valid `ExpectCall`. - * - * In order to be an `ExpectCall`, the `node` must: - * * be a `CallExpression`, - * * have an accessor named 'expect', - * * have a `parent`. - * - * @param {Node} node - * - * @return {node is ExpectCall} - */ -export const isExpectCall = (node: TSESTree.Node): node is ExpectCall => - node.type === AST_NODE_TYPES.CallExpression && - isSupportedAccessor(node.callee, 'expect') && - node.parent !== undefined - -interface ParsedExpectMember< - Name extends ExpectPropertyName = ExpectPropertyName, - Node extends ExpectMember = ExpectMember -> { - name: Name - node: Node -} - -/** - * Represents a `MemberExpression` that comes after an `ExpectCall`. - */ -interface ExpectMember - extends KnownMemberExpression { - object: ExpectCall | ExpectMember - parent: TSESTree.Node -} - -export const isExpectMember = ( - node: TSESTree.Node, - name?: Name -): node is ExpectMember => - node.type === AST_NODE_TYPES.MemberExpression && isSupportedAccessor(node.property, name) - -/** - * Represents all the jest matchers. - */ -type MatcherName = string /* & not ModifierName */ -type ExpectPropertyName = ModifierName | MatcherName - -export type ParsedEqualityMatcherCall< - Argument extends TSESTree.Expression = TSESTree.Expression, - Matcher extends EqualityMatcher = EqualityMatcher -> = Omit, 'arguments'> & { - parent: TSESTree.CallExpression - arguments: [Argument] -} - -export enum ModifierName { - not = 'not', - rejects = 'rejects', - resolves = 'resolves', -} - -export enum EqualityMatcher { - toBe = 'toBe', - toEqual = 'toEqual', - toStrictEqual = 'toStrictEqual', -} - -export const isParsedEqualityMatcherCall = ( - matcher: ParsedExpectMatcher, - name?: MatcherName -): matcher is ParsedEqualityMatcherCall => - (name ? matcher.name === name : EqualityMatcher.hasOwnProperty(matcher.name)) && - matcher.arguments !== null && - matcher.arguments.length === 1 - -/** - * Represents a parsed expect matcher, such as `toBe`, `toContain`, and so on. - */ -export interface ParsedExpectMatcher< - Matcher extends MatcherName = MatcherName, - Node extends ExpectMember = ExpectMember -> extends ParsedExpectMember { - /** - * The arguments being passed to the matcher. - * A value of `null` means the matcher isn't being called. - */ - arguments: TSESTree.CallExpression['arguments'] | null -} - -type BaseParsedModifier = ParsedExpectMember - -type NegatableModifierName = ModifierName.rejects | ModifierName.resolves -type NotNegatableModifierName = ModifierName.not - -/** - * Represents a parsed modifier that can be followed by a `not` negation modifier. - */ -interface NegatableParsedModifier - extends BaseParsedModifier { - negation?: ExpectMember -} - -/** - * Represents a parsed modifier that cannot be followed by a `not` negation modifier. - */ -export interface NotNegatableParsedModifier - extends BaseParsedModifier { - negation?: never -} - -export type ParsedExpectModifier = NotNegatableParsedModifier | NegatableParsedModifier - -interface Expectation { - expect: ExpectNode - modifier?: ParsedExpectModifier - matcher?: ParsedExpectMatcher -} - -const parseExpectMember = (expectMember: ExpectMember): ParsedExpectMember => ({ - name: getAccessorValue(expectMember.property), - node: expectMember, -}) - -const reparseAsMatcher = (parsedMember: ParsedExpectMember): ParsedExpectMatcher => ({ - ...parsedMember, - /** - * The arguments being passed to this `Matcher`, if any. - * - * If this matcher isn't called, this will be `null`. - */ - arguments: - parsedMember.node.parent.type === AST_NODE_TYPES.CallExpression ? parsedMember.node.parent.arguments : null, -}) - -/** - * Re-parses the given `parsedMember` as a `ParsedExpectModifier`. - * - * If the given `parsedMember` does not have a `name` of a valid `Modifier`, - * an exception will be thrown. - * - * @param {ParsedExpectMember} parsedMember - * - * @return {ParsedExpectModifier} - */ -const reparseMemberAsModifier = (parsedMember: ParsedExpectMember): ParsedExpectModifier => { - if (isSpecificMember(parsedMember, ModifierName.not)) { - return parsedMember - } - - /* istanbul ignore if */ - if ( - !isSpecificMember(parsedMember, ModifierName.resolves) && - !isSpecificMember(parsedMember, ModifierName.rejects) - ) { - // ts doesn't think that the ModifierName.not check is the direct inverse as the above two checks - // todo: impossible at runtime, but can't be typed w/o negation support - throw new Error( - `modifier name must be either "${ModifierName.resolves}" or "${ModifierName.rejects}" (got "${parsedMember.name}")` - ) - } - - const negation = isExpectMember(parsedMember.node.parent, ModifierName.not) ? parsedMember.node.parent : undefined - - return { - ...parsedMember, - negation, - } -} - -const isSpecificMember = ( - member: ParsedExpectMember, - specific: Name -): member is ParsedExpectMember => member.name === specific - -/** - * Checks if the given `ParsedExpectMember` should be re-parsed as an `ParsedExpectModifier`. - * - * @param {ParsedExpectMember} member - * - * @return {member is ParsedExpectMember} - */ -const shouldBeParsedExpectModifier = (member: ParsedExpectMember): member is ParsedExpectMember => - ModifierName.hasOwnProperty(member.name) - -export const parseExpectCall = (expect: ExpectNode): Expectation => { - const expectation: Expectation = { - expect, - } - - if (!isExpectMember(expect.parent)) { - return expectation - } - - const parsedMember = parseExpectMember(expect.parent) - - if (!shouldBeParsedExpectModifier(parsedMember)) { - expectation.matcher = reparseAsMatcher(parsedMember) - - return expectation - } - - const modifier = (expectation.modifier = reparseMemberAsModifier(parsedMember)) - - const memberNode = modifier.negation || modifier.node - - if (!isExpectMember(memberNode.parent)) { - return expectation - } - - expectation.matcher = reparseAsMatcher(parseExpectMember(memberNode.parent)) - - return expectation -} - -export enum DescribeAlias { - 'describe' = 'describe', - 'fdescribe' = 'fdescribe', - 'xdescribe' = 'xdescribe', -} - -export enum TestCaseName { - 'fit' = 'fit', - 'it' = 'it', - 'test' = 'test', - 'xit' = 'xit', - 'xtest' = 'xtest', -} - -export enum HookName { - 'beforeAll' = 'beforeAll', - 'beforeEach' = 'beforeEach', - 'afterAll' = 'afterAll', - 'afterEach' = 'afterEach', -} - -export enum DescribeProperty { - 'each' = 'each', - 'only' = 'only', - 'skip' = 'skip', -} - -export enum TestCaseProperty { - 'each' = 'each', - 'concurrent' = 'concurrent', - 'only' = 'only', - 'skip' = 'skip', - 'todo' = 'todo', -} - -type JestFunctionName = DescribeAlias | TestCaseName | HookName -type JestPropertyName = DescribeProperty | TestCaseProperty - -interface JestFunctionIdentifier extends TSESTree.Identifier { - name: FunctionName -} - -interface JestFunctionMemberExpression< - FunctionName extends JestFunctionName, - PropertyName extends JestPropertyName = JestPropertyName -> extends KnownMemberExpression { - object: JestFunctionIdentifier -} - -interface JestFunctionCallExpressionWithMemberExpressionCallee< - FunctionName extends JestFunctionName, - PropertyName extends JestPropertyName = JestPropertyName -> extends TSESTree.CallExpression { - callee: JestFunctionMemberExpression -} - -export interface JestFunctionCallExpressionWithIdentifierCallee - extends TSESTree.CallExpression { - callee: JestFunctionIdentifier -} - -interface JestEachMemberExpression> - extends KnownMemberExpression<'each'> { - object: KnownIdentifier | (KnownMemberExpression & { object: KnownIdentifier }) -} - -export interface JestCalledEachCallExpression> - extends TSESTree.CallExpression { - callee: TSESTree.CallExpression & { - callee: JestEachMemberExpression - } -} - -export interface JestTaggedEachCallExpression> - extends TSESTree.CallExpression { - callee: TSESTree.TaggedTemplateExpression & { - tag: JestEachMemberExpression - } -} - -type JestEachCallExpression> = - | JestCalledEachCallExpression - | JestTaggedEachCallExpression - -export type JestFunctionCallExpression< - FunctionName extends Exclude = Exclude -> = - | JestEachCallExpression - | JestFunctionCallExpressionWithMemberExpressionCallee - | JestFunctionCallExpressionWithIdentifierCallee - -const joinNames = (a: string | null, b: string | null): string | null => (a && b ? `${a}.${b}` : null) - -export function getNodeName( - node: - | JestFunctionCallExpression - | JestFunctionMemberExpression - | JestFunctionIdentifier - | TSESTree.TaggedTemplateExpression -): string -export function getNodeName(node: TSESTree.Node): string | null -export function getNodeName(node: TSESTree.Node): string | null { - if (isSupportedAccessor(node)) { - return getAccessorValue(node) - } - - switch (node.type) { - case AST_NODE_TYPES.TaggedTemplateExpression: - return getNodeName(node.tag) - case AST_NODE_TYPES.MemberExpression: - return joinNames(getNodeName(node.object), getNodeName(node.property)) - case AST_NODE_TYPES.NewExpression: - case AST_NODE_TYPES.CallExpression: - return getNodeName(node.callee) - } - - return null -} - -export type FunctionExpression = TSESTree.ArrowFunctionExpression | TSESTree.FunctionExpression - -export const isFunction = (node: TSESTree.Node): node is FunctionExpression => - node.type === AST_NODE_TYPES.FunctionExpression || node.type === AST_NODE_TYPES.ArrowFunctionExpression - -export const isHook = ( - node: TSESTree.CallExpression -): node is JestFunctionCallExpressionWithIdentifierCallee => - node.callee.type === AST_NODE_TYPES.Identifier && HookName.hasOwnProperty(node.callee.name) - -export const getTestCallExpressionsFromDeclaredVariables = ( - declaredVariables: readonly TSESLint.Scope.Variable[] -): Array> => { - return declaredVariables.reduce>>( - (acc, { references }) => - acc.concat( - references - .map(({ identifier }) => identifier.parent) - .filter( - (node): node is JestFunctionCallExpression => - !!node && node.type === AST_NODE_TYPES.CallExpression && isTestCaseCall(node) - ) - ), - [] - ) -} - -const isTestCaseName = (node: TSESTree.LeftHandSideExpression) => - node.type === AST_NODE_TYPES.Identifier && TestCaseName.hasOwnProperty(node.name) - -const isTestCaseProperty = ( - node: TSESTree.Expression | TSESTree.PrivateIdentifier -): node is AccessorNode => - isSupportedAccessor(node) && TestCaseProperty.hasOwnProperty(getAccessorValue(node)) - -/** - * Checks if the given `node` is a *call* to a test case function that would - * result in tests being run by `jest`. - * - * Note that `.each()` does not count as a call in this context, as it will not - * result in `jest` running any tests. - * - * @param {TSESTree.CallExpression} node - * - * @return {node is JestFunctionCallExpression} - */ -export const isTestCaseCall = (node: TSESTree.CallExpression): node is JestFunctionCallExpression => { - if (isTestCaseName(node.callee)) { - return true - } - - const callee = - node.callee.type === AST_NODE_TYPES.TaggedTemplateExpression - ? node.callee.tag - : node.callee.type === AST_NODE_TYPES.CallExpression - ? node.callee.callee - : node.callee - - if (callee.type === AST_NODE_TYPES.MemberExpression && isTestCaseProperty(callee.property)) { - // if we're an `each()`, ensure we're the outer CallExpression (i.e `.each()()`) - if ( - getAccessorValue(callee.property) === 'each' && - node.callee.type !== AST_NODE_TYPES.TaggedTemplateExpression && - node.callee.type !== AST_NODE_TYPES.CallExpression - ) { - return false - } - - return callee.object.type === AST_NODE_TYPES.MemberExpression - ? isTestCaseName(callee.object.object) - : isTestCaseName(callee.object) - } - - return false -} - -const isDescribeAlias = (node: TSESTree.LeftHandSideExpression) => - node.type === AST_NODE_TYPES.Identifier && DescribeAlias.hasOwnProperty(node.name) - -const isDescribeProperty = ( - node: TSESTree.Expression | TSESTree.PrivateIdentifier -): node is AccessorNode => - isSupportedAccessor(node) && DescribeProperty.hasOwnProperty(getAccessorValue(node)) - -/** - * Checks if the given `node` is a *call* to a `describe` function that would - * result in a `describe` block being created by `jest`. - * - * Note that `.each()` does not count as a call in this context, as it will not - * result in `jest` creating any `describe` blocks. - * - * @param {TSESTree.CallExpression} node - * - * @return {node is JestFunctionCallExpression} - */ -export const isDescribeCall = (node: TSESTree.CallExpression): node is JestFunctionCallExpression => { - if (isDescribeAlias(node.callee)) { - return true - } - - const callee = - node.callee.type === AST_NODE_TYPES.TaggedTemplateExpression - ? node.callee.tag - : node.callee.type === AST_NODE_TYPES.CallExpression - ? node.callee.callee - : node.callee - - if (callee.type === AST_NODE_TYPES.MemberExpression && isDescribeProperty(callee.property)) { - // if we're an `each()`, ensure we're the outer CallExpression (i.e `.each()()`) - if ( - getAccessorValue(callee.property) === 'each' && - node.callee.type !== AST_NODE_TYPES.TaggedTemplateExpression && - node.callee.type !== AST_NODE_TYPES.CallExpression - ) { - return false - } - - return callee.object.type === AST_NODE_TYPES.MemberExpression - ? isDescribeAlias(callee.object.object) - : isDescribeAlias(callee.object) - } - - return false -} - -const collectReferences = (scope: TSESLint.Scope.Scope) => { - const locals = new Set() - const unresolved = new Set() - - let currentScope: TSESLint.Scope.Scope | null = scope - - while (currentScope !== null) { - for (const ref of currentScope.variables) { - const isReferenceDefined = ref.defs.some((def) => { - return def.type !== 'ImplicitGlobalVariable' - }) - - if (isReferenceDefined) { - locals.add(ref.name) - } - } - - for (const ref of currentScope.through) { - unresolved.add(ref.identifier.name) - } - - currentScope = currentScope.upper - } - - return { locals, unresolved } -} - -export const scopeHasLocalReference = (scope: TSESLint.Scope.Scope, referenceName: string) => { - const references = collectReferences(scope) - - return ( - // referenceName was found as a local variable or function declaration. - references.locals.has(referenceName) || - // referenceName was not found as an unresolved reference, - // meaning it is likely not an implicit global reference. - !references.unresolved.has(referenceName) - ) -} diff --git a/meteor/jest.config.js b/meteor/jest.config.js index 2c93ee5633b..b39804ae239 100644 --- a/meteor/jest.config.js +++ b/meteor/jest.config.js @@ -5,19 +5,13 @@ const commonConfig = { moduleNameMapper: {}, unmockedModulePathPatterns: ['/^imports\\/.*\\.jsx?$/', '/^node_modules/'], globals: {}, - moduleFileExtensions: ['ts', 'js'], + moduleFileExtensions: ['ts', 'js', 'json'], transform: { '^.+\\.(ts|tsx)$': [ 'ts-jest', { isolatedModules: true, // Skip type check to reduce memory impact, as we are already do a yarn check-types tsconfig: 'tsconfig.json', - babelConfig: { - plugins: [ - // Fibers and await do not work well together. This transpiles await calls to something that works - './__mocks__/plugins/meteor-async-await.js', - ], - }, diagnostics: { ignoreCodes: ['TS151001'], }, @@ -34,16 +28,6 @@ const commonConfig = { module.exports = { projects: [ Object.assign({}, commonConfig, { - displayName: 'lib', - testMatch: [ - '/lib/__tests__/**/*.(spec|test).(ts|js)', - '/lib/**/__tests__/**/*.(spec|test).(ts|js)', - '!.meteor/*.*', - ], - testEnvironment: 'node', - }), - Object.assign({}, commonConfig, { - displayName: 'server', testMatch: [ '/server/__tests__/**/*.(spec|test).(ts|js)', '/server/**/__tests__/**/*.(spec|test).(ts|js)', diff --git a/meteor/package.json b/meteor/package.json index f347b2c0961..c8ed1b51280 100644 --- a/meteor/package.json +++ b/meteor/package.json @@ -3,25 +3,23 @@ "version": "1.52.0-in-development", "private": true, "engines": { - "node": ">=14.19.1" + "node": ">=20.18" }, "scripts": { "preinstall": "node -v", "debug": "meteor run", "libs:syncVersions": "node scripts/libs-sync-version.js", "libs:syncVersionsAndChangelogs": "node scripts/libs-sync-version-and-changelog.js", - "postinstall": "meteor yarn run prepareForTest", - "prepareForTest": "node ../scripts/fixTestFibers.js", "inject-git-hash": "node ./scripts/generate-version-file.js", "unit": "jest", "unitci": "jest --maxWorkers 2 --coverage", "unitcov": "jest --coverage", - "test": "meteor yarn check-types && meteor yarn unit", + "test": "yarn check-types && yarn unit", "watch": "jest --watch", "update-snapshots": "jest --updateSnapshot", - "ci:lint": "meteor yarn check-types && meteor yarn lint", + "ci:lint": "yarn check-types && yarn lint", "cov-open": "open-cli coverage/lcov-report/index.html", - "cov": "meteor yarn unitcov && meteor yarn cov-open", + "cov": "yarn unitcov && yarn cov-open", "license-validate": "node ../scripts/checkLicenses.js --allowed=\"MIT,BSD,ISC,Apache,Unlicense,CC0,LGPL,CC BY 3.0,CC BY 4.0,MPL 2.0,Python 2.0\" --excludePackages=timecode,rxjs/ajax,rxjs/fetch,rxjs/internal-compatibility,nw-pre-gyp-module-test,rxjs/operators,rxjs/testing,rxjs/webSocket,undefined,i18next-conv,@fortawesome/fontawesome-common-types,argv,indexof,custom-license,private,public-domain-module,@sofie-automation/corelib,@sofie-automation/shared-lib,@sofie-automation/job-worker", "lint": "run lint:raw .", "lint:raw": "eslint --ext .ts --ext .js --ext .tsx --ext .jsx", @@ -54,6 +52,7 @@ "body-parser": "^1.20.2", "deep-extend": "0.6.0", "deepmerge": "^4.3.1", + "elastic-apm-node": "^4.8.0", "i18next": "^21.10.0", "indexof": "0.0.1", "koa": "^2.15.0", @@ -83,14 +82,13 @@ "@types/app-root-path": "^1.2.8", "@types/body-parser": "^1.19.5", "@types/deep-extend": "^0.6.2", - "@types/fibers": "^3.1.4", "@types/jest": "^29.5.11", "@types/koa": "^2.14.0", "@types/koa-bodyparser": "^4.3.12", "@types/koa-static": "^4.0.4", "@types/koa__cors": "^5.0.0", "@types/koa__router": "^12.0.4", - "@types/node": "^14.18.63", + "@types/node": "^20.17.6", "@types/request": "^2.48.12", "@types/semver": "^7.5.6", "@types/underscore": "^1.11.15", @@ -101,18 +99,15 @@ "ejson": "^2.2.3", "eslint": "^8.56.0", "eslint-config-prettier": "^8.10.0", - "eslint-plugin-custom-rules": "link:eslint-rules", "eslint-plugin-jest": "^27.6.3", "eslint-plugin-node": "^11.1.0", "eslint-plugin-prettier": "^4.2.1", "fast-clone": "^1.5.13", - "fibers-npm": "npm:fibers@5.0.3", "glob": "^8.1.0", "i18next-conv": "^10.2.0", "i18next-scanner": "^4.4.0", "jest": "^29.7.0", "legally": "^3.5.10", - "meteor-promise": "0.9.0", "open-cli": "^7.2.0", "prettier": "^2.8.8", "standard-version": "^9.5.0", diff --git a/meteor/server/__tests__/_testEnvironment.test.ts b/meteor/server/__tests__/_testEnvironment.test.ts index 549cba2c9b7..385c946810e 100644 --- a/meteor/server/__tests__/_testEnvironment.test.ts +++ b/meteor/server/__tests__/_testEnvironment.test.ts @@ -1,9 +1,7 @@ -import { Meteor } from 'meteor/meteor' import { RandomMock } from '../../__mocks__/random' import { MongoMock } from '../../__mocks__/mongo' import { protectString, getRandomString } from '../lib/tempLib' -import { waitForPromise, sleep } from '../lib/lib' -import { testInFiber } from '../../__mocks__/helpers/jest' +import { sleep } from '../lib/lib' import { AdLibPieces, Blueprints, @@ -31,31 +29,11 @@ import { UserActionsLog, } from '../collections' import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' -import { isInFiber } from '../../__mocks__/Fibers' import { Mongo } from 'meteor/mongo' import { defaultStudio } from '../../__mocks__/defaultCollectionObjects' +import { MinimalMeteorMongoCollection } from '../collections/implementations/asyncCollection' describe('Basic test of test environment', () => { - testInFiber('Check that tests will run in fibers correctly', () => { - // This code runs in a fiber - expect(isInFiber()).toBeTruthy() - - const val = asynchronousFibersFunction(1, 2, 3) - expect(val).toEqual(1 + 2 + 3) - - const p = Promise.resolve() - .then(() => { - expect(isInFiber()).toBeTruthy() - return 'a' - }) - .then(async (innerVal) => { - return new Promise((resolve) => { - expect(isInFiber()).toBeTruthy() - resolve(innerVal) - }) - }) - expect(waitForPromise(p)).toEqual('a') - }) test('Meteor Random mock', () => { RandomMock.mockIds = ['superRandom'] expect(tempTestRandom()).toEqual('superRandom') @@ -168,64 +146,53 @@ describe('Basic test of test environment', () => { MongoMock.mockSetData(Studios, null) expect(await Studios.findFetchAsync({})).toHaveLength(0) }) - testInFiber('Promises in fibers', () => { - const p = new Promise((resolve) => { - setTimeout(() => { - resolve('yup') - }, 10) - }) - - const result = waitForPromise(p) - - expect(result).toEqual('yup') - }) - testInFiber('Mongo mock', async () => { + test('Mongo mock', async () => { const mockAdded = jest.fn() const mockChanged = jest.fn() const mockRemoved = jest.fn() - const collection = new Mongo.Collection('testmock') + const collection = new Mongo.Collection('testmock') as any as MinimalMeteorMongoCollection - collection + await collection .find({ prop: 'b', }) - .observeChanges({ + .observeChangesAsync({ added: mockAdded, changed: mockChanged, removed: mockRemoved, }) - expect(collection.find({}).fetch()).toHaveLength(0) + expect(await collection.find({}).fetchAsync()).toHaveLength(0) - const id = collection.insert({ prop: 'a' }) + const id = await collection.insertAsync({ prop: 'a' }) expect(id).toBeTruthy() - expect(collection.find({}).fetch()).toHaveLength(1) - expect(collection.findOne(id)).toMatchObject({ - prop: 'a', - }) - expect(collection.remove(id)).toEqual(1) - expect(collection.find({}).fetch()).toHaveLength(0) + expect(await collection.find({}).fetchAsync()).toHaveLength(1) + // expect(collection.findOne(id)).toMatchObject({ + // prop: 'a', + // }) + expect(await collection.removeAsync(id)).toEqual(1) + expect(await collection.find({}).fetchAsync()).toHaveLength(0) expect(mockAdded).toHaveBeenCalledTimes(0) expect(mockChanged).toHaveBeenCalledTimes(0) expect(mockRemoved).toHaveBeenCalledTimes(0) - const id2 = collection.insert({ prop: 'b' }) + const id2 = await collection.insertAsync({ prop: 'b' }) await sleep(10) expect(mockAdded).toHaveBeenCalledTimes(1) expect(mockChanged).toHaveBeenCalledTimes(0) expect(mockRemoved).toHaveBeenCalledTimes(0) mockAdded.mockClear() - collection.update(id2, { $set: { name: 'test' } }) + await collection.updateAsync(id2, { $set: { name: 'test' } }) await sleep(10) expect(mockAdded).toHaveBeenCalledTimes(0) expect(mockChanged).toHaveBeenCalledTimes(1) expect(mockRemoved).toHaveBeenCalledTimes(0) mockChanged.mockClear() - collection.remove(id2) + await collection.removeAsync(id2) await sleep(10) expect(mockAdded).toHaveBeenCalledTimes(0) expect(mockChanged).toHaveBeenCalledTimes(0) @@ -233,18 +200,6 @@ describe('Basic test of test environment', () => { }) }) -function asynchronousFibersFunction(a: number, b: number, c: number): number { - return innerAsynchronousFiberFunction(a, b) + c -} - -const innerAsynchronousFiberFunction = Meteor.wrapAsync( - (val0: number, val1: number, cb: (err: any, result: number) => void) => { - setTimeout(() => { - cb(undefined, val0 + val1) - }, 10) - } -) - function tempTestRandom() { return getRandomString() } diff --git a/meteor/server/__tests__/coreSystem.test.ts b/meteor/server/__tests__/coreSystem.test.ts index 3d200e3f8c9..147f26199a5 100644 --- a/meteor/server/__tests__/coreSystem.test.ts +++ b/meteor/server/__tests__/coreSystem.test.ts @@ -1,8 +1,7 @@ -import { testInFiber } from '../../__mocks__/helpers/jest' import { RelevantSystemVersions } from '../coreSystem' describe('coreSystem', () => { - testInFiber('RelevantSystemVersions', async () => { + test('RelevantSystemVersions', async () => { const versions = await RelevantSystemVersions expect(versions).toEqual({ diff --git a/meteor/server/__tests__/cronjobs.test.ts b/meteor/server/__tests__/cronjobs.test.ts index 2c189d38afd..65bd80d24cd 100644 --- a/meteor/server/__tests__/cronjobs.test.ts +++ b/meteor/server/__tests__/cronjobs.test.ts @@ -1,5 +1,5 @@ import '../../__mocks__/_extendJest' -import { testInFiber, runAllTimers, beforeAllInFiber, waitUntil } from '../../__mocks__/helpers/jest' +import { runAllTimers, waitUntil } from '../../__mocks__/helpers/jest' import { MeteorMock } from '../../__mocks__/meteor' import { logger } from '../logging' import { getRandomId, getRandomString, protectString } from '../lib/tempLib' @@ -69,7 +69,7 @@ describe('cronjobs', () => { let env: DefaultEnvironment let rundownId: RundownId - beforeAllInFiber(async () => { + beforeAll(async () => { env = await setupDefaultStudioEnvironment() const o = await setupDefaultRundownPlaylist(env) @@ -88,7 +88,7 @@ describe('cronjobs', () => { jest.useFakeTimers() // set time to 2020/07/19 00:00 Local Time mockCurrentTime = new Date(2020, 6, 19, 0, 0, 0).getTime() - MeteorMock.mockRunMeteorStartup() + await MeteorMock.mockRunMeteorStartup() origGetCurrentTime = lib.getCurrentTime //@ts-ignore Mock getCurrentTime for tests lib.getCurrentTime = jest.fn(() => { @@ -101,22 +101,22 @@ describe('cronjobs', () => { await CoreSystem.removeAsync(SYSTEM_ID) }) describe('Runs at the appropriate time', () => { - testInFiber("Doesn't run during the day", async () => { + test("Doesn't run during the day", async () => { // set time to 2020/07/19 12:00 Local Time mockCurrentTime = new Date(2020, 6, 19, 12, 0, 0).getTime() // cronjob is checked every 5 minutes, so advance 6 minutes - jest.advanceTimersByTime(6 * 60 * 1000) + await jest.advanceTimersByTimeAsync(6 * 60 * 1000) expect(lib.getCurrentTime).toHaveBeenCalled() await runAllTimers() expect(logger.info).toHaveBeenCalledTimes(0) }) - testInFiber("Runs at 4 o'clock", async () => { + test("Runs at 4 o'clock", async () => { // set time to 2020/07/20 04:05 Local Time, should be more than 24 hours after 2020/07/19 00:00 UTC mockCurrentTime = new Date(2020, 6, 20, 4, 5, 0).getTime() // cronjob is checked every 5 minutes, so advance 6 minutes - jest.advanceTimersByTime(6 * 60 * 1000) + await jest.advanceTimersByTimeAsync(6 * 60 * 1000) expect(lib.getCurrentTime).toHaveBeenCalled() expect(logger.info).not.toHaveBeenLastCalledWith('Nightly cronjob: done') @@ -127,11 +127,11 @@ describe('cronjobs', () => { expect(logger.info).toHaveBeenLastCalledWith('Nightly cronjob: done') }, MAX_WAIT_TIME) }) - testInFiber("Doesn't run if less than 20 hours have passed since last run", async () => { + test("Doesn't run if less than 20 hours have passed since last run", async () => { // set time to 2020/07/21 04:05 Local Time, should be more than 24 hours after 2020/07/19 00:00 UTC mockCurrentTime = new Date(2020, 6, 21, 4, 5, 0).getTime() // cronjob is checked every 5 minutes, so advance 6 minutes - jest.advanceTimersByTime(6 * 60 * 1000) + await jest.advanceTimersByTimeAsync(6 * 60 * 1000) expect(lib.getCurrentTime).toHaveBeenCalled() expect(logger.info).not.toHaveBeenLastCalledWith('Nightly cronjob: done') @@ -145,7 +145,7 @@ describe('cronjobs', () => { ;(logger.info as jest.Mock).mockClear() mockCurrentTime = new Date(2020, 6, 20, 4, 50, 0).getTime() - jest.advanceTimersByTime(6 * 60 * 1000) + await jest.advanceTimersByTimeAsync(6 * 60 * 1000) await runAllTimers() // less than 24 hours have passed so we do not expect the cronjob to run @@ -159,7 +159,7 @@ describe('cronjobs', () => { // set time to 2020/07/{date} 04:05 Local Time, should be more than 24 hours after 2020/07/19 00:00 UTC mockCurrentTime = new Date(2020, 6, date++, 4, 5, 0).getTime() // cronjob is checked every 5 minutes, so advance 6 minutes - jest.advanceTimersByTime(6 * 60 * 1000) + await jest.advanceTimersByTimeAsync(6 * 60 * 1000) expect(logger.info).not.toHaveBeenLastCalledWith('Nightly cronjob: done') await waitUntil(async () => { @@ -173,7 +173,7 @@ describe('cronjobs', () => { await PeripheralDevices.removeAsync({}) }) - testInFiber('Remove NrcsIngestDataCache objects that are not connected to any Rundown', async () => { + test('Remove NrcsIngestDataCache objects that are not connected to any Rundown', async () => { // Set up a mock rundown, a detached NrcsIngestDataCache object and an object attached to the mock rundown // Detached NrcsIngestDataCache object 0 const dataCache0Id = protectString(getRandomString()) @@ -217,7 +217,7 @@ describe('cronjobs', () => { }) expect(await NrcsIngestDataCache.findOneAsync(dataCache0Id)).toBeUndefined() }) - testInFiber('Remove SofieIngestDataCache objects that are not connected to any Rundown', async () => { + test('Remove SofieIngestDataCache objects that are not connected to any Rundown', async () => { // Set up a mock rundown, a detached SofieIngestDataCache object and an object attached to the mock rundown // Detached SofieIngestDataCache object 0 const dataCache0Id = protectString(getRandomString()) @@ -263,7 +263,7 @@ describe('cronjobs', () => { }) expect(await SofieIngestDataCache.findOneAsync(dataCache0Id)).toBeUndefined() }) - testInFiber('Removes old PartInstances and PieceInstances', async () => { + test('Removes old PartInstances and PieceInstances', async () => { // nightlyCronjobInner() const segment0: DBSegment = { @@ -396,7 +396,7 @@ describe('cronjobs', () => { expect(await PieceInstances.findOneAsync(pieceInstance0._id)).toBeDefined() expect(await PieceInstances.findOneAsync(pieceInstance1._id)).toBeUndefined() // Removed, since owned by non-existent partInstance2 }) - testInFiber('Removes old entries in UserActionsLog', async () => { + test('Removes old entries in UserActionsLog', async () => { // reasonably fresh entry const userAction0 = protectString(getRandomString()) await UserActionsLog.insertAsync({ @@ -430,7 +430,7 @@ describe('cronjobs', () => { }) expect(await UserActionsLog.findOneAsync(userAction1)).toBeUndefined() }) - testInFiber('Removes old entries in Snapshots', async () => { + test('Removes old entries in Snapshots', async () => { // reasonably fresh entry const snapshot0 = protectString(getRandomString()) await Snapshots.insertAsync({ @@ -524,14 +524,14 @@ describe('cronjobs', () => { } } - testInFiber('Attempts to restart CasparCG when job is enabled', async () => { + test('Attempts to restart CasparCG when job is enabled', async () => { const { mockCasparCg } = await createMockPlayoutGatewayAndDevices(Date.now()) // Some time after the threshold ;(logger.info as jest.Mock).mockClear() // set time to 2020/07/{date} 04:05 Local Time, should be more than 24 hours after 2020/07/19 00:00 UTC mockCurrentTime = new Date(2020, 6, date++, 4, 5, 0).getTime() // cronjob is checked every 5 minutes, so advance 6 minutes - jest.advanceTimersByTime(6 * 60 * 1000) + await jest.advanceTimersByTimeAsync(6 * 60 * 1000) await runAllTimers() // check if the correct PeripheralDevice command has been issued, and only for CasparCG devices @@ -543,16 +543,18 @@ describe('cronjobs', () => { }) // Emulate that the restart was successful: - pendingCommands.forEach((cmd) => { - Meteor.call( - 'peripheralDevice.functionReply', - cmd.deviceId, // deviceId - '', // deviceToken - cmd._id, // commandId - null, // err - null // result + await Promise.all( + pendingCommands.map(async (cmd) => + Meteor.callAsync( + 'peripheralDevice.functionReply', + cmd.deviceId, // deviceId + '', // deviceToken + cmd._id, // commandId + null, // err + null // result + ) ) - }) + ) expect(logger.info).not.toHaveBeenLastCalledWith('Nightly cronjob: done') await waitUntil(async () => { @@ -561,7 +563,7 @@ describe('cronjobs', () => { expect(logger.info).toHaveBeenLastCalledWith('Nightly cronjob: done') }, MAX_WAIT_TIME) }) - testInFiber('Skips offline CasparCG when job is enabled', async () => { + test('Skips offline CasparCG when job is enabled', async () => { const { mockCasparCg } = await createMockPlayoutGatewayAndDevices(Date.now()) // Some time after the threshold await PeripheralDevices.updateAsync(mockCasparCg, { $set: { @@ -572,7 +574,7 @@ describe('cronjobs', () => { // set time to 2020/07/{date} 04:05 Local Time, should be more than 24 hours after 2020/07/19 00:00 UTC mockCurrentTime = new Date(2020, 6, date++, 4, 5, 0).getTime() // cronjob is checked every 5 minutes, so advance 6 minutes - jest.advanceTimersByTime(6 * 60 * 1000) + await jest.advanceTimersByTimeAsync(6 * 60 * 1000) await waitUntil(async () => { // Run timers, so that all promises in the cronjob has a chance to resolve: @@ -587,7 +589,7 @@ describe('cronjobs', () => { expect(logger.info).toHaveBeenLastCalledWith('Nightly cronjob: done') }, MAX_WAIT_TIME) }) - testInFiber('Does not attempt to restart CasparCG when job is disabled', async () => { + test('Does not attempt to restart CasparCG when job is disabled', async () => { await createMockPlayoutGatewayAndDevices(Date.now()) // Some time after the threshold await CoreSystem.updateAsync( {}, @@ -602,7 +604,7 @@ describe('cronjobs', () => { // set time to 2020/07/{date} 04:05 Local Time, should be more than 24 hours after 2020/07/19 00:00 UTC mockCurrentTime = new Date(2020, 6, date++, 4, 5, 0).getTime() // cronjob is checked every 5 minutes, so advance 6 minutes - jest.advanceTimersByTime(6 * 60 * 1000) + await jest.advanceTimersByTimeAsync(6 * 60 * 1000) jest.runOnlyPendingTimers() // check if the no PeripheralDevice command have been issued diff --git a/meteor/server/__tests__/logging.test.ts b/meteor/server/__tests__/logging.test.ts index 0485711eff6..d1dcc59cd1c 100644 --- a/meteor/server/__tests__/logging.test.ts +++ b/meteor/server/__tests__/logging.test.ts @@ -1,10 +1,9 @@ -import { testInFiber } from '../../__mocks__/helpers/jest' import { supressLogging } from '../../__mocks__/helpers/lib' import { SupressLogMessages } from '../../__mocks__/suppressLogging' import { logger } from '../logging' describe('server/logger', () => { - testInFiber('supress errors', async () => { + test('supress errors', async () => { const logMessages = () => { logger.debug('This is a debug message') logger.info('This is an info message') @@ -20,7 +19,7 @@ describe('server/logger', () => { expect(1).toBe(1) }) - testInFiber('logger', () => { + test('logger', () => { expect(typeof logger.error).toEqual('function') expect(typeof logger.warn).toEqual('function') // expect(typeof logger.help).toEqual('function') diff --git a/meteor/server/__tests__/systemTime.test.ts b/meteor/server/__tests__/systemTime.test.ts index 4c9a1a2c1b3..5f341fc817a 100644 --- a/meteor/server/__tests__/systemTime.test.ts +++ b/meteor/server/__tests__/systemTime.test.ts @@ -1,8 +1,8 @@ -import { runTimersUntilNow, testInFiber } from '../../__mocks__/helpers/jest' +import { runTimersUntilNow } from '../../__mocks__/helpers/jest' import { TimeJumpDetector } from '../systemTime' describe('lib/systemTime', () => { - testInFiber('TimeJumpDetector', async () => { + test('TimeJumpDetector', async () => { jest.useFakeTimers() const mockCallback = jest.fn() let now = Date.now() diff --git a/meteor/server/api/ExternalMessageQueue.ts b/meteor/server/api/ExternalMessageQueue.ts index 51e08009856..5d90abb7e3a 100644 --- a/meteor/server/api/ExternalMessageQueue.ts +++ b/meteor/server/api/ExternalMessageQueue.ts @@ -1,7 +1,7 @@ import { Meteor } from 'meteor/meteor' import { check } from '../lib/check' import { StatusCode } from '@sofie-automation/blueprints-integration' -import { deferAsync, getCurrentTime, MeteorStartupAsync } from '../lib/lib' +import { deferAsync, getCurrentTime } from '../lib/lib' import { registerClassToMeteorMethods } from '../methods' import { NewExternalMessageQueueAPI, @@ -50,7 +50,7 @@ function updateExternalMessageQueueStatus(): void { } } -MeteorStartupAsync(async () => { +Meteor.startup(async () => { await ExternalMessageQueue.observeChanges( { sent: { $not: { $gt: 0 } }, diff --git a/meteor/server/api/__tests__/cleanup.test.ts b/meteor/server/api/__tests__/cleanup.test.ts index ae7dd14fcb7..462511ca2c6 100644 --- a/meteor/server/api/__tests__/cleanup.test.ts +++ b/meteor/server/api/__tests__/cleanup.test.ts @@ -1,5 +1,4 @@ import { getRandomId } from '../../lib/tempLib' -import { beforeEachInFiber, testInFiber } from '../../../__mocks__/helpers/jest' import '../../collections' // include this in order to get all of the collection set up import { cleanupOldDataInner } from '../cleanup' @@ -56,12 +55,12 @@ import { DBNotificationTargetType } from '@sofie-automation/corelib/dist/dataMod describe('Cleanup', () => { let env: DefaultEnvironment - beforeEachInFiber(async () => { + beforeEach(async () => { await clearAllDBCollections() env = await setupDefaultStudioEnvironment() }) - testInFiber('Check that all collections are covered', async () => { + test('Check that all collections are covered', async () => { expect(Collections.size).toBeGreaterThan(10) const result = await cleanupOldDataInner(false) @@ -73,7 +72,7 @@ describe('Cleanup', () => { } }) - testInFiber('No bad removals', async () => { + test('No bad removals', async () => { // Check that cleanupOldDataInner() doesn't remove any data when the default data set is in the DB. await setDefaultDatatoDB(env, Date.now()) @@ -94,7 +93,7 @@ describe('Cleanup', () => { expect(await RundownPlaylists.countDocuments()).toBe(1) expect(await Rundowns.countDocuments()).toBe(1) }) - testInFiber('All dependants should be removed', async () => { + test('All dependants should be removed', async () => { // Check that cleanupOldDataInner() cleans up all data from the database. await setDefaultDatatoDB(env, 0) @@ -137,7 +136,7 @@ describe('Cleanup', () => { } } }) - testInFiber('PieceInstance should be removed when PartInstance is removed', async () => { + test('PieceInstance should be removed when PartInstance is removed', async () => { // Check that cleanupOldDataInner() cleans up all data from the database. await setDefaultDatatoDB(env, 0) diff --git a/meteor/server/api/__tests__/client.test.ts b/meteor/server/api/__tests__/client.test.ts index 018b2bc936f..78fcfcc4dcf 100644 --- a/meteor/server/api/__tests__/client.test.ts +++ b/meteor/server/api/__tests__/client.test.ts @@ -3,10 +3,8 @@ import { MeteorMock } from '../../../__mocks__/meteor' import { UserActionsLogItem } from '@sofie-automation/meteor-lib/dist/collections/UserActionsLog' import { ClientAPIMethods } from '@sofie-automation/meteor-lib/dist/api/client' import { protectString, LogLevel } from '../../lib/tempLib' -import { makePromise } from '../../lib/lib' import { PeripheralDeviceCommand } from '@sofie-automation/corelib/dist/dataModel/PeripheralDeviceCommand' import { setLogLevel } from '../../logging' -import { testInFiber, beforeAllInFiber } from '../../../__mocks__/helpers/jest' import { PeripheralDeviceCategory, PeripheralDeviceType, @@ -26,7 +24,7 @@ const orgSetTimeout = setTimeout describe('ClientAPI', () => { let mockDeviceId: PeripheralDeviceId = protectString('not set yet') - beforeAllInFiber(async () => { + beforeAll(async () => { const studio = await setupMockStudio() const mockDevice = await setupMockPeripheralDevice( PeripheralDeviceCategory.PLAYOUT, @@ -37,10 +35,10 @@ describe('ClientAPI', () => { mockDeviceId = mockDevice._id }) describe('clientErrorReport', () => { - testInFiber('Exports a Meteor method to the client', () => { + test('Exports a Meteor method to the client', () => { expect(MeteorMock.mockMethods[ClientAPIMethods.clientErrorReport]).toBeTruthy() }) - testInFiber('Returns a success response to the client', async () => { + test('Returns a success response to the client', async () => { SupressLogMessages.suppressLogMessage(/Uncaught error happened in GUI/i) // should not throw: await MeteorCall.client.clientErrorReport(1000, 'MockString', 'MockLocation') @@ -53,14 +51,14 @@ describe('ClientAPI', () => { const mockContext = 'Context description' const mockArgs = ['mockArg1', 'mockArg2'] - testInFiber('Exports a Meteor method to the client', () => { + test('Exports a Meteor method to the client', () => { expect(MeteorMock.mockMethods[ClientAPIMethods.callPeripheralDeviceFunction]).toBeTruthy() }) describe('Call a method on the peripheralDevice', () => { let logMethodName = `not set yet` let promise: Promise - beforeAllInFiber(async () => { + beforeAll(async () => { logMethodName = `${mockDeviceId}: ${mockFunctionName}` promise = MeteorCall.client.callPeripheralDeviceFunction( mockContext, @@ -72,7 +70,7 @@ describe('ClientAPI', () => { promise.catch(() => null) // Dismiss uncaught promise warning await new Promise((resolve) => orgSetTimeout(resolve, 100)) }) - testInFiber('Logs the call in UserActionsLog', async () => { + test('Logs the call in UserActionsLog', async () => { const log = (await UserActionsLog.findOneAsync({ method: logMethodName, })) as UserActionsLogItem @@ -82,7 +80,7 @@ describe('ClientAPI', () => { expect(log.userId).toBeDefined() }) - testInFiber('Sends a call to the peripheralDevice', async () => { + test('Sends a call to the peripheralDevice', async () => { const pdc = (await PeripheralDeviceCommands.findOneAsync({ deviceId: mockDeviceId, functionName: mockFunctionName, @@ -93,57 +91,52 @@ describe('ClientAPI', () => { expect(pdc.functionName).toBe(mockFunctionName) expect(pdc.args).toMatchObject(mockArgs) }) - testInFiber( - 'Resolves the returned promise once a response from the peripheralDevice is received', - async () => { - await PeripheralDeviceCommands.updateAsync( - { - deviceId: mockDeviceId, - functionName: mockFunctionName, + test('Resolves the returned promise once a response from the peripheralDevice is received', async () => { + await PeripheralDeviceCommands.updateAsync( + { + deviceId: mockDeviceId, + functionName: mockFunctionName, + }, + { + $set: { + hasReply: true, + reply: 'OK', }, - { - $set: { - hasReply: true, - reply: 'OK', - }, - }, - { multi: true } - ) - return promise.then(async (value) => { - const log = (await UserActionsLog.findOneAsync({ - method: logMethodName, - })) as UserActionsLogItem - expect(log).toBeTruthy() - - expect(log.success).toBe(true) - expect(log.doneTime).toBeDefined() - expect(value).toBe('OK') - }) - } - ) + }, + { multi: true } + ) + return promise.then(async (value) => { + const log = (await UserActionsLog.findOneAsync({ + method: logMethodName, + })) as UserActionsLogItem + expect(log).toBeTruthy() + + expect(log.success).toBe(true) + expect(log.doneTime).toBeDefined() + expect(value).toBe('OK') + }) + }) }) describe('Call a failing method on the peripheralDevice', () => { let logMethodName = `not set yet` let promise: Promise - beforeAllInFiber(async () => { + beforeAll(async () => { logMethodName = `${mockDeviceId}: ${mockFailingFunctionName}` - promise = makePromise(() => { - return Meteor.call( - ClientAPIMethods.callPeripheralDeviceFunction, - mockContext, - mockDeviceId, - undefined, - mockFailingFunctionName, - ...mockArgs - ) - }) + promise = Meteor.callAsync( + ClientAPIMethods.callPeripheralDeviceFunction, + mockContext, + mockDeviceId, + undefined, + mockFailingFunctionName, + ...mockArgs + ) promise.catch(() => null) // Dismiss uncaught promise warning await new Promise((resolve) => orgSetTimeout(resolve, 100)) }) - testInFiber('Logs the call in UserActionsLog', async () => { + test('Logs the call in UserActionsLog', async () => { const log = (await UserActionsLog.findOneAsync({ method: logMethodName, })) as UserActionsLogItem @@ -152,7 +145,7 @@ describe('ClientAPI', () => { expect(log.method).toBe(logMethodName) expect(log.userId).toBeDefined() }) - testInFiber('Sends a call to the peripheralDevice', async () => { + test('Sends a call to the peripheralDevice', async () => { const pdc = (await PeripheralDeviceCommands.findOneAsync({ deviceId: mockDeviceId, functionName: mockFailingFunctionName, @@ -163,38 +156,35 @@ describe('ClientAPI', () => { expect(pdc.functionName).toBe(mockFailingFunctionName) expect(pdc.args).toMatchObject(mockArgs) }) - testInFiber( - 'Resolves the returned promise once a response from the peripheralDevice is received', - async () => { - SupressLogMessages.suppressLogMessage(/Failed/i) - SupressLogMessages.suppressLogMessage(/Failed/i) - await PeripheralDeviceCommands.updateAsync( - { - deviceId: mockDeviceId, - functionName: mockFailingFunctionName, + test('Resolves the returned promise once a response from the peripheralDevice is received', async () => { + SupressLogMessages.suppressLogMessage(/Failed/i) + SupressLogMessages.suppressLogMessage(/Failed/i) + await PeripheralDeviceCommands.updateAsync( + { + deviceId: mockDeviceId, + functionName: mockFailingFunctionName, + }, + { + $set: { + hasReply: true, + replyError: 'Failed', }, - { - $set: { - hasReply: true, - replyError: 'Failed', - }, - }, - { multi: true } - ) + }, + { multi: true } + ) - // This will probably resolve after around 3s, since that is the timeout time - // of checkReply and the observeChanges is not implemented in the mock - await expect(promise).rejects.toBe('Failed') + // This will probably resolve after around 3s, since that is the timeout time + // of checkReply and the observeChanges is not implemented in the mock + await expect(promise).rejects.toBe('Failed') - const log = (await UserActionsLog.findOneAsync({ - method: logMethodName, - })) as UserActionsLogItem - expect(log).toBeTruthy() + const log = (await UserActionsLog.findOneAsync({ + method: logMethodName, + })) as UserActionsLogItem + expect(log).toBeTruthy() - expect(log.success).toBe(false) - expect(log.doneTime).toBeDefined() - } - ) + expect(log.success).toBe(false) + expect(log.doneTime).toBeDefined() + }) }) }) }) diff --git a/meteor/server/api/__tests__/externalMessageQueue.test.ts b/meteor/server/api/__tests__/externalMessageQueue.test.ts index 65fca90ee35..402efa0958d 100644 --- a/meteor/server/api/__tests__/externalMessageQueue.test.ts +++ b/meteor/server/api/__tests__/externalMessageQueue.test.ts @@ -2,7 +2,6 @@ import '../../../__mocks__/_extendJest' import { ExternalMessageQueueObj } from '@sofie-automation/corelib/dist/dataModel/ExternalMessageQueue' import { ExternalMessageQueue, RundownPlaylists, Rundowns } from '../../collections' import { IBlueprintExternalMessageQueueType, PlaylistTimingType } from '@sofie-automation/blueprints-integration' -import { testInFiber } from '../../../__mocks__/helpers/jest' import { DefaultEnvironment, setupDefaultStudioEnvironment } from '../../../__mocks__/helpers/database' import { getRandomId, protectString } from '../../lib/tempLib' import { getCurrentTime } from '../../lib/lib' @@ -84,7 +83,7 @@ describe('Test external message queue static methods', () => { }) }) - testInFiber('toggleHold', async () => { + test('toggleHold', async () => { let message = (await ExternalMessageQueue.findOneAsync({})) as ExternalMessageQueueObj expect(message).toBeTruthy() expect(message.hold).toBeUndefined() @@ -100,7 +99,7 @@ describe('Test external message queue static methods', () => { expect(message.hold).toBe(false) }) - testInFiber('toggleHold unknown id', async () => { + test('toggleHold unknown id', async () => { SupressLogMessages.suppressLogMessage(/ExternalMessage/i) await expect(MeteorCall.externalMessages.toggleHold(protectString('cake'))).rejects.toThrowMeteor( 404, @@ -108,7 +107,7 @@ describe('Test external message queue static methods', () => { ) }) - testInFiber('retry', async () => { + test('retry', async () => { let message = (await ExternalMessageQueue.findOneAsync({})) as ExternalMessageQueueObj expect(message).toBeTruthy() @@ -123,7 +122,7 @@ describe('Test external message queue static methods', () => { }) }) - testInFiber('retry unknown id', async () => { + test('retry unknown id', async () => { SupressLogMessages.suppressLogMessage(/ExternalMessage/i) await expect(MeteorCall.externalMessages.retry(protectString('is_a_lie'))).rejects.toThrowMeteor( 404, @@ -131,7 +130,7 @@ describe('Test external message queue static methods', () => { ) }) - testInFiber('remove', async () => { + test('remove', async () => { const message = (await ExternalMessageQueue.findOneAsync({})) as ExternalMessageQueueObj expect(message).toBeTruthy() diff --git a/meteor/server/api/__tests__/methods.test.ts b/meteor/server/api/__tests__/methods.test.ts deleted file mode 100644 index 5b0e3be3e67..00000000000 --- a/meteor/server/api/__tests__/methods.test.ts +++ /dev/null @@ -1,27 +0,0 @@ -import '../../../__mocks__/_extendJest' -import { MeteorDebugMethods } from '../../methods' -import { Settings } from '../../Settings' -import { MeteorPromiseApply } from '../methods' -import { testInFiber } from '../../../__mocks__/helpers/jest' - -testInFiber('MeteorPromiseApply', async () => { - // set up method: - Settings.enableUserAccounts = false - MeteorDebugMethods({ - myMethod: async (value1: string, value2: string) => { - // Do an async operation, to ensure that asynchronous operations work: - const v = await new Promise((resolve) => { - setTimeout(() => { - resolve(value1 + value2) - }, 10) - }) - return v - }, - }) - const pValue: any = MeteorPromiseApply('myMethod', ['myValue', 'AAA']).catch((e) => { - throw e - }) - expect(pValue).toHaveProperty('then') // be a promise - const value = await pValue - expect(value).toEqual('myValueAAA') -}) diff --git a/meteor/server/api/__tests__/peripheralDevice.test.ts b/meteor/server/api/__tests__/peripheralDevice.test.ts index 7299084e523..6efe7a15969 100644 --- a/meteor/server/api/__tests__/peripheralDevice.test.ts +++ b/meteor/server/api/__tests__/peripheralDevice.test.ts @@ -8,7 +8,7 @@ import { import { EmptyPieceTimelineObjectsBlob } from '@sofie-automation/corelib/dist/dataModel/Piece' import { literal, protectString, ProtectedString, getRandomId, LogLevel, getRandomString } from '../../lib/tempLib' import { getCurrentTime } from '../../lib/lib' -import { testInFiber, waitUntil } from '../../../__mocks__/helpers/jest' +import { waitUntil } from '../../../__mocks__/helpers/jest' import { setupDefaultStudioEnvironment, DefaultEnvironment } from '../../../__mocks__/helpers/database' import { setLogLevel } from '../../logging' import { @@ -177,7 +177,7 @@ describe('test peripheralDevice general API methods', () => { QueueStudioJobSpy.mockClear() }) - testInFiber('initialize', async () => { + test('initialize', async () => { if (DEBUG) setLogLevel(LogLevel.DEBUG) expect(await PeripheralDevices.findOneAsync(device._id)).toBeTruthy() @@ -202,7 +202,7 @@ describe('test peripheralDevice general API methods', () => { expect(initDevice.subType).toBe(options.subType) }) - testInFiber('setStatus', async () => { + test('setStatus', async () => { expect(await PeripheralDevices.findOneAsync(device._id)).toBeTruthy() expect(((await PeripheralDevices.findOneAsync(device._id)) as PeripheralDevice).status).toMatchObject({ statusCode: StatusCode.GOOD, @@ -217,7 +217,7 @@ describe('test peripheralDevice general API methods', () => { }) }) - testInFiber('getPeripheralDevice', async () => { + test('getPeripheralDevice', async () => { const gotDevice: PeripheralDeviceForDevice = await MeteorCall.peripheralDevice.getPeripheralDevice( device._id, device.token @@ -226,7 +226,7 @@ describe('test peripheralDevice general API methods', () => { expect(gotDevice._id).toBe(device._id) }) - testInFiber('ping', async () => { + test('ping', async () => { jest.useFakeTimers() const EPOCH = 10000 jest.setSystemTime(EPOCH) @@ -253,7 +253,7 @@ describe('test peripheralDevice general API methods', () => { jest.useRealTimers() }) - testInFiber('determineDiffTime', async () => { + test('determineDiffTime', async () => { const response = await MeteorCall.peripheralDevice.determineDiffTime() expect(response).toBeTruthy() expect(Math.abs(response.mean - 400)).toBeLessThan(10) // be about 400 @@ -261,7 +261,7 @@ describe('test peripheralDevice general API methods', () => { expect(response.stdDev).toBeGreaterThan(0.1) }) - testInFiber('getTimeDiff', async () => { + test('getTimeDiff', async () => { const response = await MeteorCall.peripheralDevice.getTimeDiff() const now = getCurrentTime() expect(response).toBeTruthy() @@ -273,14 +273,14 @@ describe('test peripheralDevice general API methods', () => { expect(response.good).toBeDefined() }) - testInFiber('getTime', async () => { + test('getTime', async () => { const response = await MeteorCall.peripheralDevice.getTime() const now = getCurrentTime() expect(response).toBeGreaterThan(now - 30) expect(response).toBeLessThan(now + 30) }) - testInFiber('pingWithCommand and functionReply', async () => { + test('pingWithCommand and functionReply', async () => { jest.useFakeTimers() const EPOCH = 10000 jest.setSystemTime(EPOCH) @@ -329,7 +329,7 @@ describe('test peripheralDevice general API methods', () => { expect(resultMessage).toBeUndefined() const replyMessage = 'Waving back!' - Meteor.call( + await Meteor.callAsync( PeripheralDeviceAPIMethods.functionReply, device._id, device.token, @@ -357,7 +357,7 @@ describe('test peripheralDevice general API methods', () => { jest.useRealTimers() }) - testInFiber('playoutPlaybackChanged', async () => { + test('playoutPlaybackChanged', async () => { if (DEBUG) setLogLevel(LogLevel.DEBUG) QueueStudioJobSpy.mockImplementation(async () => CreateFakeResult(Promise.resolve(null))) @@ -455,7 +455,7 @@ describe('test peripheralDevice general API methods', () => { ) }) - testInFiber('timelineTriggerTime', async () => { + test('timelineTriggerTime', async () => { if (DEBUG) setLogLevel(LogLevel.DEBUG) QueueStudioJobSpy.mockImplementation(async () => CreateFakeResult(Promise.resolve(null))) @@ -481,7 +481,7 @@ describe('test peripheralDevice general API methods', () => { ) }) - testInFiber('killProcess with a rundown present', async () => { + test('killProcess with a rundown present', async () => { // test this does not shutdown because Rundown stored if (DEBUG) setLogLevel(LogLevel.DEBUG) SupressLogMessages.suppressLogMessage(/Unable to run killProcess/i) @@ -491,7 +491,7 @@ describe('test peripheralDevice general API methods', () => { ) }) - testInFiber('testMethod', async () => { + test('testMethod', async () => { if (DEBUG) setLogLevel(LogLevel.DEBUG) const result = await MeteorCall.peripheralDevice.testMethod(device._id, device.token, 'european') expect(result).toBe('european') @@ -502,7 +502,7 @@ describe('test peripheralDevice general API methods', () => { }) /* - testInFiber('timelineTriggerTime', () => { + test('timelineTriggerTime', () => { if (DEBUG) setLogLevel(LogLevel.DEBUG) let timelineTriggerTimeResult: PeripheralDeviceAPI.TimelineTriggerTimeResult = [ { id: 'wibble', time: getCurrentTime() }, { id: 'wobble', time: getCurrentTime() - 100 }] @@ -510,7 +510,7 @@ describe('test peripheralDevice general API methods', () => { }) */ - testInFiber('requestUserAuthToken', async () => { + test('requestUserAuthToken', async () => { if (DEBUG) setLogLevel(LogLevel.DEBUG) SupressLogMessages.suppressLogMessage(/can only request user auth token/i) @@ -536,7 +536,7 @@ describe('test peripheralDevice general API methods', () => { }) // Should only really work for SpreadsheetDevice - testInFiber('storeAccessToken', async () => { + test('storeAccessToken', async () => { if (DEBUG) setLogLevel(LogLevel.DEBUG) SupressLogMessages.suppressLogMessage(/can only store access token/i) await expect( @@ -559,7 +559,7 @@ describe('test peripheralDevice general API methods', () => { expect((deviceWithSecretToken.settings as IngestDeviceSettings).secretAccessToken).toBe(true) }) - testInFiber('uninitialize', async () => { + test('uninitialize', async () => { if (DEBUG) setLogLevel(LogLevel.DEBUG) await MeteorCall.peripheralDevice.unInitialize(device._id, device.token) expect(await PeripheralDevices.findOneAsync({})).toBeFalsy() @@ -569,7 +569,7 @@ describe('test peripheralDevice general API methods', () => { }) // Note: this test fails, due to a backwards-compatibility hack in #c579c8f0 - // testInFiber('initialize with bad arguments', () => { + // test('initialize with bad arguments', () => { // let options: PeripheralDeviceInitOptions = { // category: PeripheralDeviceCategory.INGEST, // type: PeripheralDeviceType.MOS, @@ -590,7 +590,7 @@ describe('test peripheralDevice general API methods', () => { // } // }) - // testInFiber('setStatus with bad arguments', () => { + // test('setStatus with bad arguments', () => { // try { // Meteor.call(PeripheralDeviceAPIMethods.setStatus, 'wibbly', device.token, { statusCode: 0 }) // fail('expected to throw') @@ -613,7 +613,7 @@ describe('test peripheralDevice general API methods', () => { // } // }) - testInFiber('removePeripheralDevice', async () => { + test('removePeripheralDevice', async () => { { const deviceObj = await PeripheralDevices.findOneAsync(device?._id) expect(deviceObj).toBeDefined() @@ -697,7 +697,7 @@ describe('test peripheralDevice general API methods', () => { workFlowId: workFlowId, }) }) - testInFiber('getMediaWorkFlowRevisions', async () => { + test('getMediaWorkFlowRevisions', async () => { const workFlows = ( await MediaWorkFlows.findFetchAsync({ studioId: device.studioId, @@ -711,7 +711,7 @@ describe('test peripheralDevice general API methods', () => { expect(res).toHaveLength(workFlows.length) expect(res).toMatchObject(workFlows) }) - testInFiber('getMediaWorkFlowStepRevisions', async () => { + test('getMediaWorkFlowStepRevisions', async () => { const workFlowSteps = ( await MediaWorkFlowSteps.findFetchAsync({ studioId: device.studioId, @@ -726,7 +726,7 @@ describe('test peripheralDevice general API methods', () => { expect(res).toMatchObject(workFlowSteps) }) describe('updateMediaWorkFlow', () => { - testInFiber('update', async () => { + test('update', async () => { const workFlow = await MediaWorkFlows.findOneAsync(workFlowId) expect(workFlow).toBeTruthy() @@ -744,7 +744,7 @@ describe('test peripheralDevice general API methods', () => { const updatedWorkFlow = await MediaWorkFlows.findOneAsync(workFlowId) expect(updatedWorkFlow).toMatchObject(newWorkFlow) }) - testInFiber('remove', async () => { + test('remove', async () => { const workFlow = (await MediaWorkFlows.findOneAsync(workFlowId)) as MediaWorkFlow expect(workFlow).toBeTruthy() @@ -755,7 +755,7 @@ describe('test peripheralDevice general API methods', () => { }) }) describe('updateMediaWorkFlowStep', () => { - testInFiber('update', async () => { + test('update', async () => { const workStep = await MediaWorkFlowSteps.findOneAsync(workStepIds[0]) expect(workStep).toBeTruthy() @@ -773,7 +773,7 @@ describe('test peripheralDevice general API methods', () => { const updatedWorkFlow = await MediaWorkFlowSteps.findOneAsync(workStepIds[0]) expect(updatedWorkFlow).toMatchObject(newWorkStep) }) - testInFiber('remove', async () => { + test('remove', async () => { const workStep = (await MediaWorkFlowSteps.findOneAsync(workStepIds[0])) as MediaWorkFlowStep expect(workStep).toBeTruthy() @@ -840,7 +840,7 @@ describe('test peripheralDevice general API methods', () => { tinf: '', }) }) - testInFiber('getMediaObjectRevisions', async () => { + test('getMediaObjectRevisions', async () => { const mobjects = ( await MediaObjects.findFetchAsync({ studioId: device.studioId, @@ -861,7 +861,7 @@ describe('test peripheralDevice general API methods', () => { expect(mobjects).toMatchObject(mobjects) }) describe('updateMediaObject', () => { - testInFiber('update', async () => { + test('update', async () => { const mo = (await MediaObjects.findOneAsync({ collectionId: MOCK_COLLECTION, studioId: device.studioId!, @@ -886,7 +886,7 @@ describe('test peripheralDevice general API methods', () => { }) expect(updateMo).toMatchObject(newMo) }) - testInFiber('remove', async () => { + test('remove', async () => { const mo = (await MediaObjects.findOneAsync({ collectionId: MOCK_COLLECTION, studioId: device.studioId!, diff --git a/meteor/server/api/__tests__/rundownLayouts.test.ts b/meteor/server/api/__tests__/rundownLayouts.test.ts index 7661c10bcd1..fd1e2c982a4 100644 --- a/meteor/server/api/__tests__/rundownLayouts.test.ts +++ b/meteor/server/api/__tests__/rundownLayouts.test.ts @@ -1,5 +1,4 @@ import '../../../__mocks__/_extendJest' -import { testInFiber } from '../../../__mocks__/helpers/jest' import { setupDefaultStudioEnvironment, DefaultEnvironment } from '../../../__mocks__/helpers/database' import { protectString, literal, getRandomString } from '../../lib/tempLib' import { @@ -20,7 +19,7 @@ describe('Rundown Layouts', () => { env = await setupDefaultStudioEnvironment() }) let rundownLayoutId: RundownLayoutId - testInFiber('Create rundown layout', async () => { + test('Create rundown layout', async () => { const res = await MeteorCall.rundownLayout.createRundownLayout( 'Test', RundownLayoutType.RUNDOWN_LAYOUT, @@ -35,7 +34,7 @@ describe('Rundown Layouts', () => { _id: rundownLayoutId, }) }) - testInFiber('Remove rundown layout', async () => { + test('Remove rundown layout', async () => { const item0 = await RundownLayouts.findOneAsync(rundownLayoutId) expect(item0).toMatchObject({ _id: rundownLayoutId, diff --git a/meteor/server/api/__tests__/userActions/buckets.test.ts2 b/meteor/server/api/__tests__/userActions/buckets.test.ts2 index fd4beed3dff..cc01b68930c 100644 --- a/meteor/server/api/__tests__/userActions/buckets.test.ts2 +++ b/meteor/server/api/__tests__/userActions/buckets.test.ts2 @@ -1,5 +1,4 @@ import '../../../../__mocks__/_extendJest' -import { testInFiber } from '../../../../__mocks__/helpers/jest' import { setupDefaultStudioEnvironment, DefaultEnvironment } from '../../../../__mocks__/helpers/database' import { getRandomId, protectString } from '../../../../lib/lib' import { ClientAPI } from '@sofie-automation/meteor-lib/dist/api/client' @@ -69,7 +68,7 @@ describe('User Actions - Buckets', () => { throw new Error('Not implemented') }) }) - testInFiber('createBucket', async () => { + test('createBucket', async () => { const NAME = 'Test bucket' // should fail if the studio doesn't exist @@ -96,7 +95,7 @@ describe('User Actions - Buckets', () => { }) } }) - testInFiber('removeBucket', async () => { + test('removeBucket', async () => { const { bucketId } = setUpMockBucket() expect( @@ -123,7 +122,7 @@ describe('User Actions - Buckets', () => { ).toHaveLength(0) } }) - testInFiber('modifyBucket', async () => { + test('modifyBucket', async () => { const { bucketId } = setUpMockBucket() // should throw if the bucket doesn't exist @@ -149,7 +148,7 @@ describe('User Actions - Buckets', () => { }) } }) - testInFiber('emptyBucket', async () => { + test('emptyBucket', async () => { const { bucketId } = setUpMockBucket() // should throw if the bucket doesn't exist @@ -170,7 +169,7 @@ describe('User Actions - Buckets', () => { ).toHaveLength(0) } }) - testInFiber('removeBucketAdLib', async () => { + test('removeBucketAdLib', async () => { const { bucketAdlibs } = setUpMockBucket() // should throw if the adlib doesn't exits @@ -187,7 +186,7 @@ describe('User Actions - Buckets', () => { expect(BucketAdLibs.findOne(bucketAdlibs[0]._id)).toBeUndefined() } }) - testInFiber('modifyBucketAdLib', async () => { + test('modifyBucketAdLib', async () => { const { bucketAdlibs } = setUpMockBucket() // check that the adlib exists diff --git a/meteor/server/api/__tests__/userActions/general.test.ts b/meteor/server/api/__tests__/userActions/general.test.ts index 823cd6c80c7..22eaadac687 100644 --- a/meteor/server/api/__tests__/userActions/general.test.ts +++ b/meteor/server/api/__tests__/userActions/general.test.ts @@ -1,8 +1,7 @@ import '../../../../__mocks__/_extendJest' -import { testInFiber } from '../../../../__mocks__/helpers/jest' import { setupDefaultStudioEnvironment } from '../../../../__mocks__/helpers/database' import { hashSingleUseToken } from '../../deviceTriggers/triggersContext' -import { getCurrentTime } from '../../../lib/lib' +import { getCurrentTime, sleep } from '../../../lib/lib' import { MeteorCall } from '../../methods' import { ClientAPI } from '@sofie-automation/meteor-lib/dist/api/client' import { UserActionsLog } from '../../../collections' @@ -18,7 +17,7 @@ describe('User Actions - General', () => { await setupDefaultStudioEnvironment() }) - testInFiber('Restart Core', async () => { + test('Restart Core', async () => { jest.useFakeTimers() // Generate restart token @@ -47,10 +46,11 @@ describe('User Actions - General', () => { jest.useRealTimers() }) - testInFiber('GUI Status', async () => { + test('GUI Status', async () => { await expect(MeteorCall.userAction.guiFocused('click', getCurrentTime())).resolves.toMatchObject({ success: 200, }) + await sleep(0) const logs0 = await UserActionsLog.findFetchAsync({ method: 'guiFocused', }) @@ -62,6 +62,7 @@ describe('User Actions - General', () => { await expect(MeteorCall.userAction.guiBlurred('click', getCurrentTime())).resolves.toMatchObject({ success: 200, }) + await sleep(0) const logs1 = await UserActionsLog.findFetchAsync({ method: 'guiBlurred', }) diff --git a/meteor/server/api/__tests__/userActions/mediaManager.test.ts b/meteor/server/api/__tests__/userActions/mediaManager.test.ts index b4806df71fb..3680cffde2b 100644 --- a/meteor/server/api/__tests__/userActions/mediaManager.test.ts +++ b/meteor/server/api/__tests__/userActions/mediaManager.test.ts @@ -1,5 +1,5 @@ import '../../../../__mocks__/_extendJest' -import { testInFiber, waitUntil } from '../../../../__mocks__/helpers/jest' +import { waitUntil } from '../../../../__mocks__/helpers/jest' import { getRandomId, protectString } from '../../../lib/tempLib' import { getCurrentTime } from '../../../lib/lib' import { setupDefaultStudioEnvironment, DefaultEnvironment } from '../../../../__mocks__/helpers/database' @@ -46,7 +46,7 @@ describe('User Actions - Media Manager', () => { env = await setupDefaultStudioEnvironment() jest.resetAllMocks() }) - testInFiber('Restart workflow', async () => { + test('Restart workflow', async () => { const { workFlowId } = await setupMockWorkFlow() // should fail if the workflow doesn't exist @@ -76,7 +76,7 @@ describe('User Actions - Media Manager', () => { await p } }) - testInFiber('Abort worfklow', async () => { + test('Abort worfklow', async () => { const { workFlowId } = await setupMockWorkFlow() // should fail if the workflow doesn't exist @@ -107,7 +107,7 @@ describe('User Actions - Media Manager', () => { await p } }) - testInFiber('Prioritize workflow', async () => { + test('Prioritize workflow', async () => { const { workFlowId } = await setupMockWorkFlow() // should fail if the workflow doesn't exist @@ -138,7 +138,7 @@ describe('User Actions - Media Manager', () => { await p } }) - testInFiber('Restart all workflows', async () => { + test('Restart all workflows', async () => { await setupMockWorkFlow() { @@ -160,7 +160,7 @@ describe('User Actions - Media Manager', () => { await p } }) - testInFiber('Abort all workflows', async () => { + test('Abort all workflows', async () => { await setupMockWorkFlow() { diff --git a/meteor/server/api/__tests__/userActions/system.test.ts b/meteor/server/api/__tests__/userActions/system.test.ts index a011ef0a4c8..29bf0161c9d 100644 --- a/meteor/server/api/__tests__/userActions/system.test.ts +++ b/meteor/server/api/__tests__/userActions/system.test.ts @@ -13,7 +13,6 @@ import { setupMockPeripheralDevice, } from '../../../../__mocks__/helpers/database' import '../../../../__mocks__/_extendJest' -import { testInFiber } from '../../../../__mocks__/helpers/jest' import { Studios } from '../../../collections' import { JSONBlobStringify } from '@sofie-automation/shared-lib/dist/lib/JSONBlob' import { @@ -85,7 +84,7 @@ describe('User Actions - Disable Peripheral SubDevice', () => { jest.resetAllMocks() }) - testInFiber('disable existing subDevice', async () => { + test('disable existing subDevice', async () => { await expect( MeteorCall.userAction.disablePeripheralSubDevice('e', getCurrentTime(), pDevice._id, mockSubDeviceId, true) ).resolves.toMatchObject({ @@ -97,7 +96,7 @@ describe('User Actions - Disable Peripheral SubDevice', () => { const playoutDevices = applyAndValidateOverrides(studio.peripheralDeviceSettings.playoutDevices).obj expect(playoutDevices[mockSubDeviceId].options.disable).toBe(true) }) - testInFiber('enable existing subDevice', async () => { + test('enable existing subDevice', async () => { { await expect( MeteorCall.userAction.disablePeripheralSubDevice( @@ -136,7 +135,7 @@ describe('User Actions - Disable Peripheral SubDevice', () => { expect(playoutDevices[mockSubDeviceId].options.disable).toBe(false) } }) - testInFiber('edit missing subDevice throws an error', async () => { + test('edit missing subDevice throws an error', async () => { await expect( MeteorCall.userAction.disablePeripheralSubDevice( 'e', @@ -147,7 +146,7 @@ describe('User Actions - Disable Peripheral SubDevice', () => { ) ).resolves.toMatchUserRawError(/is not configured/) }) - testInFiber('edit missing device throws an error', async () => { + test('edit missing device throws an error', async () => { await expect( MeteorCall.userAction.disablePeripheralSubDevice( 'e', @@ -158,7 +157,7 @@ describe('User Actions - Disable Peripheral SubDevice', () => { ) ).resolves.toMatchUserRawError(/not found/) }) - testInFiber("edit device that doesn't support the disable property throws an error", async () => { + test("edit device that doesn't support the disable property throws an error", async () => { const pDeviceUnsupported = await setupMockPeripheralDevice( PeripheralDeviceCategory.PLAYOUT, PeripheralDeviceType.PLAYOUT, diff --git a/meteor/server/api/blueprintConfigPresets.ts b/meteor/server/api/blueprintConfigPresets.ts index c3fe8e70ef7..d705c63702e 100644 --- a/meteor/server/api/blueprintConfigPresets.ts +++ b/meteor/server/api/blueprintConfigPresets.ts @@ -5,7 +5,7 @@ import { DBShowStyleBase } from '@sofie-automation/corelib/dist/dataModel/ShowSt import { DBShowStyleVariant } from '@sofie-automation/corelib/dist/dataModel/ShowStyleVariant' import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' import { ObserveChangesHelper } from '../collections/lib' -import { MeteorStartupAsync } from '../lib/lib' +import { Meteor } from 'meteor/meteor' const ObserveChangeBufferTimeout = 100 @@ -18,7 +18,7 @@ const ObserveChangeBufferTimeout = 100 * Whenever the Studio changes the blueprint or config preset, ensure the config is synced across * We want it synced across, so that if the config-preset is removed, then there is some config that can be used */ -MeteorStartupAsync(async () => { +Meteor.startup(async () => { const doUpdate = async (doc: DBStudio): Promise => { const markUnlinked = async () => { await Studios.updateAsync(doc._id, { @@ -69,7 +69,7 @@ MeteorStartupAsync(async () => { * Whenever the ShowStyleBase changes the blueprint or config preset, ensure the config is synced across * We want it synced across, so that if the config-preset is removed, then there is some config that can be used */ -MeteorStartupAsync(async () => { +Meteor.startup(async () => { const doUpdate = async (doc: DBShowStyleBase): Promise => { const markUnlinked = async () => { await Promise.all([ @@ -168,7 +168,7 @@ MeteorStartupAsync(async () => { * Whenever the ShowStyleVariant changes the config preset, ensure the config is synced across * We want it synced across, so that if the config-preset is removed, then there is some config that can be used */ -MeteorStartupAsync(async () => { +Meteor.startup(async () => { const doUpdate = async (doc: DBShowStyleVariant): Promise => { const markUnlinked = async () => { await ShowStyleVariants.updateAsync(doc._id, { diff --git a/meteor/server/api/blueprints/__tests__/api.test.ts b/meteor/server/api/blueprints/__tests__/api.test.ts index 9d22018e265..b92bf0a0ac2 100644 --- a/meteor/server/api/blueprints/__tests__/api.test.ts +++ b/meteor/server/api/blueprints/__tests__/api.test.ts @@ -1,6 +1,5 @@ import * as _ from 'underscore' import { setupDefaultStudioEnvironment, packageBlueprint } from '../../../../__mocks__/helpers/database' -import { testInFiber } from '../../../../__mocks__/helpers/jest' import { literal, getRandomId, protectString } from '../../../lib/tempLib' import { Blueprint } from '@sofie-automation/corelib/dist/dataModel/Blueprint' import { BlueprintManifestType } from '@sofie-automation/blueprints-integration' @@ -84,7 +83,7 @@ describe('Test blueprint management api', () => { return core.blueprintId } - testInFiber('empty id', async () => { + test('empty id', async () => { const initialBlueprintId = await getActiveSystemBlueprintId() SupressLogMessages.suppressLogMessage(/Blueprint not found/i) @@ -95,7 +94,7 @@ describe('Test blueprint management api', () => { expect(await getActiveSystemBlueprintId()).toEqual(initialBlueprintId) }) - testInFiber('unknown id', async () => { + test('unknown id', async () => { const blueprint = await ensureSystemBlueprint() const initialBlueprintId = await getActiveSystemBlueprintId() @@ -106,7 +105,7 @@ describe('Test blueprint management api', () => { expect(await getActiveSystemBlueprintId()).toEqual(initialBlueprintId) }) - testInFiber('good', async () => { + test('good', async () => { const blueprint = await ensureSystemBlueprint() // Ensure starts off 'wrong' @@ -117,7 +116,7 @@ describe('Test blueprint management api', () => { // Ensure ends up good expect(await getActiveSystemBlueprintId()).toEqual(blueprint._id) }) - testInFiber('unassign', async () => { + test('unassign', async () => { // Ensure starts off 'wrong' expect(await getActiveSystemBlueprintId()).toBeTruthy() @@ -126,7 +125,7 @@ describe('Test blueprint management api', () => { // Ensure ends up good expect(await getActiveSystemBlueprintId()).toBeFalsy() }) - testInFiber('wrong type', async () => { + test('wrong type', async () => { const blueprint = (await Blueprints.findOneAsync({ blueprintType: BlueprintManifestType.SHOWSTYLE, })) as Blueprint @@ -148,25 +147,25 @@ describe('Test blueprint management api', () => { }) describe('removeBlueprint', () => { - testInFiber('undefined id', async () => { + test('undefined id', async () => { SupressLogMessages.suppressLogMessage(/Match error/i) await expect(MeteorCall.blueprint.removeBlueprint(undefined as any)).rejects.toThrow( 'Match error: Expected string, got undefined' ) }) - testInFiber('empty id', async () => { + test('empty id', async () => { SupressLogMessages.suppressLogMessage(/Blueprint id/i) await expect(MeteorCall.blueprint.removeBlueprint(protectString(''))).rejects.toThrowMeteor( 404, 'Blueprint id "" was not found' ) }) - testInFiber('missing id', async () => { + test('missing id', async () => { // Should not error await MeteorCall.blueprint.removeBlueprint(protectString('not_a_real_blueprint')) }) - testInFiber('good', async () => { + test('good', async () => { const blueprint = await ensureSystemBlueprint() expect(await Blueprints.findOneAsync(blueprint._id)).toBeTruthy() @@ -177,7 +176,7 @@ describe('Test blueprint management api', () => { }) describe('insertBlueprint', () => { - testInFiber('no params', async () => { + test('no params', async () => { const initialBlueprints = await getCurrentBlueprintIds() const newId = await MeteorCall.blueprint.insertBlueprint() @@ -194,7 +193,7 @@ describe('Test blueprint management api', () => { expect(blueprint.name).toBeTruthy() expect(blueprint.blueprintType).toBeFalsy() }) - testInFiber('with name', async () => { + test('with name', async () => { const rawName = 'some_fake_name' const newId = await insertBlueprint(DEFAULT_CONTEXT, undefined, rawName) expect(newId).toBeTruthy() @@ -205,7 +204,7 @@ describe('Test blueprint management api', () => { expect(blueprint.name).toEqual(rawName) expect(blueprint.blueprintType).toBeFalsy() }) - testInFiber('with type', async () => { + test('with type', async () => { const type = BlueprintManifestType.STUDIO const newId = await insertBlueprint(DEFAULT_CONTEXT, type) expect(newId).toBeTruthy() @@ -219,24 +218,24 @@ describe('Test blueprint management api', () => { }) describe('uploadBlueprint', () => { - testInFiber('empty id', async () => { + test('empty id', async () => { await expect(uploadBlueprint(DEFAULT_CONTEXT, protectString(''), '0')).rejects.toThrowMeteor( 400, 'Blueprint id "" is not valid' ) }) - testInFiber('empty body', async () => { + test('empty body', async () => { await expect(uploadBlueprint(DEFAULT_CONTEXT, protectString('blueprint99'), '')).rejects.toThrowMeteor( 400, 'Blueprint blueprint99 failed to parse' ) }) - testInFiber('body not a manifest', async () => { + test('body not a manifest', async () => { await expect( uploadBlueprint(DEFAULT_CONTEXT, protectString('blueprint99'), `({default: (() => 5)()})`) ).rejects.toThrowMeteor(400, 'Blueprint blueprint99 returned a manifest of type number') }) - testInFiber('manifest missing blueprintType', async () => { + test('manifest missing blueprintType', async () => { const blueprintStr = packageBlueprint({}, () => { return { blueprintType: undefined as any, @@ -261,7 +260,7 @@ describe('Test blueprint management api', () => { `Blueprint blueprint99 returned a manifest of unknown blueprintType "undefined"` ) }) - testInFiber('replace existing with different type', async () => { + test('replace existing with different type', async () => { const BLUEPRINT_TYPE = BlueprintManifestType.STUDIO const blueprintStr = packageBlueprint( { @@ -287,7 +286,7 @@ describe('Test blueprint management api', () => { `Cannot replace old blueprint (of type "showstyle") with new blueprint of type "studio"` ) }) - testInFiber('success - showstyle', async () => { + test('success - showstyle', async () => { const BLUEPRINT_TYPE = BlueprintManifestType.SHOWSTYLE const blueprintStr = packageBlueprint( { @@ -326,7 +325,7 @@ describe('Test blueprint management api', () => { ) expect(blueprint.studioConfigSchema).toBeUndefined() }) - testInFiber('success - studio', async () => { + test('success - studio', async () => { const BLUEPRINT_TYPE = BlueprintManifestType.STUDIO const blueprintStr = packageBlueprint( { @@ -369,7 +368,7 @@ describe('Test blueprint management api', () => { ) expect(blueprint.showStyleConfigSchema).toBeUndefined() }) - testInFiber('success - system', async () => { + test('success - system', async () => { const BLUEPRINT_TYPE = BlueprintManifestType.SYSTEM const blueprintStr = packageBlueprint( { @@ -413,7 +412,7 @@ describe('Test blueprint management api', () => { expect(blueprint.showStyleConfigSchema).toBeUndefined() expect(blueprint.studioConfigSchema).toBeUndefined() }) - testInFiber('update - studio', async () => { + test('update - studio', async () => { const BLUEPRINT_TYPE = BlueprintManifestType.STUDIO const blueprintStr = packageBlueprint( { @@ -457,7 +456,7 @@ describe('Test blueprint management api', () => { ) expect(blueprint.showStyleConfigSchema).toBeUndefined() }) - testInFiber('update - matching blueprintId', async () => { + test('update - matching blueprintId', async () => { const BLUEPRINT_TYPE = BlueprintManifestType.SHOWSTYLE const blueprintStr = packageBlueprint( { @@ -503,7 +502,7 @@ describe('Test blueprint management api', () => { ) expect(blueprint.studioConfigSchema).toBeUndefined() }) - testInFiber('update - change blueprintId', async () => { + test('update - change blueprintId', async () => { const BLUEPRINT_TYPE = BlueprintManifestType.SHOWSTYLE const blueprintStr = packageBlueprint( { @@ -534,7 +533,7 @@ describe('Test blueprint management api', () => { `Cannot replace old blueprint "${existingBlueprint._id}" ("ss1") with new blueprint "show2"` ) }) - testInFiber('update - drop blueprintId', async () => { + test('update - drop blueprintId', async () => { const BLUEPRINT_TYPE = BlueprintManifestType.SHOWSTYLE const blueprintStr = packageBlueprint( { diff --git a/meteor/server/api/blueprints/__tests__/migrationContext.test.ts b/meteor/server/api/blueprints/__tests__/migrationContext.test.ts index 47fce54f942..419a13b9a73 100644 --- a/meteor/server/api/blueprints/__tests__/migrationContext.test.ts +++ b/meteor/server/api/blueprints/__tests__/migrationContext.test.ts @@ -1,50 +1,23 @@ -import * as _ from 'underscore' +import '../../../../__mocks__/_extendJest' import { setupDefaultStudioEnvironment } from '../../../../__mocks__/helpers/database' -import { testInFiber } from '../../../../__mocks__/helpers/jest' +import { literal, unprotectString } from '../../../lib/tempLib' import { - PeripheralDevice, - PeripheralDeviceCategory, - PeripheralDeviceType, - PERIPHERAL_SUBTYPE_PROCESS, -} from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' -import { literal, getRandomId, protectString, unprotectString } from '../../../lib/tempLib' -import { - LookaheadMode, - BlueprintMapping, - ISourceLayer, - SourceLayerType, - IOutputLayer, - TSR, - IBlueprintShowStyleVariant, - IBlueprintConfig, TriggerType, ClientActions, PlayoutActions, IBlueprintTriggeredActions, } from '@sofie-automation/blueprints-integration' -import { DBStudio, MappingExt } from '@sofie-automation/corelib/dist/dataModel/Studio' -import { MigrationContextStudio, MigrationContextShowStyle, MigrationContextSystem } from '../migrationContext' -import { DBShowStyleBase, SourceLayers } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' -import { DBShowStyleVariant } from '@sofie-automation/corelib/dist/dataModel/ShowStyleVariant' -import { - applyAndValidateOverrides, - wrapDefaultObject, -} from '@sofie-automation/corelib/dist/settings/objectWithOverrides' -import { - CoreSystem, - PeripheralDevices, - ShowStyleBases, - ShowStyleVariants, - Studios, - TriggeredActions, -} from '../../../collections' -import { JSONBlobStringify } from '@sofie-automation/shared-lib/dist/lib/JSONBlob' +import { MigrationContextSystem } from '../migrationContext' +import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' +import { CoreSystem, TriggeredActions } from '../../../collections' describe('Test blueprint migrationContext', () => { beforeAll(async () => { await setupDefaultStudioEnvironment() }) + // eslint-disable-next-line jest/no-commented-out-tests + /* describe('MigrationContextStudio', () => { async function getContext() { const studio = (await Studios.findOneAsync({})) as DBStudio @@ -63,17 +36,17 @@ describe('Test blueprint migrationContext', () => { return studio2.mappingsWithOverrides.defaults[mappingId] } - testInFiber('getMapping: no id', async () => { + test('getMapping: no id', async () => { const ctx = await getContext() const mapping = ctx.getMapping('') expect(mapping).toBeFalsy() }) - testInFiber('getMapping: missing', async () => { + test('getMapping: missing', async () => { const ctx = await getContext() const mapping = ctx.getMapping('fake_mapping') expect(mapping).toBeFalsy() }) - testInFiber('getMapping: good', async () => { + test('getMapping: good', async () => { const ctx = await getContext() const studio = getStudio(ctx) const rawMapping: MappingExt = { @@ -92,7 +65,7 @@ describe('Test blueprint migrationContext', () => { expect(mapping).not.toEqual(studio.mappingsWithOverrides.defaults['mapping1']) }) - testInFiber('insertMapping: good', async () => { + test('insertMapping: good', async () => { const ctx = await getContext() const rawMapping: BlueprintMapping = { @@ -113,7 +86,7 @@ describe('Test blueprint migrationContext', () => { const dbMapping = await getMappingFromDb(getStudio(ctx), 'mapping2') expect(dbMapping).toEqual(rawMapping) }) - testInFiber('insertMapping: no id', async () => { + test('insertMapping: no id', async () => { const ctx = await getContext() const rawMapping: BlueprintMapping = { @@ -133,7 +106,7 @@ describe('Test blueprint migrationContext', () => { const dbMapping = await getMappingFromDb(getStudio(ctx), '') expect(dbMapping).toBeFalsy() }) - testInFiber('insertMapping: existing', async () => { + test('insertMapping: existing', async () => { const ctx = await getContext() const existingMapping = ctx.getMapping('mapping2') expect(existingMapping).toBeTruthy() @@ -159,7 +132,7 @@ describe('Test blueprint migrationContext', () => { expect(dbMapping).toEqual(existingMapping) }) - testInFiber('updateMapping: good', async () => { + test('updateMapping: good', async () => { const ctx = await getContext() const existingMapping = ctx.getMapping('mapping2') as BlueprintMapping expect(existingMapping).toBeTruthy() @@ -183,7 +156,7 @@ describe('Test blueprint migrationContext', () => { const dbMapping = await getMappingFromDb(getStudio(ctx), 'mapping2') expect(dbMapping).toEqual(expectedMapping) }) - testInFiber('updateMapping: no props', async () => { + test('updateMapping: no props', async () => { const ctx = await getContext() const existingMapping = ctx.getMapping('mapping2') as BlueprintMapping expect(existingMapping).toBeTruthy() @@ -191,7 +164,7 @@ describe('Test blueprint migrationContext', () => { // Should not error ctx.updateMapping('mapping2', {}) }) - testInFiber('updateMapping: no id', async () => { + test('updateMapping: no id', async () => { const ctx = await getContext() const existingMapping = ctx.getMapping('') as BlueprintMapping expect(existingMapping).toBeFalsy() @@ -200,7 +173,7 @@ describe('Test blueprint migrationContext', () => { `[404] Mapping "" cannot be updated as it does not exist` ) }) - testInFiber('updateMapping: missing', async () => { + test('updateMapping: missing', async () => { const ctx = await getContext() expect(ctx.getMapping('mapping1')).toBeFalsy() @@ -222,14 +195,14 @@ describe('Test blueprint migrationContext', () => { expect(dbMapping).toBeFalsy() }) - testInFiber('removeMapping: missing', async () => { + test('removeMapping: missing', async () => { const ctx = await getContext() expect(ctx.getMapping('mapping1')).toBeFalsy() // Should not error ctx.removeMapping('mapping1') }) - testInFiber('removeMapping: no id', async () => { + test('removeMapping: no id', async () => { const ctx = await getContext() expect(ctx.getMapping('')).toBeFalsy() expect(ctx.getMapping('mapping2')).toBeTruthy() @@ -240,7 +213,7 @@ describe('Test blueprint migrationContext', () => { // ensure other mappings still exist expect(await getMappingFromDb(getStudio(ctx), 'mapping2')).toBeTruthy() }) - testInFiber('removeMapping: good', async () => { + test('removeMapping: good', async () => { const ctx = await getContext() expect(ctx.getMapping('mapping2')).toBeTruthy() @@ -259,17 +232,17 @@ describe('Test blueprint migrationContext', () => { return studio2.blueprintConfigWithOverrides.defaults } - testInFiber('getConfig: no id', async () => { + test('getConfig: no id', async () => { const ctx = await getContext() expect(ctx.getConfig('')).toBeFalsy() }) - testInFiber('getConfig: missing', async () => { + test('getConfig: missing', async () => { const ctx = await getContext() expect(ctx.getConfig('conf1')).toBeFalsy() }) - testInFiber('getConfig: good', async () => { + test('getConfig: good', async () => { const ctx = await getContext() const studio = getStudio(ctx) @@ -280,7 +253,7 @@ describe('Test blueprint migrationContext', () => { expect(ctx.getConfig('conf2')).toEqual('af') }) - testInFiber('setConfig: no id', async () => { + test('setConfig: no id', async () => { const ctx = await getContext() const studio = getStudio(ctx) const initialConfig = _.clone(studio.blueprintConfigWithOverrides.defaults) @@ -291,7 +264,7 @@ describe('Test blueprint migrationContext', () => { expect(studio.blueprintConfigWithOverrides.defaults).toEqual(initialConfig) expect(await getAllConfigFromDb(studio)).toEqual(initialConfig) }) - testInFiber('setConfig: insert', async () => { + test('setConfig: insert', async () => { const ctx = await getContext() const studio = getStudio(ctx) const initialConfig = _.clone(studio.blueprintConfigWithOverrides.defaults) @@ -310,7 +283,7 @@ describe('Test blueprint migrationContext', () => { expect(studio.blueprintConfigWithOverrides.defaults).toEqual(initialConfig) expect(await getAllConfigFromDb(studio)).toEqual(initialConfig) }) - testInFiber('setConfig: insert undefined', async () => { + test('setConfig: insert undefined', async () => { const ctx = await getContext() const studio = getStudio(ctx) const initialConfig = _.clone(studio.blueprintConfigWithOverrides.defaults) @@ -330,7 +303,7 @@ describe('Test blueprint migrationContext', () => { expect(await getAllConfigFromDb(studio)).toEqual(initialConfig) }) - testInFiber('setConfig: update', async () => { + test('setConfig: update', async () => { const ctx = await getContext() const studio = getStudio(ctx) const initialConfig = _.clone(studio.blueprintConfigWithOverrides.defaults) @@ -349,7 +322,7 @@ describe('Test blueprint migrationContext', () => { expect(studio.blueprintConfigWithOverrides.defaults).toEqual(initialConfig) expect(await getAllConfigFromDb(studio)).toEqual(initialConfig) }) - testInFiber('setConfig: update undefined', async () => { + test('setConfig: update undefined', async () => { const ctx = await getContext() const studio = getStudio(ctx) const initialConfig = _.clone(studio.blueprintConfigWithOverrides.defaults) @@ -369,7 +342,7 @@ describe('Test blueprint migrationContext', () => { expect(await getAllConfigFromDb(studio)).toEqual(initialConfig) }) - testInFiber('removeConfig: no id', async () => { + test('removeConfig: no id', async () => { const ctx = await getContext() const studio = getStudio(ctx) ctx.setConfig('conf1', true) @@ -383,7 +356,7 @@ describe('Test blueprint migrationContext', () => { expect(studio.blueprintConfigWithOverrides.defaults).toEqual(initialConfig) expect(await getAllConfigFromDb(studio)).toEqual(initialConfig) }) - testInFiber('removeConfig: missing', async () => { + test('removeConfig: missing', async () => { const ctx = await getContext() const studio = getStudio(ctx) const initialConfig = _.clone(studio.blueprintConfigWithOverrides.defaults) @@ -397,7 +370,7 @@ describe('Test blueprint migrationContext', () => { expect(studio.blueprintConfigWithOverrides.defaults).toEqual(initialConfig) expect(await getAllConfigFromDb(studio)).toEqual(initialConfig) }) - testInFiber('removeConfig: good', async () => { + test('removeConfig: good', async () => { const ctx = await getContext() const studio = getStudio(ctx) const initialConfig = _.clone(studio.blueprintConfigWithOverrides.defaults) @@ -469,17 +442,17 @@ describe('Test blueprint migrationContext', () => { return device as PeripheralDevice } - testInFiber('getDevice: no id', async () => { + test('getDevice: no id', async () => { const ctx = await getContext() const device = ctx.getDevice('') expect(device).toBeFalsy() }) - testInFiber('getDevice: missing', async () => { + test('getDevice: missing', async () => { const ctx = await getContext() const device = ctx.getDevice('fake_device') expect(device).toBeFalsy() }) - testInFiber('getDevice: missing with parent', async () => { + test('getDevice: missing with parent', async () => { const ctx = await getContext() const studio = await getStudio(ctx) const playoutId = await createPlayoutDevice(studio) @@ -488,7 +461,7 @@ describe('Test blueprint migrationContext', () => { const device = ctx.getDevice('fake_device') expect(device).toBeFalsy() }) - testInFiber('getDevice: good', async () => { + test('getDevice: good', async () => { const ctx = await getContext() const peripheral = getPlayoutDevice(await getStudio(ctx)) expect(peripheral).toBeTruthy() @@ -501,7 +474,7 @@ describe('Test blueprint migrationContext', () => { expect(device2).toBeFalsy() }) - testInFiber('insertDevice: no id', async () => { + test('insertDevice: no id', async () => { const ctx = await getContext() const studio = await getStudio(ctx) const initialSettings = studio.peripheralDeviceSettings.playoutDevices @@ -514,7 +487,7 @@ describe('Test blueprint migrationContext', () => { expect(ctx.getDevice('')).toBeFalsy() expect((await getStudio(ctx)).peripheralDeviceSettings.playoutDevices).toEqual(initialSettings) }) - testInFiber('insertDevice: already exists', async () => { + test('insertDevice: already exists', async () => { const ctx = await getContext() const studio = await getStudio(ctx) const initialSettings = studio.peripheralDeviceSettings.playoutDevices @@ -526,7 +499,7 @@ describe('Test blueprint migrationContext', () => { expect((await getStudio(ctx)).peripheralDeviceSettings.playoutDevices).toEqual(initialSettings) }) - testInFiber('insertDevice: ok', async () => { + test('insertDevice: ok', async () => { const ctx = await getContext() const studio = await getStudio(ctx) const initialSettings = studio.peripheralDeviceSettings.playoutDevices @@ -546,7 +519,7 @@ describe('Test blueprint migrationContext', () => { expect(device).toEqual(rawDevice) }) - testInFiber('updateDevice: no id', async () => { + test('updateDevice: no id', async () => { const ctx = await getContext() const studio = await getStudio(ctx) const initialSettings = studio.peripheralDeviceSettings.playoutDevices @@ -559,7 +532,7 @@ describe('Test blueprint migrationContext', () => { expect(ctx.getDevice('')).toBeFalsy() expect((await getStudio(ctx)).peripheralDeviceSettings.playoutDevices).toEqual(initialSettings) }) - testInFiber('updateDevice: missing', async () => { + test('updateDevice: missing', async () => { const ctx = await getContext() const studio = await getStudio(ctx) const initialSettings = studio.peripheralDeviceSettings.playoutDevices @@ -571,7 +544,7 @@ describe('Test blueprint migrationContext', () => { expect((await getStudio(ctx)).peripheralDeviceSettings.playoutDevices).toEqual(initialSettings) }) - testInFiber('Device: good', async () => { + test('Device: good', async () => { const ctx = await getContext() const studio = await getStudio(ctx) const initialSettings = studio.peripheralDeviceSettings.playoutDevices @@ -593,7 +566,7 @@ describe('Test blueprint migrationContext', () => { expect((await getStudio(ctx)).peripheralDeviceSettings.playoutDevices).toEqual(initialSettings) }) - testInFiber('removeDevice: no id', async () => { + test('removeDevice: no id', async () => { const ctx = await getContext() const studio = await getStudio(ctx) const initialSettings = studio.peripheralDeviceSettings.playoutDevices @@ -604,7 +577,7 @@ describe('Test blueprint migrationContext', () => { expect(ctx.getDevice('')).toBeFalsy() expect((await getStudio(ctx)).peripheralDeviceSettings.playoutDevices).toEqual(initialSettings) }) - testInFiber('removeDevice: missing', async () => { + test('removeDevice: missing', async () => { const ctx = await getContext() const studio = await getStudio(ctx) const initialSettings = studio.peripheralDeviceSettings.playoutDevices @@ -615,7 +588,7 @@ describe('Test blueprint migrationContext', () => { expect((await getStudio(ctx)).peripheralDeviceSettings.playoutDevices).toEqual(initialSettings) }) - testInFiber('removeDevice: good', async () => { + test('removeDevice: good', async () => { const ctx = await getContext() const studio = await getStudio(ctx) const initialSettings = studio.peripheralDeviceSettings.playoutDevices @@ -659,18 +632,18 @@ describe('Test blueprint migrationContext', () => { } describe('variants', () => { - testInFiber('getAllVariants: good', async () => { + test('getAllVariants: good', async () => { const ctx = await getContext() const variants = ctx.getAllVariants() expect(variants).toHaveLength(1) }) - testInFiber('getAllVariants: missing base', () => { + test('getAllVariants: missing base', () => { const ctx = new MigrationContextShowStyle({ _id: 'fakeStyle' } as any) const variants = ctx.getAllVariants() expect(variants).toHaveLength(0) }) - testInFiber('getVariantId: consistent', async () => { + test('getVariantId: consistent', async () => { const ctx = await getContext() const id1 = ctx.getVariantId('variant1') @@ -680,7 +653,7 @@ describe('Test blueprint migrationContext', () => { const id3 = ctx.getVariantId('variant2') expect(id3).not.toEqual(id1) }) - testInFiber('getVariantId: different base', async () => { + test('getVariantId: different base', async () => { const ctx = await getContext() const ctx2 = new MigrationContextShowStyle({ _id: 'fakeStyle' } as any) @@ -689,7 +662,7 @@ describe('Test blueprint migrationContext', () => { expect(id2).not.toEqual(id1) }) - testInFiber('getVariant: good', async () => { + test('getVariant: good', async () => { const ctx = await getContext() const rawVariant = await createVariant(ctx, 'variant1') @@ -697,19 +670,19 @@ describe('Test blueprint migrationContext', () => { expect(variant).toBeTruthy() expect(variant).toEqual(rawVariant) }) - testInFiber('getVariant: no id', async () => { + test('getVariant: no id', async () => { const ctx = await getContext() expect(() => ctx.getVariant('')).toThrow(`[500] Variant id "" is invalid`) }) - testInFiber('getVariant: missing', async () => { + test('getVariant: missing', async () => { const ctx = await getContext() const variant = ctx.getVariant('fake_variant') expect(variant).toBeFalsy() }) - testInFiber('insertVariant: no id', async () => { + test('insertVariant: no id', async () => { const ctx = await getContext() const initialVariants = _.clone(ctx.getAllVariants()) @@ -721,7 +694,7 @@ describe('Test blueprint migrationContext', () => { expect(ctx.getAllVariants()).toEqual(initialVariants) }) - testInFiber('insertVariant: already exists', async () => { + test('insertVariant: already exists', async () => { const ctx = await getContext() const initialVariants = _.clone(ctx.getAllVariants()) expect(ctx.getVariant('variant1')).toBeTruthy() @@ -730,11 +703,11 @@ describe('Test blueprint migrationContext', () => { ctx.insertVariant('variant1', { name: 'test2', }) - ).toThrow(/*`[500] Variant id "variant1" already exists`*/) + ).toThrow(/*`[500] Variant id "variant1" already exists`* /) expect(ctx.getAllVariants()).toEqual(initialVariants) }) - testInFiber('insertVariant: good', async () => { + test('insertVariant: good', async () => { const ctx = await getContext() const initialVariants = _.clone(ctx.getAllVariants()) expect(ctx.getVariant('variant2')).toBeFalsy() @@ -758,7 +731,7 @@ describe('Test blueprint migrationContext', () => { expect(ctx.getAllVariants()).toEqual(initialVariants) }) - testInFiber('updateVariant: no id', async () => { + test('updateVariant: no id', async () => { const ctx = await getContext() const initialVariants = _.clone(ctx.getAllVariants()) @@ -770,7 +743,7 @@ describe('Test blueprint migrationContext', () => { expect(ctx.getAllVariants()).toEqual(initialVariants) }) - testInFiber('updateVariant: missing', async () => { + test('updateVariant: missing', async () => { const ctx = await getContext() const initialVariants = _.clone(ctx.getAllVariants()) expect(ctx.getVariant('variant11')).toBeFalsy() @@ -779,12 +752,12 @@ describe('Test blueprint migrationContext', () => { ctx.updateVariant('variant11', { name: 'test2', }) - ).toThrow(/*`[404] Variant id "variant1" does not exist`*/) + ).toThrow(/*`[404] Variant id "variant1" does not exist`* /) // TODO - tidy up the error type expect(ctx.getAllVariants()).toEqual(initialVariants) }) - testInFiber('updateVariant: good', async () => { + test('updateVariant: good', async () => { const ctx = await getContext() const initialVariants = _.clone(ctx.getAllVariants()) expect(ctx.getVariant('variant1')).toBeTruthy() @@ -801,7 +774,7 @@ describe('Test blueprint migrationContext', () => { expect(ctx.getAllVariants()).toEqual(initialVariants) }) - testInFiber('removeVariant: no id', async () => { + test('removeVariant: no id', async () => { const ctx = await getContext() const initialVariants = _.clone(ctx.getAllVariants()) @@ -809,7 +782,7 @@ describe('Test blueprint migrationContext', () => { expect(ctx.getAllVariants()).toEqual(initialVariants) }) - testInFiber('removeVariant: missing', async () => { + test('removeVariant: missing', async () => { const ctx = await getContext() const initialVariants = _.clone(ctx.getAllVariants()) expect(ctx.getVariant('variant11')).toBeFalsy() @@ -819,7 +792,7 @@ describe('Test blueprint migrationContext', () => { expect(ctx.getAllVariants()).toEqual(initialVariants) }) - testInFiber('removeVariant: good', async () => { + test('removeVariant: good', async () => { const ctx = await getContext() const initialVariants = _.clone(ctx.getAllVariants()) expect(ctx.getVariant('variant1')).toBeTruthy() @@ -842,18 +815,18 @@ describe('Test blueprint migrationContext', () => { return showStyle2.sourceLayersWithOverrides.defaults } - testInFiber('getSourceLayer: no id', async () => { + test('getSourceLayer: no id', async () => { const ctx = await getContext() expect(() => ctx.getSourceLayer('')).toThrow(`[500] SourceLayer id "" is invalid`) }) - testInFiber('getSourceLayer: missing', async () => { + test('getSourceLayer: missing', async () => { const ctx = await getContext() const layer = ctx.getSourceLayer('fake_source_layer') expect(layer).toBeFalsy() }) - testInFiber('getSourceLayer: good', async () => { + test('getSourceLayer: good', async () => { const ctx = await getContext() const layer = ctx.getSourceLayer('cam0') as ISourceLayer @@ -865,7 +838,7 @@ describe('Test blueprint migrationContext', () => { expect(layer2._id).toEqual('vt0') }) - testInFiber('insertSourceLayer: no id', async () => { + test('insertSourceLayer: no id', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) const initialSourceLayers = _.clone(showStyle.sourceLayersWithOverrides.defaults) @@ -881,7 +854,7 @@ describe('Test blueprint migrationContext', () => { expect(getShowStyle(ctx).sourceLayersWithOverrides.defaults).toEqual(initialSourceLayers) expect(await getAllSourceLayersFromDb(showStyle)).toEqual(initialSourceLayers) }) - testInFiber('insertSourceLayer: existing', async () => { + test('insertSourceLayer: existing', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) const initialSourceLayers = _.clone(showStyle.sourceLayersWithOverrides.defaults) @@ -897,7 +870,7 @@ describe('Test blueprint migrationContext', () => { expect(getShowStyle(ctx).sourceLayersWithOverrides.defaults).toEqual(initialSourceLayers) expect(await getAllSourceLayersFromDb(showStyle)).toEqual(initialSourceLayers) }) - testInFiber('insertSourceLayer: good', async () => { + test('insertSourceLayer: good', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) const initialSourceLayers = _.clone(showStyle.sourceLayersWithOverrides.defaults) @@ -918,7 +891,7 @@ describe('Test blueprint migrationContext', () => { expect(await getAllSourceLayersFromDb(showStyle)).toEqual(initialSourceLayers) }) - testInFiber('updateSourceLayer: no id', async () => { + test('updateSourceLayer: no id', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) const initialSourceLayers = _.clone(showStyle.sourceLayersWithOverrides.defaults) @@ -934,7 +907,7 @@ describe('Test blueprint migrationContext', () => { expect(getShowStyle(ctx).sourceLayersWithOverrides.defaults).toEqual(initialSourceLayers) expect(await getAllSourceLayersFromDb(showStyle)).toEqual(initialSourceLayers) }) - testInFiber('updateSourceLayer: missing', async () => { + test('updateSourceLayer: missing', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) const initialSourceLayers = _.clone(showStyle.sourceLayersWithOverrides.defaults) @@ -950,7 +923,7 @@ describe('Test blueprint migrationContext', () => { expect(getShowStyle(ctx).sourceLayersWithOverrides.defaults).toEqual(initialSourceLayers) expect(await getAllSourceLayersFromDb(showStyle)).toEqual(initialSourceLayers) }) - testInFiber('updateSourceLayer: good', async () => { + test('updateSourceLayer: good', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) const initialSourceLayers = _.clone(showStyle.sourceLayersWithOverrides.defaults) @@ -971,7 +944,7 @@ describe('Test blueprint migrationContext', () => { expect(await getAllSourceLayersFromDb(showStyle)).toEqual(initialSourceLayers) }) - testInFiber('removeSourceLayer: no id', async () => { + test('removeSourceLayer: no id', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) const initialSourceLayers = _.clone(showStyle.sourceLayersWithOverrides.defaults) @@ -981,7 +954,7 @@ describe('Test blueprint migrationContext', () => { expect(getShowStyle(ctx).sourceLayersWithOverrides.defaults).toEqual(initialSourceLayers) expect(await getAllSourceLayersFromDb(showStyle)).toEqual(initialSourceLayers) }) - testInFiber('removeSourceLayer: missing', async () => { + test('removeSourceLayer: missing', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) const initialSourceLayers = _.clone(showStyle.sourceLayersWithOverrides.defaults) @@ -993,7 +966,7 @@ describe('Test blueprint migrationContext', () => { expect(getShowStyle(ctx).sourceLayersWithOverrides.defaults).toEqual(initialSourceLayers) expect(await getAllSourceLayersFromDb(showStyle)).toEqual(initialSourceLayers) }) - testInFiber('removeSourceLayer: good', async () => { + test('removeSourceLayer: good', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) const initialSourceLayers = _.clone(showStyle.sourceLayersWithOverrides.defaults) @@ -1017,18 +990,18 @@ describe('Test blueprint migrationContext', () => { return showStyle2.outputLayersWithOverrides.defaults } - testInFiber('getOutputLayer: no id', async () => { + test('getOutputLayer: no id', async () => { const ctx = await getContext() expect(() => ctx.getOutputLayer('')).toThrow(`[500] OutputLayer id "" is invalid`) }) - testInFiber('getOutputLayer: missing', async () => { + test('getOutputLayer: missing', async () => { const ctx = await getContext() const layer = ctx.getOutputLayer('fake_source_layer') expect(layer).toBeFalsy() }) - testInFiber('getOutputLayer: good', async () => { + test('getOutputLayer: good', async () => { const ctx = await getContext() const layer = ctx.getOutputLayer('pgm') as IOutputLayer @@ -1036,7 +1009,7 @@ describe('Test blueprint migrationContext', () => { expect(layer._id).toEqual('pgm') }) - testInFiber('insertOutputLayer: no id', async () => { + test('insertOutputLayer: no id', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) const initialOutputLayers = _.clone(showStyle.outputLayersWithOverrides.defaults) @@ -1052,7 +1025,7 @@ describe('Test blueprint migrationContext', () => { expect(getShowStyle(ctx).outputLayersWithOverrides.defaults).toEqual(initialOutputLayers) expect(await getAllOutputLayersFromDb(showStyle)).toEqual(initialOutputLayers) }) - testInFiber('insertOutputLayer: existing', async () => { + test('insertOutputLayer: existing', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) const initialOutputLayers = _.clone(showStyle.outputLayersWithOverrides.defaults) @@ -1068,7 +1041,7 @@ describe('Test blueprint migrationContext', () => { expect(getShowStyle(ctx).outputLayersWithOverrides.defaults).toEqual(initialOutputLayers) expect(await getAllOutputLayersFromDb(showStyle)).toEqual(initialOutputLayers) }) - testInFiber('insertOutputLayer: good', async () => { + test('insertOutputLayer: good', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) const initialOutputLayers = _.clone(showStyle.outputLayersWithOverrides.defaults) @@ -1089,7 +1062,7 @@ describe('Test blueprint migrationContext', () => { expect(await getAllOutputLayersFromDb(showStyle)).toEqual(initialOutputLayers) }) - testInFiber('updateOutputLayer: no id', async () => { + test('updateOutputLayer: no id', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) const initialOutputLayers = _.clone(showStyle.outputLayersWithOverrides.defaults) @@ -1105,7 +1078,7 @@ describe('Test blueprint migrationContext', () => { expect(getShowStyle(ctx).outputLayersWithOverrides.defaults).toEqual(initialOutputLayers) expect(await getAllOutputLayersFromDb(showStyle)).toEqual(initialOutputLayers) }) - testInFiber('updateOutputLayer: missing', async () => { + test('updateOutputLayer: missing', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) const initialOutputLayers = _.clone(showStyle.outputLayersWithOverrides.defaults) @@ -1121,7 +1094,7 @@ describe('Test blueprint migrationContext', () => { expect(getShowStyle(ctx).outputLayersWithOverrides.defaults).toEqual(initialOutputLayers) expect(await getAllOutputLayersFromDb(showStyle)).toEqual(initialOutputLayers) }) - testInFiber('updateOutputLayer: good', async () => { + test('updateOutputLayer: good', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) const initialOutputLayers = _.clone(showStyle.outputLayersWithOverrides.defaults) @@ -1142,7 +1115,7 @@ describe('Test blueprint migrationContext', () => { expect(await getAllOutputLayersFromDb(showStyle)).toEqual(initialOutputLayers) }) - testInFiber('removeOutputLayer: no id', async () => { + test('removeOutputLayer: no id', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) const initialOutputLayers = _.clone(showStyle.outputLayersWithOverrides.defaults) @@ -1152,7 +1125,7 @@ describe('Test blueprint migrationContext', () => { expect(getShowStyle(ctx).outputLayersWithOverrides.defaults).toEqual(initialOutputLayers) expect(await getAllOutputLayersFromDb(showStyle)).toEqual(initialOutputLayers) }) - testInFiber('removeOutputLayer: missing', async () => { + test('removeOutputLayer: missing', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) const initialOutputLayers = _.clone(showStyle.outputLayersWithOverrides.defaults) @@ -1164,7 +1137,7 @@ describe('Test blueprint migrationContext', () => { expect(getShowStyle(ctx).outputLayersWithOverrides.defaults).toEqual(initialOutputLayers) expect(await getAllOutputLayersFromDb(showStyle)).toEqual(initialOutputLayers) }) - testInFiber('removeOutputLayer: good', async () => { + test('removeOutputLayer: good', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) const initialOutputLayers = _.clone(showStyle.outputLayersWithOverrides.defaults) @@ -1186,17 +1159,17 @@ describe('Test blueprint migrationContext', () => { return showStyle2.blueprintConfigWithOverrides.defaults } - testInFiber('getBaseConfig: no id', async () => { + test('getBaseConfig: no id', async () => { const ctx = await getContext() expect(ctx.getBaseConfig('')).toBeFalsy() }) - testInFiber('getBaseConfig: missing', async () => { + test('getBaseConfig: missing', async () => { const ctx = await getContext() expect(ctx.getBaseConfig('conf1')).toBeFalsy() }) - testInFiber('getBaseConfig: good', async () => { + test('getBaseConfig: good', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) @@ -1207,7 +1180,7 @@ describe('Test blueprint migrationContext', () => { expect(ctx.getBaseConfig('conf2')).toEqual('af') }) - testInFiber('setBaseConfig: no id', async () => { + test('setBaseConfig: no id', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) const initialBaseConfig = _.clone(showStyle.blueprintConfigWithOverrides.defaults) @@ -1218,7 +1191,7 @@ describe('Test blueprint migrationContext', () => { expect(showStyle.blueprintConfigWithOverrides.defaults).toEqual(initialBaseConfig) expect(await getAllBaseConfigFromDb(showStyle)).toEqual(initialBaseConfig) }) - testInFiber('setBaseConfig: insert', async () => { + test('setBaseConfig: insert', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) const initialBaseConfig = _.clone(showStyle.blueprintConfigWithOverrides.defaults) @@ -1237,7 +1210,7 @@ describe('Test blueprint migrationContext', () => { expect(showStyle.blueprintConfigWithOverrides.defaults).toEqual(initialBaseConfig) expect(await getAllBaseConfigFromDb(showStyle)).toEqual(initialBaseConfig) }) - testInFiber('setBaseConfig: insert undefined', async () => { + test('setBaseConfig: insert undefined', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) const initialBaseConfig = _.clone(showStyle.blueprintConfigWithOverrides.defaults) @@ -1252,7 +1225,7 @@ describe('Test blueprint migrationContext', () => { expect(await getAllBaseConfigFromDb(showStyle)).toEqual(initialBaseConfig) }) - testInFiber('setBaseConfig: update', async () => { + test('setBaseConfig: update', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) const initialBaseConfig = _.clone(showStyle.blueprintConfigWithOverrides.defaults) @@ -1271,7 +1244,7 @@ describe('Test blueprint migrationContext', () => { expect(showStyle.blueprintConfigWithOverrides.defaults).toEqual(initialBaseConfig) expect(await getAllBaseConfigFromDb(showStyle)).toEqual(initialBaseConfig) }) - testInFiber('setBaseConfig: update undefined', async () => { + test('setBaseConfig: update undefined', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) const initialBaseConfig = _.clone(showStyle.blueprintConfigWithOverrides.defaults) @@ -1286,7 +1259,7 @@ describe('Test blueprint migrationContext', () => { expect(await getAllBaseConfigFromDb(showStyle)).toEqual(initialBaseConfig) }) - testInFiber('removeBaseConfig: no id', async () => { + test('removeBaseConfig: no id', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) ctx.setBaseConfig('conf1', true) @@ -1300,7 +1273,7 @@ describe('Test blueprint migrationContext', () => { expect(showStyle.blueprintConfigWithOverrides.defaults).toEqual(initialBaseConfig) expect(await getAllBaseConfigFromDb(showStyle)).toEqual(initialBaseConfig) }) - testInFiber('removeBaseConfig: missing', async () => { + test('removeBaseConfig: missing', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) const initialBaseConfig = _.clone(showStyle.blueprintConfigWithOverrides.defaults) @@ -1314,7 +1287,7 @@ describe('Test blueprint migrationContext', () => { expect(showStyle.blueprintConfigWithOverrides.defaults).toEqual(initialBaseConfig) expect(await getAllBaseConfigFromDb(showStyle)).toEqual(initialBaseConfig) }) - testInFiber('removeBaseConfig: good', async () => { + test('removeBaseConfig: good', async () => { const ctx = await getContext() const showStyle = getShowStyle(ctx) const initialBaseConfig = _.clone(showStyle.blueprintConfigWithOverrides.defaults) @@ -1341,25 +1314,25 @@ describe('Test blueprint migrationContext', () => { return variant.blueprintConfigWithOverrides.defaults } - testInFiber('getVariantConfig: no variant id', async () => { + test('getVariantConfig: no variant id', async () => { const ctx = await getContext() expect(() => ctx.getVariantConfig('', 'conf1')).toThrow(`[404] ShowStyleVariant "" not found`) }) - testInFiber('getVariantConfig: missing variant', async () => { + test('getVariantConfig: missing variant', async () => { const ctx = await getContext() expect(() => ctx.getVariantConfig('fake_variant', 'conf1')).toThrow( `[404] ShowStyleVariant "fake_variant" not found` ) }) - testInFiber('getVariantConfig: missing', async () => { + test('getVariantConfig: missing', async () => { const ctx = await getContext() await createVariant(ctx, 'configVariant', { conf1: 5, conf2: ' af ' }) expect(ctx.getVariantConfig('configVariant', 'conf11')).toBeFalsy() }) - testInFiber('getVariantConfig: good', async () => { + test('getVariantConfig: good', async () => { const ctx = await getContext() expect(ctx.getVariant('configVariant')).toBeTruthy() @@ -1367,19 +1340,19 @@ describe('Test blueprint migrationContext', () => { expect(ctx.getVariantConfig('configVariant', 'conf2')).toEqual('af') }) - testInFiber('setVariantConfig: no variant id', async () => { + test('setVariantConfig: no variant id', async () => { const ctx = await getContext() expect(() => ctx.setVariantConfig('', 'conf1', 5)).toThrow(`[404] ShowStyleVariant "" not found`) }) - testInFiber('setVariantConfig: missing variant', async () => { + test('setVariantConfig: missing variant', async () => { const ctx = await getContext() expect(() => ctx.setVariantConfig('fake_variant', 'conf1', 5)).toThrow( `[404] ShowStyleVariant "fake_variant" not found` ) }) - testInFiber('setVariantConfig: no id', async () => { + test('setVariantConfig: no id', async () => { const ctx = await getContext() const initialVariantConfig = _.clone(await getAllVariantConfigFromDb(ctx, 'configVariant')) expect(ctx.getVariant('configVariant')).toBeTruthy() @@ -1389,7 +1362,7 @@ describe('Test blueprint migrationContext', () => { // VariantConfig should not have changed expect(await getAllVariantConfigFromDb(ctx, 'configVariant')).toEqual(initialVariantConfig) }) - testInFiber('setVariantConfig: insert', async () => { + test('setVariantConfig: insert', async () => { const ctx = await getContext() const initialVariantConfig = _.clone(await getAllVariantConfigFromDb(ctx, 'configVariant')) expect(ctx.getVariantConfig('configVariant', 'conf19')).toBeFalsy() @@ -1406,7 +1379,7 @@ describe('Test blueprint migrationContext', () => { initialVariantConfig[expectedItem._id] = expectedItem.value expect(await getAllVariantConfigFromDb(ctx, 'configVariant')).toEqual(initialVariantConfig) }) - testInFiber('setVariantConfig: insert undefined', async () => { + test('setVariantConfig: insert undefined', async () => { const ctx = await getContext() const initialVariantConfig = _.clone(await getAllVariantConfigFromDb(ctx, 'configVariant')) expect(ctx.getVariantConfig('configVariant', 'confUndef')).toBeFalsy() @@ -1419,7 +1392,7 @@ describe('Test blueprint migrationContext', () => { expect(await getAllVariantConfigFromDb(ctx, 'configVariant')).toEqual(initialVariantConfig) }) - testInFiber('setVariantConfig: update', async () => { + test('setVariantConfig: update', async () => { const ctx = await getContext() const initialVariantConfig = _.clone(await getAllVariantConfigFromDb(ctx, 'configVariant')) expect(ctx.getVariantConfig('configVariant', 'conf1')).toBeTruthy() @@ -1436,7 +1409,7 @@ describe('Test blueprint migrationContext', () => { initialVariantConfig[expectedItem._id] = expectedItem.value expect(await getAllVariantConfigFromDb(ctx, 'configVariant')).toEqual(initialVariantConfig) }) - testInFiber('setVariantConfig: update undefined', async () => { + test('setVariantConfig: update undefined', async () => { const ctx = await getContext() const initialVariantConfig = _.clone(await getAllVariantConfigFromDb(ctx, 'configVariant')) expect(ctx.getVariantConfig('configVariant', 'conf1')).toBeTruthy() @@ -1449,19 +1422,19 @@ describe('Test blueprint migrationContext', () => { expect(await getAllVariantConfigFromDb(ctx, 'configVariant')).toEqual(initialVariantConfig) }) - testInFiber('removeVariantConfig: no variant id', async () => { + test('removeVariantConfig: no variant id', async () => { const ctx = await getContext() expect(() => ctx.removeVariantConfig('', 'conf1')).toThrow(`[404] ShowStyleVariant "" not found`) }) - testInFiber('removeVariantConfig: missing variant', async () => { + test('removeVariantConfig: missing variant', async () => { const ctx = await getContext() expect(() => ctx.removeVariantConfig('fake_variant', 'conf1')).toThrow( `[404] ShowStyleVariant "fake_variant" not found` ) }) - testInFiber('removeVariantConfig: no id', async () => { + test('removeVariantConfig: no id', async () => { const ctx = await getContext() ctx.setVariantConfig('configVariant', 'conf1', true) const initialVariantConfig = _.clone(await getAllVariantConfigFromDb(ctx, 'configVariant')) @@ -1473,7 +1446,7 @@ describe('Test blueprint migrationContext', () => { // VariantConfig should not have changed expect(await getAllVariantConfigFromDb(ctx, 'configVariant')).toEqual(initialVariantConfig) }) - testInFiber('removeVariantConfig: missing', async () => { + test('removeVariantConfig: missing', async () => { const ctx = await getContext() const initialVariantConfig = _.clone(await getAllVariantConfigFromDb(ctx, 'configVariant')) expect(ctx.getVariantConfig('configVariant', 'conf1')).toBeTruthy() @@ -1485,7 +1458,7 @@ describe('Test blueprint migrationContext', () => { // VariantConfig should not have changed expect(await getAllVariantConfigFromDb(ctx, 'configVariant')).toEqual(initialVariantConfig) }) - testInFiber('removeVariantConfig: good', async () => { + test('removeVariantConfig: good', async () => { const ctx = await getContext() const initialVariantConfig = _.clone(await getAllVariantConfigFromDb(ctx, 'configVariant')) expect(ctx.getVariantConfig('configVariant', 'conf1')).toBeTruthy() @@ -1499,6 +1472,7 @@ describe('Test blueprint migrationContext', () => { }) }) }) + */ describe('MigrationContextSystem', () => { async function getContext() { @@ -1522,60 +1496,65 @@ describe('Test blueprint migrationContext', () => { ) } describe('triggeredActions', () => { - testInFiber('getAllTriggeredActions: return all triggeredActions', async () => { + test('getAllTriggeredActions: return all triggeredActions', async () => { const ctx = await getContext() // default studio environment should have 3 core-level actions - expect(ctx.getAllTriggeredActions()).toHaveLength(3) + expect(await ctx.getAllTriggeredActions()).toHaveLength(3) }) - testInFiber('getTriggeredAction: no id', async () => { + test('getTriggeredAction: no id', async () => { const ctx = await getContext() - expect(() => ctx.getTriggeredAction('')).toThrow('[500] Triggered actions Id "" is invalid') + await expect(ctx.getTriggeredAction('')).rejects.toThrowMeteor( + 500, + 'Triggered actions Id "" is invalid' + ) }) - testInFiber('getTriggeredAction: missing id', async () => { + test('getTriggeredAction: missing id', async () => { const ctx = await getContext() - expect(ctx.getTriggeredAction('abc')).toBeFalsy() + expect(await ctx.getTriggeredAction('abc')).toBeFalsy() }) - testInFiber('getTriggeredAction: existing id', async () => { + test('getTriggeredAction: existing id', async () => { const ctx = await getContext() const existingTriggeredActions = (await getSystemTriggeredActions())[0] expect(existingTriggeredActions).toBeTruthy() - expect(ctx.getTriggeredAction(existingTriggeredActions._id)).toMatchObject(existingTriggeredActions) + expect(await ctx.getTriggeredAction(existingTriggeredActions._id)).toMatchObject( + existingTriggeredActions + ) }) - testInFiber('setTriggeredAction: set undefined', async () => { + test('setTriggeredAction: set undefined', async () => { const ctx = await getContext() - expect(() => ctx.setTriggeredAction(undefined as any)).toThrow(/Match error/) + await expect(ctx.setTriggeredAction(undefined as any)).rejects.toThrow(/Match error/) }) - testInFiber('setTriggeredAction: set without id', async () => { + test('setTriggeredAction: set without id', async () => { const ctx = await getContext() - expect(() => + await expect( ctx.setTriggeredAction({ _rank: 0, actions: [], triggers: [], } as any) - ).toThrow(/Match error/) + ).rejects.toThrow(/Match error/) }) - testInFiber('setTriggeredAction: set without actions', async () => { + test('setTriggeredAction: set without actions', async () => { const ctx = await getContext() - expect(() => + await expect( ctx.setTriggeredAction({ _id: 'test1', _rank: 0, triggers: [], } as any) - ).toThrow(/Match error/) + ).rejects.toThrow(/Match error/) }) - testInFiber('setTriggeredAction: set with null as name', async () => { + test('setTriggeredAction: set with null as name', async () => { const ctx = await getContext() - expect(() => + await expect( ctx.setTriggeredAction({ _id: 'test1', _rank: 0, @@ -1583,14 +1562,14 @@ describe('Test blueprint migrationContext', () => { triggers: [], name: null, } as any) - ).toThrow(/Match error/) + ).rejects.toThrow(/Match error/) }) - testInFiber('setTriggeredAction: set non-existing id', async () => { + test('setTriggeredAction: set non-existing id', async () => { const ctx = await getContext() const blueprintLocalId = 'test0' - ctx.setTriggeredAction({ + await ctx.setTriggeredAction({ _id: blueprintLocalId, _rank: 1001, actions: { @@ -1611,20 +1590,20 @@ describe('Test blueprint migrationContext', () => { }, }, }) - const insertedTriggeredAction = ctx.getTriggeredAction(blueprintLocalId) + const insertedTriggeredAction = await ctx.getTriggeredAction(blueprintLocalId) expect(insertedTriggeredAction).toBeTruthy() // the actual id in the database should not be the same as the one provided // in the setTriggeredAction method expect(insertedTriggeredAction?._id !== blueprintLocalId).toBe(true) }) - testInFiber('setTriggeredAction: set existing id', async () => { + test('setTriggeredAction: set existing id', async () => { const ctx = await getContext() - const oldCoreAction = ctx.getTriggeredAction('mockTriggeredAction_core0') + const oldCoreAction = await ctx.getTriggeredAction('mockTriggeredAction_core0') expect(oldCoreAction).toBeTruthy() expect(oldCoreAction?.actions[0].action).toBe(PlayoutActions.adlib) - ctx.setTriggeredAction({ + await ctx.setTriggeredAction({ _id: 'mockTriggeredAction_core0', _rank: 0, actions: { @@ -1646,23 +1625,26 @@ describe('Test blueprint migrationContext', () => { }, }) - const newCoreAction = ctx.getTriggeredAction('mockTriggeredAction_core0') + const newCoreAction = await ctx.getTriggeredAction('mockTriggeredAction_core0') expect(newCoreAction).toBeTruthy() expect(newCoreAction?.actions[0].action).toBe(PlayoutActions.activateRundownPlaylist) }) - testInFiber('removeTriggeredAction: remove empty id', async () => { + test('removeTriggeredAction: remove empty id', async () => { const ctx = await getContext() - expect(() => ctx.removeTriggeredAction('')).toThrow('[500] Triggered actions Id "" is invalid') + await expect(ctx.removeTriggeredAction('')).rejects.toThrowMeteor( + 500, + 'Triggered actions Id "" is invalid' + ) }) - testInFiber('removeTriggeredAction: remove existing id', async () => { + test('removeTriggeredAction: remove existing id', async () => { const ctx = await getContext() - const oldCoreAction = ctx.getTriggeredAction('mockTriggeredAction_core0') + const oldCoreAction = await ctx.getTriggeredAction('mockTriggeredAction_core0') expect(oldCoreAction).toBeTruthy() - ctx.removeTriggeredAction('mockTriggeredAction_core0') - expect(ctx.getTriggeredAction('mockTriggeredAction_core0')).toBeFalsy() + await ctx.removeTriggeredAction('mockTriggeredAction_core0') + expect(await ctx.getTriggeredAction('mockTriggeredAction_core0')).toBeFalsy() }) }) }) diff --git a/meteor/server/api/blueprints/migrationContext.ts b/meteor/server/api/blueprints/migrationContext.ts index fe742fdc9d7..a273f24bd1a 100644 --- a/meteor/server/api/blueprints/migrationContext.ts +++ b/meteor/server/api/blueprints/migrationContext.ts @@ -1,52 +1,20 @@ -import * as _ from 'underscore' -import { - getHash, - unprotectObject, - protectString, - unprotectString, - objectPathGet, - objectPathSet, - clone, - Complete, - objectPathDelete, -} from '../../lib/tempLib' -import { waitForPromise } from '../../lib/lib' -import { DBStudio, StudioPlayoutDevice } from '@sofie-automation/corelib/dist/dataModel/Studio' -import { DBShowStyleBase } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' +import { getHash, protectString, unprotectString, clone, Complete } from '../../lib/tempLib' import { Meteor } from 'meteor/meteor' import { - ConfigItemValue, - MigrationContextStudio as IMigrationContextStudio, - MigrationContextShowStyle as IMigrationContextShowStyle, MigrationContextSystem as IMigrationContextSystem, - BlueprintMapping, - IOutputLayer, - ISourceLayer, - ShowStyleVariantPart, - IBlueprintShowStyleVariant, - TSR, - OmitId, IBlueprintTriggeredActions, } from '@sofie-automation/blueprints-integration' - -import { DBShowStyleVariant } from '@sofie-automation/corelib/dist/dataModel/ShowStyleVariant' import { check } from '../../lib/check' -import { - PERIPHERAL_SUBTYPE_PROCESS, - PeripheralDeviceType, -} from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' import { TriggeredActionsObj } from '@sofie-automation/meteor-lib/dist/collections/TriggeredActions' import { Match } from 'meteor/check' -import { MongoModifier } from '@sofie-automation/corelib/dist/mongo' import { wrapDefaultObject } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' -import { ShowStyleBaseId, ShowStyleVariantId, TriggeredActionId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { PeripheralDevices, ShowStyleBases, ShowStyleVariants, Studios, TriggeredActions } from '../../collections' -import { literal } from '@sofie-automation/shared-lib/dist/lib/lib' +import { ShowStyleBaseId, TriggeredActionId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { TriggeredActions } from '../../collections' -function trimIfString(value: T): T | string { - if (_.isString(value)) return value.trim() - return value -} +// function trimIfString(value: T): T | string { +// if (_.isString(value)) return value.trim() +// return value +// } function convertTriggeredActionToBlueprints(triggeredAction: TriggeredActionsObj): IBlueprintTriggeredActions { const obj: Complete = { @@ -69,40 +37,36 @@ class AbstractMigrationContextWithTriggeredActions { private getProtectedTriggeredActionId(triggeredActionId: string): TriggeredActionId { return protectString(this.getTriggeredActionId(triggeredActionId)) } - getAllTriggeredActions(): IBlueprintTriggeredActions[] { - return waitForPromise( - TriggeredActions.findFetchAsync({ + async getAllTriggeredActions(): Promise { + return ( + await TriggeredActions.findFetchAsync({ showStyleBaseId: this.showStyleBaseId, }) ).map(convertTriggeredActionToBlueprints) } - private getTriggeredActionFromDb(triggeredActionId: string): TriggeredActionsObj | undefined { - const triggeredAction = waitForPromise( - TriggeredActions.findOneAsync({ - showStyleBaseId: this.showStyleBaseId, - _id: this.getProtectedTriggeredActionId(triggeredActionId), - }) - ) + private async getTriggeredActionFromDb(triggeredActionId: string): Promise { + const triggeredAction = await TriggeredActions.findOneAsync({ + showStyleBaseId: this.showStyleBaseId, + _id: this.getProtectedTriggeredActionId(triggeredActionId), + }) if (triggeredAction) return triggeredAction // Assume we were given the full id - return waitForPromise( - TriggeredActions.findOneAsync({ - showStyleBaseId: this.showStyleBaseId, - _id: protectString(triggeredActionId), - }) - ) + return TriggeredActions.findOneAsync({ + showStyleBaseId: this.showStyleBaseId, + _id: protectString(triggeredActionId), + }) } - getTriggeredAction(triggeredActionId: string): IBlueprintTriggeredActions | undefined { + async getTriggeredAction(triggeredActionId: string): Promise { check(triggeredActionId, String) if (!triggeredActionId) { throw new Meteor.Error(500, `Triggered actions Id "${triggeredActionId}" is invalid`) } - const obj = this.getTriggeredActionFromDb(triggeredActionId) + const obj = await this.getTriggeredActionFromDb(triggeredActionId) return obj ? convertTriggeredActionToBlueprints(obj) : undefined } - setTriggeredAction(triggeredActions: IBlueprintTriggeredActions) { + async setTriggeredAction(triggeredActions: IBlueprintTriggeredActions): Promise { check(triggeredActions, Object) check(triggeredActions._id, String) check(triggeredActions._rank, Number) @@ -123,43 +87,37 @@ class AbstractMigrationContextWithTriggeredActions { blueprintUniqueId: triggeredActions._id, } - const currentTriggeredAction = this.getTriggeredActionFromDb(triggeredActions._id) + const currentTriggeredAction = await this.getTriggeredActionFromDb(triggeredActions._id) if (!currentTriggeredAction) { - waitForPromise( - TriggeredActions.insertAsync({ - ...newObj, - showStyleBaseId: this.showStyleBaseId, - _id: this.getProtectedTriggeredActionId(triggeredActions._id), - }) - ) + await TriggeredActions.insertAsync({ + ...newObj, + showStyleBaseId: this.showStyleBaseId, + _id: this.getProtectedTriggeredActionId(triggeredActions._id), + }) } else { - waitForPromise( - TriggeredActions.updateAsync( - { - _id: currentTriggeredAction._id, - }, - { - $set: newObj, - }, - { multi: true } - ) + await TriggeredActions.updateAsync( + { + _id: currentTriggeredAction._id, + }, + { + $set: newObj, + }, + { multi: true } ) } } - removeTriggeredAction(triggeredActionId: string) { + async removeTriggeredAction(triggeredActionId: string): Promise { check(triggeredActionId, String) if (!triggeredActionId) { throw new Meteor.Error(500, `Triggered actions Id "${triggeredActionId}" is invalid`) } - const currentTriggeredAction = this.getTriggeredActionFromDb(triggeredActionId) + const currentTriggeredAction = await this.getTriggeredActionFromDb(triggeredActionId) if (currentTriggeredAction) { - waitForPromise( - TriggeredActions.removeAsync({ - _id: currentTriggeredAction._id, - showStyleBaseId: this.showStyleBaseId, - }) - ) + await TriggeredActions.removeAsync({ + _id: currentTriggeredAction._id, + showStyleBaseId: this.showStyleBaseId, + }) } } } @@ -168,6 +126,7 @@ export class MigrationContextSystem extends AbstractMigrationContextWithTriggeredActions implements IMigrationContextSystem {} +/* export class MigrationContextStudio implements IMigrationContextStudio { private studio: DBStudio @@ -774,3 +733,4 @@ export class MigrationContextShowStyle } } } +*/ diff --git a/meteor/server/api/deviceTriggers/StudioObserver.ts b/meteor/server/api/deviceTriggers/StudioObserver.ts index 0305c68a88e..28502c443fe 100644 --- a/meteor/server/api/deviceTriggers/StudioObserver.ts +++ b/meteor/server/api/deviceTriggers/StudioObserver.ts @@ -10,7 +10,6 @@ import { MongoFieldSpecifierOnesStrict } from '@sofie-automation/corelib/dist/mo import EventEmitter from 'events' import { Meteor } from 'meteor/meteor' import _ from 'underscore' -import { MongoCursor } from '@sofie-automation/meteor-lib/dist/collections/lib' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { DBShowStyleBase } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' @@ -21,6 +20,7 @@ import { RundownContentObserver } from './RundownContentObserver' import { RundownsObserver } from './RundownsObserver' import { RundownPlaylists, Rundowns, ShowStyleBases } from '../../collections' import { PromiseDebounce } from '../../publications/lib/debounce' +import { MinimalMongoCursor } from '../../collections/implementations/asyncCollection' type ChangedHandler = (showStyleBaseId: ShowStyleBaseId, cache: ContentCache) => () => void @@ -84,7 +84,7 @@ export class StudioObserver extends EventEmitter { { projection: rundownPlaylistFieldSpecifier, } - ) as Promise>> + ) as Promise>> ) .end(this.updatePlaylistInStudio) } @@ -137,7 +137,7 @@ export class StudioObserver extends EventEmitter { 'currentRundown', async () => Rundowns.findWithCursor({ _id: rundownId }, { fields: rundownFieldSpecifier, limit: 1 }) as Promise< - MongoCursor> + MinimalMongoCursor> > ) .next('showStyleBase', async (chain) => @@ -148,7 +148,7 @@ export class StudioObserver extends EventEmitter { fields: showStyleBaseFieldSpecifier, limit: 1, } - ) as Promise>>) + ) as Promise>>) : null ) .end(this.updateShowStyle.call) diff --git a/meteor/server/api/deviceTriggers/observer.ts b/meteor/server/api/deviceTriggers/observer.ts index fb1448f24e4..aa6bc4bc861 100644 --- a/meteor/server/api/deviceTriggers/observer.ts +++ b/meteor/server/api/deviceTriggers/observer.ts @@ -18,14 +18,13 @@ import { StudioObserver } from './StudioObserver' import { Studios } from '../../collections' import { ReactiveCacheCollection } from '../../publications/lib/ReactiveCacheCollection' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' -import { MeteorStartupAsync } from '../../lib/lib' type ObserverAndManager = { observer: StudioObserver manager: StudioDeviceTriggerManager } -MeteorStartupAsync(async () => { +Meteor.startup(async () => { const studioObserversAndManagers = new Map() const jobQueue = new JobQueueWithClasses({ autoStart: true, diff --git a/meteor/server/api/ingest/mosDevice/__tests__/actions.test.ts b/meteor/server/api/ingest/mosDevice/__tests__/actions.test.ts index 5de794580a8..f5fd0c2022e 100644 --- a/meteor/server/api/ingest/mosDevice/__tests__/actions.test.ts +++ b/meteor/server/api/ingest/mosDevice/__tests__/actions.test.ts @@ -3,7 +3,6 @@ import { Meteor } from 'meteor/meteor' import { MOS } from '@sofie-automation/meteor-lib/dist/mos' import { setupDefaultStudioEnvironment } from '../../../../../__mocks__/helpers/database' -import { testInFiber } from '../../../../../__mocks__/helpers/jest' import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' import { MOSDeviceActions } from '../actions' import { PeripheralDeviceCommand } from '@sofie-automation/corelib/dist/dataModel/PeripheralDeviceCommand' @@ -47,7 +46,7 @@ describe('Test sending mos actions', () => { } }) - testInFiber('reloadRundown: expect error', async () => { + test('reloadRundown: expect error', async () => { // setLogLevel(LogLevel.DEBUG) const rundownId: RundownId = getRandomId() @@ -82,7 +81,7 @@ describe('Test sending mos actions', () => { await expect(MOSDeviceActions.reloadRundown(device, fakeRundown)).rejects.toMatch(`unknown annoying error`) }) - testInFiber('reloadRundown: valid payload', async () => { + test('reloadRundown: valid payload', async () => { // setLogLevel(LogLevel.DEBUG) const roData = fakeMinimalRo() @@ -140,7 +139,7 @@ describe('Test sending mos actions', () => { ) }) - testInFiber('reloadRundown: receive incorrect response rundown id', async () => { + test('reloadRundown: receive incorrect response rundown id', async () => { // setLogLevel(LogLevel.DEBUG) const roData = fakeMinimalRo() diff --git a/meteor/server/api/ingest/rundownInput.ts b/meteor/server/api/ingest/rundownInput.ts index 6534daaf347..8b05552eba0 100644 --- a/meteor/server/api/ingest/rundownInput.ts +++ b/meteor/server/api/ingest/rundownInput.ts @@ -3,7 +3,7 @@ import { check } from '../../lib/check' import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' import { NrcsIngestDataCache, MediaObjects, Parts, Rundowns, Segments } from '../../collections' import { literal } from '../../lib/tempLib' -import { lazyIgnore, MeteorStartupAsync } from '../../lib/lib' +import { lazyIgnore } from '../../lib/lib' import { IngestRundown, IngestSegment, IngestPart, IngestPlaylist } from '@sofie-automation/blueprints-integration' import { logger } from '../../logging' import { RundownIngestDataCache } from './ingestCache' @@ -363,7 +363,7 @@ async function listIngestRundowns(peripheralDevice: PeripheralDevice): Promise { +Meteor.startup(async () => { await MediaObjects.observe( {}, { diff --git a/meteor/server/api/methods.ts b/meteor/server/api/methods.ts index 10de2b2be3c..b8566a5db2f 100644 --- a/meteor/server/api/methods.ts +++ b/meteor/server/api/methods.ts @@ -1,24 +1,4 @@ import { Meteor } from 'meteor/meteor' import { MakeMeteorCall } from '@sofie-automation/meteor-lib/dist/api/methods' -export const MeteorCall = MakeMeteorCall(MeteorPromiseApply) - -/** - * Convenience method to convert a Meteor.apply() into a Promise - * @param callName {string} Method name - * @param args {Array} An array of arguments for the method call - * @param options (Optional) An object with options for the call. See Meteor documentation. - * @returns {Promise} A promise containing the result of the called method. - */ -export async function MeteorPromiseApply( - callName: Parameters[0], - args: Parameters[1], - options?: Parameters[2] -): Promise { - return new Promise((resolve, reject) => { - Meteor.apply(callName, args, options, (err, res) => { - if (err) reject(err) - else resolve(res) - }) - }) -} +export const MeteorCall = MakeMeteorCall(Meteor.applyAsync) diff --git a/meteor/server/api/profiler/apm.ts b/meteor/server/api/profiler/apm.ts new file mode 100644 index 00000000000..6e2eb99d702 --- /dev/null +++ b/meteor/server/api/profiler/apm.ts @@ -0,0 +1,52 @@ +import { Meteor } from 'meteor/meteor' +// const shimmer = require('shimmer') +import Agent, { AgentConfigOptions } from 'elastic-apm-node' + +// const { Session, Subscription, MongoCursor } = require('./meteorx') + +// Only the ones of these we use have been copied across. +// The others can be found at https://github.com/Meteor-Community-Packages/meteor-elastic-apm/tree/master/instrumenting +// const instrumentMethods = require('./instrumenting/methods') +// const instrumentHttpOut = require('./instrumenting/http-out') +// const instrumentSession = require('./instrumenting/session') +// const instrumentSubscription = require('./instrumenting/subscription') +// const instrumentDB = require('./instrumenting/db') +// const startMetrics = require('./metrics') + +// const hackDB = require('./hacks') + +const [framework, version] = Meteor.release.split('@') + +Agent.setFramework({ + name: framework, + version, + overwrite: true, +}) + +export const RawAgent = Agent + +export function startAgent(config: AgentConfigOptions): void { + if (config.active !== false) { + try { + // Must be called before any other route is registered on WebApp. + // http-in has been moved to be part of where the koa router is mounted + // instrumentHttpOut(Agent) + + Agent.start(config) + + // instrumentMethods(Agent, Meteor), + // instrumentSession(Agent, Session), + // instrumentSubscription(Agent, Subscription), + // hackDB() // TODO: what is this doing? https://github.com/Meteor-Community-Packages/meteor-elastic-apm/blob/master/hacks.js + // instrumentDB replaced by manual wrapping in WrappedAsyncMongoCollection + // startMetrics(Agent), + + Agent.logger.info('meteor-elastic-apm completed instrumenting') + } catch (e) { + Agent.logger.error('Could not start meteor-elastic-apm') + throw e + } + } else { + Agent.logger.warn('meteor-elastic-apm is not active') + } +} diff --git a/meteor/server/api/profiler.ts b/meteor/server/api/profiler/index.ts similarity index 68% rename from meteor/server/api/profiler.ts rename to meteor/server/api/profiler/index.ts index ef570049900..6faa03270c3 100644 --- a/meteor/server/api/profiler.ts +++ b/meteor/server/api/profiler/index.ts @@ -1,16 +1,16 @@ -import Agent from 'meteor/julusian:meteor-elastic-apm' +import { RawAgent } from './apm' class Profiler { private active = false startSpan(_name: string) { if (!this.active) return - return Agent.startSpan(_name) + return RawAgent.startSpan(_name) } startTransaction(description: string, name: string) { if (!this.active) return - return Agent.startTransaction(description, name) + return RawAgent.startTransaction(description, name) } setActive(active: boolean) { diff --git a/meteor/server/api/rest/koa.ts b/meteor/server/api/rest/koa.ts index 3a8c54dc0a0..6fc91e87061 100644 --- a/meteor/server/api/rest/koa.ts +++ b/meteor/server/api/rest/koa.ts @@ -9,6 +9,7 @@ import { public_dir } from '../../lib' import staticServe from 'koa-static' import { logger } from '../../logging' import { PackageInfo } from '../../coreSystem' +import { profiler } from '../profiler' declare module 'http' { interface IncomingMessage { @@ -46,6 +47,30 @@ Meteor.startup(() => { // Expose the API at the url WebApp.rawConnectHandlers.use((req, res) => { + const transaction = profiler.startTransaction(`${req.method}:${req.url}`, 'http.incoming') + if (transaction) { + transaction.setLabel('url', `${req.url}`) + transaction.setLabel('method', `${req.method}`) + + res.on('finish', () => { + let route = req.originalUrl + if (req.originalUrl && req.url && req.originalUrl.endsWith(req.url.slice(1)) && req.url.length > 1) { + route = req.originalUrl.slice(0, -1 * (req.url.length - 1)) + } + + if (route && route.endsWith('/')) { + route = route.slice(0, -1) + } + + if (route) { + transaction.name = `${req.method}:${route}` + transaction.setLabel('route', `${route}`) + } + + transaction.end() + }) + } + const callback = Meteor.bindEnvironment(koaApp.callback()) callback(req, res).catch(() => res.end()) }) diff --git a/meteor/server/api/rest/v0/__tests__/rest.test.ts b/meteor/server/api/rest/v0/__tests__/rest.test.ts index e40b92664eb..41d9e876c7a 100644 --- a/meteor/server/api/rest/v0/__tests__/rest.test.ts +++ b/meteor/server/api/rest/v0/__tests__/rest.test.ts @@ -1,4 +1,3 @@ -import { beforeEachInFiber } from '../../../../../__mocks__/helpers/jest' import { MeteorMock } from '../../../../../__mocks__/meteor' import { Meteor } from 'meteor/meteor' import { UserActionAPIMethods } from '@sofie-automation/meteor-lib/dist/api/userActions' @@ -15,8 +14,8 @@ import '../index.ts' describe('REST API', () => { describe('UNSTABLE v0', () => { - beforeEachInFiber(() => { - MeteorMock.mockRunMeteorStartup() + beforeEach(async () => { + await MeteorMock.mockRunMeteorStartup() }) const legacyApiRouter = createLegacyApiRouter() diff --git a/meteor/server/api/rest/v0/index.ts b/meteor/server/api/rest/v0/index.ts index a2a150a51ff..b1b007dbbed 100644 --- a/meteor/server/api/rest/v0/index.ts +++ b/meteor/server/api/rest/v0/index.ts @@ -78,9 +78,9 @@ export function createLegacyApiRouter(): KoaRouter { index.POST.push(docString) - assignRoute(router, 'POST', resource, signature.length, (args) => { + assignRoute(router, 'POST', resource, signature.length, async (args) => { const convArgs = typeConvertUrlParameters(args) - return Meteor.call(methodValue, ...convArgs) + return Meteor.callAsync(methodValue, ...convArgs) }) } @@ -159,7 +159,7 @@ function assignRoute( routeType: 'POST' | 'GET', resource: string, paramCount: number, - fcn: (p: any[]) => any + fcn: (p: any[]) => Promise ) { const route = routeType === 'POST' ? router.post.bind(router) : router.get.bind(router) diff --git a/meteor/server/api/snapshot.ts b/meteor/server/api/snapshot.ts index d346e5c3a22..a07dbdb7d98 100644 --- a/meteor/server/api/snapshot.ts +++ b/meteor/server/api/snapshot.ts @@ -279,7 +279,6 @@ async function createDebugSnapshot(studioId: StudioId, organizationId: Organizat if (device.connected && device.subType === PERIPHERAL_SUBTYPE_PROCESS) { const startTime = getCurrentTime() - // defer to another fiber const deviceSnapshot = await executePeripheralDeviceFunction(device._id, 'getSnapshot') logger.info('Got snapshot from device "' + device._id + '"') diff --git a/meteor/server/api/studio/api.ts b/meteor/server/api/studio/api.ts index 1c085f3c4b8..94ac811d40d 100644 --- a/meteor/server/api/studio/api.ts +++ b/meteor/server/api/studio/api.ts @@ -4,7 +4,7 @@ import { registerClassToMeteorMethods } from '../../methods' import { NewStudiosAPI, StudiosAPIMethods } from '@sofie-automation/meteor-lib/dist/api/studios' import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' import { literal, getRandomId } from '../../lib/tempLib' -import { lazyIgnore, MeteorStartupAsync } from '../../lib/lib' +import { lazyIgnore } from '../../lib/lib' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' import { ExpectedPackages, @@ -134,7 +134,7 @@ function triggerUpdateStudioMappingsHash(studioId: StudioId) { ) } -MeteorStartupAsync(async () => { +Meteor.startup(async () => { await Studios.observeChanges( {}, { diff --git a/meteor/server/api/system.ts b/meteor/server/api/system.ts index a2c7aa98064..1668f76720f 100644 --- a/meteor/server/api/system.ts +++ b/meteor/server/api/system.ts @@ -67,7 +67,7 @@ async function setupIndexes(removeOldIndexes = false): Promise { _.each(i.indexes, (index) => { - i.collection._ensureIndex(index) + i.collection.createIndex(index) }) }) } @@ -119,7 +119,7 @@ let mongoTest: AsyncOnlyMongoCollection | undefined = undefined async function doSystemBenchmarkInner() { if (!mongoTest) { mongoTest = createAsyncOnlyMongoCollection('benchmark-test' as any, false) - mongoTest._ensureIndex({ + mongoTest.createIndex({ indexedProp: 1, }) } diff --git a/meteor/server/api/user.ts b/meteor/server/api/user.ts index 50dd833241e..9b3649abdd7 100644 --- a/meteor/server/api/user.ts +++ b/meteor/server/api/user.ts @@ -1,5 +1,4 @@ import { Meteor } from 'meteor/meteor' -import * as _ from 'underscore' import { Accounts } from 'meteor/accounts-base' import { unprotectString, protectString } from '../lib/tempLib' import { sleep, deferAsync } from '../lib/lib' @@ -25,7 +24,7 @@ async function enrollUser(email: string, name: string): Promise { profile: { name: name }, }) try { - Accounts.sendEnrollmentEmail(unprotectString(id), email) + await Accounts.sendEnrollmentEmail(unprotectString(id), email) } catch (error) { logger.error('Accounts.sendEnrollmentEmail') logger.error(error) @@ -63,11 +62,13 @@ async function sendVerificationEmail(userId: UserId) { const user = await Users.findOneAsync(userId) if (!user) throw new Meteor.Error(404, `User "${userId}" not found!`) try { - _.each(user.emails, (email) => { - if (!email.verified) { - Accounts.sendVerificationEmail(unprotectString(user._id), email.address) - } - }) + await Promise.all( + user.emails.map(async (email) => { + if (!email.verified) { + await Accounts.sendVerificationEmail(unprotectString(user._id), email.address) + } + }) + ) } catch (error) { logger.error('ERROR sending email verification') logger.error(error) @@ -79,7 +80,7 @@ async function requestResetPassword(email: string): Promise { const meteorUser = Accounts.findUserByEmail(email) as unknown const user = meteorUser as User if (!user) return false - Accounts.sendResetPasswordEmail(unprotectString(user._id)) + await Accounts.sendResetPasswordEmail(unprotectString(user._id)) return true } diff --git a/meteor/server/collections/collection.ts b/meteor/server/collections/collection.ts index 7f13f1839a6..5a81d597c2e 100644 --- a/meteor/server/collections/collection.ts +++ b/meteor/server/collections/collection.ts @@ -5,7 +5,6 @@ import { Meteor } from 'meteor/meteor' import { Mongo } from 'meteor/mongo' import { NpmModuleMongodb } from 'meteor/npm-mongo' import { PromisifyCallbacks } from '@sofie-automation/shared-lib/dist/lib/types' -import { waitForPromise } from '../lib/lib' import type { AnyBulkWriteOperation, Collection as RawCollection } from 'mongodb' import { CollectionName } from '@sofie-automation/corelib/dist/dataModel/Collections' import { registerCollection } from './lib' @@ -15,22 +14,22 @@ import { WrappedReadOnlyMongoCollection } from './implementations/readonlyWrappe import { FieldNames, IndexSpecifier, - MongoCursor, ObserveCallbacks, ObserveChangesCallbacks, UpdateOptions, UpsertOptions, } from '@sofie-automation/meteor-lib/dist/collections/lib' +import { MinimalMongoCursor } from './implementations/asyncCollection' export interface MongoAllowRules { - insert?: (userId: UserId, doc: DBInterface) => Promise | boolean + insert?: (userId: UserId | null, doc: DBInterface) => Promise | boolean update?: ( - userId: UserId, + userId: UserId | null, doc: DBInterface, fieldNames: FieldNames, modifier: MongoModifier ) => Promise | boolean - remove?: (userId: UserId, doc: DBInterface) => Promise | boolean + remove?: (userId: UserId | null, doc: DBInterface) => Promise | boolean } /** @@ -117,7 +116,7 @@ function wrapMeteorCollectionIntoAsyncCollection(collection, name) } else { // Override the default mongodb methods, because the errors thrown by them doesn't contain the proper call stack @@ -129,33 +128,32 @@ function setupCollectionAllowRules, args: MongoAllowRules | false ) { - if (args) { - const { insert: origInsert, update: origUpdate, remove: origRemove } = args - - const options: Parameters['allow']>[0] = { - insert: origInsert ? (userId, doc) => waitForPromise(origInsert(protectString(userId), doc)) : () => false, - update: origUpdate - ? (userId, doc, fieldNames, modifier) => - waitForPromise(origUpdate(protectString(userId), doc, fieldNames as any, modifier)) - : () => false, - remove: origRemove ? (userId, doc) => waitForPromise(origRemove(protectString(userId), doc)) : () => false, - } - - collection.allow(options) - } else { - // Block all client mutations - collection.allow({ - insert(): boolean { - return false - }, - update() { - return false - }, - remove() { - return false - }, - }) + if (!args) { + // Mutations are disabled by default + return + } + + const { insert: origInsert, update: origUpdate, remove: origRemove } = args + + // These methods behave weirdly, we need to mangle this a bit. + // See https://github.com/meteor/meteor/issues/13444 for a full explanation + const options: any /*Parameters['allow']>[0]*/ = { + insert: () => false, + insertAsync: origInsert + ? (userId: string | null, doc: DBInterface) => origInsert(protectString(userId), doc) as any + : () => false, + update: () => false, + updateAsync: origUpdate + ? (userId: string | null, doc: DBInterface, fieldNames: string[], modifier: any) => + origUpdate(protectString(userId), doc, fieldNames as any, modifier) as any + : () => false, + remove: () => false, + removeAsync: origRemove + ? (userId: string | null, doc: DBInterface) => origRemove(protectString(userId), doc) as any + : () => false, } + + collection.allow(options) } /** @@ -273,7 +271,7 @@ export interface AsyncOnlyReadOnlyMongoCollection | DBInterface['_id'], options?: FindOptions - ): Promise> + ): Promise> /** * Observe changes on this collection @@ -301,5 +299,5 @@ export interface AsyncOnlyReadOnlyMongoCollection, options?: FindOptions): Promise - _ensureIndex(keys: IndexSpecifier | string, options?: NpmModuleMongodb.CreateIndexesOptions): void + createIndex(indexSpec: IndexSpecifier, options?: NpmModuleMongodb.CreateIndexesOptions): void } diff --git a/meteor/server/collections/implementations/asyncCollection.ts b/meteor/server/collections/implementations/asyncCollection.ts index 864748a5b78..db05a469eee 100644 --- a/meteor/server/collections/implementations/asyncCollection.ts +++ b/meteor/server/collections/implementations/asyncCollection.ts @@ -1,51 +1,137 @@ import { MongoModifier, MongoQuery } from '@sofie-automation/corelib/dist/mongo' -import { ProtectedString } from '@sofie-automation/corelib/dist/protectedString' +import { ProtectedString, protectString, unprotectString } from '@sofie-automation/corelib/dist/protectedString' import { Meteor } from 'meteor/meteor' +import { Mongo } from 'meteor/mongo' import { UpdateOptions, UpsertOptions, - FindOptions, + IndexSpecifier, MongoCursor, + FindOptions, ObserveChangesCallbacks, ObserveCallbacks, } from '@sofie-automation/meteor-lib/dist/collections/lib' +import type { AnyBulkWriteOperation, Collection as RawCollection, Db as RawDb } from 'mongodb' +import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' +import { NpmModuleMongodb } from 'meteor/npm-mongo' +import { profiler } from '../../api/profiler' import { PromisifyCallbacks } from '@sofie-automation/shared-lib/dist/lib/types' -import { makePromise } from '../../lib/lib' -import type { AnyBulkWriteOperation } from 'mongodb' -import { WrappedMongoCollectionBase, dePromiseObjectOfFunctions } from './base' import { AsyncOnlyMongoCollection } from '../collection' +export type MinimalMongoCursor }> = Pick< + MongoCursor, + 'fetchAsync' | 'observeChangesAsync' | 'observeAsync' | 'countAsync' + // | 'forEach' | 'map' | +> + +export type MinimalMeteorMongoCollection }> = Pick< + Mongo.Collection, + // | 'find' + 'insertAsync' | 'removeAsync' | 'updateAsync' | 'upsertAsync' | 'rawCollection' | 'rawDatabase' | 'createIndex' +> & { + find: (...args: Parameters['find']>) => MinimalMongoCursor +} + export class WrappedAsyncMongoCollection }> - extends WrappedMongoCollectionBase implements AsyncOnlyMongoCollection { + protected readonly _collection: MinimalMeteorMongoCollection + + public readonly name: string | null + + constructor(collection: Mongo.Collection, name: string | null) { + this._collection = collection as any + this.name = name + } + + protected get _isMock(): boolean { + // @ts-expect-error re-export private property + return this._collection._isMock + } + + public get mockCollection(): MinimalMeteorMongoCollection { + return this._collection + } + get mutableCollection(): AsyncOnlyMongoCollection { return this } + protected wrapMongoError(e: unknown): never { + const str = stringifyError(e) || 'Unknown MongoDB Error' + throw new Meteor.Error(e instanceof Meteor.Error ? e.error : 500, `Collection "${this.name}": ${str}`) + } + + rawCollection(): RawCollection { + return this._collection.rawCollection() as any + } + protected rawDatabase(): RawDb { + return this._collection.rawDatabase() as any + } + async findFetchAsync( selector: MongoQuery | DBInterface['_id'], options?: FindOptions ): Promise> { - // Make the collection fethcing in another Fiber: - return makePromise(() => { - return this.find(selector as any, options).fetch() - }) + const span = profiler.startSpan(`MongoCollection.${this.name}.findFetch`) + if (span) { + span.addLabels({ + collection: this.name, + query: JSON.stringify(selector), + }) + } + try { + const res = await this._collection.find((selector ?? {}) as any, options as any).fetchAsync() + if (span) span.end() + return res + } catch (e) { + if (span) span.end() + this.wrapMongoError(e) + } } async findOneAsync( selector: MongoQuery | DBInterface['_id'], options?: FindOptions ): Promise { - const arr = await this.findFetchAsync(selector, { ...options, limit: 1 }) - return arr[0] + const span = profiler.startSpan(`MongoCollection.${this.name}.findOne`) + if (span) { + span.addLabels({ + collection: this.name, + query: JSON.stringify(selector), + }) + } + try { + const arr = await this._collection + .find((selector ?? {}) as any, { ...(options as any), limit: 1 }) + .fetchAsync() + if (span) span.end() + return arr[0] + } catch (e) { + if (span) span.end() + this.wrapMongoError(e) + } } async findWithCursor( selector?: MongoQuery | DBInterface['_id'], options?: FindOptions - ): Promise> { - return this.find(selector as any, options) + ): Promise> { + const span = profiler.startSpan(`MongoCollection.${this.name}.findCursor`) + if (span) { + span.addLabels({ + collection: this.name, + query: JSON.stringify(selector), + }) + } + try { + const res = this._collection.find((selector ?? {}) as any, options as any) + if (span) span.end() + return res + } catch (e) { + if (span) span.end() + this.wrapMongoError(e) + } } async observeChanges( @@ -53,7 +139,23 @@ export class WrappedAsyncMongoCollection>, options?: FindOptions ): Promise { - return this.find(selector as any, options).observeChangesAsync(dePromiseObjectOfFunctions(callbacks)) + const span = profiler.startSpan(`MongoCollection.${this.name}.observeChanges`) + if (span) { + span.addLabels({ + collection: this.name, + query: JSON.stringify(selector), + }) + } + try { + const res = await this._collection + .find((selector ?? {}) as any, options as any) + .observeChangesAsync(callbacks) + if (span) span.end() + return res + } catch (e) { + if (span) span.end() + this.wrapMongoError(e) + } } async observe( @@ -61,37 +163,130 @@ export class WrappedAsyncMongoCollection>, options?: FindOptions ): Promise { - return this.find(selector as any, options).observeAsync(dePromiseObjectOfFunctions(callbacks)) + const span = profiler.startSpan(`MongoCollection.${this.name}.observe`) + if (span) { + span.addLabels({ + collection: this.name, + query: JSON.stringify(selector), + }) + } + try { + const res = await this._collection.find((selector ?? {}) as any, options as any).observeAsync(callbacks) + if (span) span.end() + return res + } catch (e) { + if (span) span.end() + this.wrapMongoError(e) + } } - async insertAsync(doc: DBInterface): Promise { - return makePromise(() => { - return this.insert(doc) - }) + public async countDocuments( + selector?: MongoQuery, + options?: FindOptions + ): Promise { + const span = profiler.startSpan(`MongoCollection.${this.name}.countDocuments`) + if (span) { + span.addLabels({ + collection: this.name, + query: JSON.stringify(selector), + }) + } + try { + const res = await this._collection.find((selector ?? {}) as any, options as any).countAsync() + if (span) span.end() + return res + } catch (e) { + if (span) span.end() + this.wrapMongoError(e) + } + } + + public async insertAsync(doc: DBInterface): Promise { + const span = profiler.startSpan(`MongoCollection.${this.name}.insert`) + if (span) { + span.addLabels({ + collection: this.name, + id: unprotectString(doc._id), + }) + } + try { + const resultId = await this._collection.insertAsync(doc as unknown as Mongo.OptionalId) + if (span) span.end() + return protectString(resultId) + } catch (e) { + if (span) span.end() + this.wrapMongoError(e) + } } async insertManyAsync(docs: DBInterface[]): Promise> { - return Promise.all(docs.map((doc) => this.insert(doc))) + return Promise.all(docs.map(async (doc) => this.insertAsync(doc))) } - async updateAsync( + public async removeAsync(selector: MongoQuery | DBInterface['_id']): Promise { + const span = profiler.startSpan(`MongoCollection.${this.name}.remove`) + if (span) { + span.addLabels({ + collection: this.name, + query: JSON.stringify(selector), + }) + } + try { + const res = await this._collection.removeAsync(selector as any) + if (span) span.end() + return res + } catch (e) { + if (span) span.end() + this.wrapMongoError(e) + } + } + public async updateAsync( selector: MongoQuery | DBInterface['_id'] | { _id: DBInterface['_id'] }, modifier: MongoModifier, options?: UpdateOptions ): Promise { - return makePromise(() => { - return this.update(selector, modifier, options) - }) + const span = profiler.startSpan(`MongoCollection.${this.name}.update`) + if (span) { + span.addLabels({ + collection: this.name, + query: JSON.stringify(selector), + }) + } + try { + const res = await this._collection.updateAsync(selector as any, modifier as any, options) + if (span) span.end() + return res + } catch (e) { + if (span) span.end() + this.wrapMongoError(e) + } } - - async upsertAsync( + public async upsertAsync( selector: MongoQuery | DBInterface['_id'] | { _id: DBInterface['_id'] }, modifier: MongoModifier, options?: UpsertOptions - ): Promise<{ numberAffected?: number; insertedId?: DBInterface['_id'] }> { - return makePromise(() => { - return this.upsert(selector, modifier, options) - }) + ): Promise<{ + numberAffected?: number + insertedId?: DBInterface['_id'] + }> { + const span = profiler.startSpan(`MongoCollection.${this.name}.upsert`) + if (span) { + span.addLabels({ + collection: this.name, + query: JSON.stringify(selector), + }) + } + try { + const result = await this._collection.upsertAsync(selector as any, modifier as any, options) + if (span) span.end() + return { + numberAffected: result.numberAffected, + insertedId: protectString(result.insertedId), + } + } catch (e) { + if (span) span.end() + this.wrapMongoError(e) + } } async upsertManyAsync(docs: DBInterface[]): Promise<{ numberAffected: number; insertedIds: DBInterface['_id'][] }> { @@ -113,13 +308,15 @@ export class WrappedAsyncMongoCollection | DBInterface['_id']): Promise { - return makePromise(() => { - return this.remove(selector) - }) - } - async bulkWriteAsync(ops: Array>): Promise { + const span = profiler.startSpan(`MongoCollection.${this.name}.bulkWrite`) + if (span) { + span.addLabels({ + collection: this.name, + opCount: ops.length, + }) + } + if (ops.length > 0) { const rawCollection = this.rawCollection() const bulkWriteResult = await rawCollection.bulkWrite(ops, { @@ -131,15 +328,25 @@ export class WrappedAsyncMongoCollection, options?: FindOptions): Promise { - return makePromise(() => { - try { - return this._collection.find((selector ?? {}) as any, options as any).count() - } catch (e) { - this.wrapMongoError(e) - } - }) + createIndex(keys: IndexSpecifier | string, options?: NpmModuleMongodb.CreateIndexesOptions): void { + const span = profiler.startSpan(`MongoCollection.${this.name}.createIndex`) + if (span) { + span.addLabels({ + collection: this.name, + keys: JSON.stringify(keys), + }) + } + try { + const res = this._collection.createIndex(keys as any, options) + if (span) span.end() + return res + } catch (e) { + if (span) span.end() + this.wrapMongoError(e) + } } } diff --git a/meteor/server/collections/implementations/base.ts b/meteor/server/collections/implementations/base.ts deleted file mode 100644 index a6c731ba731..00000000000 --- a/meteor/server/collections/implementations/base.ts +++ /dev/null @@ -1,130 +0,0 @@ -import { MongoModifier, MongoQuery } from '@sofie-automation/corelib/dist/mongo' -import { ProtectedString, protectString } from '@sofie-automation/corelib/dist/protectedString' -import { Meteor } from 'meteor/meteor' -import { Mongo } from 'meteor/mongo' -import { - UpdateOptions, - UpsertOptions, - FindOptions, - IndexSpecifier, - MongoCursor, -} from '@sofie-automation/meteor-lib/dist/collections/lib' -import type { Collection as RawCollection, Db as RawDb } from 'mongodb' -import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' -import { PromisifyCallbacks } from '@sofie-automation/shared-lib/dist/lib/types' -import { waitForPromise } from '../../lib/lib' -import { NpmModuleMongodb } from 'meteor/npm-mongo' - -export class WrappedMongoCollectionBase }> { - protected readonly _collection: Mongo.Collection - - public readonly name: string | null - - constructor(collection: Mongo.Collection, name: string | null) { - this._collection = collection - this.name = name - } - - protected get _isMock(): boolean { - // @ts-expect-error re-export private property - return this._collection._isMock - } - - public get mockCollection(): Mongo.Collection { - return this._collection - } - - protected wrapMongoError(e: unknown): never { - const str = stringifyError(e) || 'Unknown MongoDB Error' - throw new Meteor.Error(e instanceof Meteor.Error ? e.error : 500, `Collection "${this.name}": ${str}`) - } - - rawCollection(): RawCollection { - return this._collection.rawCollection() as any - } - rawDatabase(): RawDb { - return this._collection.rawDatabase() as any - } - - protected find( - selector?: MongoQuery | DBInterface['_id'], - options?: FindOptions - ): MongoCursor { - try { - return this._collection.find((selector ?? {}) as any, options as any) as MongoCursor - } catch (e) { - this.wrapMongoError(e) - } - } - - protected insert(doc: DBInterface): DBInterface['_id'] { - try { - const resultId = this._collection.insert(doc as unknown as Mongo.OptionalId) - return protectString(resultId) - } catch (e) { - this.wrapMongoError(e) - } - } - - protected remove(selector: MongoQuery | DBInterface['_id']): number { - try { - return this._collection.remove(selector as any) - } catch (e) { - this.wrapMongoError(e) - } - } - protected update( - selector: MongoQuery | DBInterface['_id'] | { _id: DBInterface['_id'] }, - modifier: MongoModifier, - options?: UpdateOptions - ): number { - try { - return this._collection.update(selector as any, modifier as any, options) - } catch (e) { - this.wrapMongoError(e) - } - } - protected upsert( - selector: MongoQuery | DBInterface['_id'] | { _id: DBInterface['_id'] }, - modifier: MongoModifier, - options?: UpsertOptions - ): { - numberAffected?: number - insertedId?: DBInterface['_id'] - } { - try { - const result = this._collection.upsert(selector as any, modifier as any, options) - return { - numberAffected: result.numberAffected, - insertedId: protectString(result.insertedId), - } - } catch (e) { - this.wrapMongoError(e) - } - } - - _ensureIndex(keys: IndexSpecifier | string, options?: NpmModuleMongodb.CreateIndexesOptions): void { - try { - return this._collection._ensureIndex(keys as any, options) - } catch (e) { - this.wrapMongoError(e) - } - } -} - -export function dePromiseObjectOfFunctions(input: PromisifyCallbacks): T { - return Object.fromEntries( - Object.entries(input).map(([id, fn]) => { - const fn2 = (...args: any[]) => { - try { - return waitForPromise(fn(...args)) - } catch (e) { - console.trace(e) - throw e - } - } - - return [id, fn2] - }) - ) as any -} diff --git a/meteor/server/collections/implementations/mock.ts b/meteor/server/collections/implementations/mock.ts index d8f0b6abbef..af5536967a9 100644 --- a/meteor/server/collections/implementations/mock.ts +++ b/meteor/server/collections/implementations/mock.ts @@ -1,136 +1,43 @@ -import { MongoModifier, MongoQuery } from '@sofie-automation/corelib/dist/mongo' +import { MongoQuery } from '@sofie-automation/corelib/dist/mongo' import { ProtectedString } from '@sofie-automation/corelib/dist/protectedString' import { Meteor } from 'meteor/meteor' -import { Mongo } from 'meteor/mongo' -import { - UpdateOptions, - UpsertOptions, - FindOptions, - MongoCursor, - ObserveChangesCallbacks, - ObserveCallbacks, -} from '@sofie-automation/meteor-lib/dist/collections/lib' -import { PromisifyCallbacks } from '@sofie-automation/shared-lib/dist/lib/types' -import type { AnyBulkWriteOperation } from 'mongodb' +import { FindOptions, MongoCursor } from '@sofie-automation/meteor-lib/dist/collections/lib' +import type { AnyBulkWriteOperation, Db as RawDb } from 'mongodb' import { AsyncOnlyMongoCollection } from '../collection' -import { WrappedMongoCollectionBase, dePromiseObjectOfFunctions } from './base' +import { WrappedAsyncMongoCollection } from './asyncCollection' +import { Mongo } from 'meteor/mongo' /** This is for the mock mongo collection, as internally it is sync and so we dont need or want to play around with fibers */ export class WrappedMockCollection }> - extends WrappedMongoCollectionBase + extends WrappedAsyncMongoCollection implements AsyncOnlyMongoCollection { - private readonly realSleep: (time: number) => Promise - constructor(collection: Mongo.Collection, name: string | null) { super(collection, name) if (!this._isMock) throw new Meteor.Error(500, 'WrappedMockCollection is only valid for a mock collection') - - const realSleep = (Meteor as any).sleepNoFakeTimers - if (!realSleep) throw new Error('Missing Meteor.sleepNoFakeTimers, looks like the mock is broken?') - this.realSleep = realSleep } get mutableCollection(): AsyncOnlyMongoCollection { return this } - async findFetchAsync( - selector: MongoQuery | DBInterface['_id'], - options?: FindOptions - ): Promise> { - await this.realSleep(0) - return this.find(selector as any, options).fetch() - } - - async findOneAsync( - selector: MongoQuery | DBInterface['_id'], - options?: FindOptions - ): Promise { - const arr = await this.findFetchAsync(selector, { ...options, limit: 1 }) - return arr[0] + protected override rawDatabase(): RawDb { + throw new Error('rawDatabase not supported in tests') } /** * Retrieve a cursor for use in a publication * @param selector A query describing the documents to find */ - async findWithCursor( + override async findWithCursor( _selector?: MongoQuery | DBInterface['_id'], _options?: FindOptions ): Promise> { throw new Error('findWithCursor not supported in tests') } - async observeChanges( - selector: MongoQuery | DBInterface['_id'], - callbacks: PromisifyCallbacks>, - options?: FindOptions - ): Promise { - return this.find(selector, options).observeChanges(dePromiseObjectOfFunctions(callbacks)) - } - - async observe( - selector: MongoQuery | DBInterface['_id'], - callbacks: PromisifyCallbacks>, - options?: FindOptions - ): Promise { - return this.find(selector, options).observe(dePromiseObjectOfFunctions(callbacks)) - } - - async insertAsync(doc: DBInterface): Promise { - await this.realSleep(0) - return this.insert(doc) - } - - async insertManyAsync(docs: DBInterface[]): Promise> { - await this.realSleep(0) - return Promise.all(docs.map((doc) => this.insert(doc))) - } - - async updateAsync( - selector: MongoQuery | DBInterface['_id'] | { _id: DBInterface['_id'] }, - modifier: MongoModifier, - options?: UpdateOptions - ): Promise { - await this.realSleep(0) - return this.update(selector, modifier, options) - } - - async upsertAsync( - selector: MongoQuery | DBInterface['_id'] | { _id: DBInterface['_id'] }, - modifier: MongoModifier, - options?: UpsertOptions - ): Promise<{ numberAffected?: number; insertedId?: DBInterface['_id'] }> { - await this.realSleep(0) - return this.upsert(selector, modifier, options) - } - - async upsertManyAsync(docs: DBInterface[]): Promise<{ numberAffected: number; insertedIds: DBInterface['_id'][] }> { - const result: { - numberAffected: number - insertedIds: DBInterface['_id'][] - } = { - numberAffected: 0, - insertedIds: [], - } - await Promise.all( - docs.map(async (doc) => { - const r = this.upsert(doc._id, { $set: doc }) - if (r.numberAffected) result.numberAffected += r.numberAffected - if (r.insertedId) result.insertedIds.push(r.insertedId) - }) - ) - return result - } - - async removeAsync(selector: MongoQuery | DBInterface['_id']): Promise { - await this.realSleep(0) - return this.remove(selector) - } - - async bulkWriteAsync(ops: Array>): Promise { + override async bulkWriteAsync(ops: Array>): Promise { if (ops.length > 0) { const rawCollection = this.rawCollection() const bulkWriteResult = await rawCollection.bulkWrite(ops, { @@ -144,13 +51,4 @@ export class WrappedMockCollection, options?: FindOptions): Promise { - await this.realSleep(0) - try { - return this._collection.find((selector ?? {}) as any, options as any).count() - } catch (e) { - this.wrapMongoError(e) - } - } } diff --git a/meteor/server/collections/implementations/readonlyWrapper.ts b/meteor/server/collections/implementations/readonlyWrapper.ts index d2e0cd89494..a4147afbd57 100644 --- a/meteor/server/collections/implementations/readonlyWrapper.ts +++ b/meteor/server/collections/implementations/readonlyWrapper.ts @@ -1,13 +1,13 @@ import { ProtectedString } from '@sofie-automation/corelib/dist/protectedString' import { Meteor } from 'meteor/meteor' -import { MongoCursor } from '@sofie-automation/meteor-lib/dist/collections/lib' import type { Collection } from 'mongodb' -import { AsyncOnlyMongoCollection, AsyncOnlyReadOnlyMongoCollection } from '../collection' +import type { AsyncOnlyMongoCollection, AsyncOnlyReadOnlyMongoCollection } from '../collection' +import type { MinimalMongoCursor } from './asyncCollection' export class WrappedReadOnlyMongoCollection }> implements AsyncOnlyReadOnlyMongoCollection { - #mutableCollection: AsyncOnlyMongoCollection + readonly #mutableCollection: AsyncOnlyMongoCollection constructor(collection: AsyncOnlyMongoCollection) { this.#mutableCollection = collection @@ -49,7 +49,7 @@ export class WrappedReadOnlyMongoCollection['findWithCursor']> - ): Promise> { + ): Promise> { return this.#mutableCollection.findWithCursor(...args) } @@ -71,7 +71,7 @@ export class WrappedReadOnlyMongoCollection['_ensureIndex']>): void { - return this.#mutableCollection._ensureIndex(...args) + createIndex(...args: Parameters['createIndex']>): void { + return this.#mutableCollection.createIndex(...args) } } diff --git a/meteor/server/collections/index.ts b/meteor/server/collections/index.ts index 5aead39e278..303096db7b3 100644 --- a/meteor/server/collections/index.ts +++ b/meteor/server/collections/index.ts @@ -28,7 +28,6 @@ import { DBUser } from '@sofie-automation/meteor-lib/dist/collections/Users' import { WorkerStatus } from '@sofie-automation/meteor-lib/dist/collections/Workers' import { registerIndex } from './indices' import { getCurrentTime } from '../lib/lib' -import { MeteorStartupAsync } from '../lib/lib' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' import { createAsyncOnlyMongoCollection, @@ -320,7 +319,7 @@ const removeOldCommands = () => { logger.error(`Failed to cleanup old PeripheralDeviceCommands: ${stringifyError(e)}`) }) } -MeteorStartupAsync(async () => { +Meteor.startup(async () => { Meteor.setInterval(() => removeOldCommands(), 5 * 60 * 1000) await Promise.allSettled([ diff --git a/meteor/server/coreSystem/index.ts b/meteor/server/coreSystem/index.ts index fa1bb84d467..95f4b740800 100644 --- a/meteor/server/coreSystem/index.ts +++ b/meteor/server/coreSystem/index.ts @@ -1,6 +1,5 @@ import { SYSTEM_ID, GENESIS_SYSTEM_VERSION } from '@sofie-automation/meteor-lib/dist/collections/CoreSystem' import { parseVersion } from '../systemStatus/semverUtils' -import { MeteorStartupAsync } from '../lib/lib' import { getCurrentTime } from '../lib/lib' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' import { Meteor } from 'meteor/meteor' @@ -9,7 +8,7 @@ import { CURRENT_SYSTEM_VERSION } from '../migration/currentSystemVersion' import { Blueprints, CoreSystem } from '../collections' import { getEnvLogLevel, logger, LogLevel, setLogLevel } from '../logging' const PackageInfo = require('../../package.json') -import Agent from 'meteor/julusian:meteor-elastic-apm' +import { startAgent } from '../api/profiler/apm' import { profiler } from '../api/profiler' import { TMP_TSR_VERSION } from '@sofie-automation/blueprints-integration' import { getAbsolutePath } from '../lib' @@ -146,7 +145,6 @@ async function startupMessage() { logger.info(`Core starting up`) logger.info(`Core system version: "${CURRENT_SYSTEM_VERSION}"`) - // @ts-expect-error Its not always defined if (global.gc) { logger.info(`Manual garbage-collection is enabled`) } else { @@ -173,22 +171,20 @@ async function startInstrumenting() { if (APM_HOST && system && system.apm) { logger.info(`APM agent starting up`) - Agent.start({ + startAgent({ serviceName: KIBANA_INDEX || 'tv-automation-server-core', hostname: APP_HOST, serverUrl: APM_HOST, secretToken: APM_SECRET, active: system.apm.enabled, transactionSampleRate: system.apm.transactionSampleRate, - disableMeteorInstrumentations: ['methods', 'http-out', 'session', 'async', 'metrics'], }) profiler.setActive(system.apm.enabled || false) } else { logger.info(`APM agent inactive`) - Agent.start({ + startAgent({ serviceName: 'tv-automation-server-core', active: false, - disableMeteorInstrumentations: ['methods', 'http-out', 'session', 'async', 'metrics'], }) } } @@ -203,7 +199,7 @@ async function updateLoggerLevel(startup: boolean) { } } -MeteorStartupAsync(async () => { +Meteor.startup(async () => { if (Meteor.isServer) { await startupMessage() await updateLoggerLevel(true) diff --git a/meteor/server/lib.ts b/meteor/server/lib.ts index 8b337fc8c73..a58d10c5347 100644 --- a/meteor/server/lib.ts +++ b/meteor/server/lib.ts @@ -4,12 +4,11 @@ import fs from 'fs' import path from 'path' import { logger } from './logging' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' -import { Meteor } from 'meteor/meteor' /** Returns absolute path to programs/server directory of your compiled application, without trailing slash. */ export function getAbsolutePath(): string { - // @ts-expect-error Meteor.absolutePath is injected by the package ostrio:meteor-root - return Meteor.absolutePath + const rootPath = path.resolve('.') + return rootPath.split(`${path.sep}.meteor`)[0] } export function extractFunctionSignature(f: Function): string[] | undefined { if (f) { diff --git a/meteor/server/lib/__tests__/lib.test.ts b/meteor/server/lib/__tests__/lib.test.ts index 0975875449c..61b68f5957d 100644 --- a/meteor/server/lib/__tests__/lib.test.ts +++ b/meteor/server/lib/__tests__/lib.test.ts @@ -8,14 +8,11 @@ import { serializeTimelineBlob, } from '@sofie-automation/corelib/dist/dataModel/Timeline' import { protectString } from '../tempLib' -import { testInFiber } from '../../../__mocks__/helpers/jest' import { Timeline } from '../../collections' import { SaveIntoDbHooks, saveIntoDb, sumChanges, anythingChanged } from '../database' -import { makePromise } from '../lib' -import { Meteor } from 'meteor/meteor' describe('server/lib', () => { - testInFiber('saveIntoDb', async () => { + test('saveIntoDb', async () => { const mystudioObjs: Array = [ { id: 'abc', @@ -164,7 +161,7 @@ describe('server/lib', () => { removed: 3, }) }) - testInFiber('anythingChanged', () => { + test('anythingChanged', () => { expect( anythingChanged({ added: 0, @@ -194,38 +191,4 @@ describe('server/lib', () => { }) ).toBeTruthy() }) - - testInFiber('makePromise', async () => { - let a = 0 - // Check that they are executed in order: - expect( - await Promise.all([ - makePromise(() => { - return a++ - }), - makePromise(() => { - return a++ - }), - ]) - ).toStrictEqual([0, 1]) - - // Handle an instant throw: - await expect( - makePromise(() => { - throw new Error('asdf') - }) - ).rejects.toMatchToString(/asdf/) - - // Handle a delayed throw: - const delayedThrow = Meteor.wrapAsync((callback: (err: any, result: any) => void) => { - setTimeout(() => { - callback(new Error('asdf'), null) - }, 10) - }) - await expect( - makePromise(() => { - delayedThrow() - }) - ).rejects.toMatchToString(/asdf/) - }) }) diff --git a/meteor/server/lib/lib.ts b/meteor/server/lib/lib.ts index dee6d8aa8a3..5e4a955ba20 100644 --- a/meteor/server/lib/lib.ts +++ b/meteor/server/lib/lib.ts @@ -23,39 +23,6 @@ export function fixValidPath(path: string): string { return path.replace(/([^a-z0-9_.@()-])/gi, '_') } -/** - * Make Meteor.wrapAsync a bit more type safe - * The original version makes the callback be after the last non-undefined parameter, rather than after or replacing the last parameter. - * Which makes it incredibly hard to find without iterating over all the parameters. This does that for you, so you dont need to check as many places - */ -export function MeteorWrapAsync(func: Function, context?: Object): any { - // A variant of Meteor.wrapAsync to fix the bug - // https://github.com/meteor/meteor/issues/11120 - - return Meteor.wrapAsync((...args: any[]) => { - // Find the callback-function: - for (let i = args.length - 1; i >= 0; i--) { - if (typeof args[i] === 'function') { - if (i < args.length - 1) { - // The callback is not the last argument, make it so then: - const callback = args[i] - const fixedArgs = args - fixedArgs[i] = undefined - fixedArgs.push(callback) - - func.apply(context, fixedArgs) - return - } else { - // The callback is the last argument, that's okay - func.apply(context, args) - return - } - } - } - throw new Meteor.Error(500, `Error in MeteorWrapAsync: No callback found!`) - }) -} - const lazyIgnoreCache: { [name: string]: number } = {} export function lazyIgnore(name: string, f1: () => void, t: number): void { // Don't execute the function f1 until the time t has passed. @@ -75,60 +42,6 @@ export function lazyIgnore(name: string, f1: () => void, t: number): void { }, t) } -/** - * Make Meteor.startup support async functions - */ -export function MeteorStartupAsync(fcn: () => Promise): void { - Meteor.startup(() => waitForPromise(fcn())) -} - -/** - * Convert a promise to a "synchronous" Fiber function - * Makes the Fiber wait for the promise to resolve, then return the value of the promise. - * If the fiber rejects, the function in the Fiber will "throw" - */ -export const waitForPromise: (p: Promise | T) => Awaited = Meteor.wrapAsync(function waitForPromise( - p: Promise | T, - cb: (err: any | null, result?: any) => Awaited -) { - if (Meteor.isClient) throw new Meteor.Error(500, `waitForPromise can't be used client-side`) - if (cb === undefined && typeof p === 'function') { - cb = p as any - p = undefined as any - } - - Promise.resolve(p) - .then((result) => { - cb(null, result) - }) - .catch((e) => { - cb(e) - }) -}) as (p: Promise | T) => Awaited // `wrapAsync` has opaque `Function` type -/** - * Convert a Fiber function into a promise - * Makes the Fiber function to run in its own fiber and return a promise - */ -export async function makePromise(fcn: () => T): Promise { - const p = new Promise((resolve, reject) => { - Meteor.defer(() => { - try { - resolve(fcn()) - } catch (e) { - reject(e) - } - }) - }) - - return ( - await Promise.all([ - p, - // Pause the current Fiber briefly, in order to allow for the deferred Fiber to start executing: - sleep(0), - ]) - )[0] -} - export function deferAsync(fcn: () => Promise): void { Meteor.defer(() => { fcn().catch((e) => logger.error(stringifyError(e))) diff --git a/meteor/server/methods.ts b/meteor/server/methods.ts index 865928cb85a..49bee70b1af 100644 --- a/meteor/server/methods.ts +++ b/meteor/server/methods.ts @@ -3,7 +3,6 @@ import * as _ from 'underscore' import { logger } from './logging' import { extractFunctionSignature } from './lib' import { MethodContext, MethodContextAPI } from './api/methodContext' -import { waitForPromise } from './lib/lib' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' import { Settings } from './Settings' import { isPromise } from '@sofie-automation/shared-lib/dist/lib/lib' @@ -155,7 +154,7 @@ export function MeteorDebugMethods(methods: { [key: string]: MeteorDebugMethod } for (const [key, fn] of Object.entries(methods)) { if (key && !!fn) { fiberMethods[key] = function (this: Meteor.MethodThisType, ...args: any[]) { - return waitForPromise(fn.call(this, ...args)) + return fn.call(this, ...args) } } } diff --git a/meteor/server/migration/__tests__/migrations.test.ts b/meteor/server/migration/__tests__/migrations.test.ts index f4ec9ac82c7..276eed40541 100644 --- a/meteor/server/migration/__tests__/migrations.test.ts +++ b/meteor/server/migration/__tests__/migrations.test.ts @@ -1,6 +1,5 @@ import * as _ from 'underscore' import { setupEmptyEnvironment } from '../../../__mocks__/helpers/database' -import { testInFiber } from '../../../__mocks__/helpers/jest' import { ICoreSystem, GENESIS_SYSTEM_VERSION } from '@sofie-automation/meteor-lib/dist/collections/CoreSystem' import { clearMigrationSteps, addMigrationSteps, prepareMigration, PreparedMigration } from '../databaseMigration' import { CURRENT_SYSTEM_VERSION } from '../currentSystemVersion' @@ -63,7 +62,7 @@ describe('Migrations', () => { }) ) } - testInFiber('System migrations, initial setup', async () => { + test('System migrations, initial setup', async () => { expect((await getSystem()).version).toEqual(GENESIS_SYSTEM_VERSION) const migrationStatus0: GetMigrationStatusResult = await MeteorCall.migration.getMigrationStatus() @@ -101,7 +100,7 @@ describe('Migrations', () => { expect((await getSystem()).version).toEqual(CURRENT_SYSTEM_VERSION) }) - testInFiber('Ensure migrations run in correct order', async () => { + test('Ensure migrations run in correct order', async () => { await MeteorCall.migration.resetDatabaseVersions() expect((await getSystem()).version).toEqual(GENESIS_SYSTEM_VERSION) diff --git a/meteor/server/migration/databaseMigration.ts b/meteor/server/migration/databaseMigration.ts index b01bdfe5fe5..42b0d76b1e1 100644 --- a/meteor/server/migration/databaseMigration.ts +++ b/meteor/server/migration/databaseMigration.ts @@ -43,11 +43,7 @@ import { GENESIS_SYSTEM_VERSION } from '@sofie-automation/meteor-lib/dist/collec import { clone, getHash, omit, protectString, unprotectString } from '../lib/tempLib' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' import { evalBlueprint } from '../api/blueprints/cache' -import { - MigrationContextShowStyle, - MigrationContextStudio, - MigrationContextSystem, -} from '../api/blueprints/migrationContext' +import { MigrationContextSystem } from '../api/blueprints/migrationContext' import { CURRENT_SYSTEM_VERSION } from './currentSystemVersion' import { SnapshotId, ShowStyleBaseId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { Blueprints, CoreSystem, ShowStyleBases, Studios } from '../collections' @@ -387,7 +383,7 @@ export async function prepareMigration(returnAllChunks?: boolean): Promise { if (chunk.sourceType !== MigrationStepType.SHOWSTYLE) @@ -820,5 +817,6 @@ async function getMigrationShowStyleContext(chunk: MigrationChunk): Promise { } describe('validateConfigForShowStyleBase', () => { - testInFiber('Missing id', async () => { + test('Missing id', async () => { await expect(validateConfigForShowStyleBase(protectString('fakeId'))).rejects.toThrowMeteor( 404, `ShowStyleBase "fakeId" not found!` ) }) - testInFiber('Missing config preset', async () => { + test('Missing config preset', async () => { const blueprint = await setupMockShowStyleBlueprint(protectString('')) const showStyleBase = await setupMockShowStyleBase(blueprint._id) @@ -83,7 +82,7 @@ describe('ShowStyleBase upgrades', () => { ) }) - testInFiber('Missing blueprint', async () => { + test('Missing blueprint', async () => { const showStyleBase = await setupMockShowStyleBase(protectString('fakeId'), { blueprintConfigPresetId: 'fake-preset', }) @@ -94,7 +93,7 @@ describe('ShowStyleBase upgrades', () => { ) }) - testInFiber('Unsupported blueprint', async () => { + test('Unsupported blueprint', async () => { const blueprint = await setupMockShowStyleBlueprint(protectString('')) const showStyleBase = await setupMockShowStyleBase(blueprint._id, { blueprintConfigPresetId: 'fake-preset', @@ -106,7 +105,7 @@ describe('ShowStyleBase upgrades', () => { ) }) - testInFiber('Success: no messages', async () => { + test('Success: no messages', async () => { const blueprint = await setupMockShowStyleBlueprint(protectString('')) const showStyleBase = await setupMockShowStyleBase(blueprint._id, { blueprintConfigPresetId: 'fake-preset', @@ -123,7 +122,7 @@ describe('ShowStyleBase upgrades', () => { expect(result.messages).toHaveLength(0) }) - testInFiber('Success: some messages', async () => { + test('Success: some messages', async () => { const blueprint = await setupMockShowStyleBlueprint(protectString('')) const showStyleBase = await setupMockShowStyleBase(blueprint._id, { blueprintConfigPresetId: 'fake-preset', @@ -167,14 +166,14 @@ describe('ShowStyleBase upgrades', () => { }) describe('runUpgradeForShowStyleBase', () => { - testInFiber('Missing id', async () => { + test('Missing id', async () => { await expect(runUpgradeForShowStyleBase(protectString('fakeId'))).rejects.toThrowMeteor( 404, `ShowStyleBase "fakeId" not found!` ) }) - testInFiber('Missing config preset', async () => { + test('Missing config preset', async () => { const blueprint = await setupMockShowStyleBlueprint(protectString('')) const showStyleBase = await setupMockShowStyleBase(blueprint._id) @@ -184,7 +183,7 @@ describe('ShowStyleBase upgrades', () => { ) }) - testInFiber('Missing blueprint', async () => { + test('Missing blueprint', async () => { const showStyleBase = await setupMockShowStyleBase(protectString('fakeId'), { blueprintConfigPresetId: 'fake-preset', }) @@ -195,7 +194,7 @@ describe('ShowStyleBase upgrades', () => { ) }) - testInFiber('Unsupported blueprint', async () => { + test('Unsupported blueprint', async () => { const blueprint = await setupMockShowStyleBlueprint(protectString('')) const showStyleBase = await setupMockShowStyleBase(blueprint._id, { blueprintConfigPresetId: 'fake-preset', @@ -207,7 +206,7 @@ describe('ShowStyleBase upgrades', () => { ) }) - testInFiber('Success', async () => { + test('Success', async () => { const blueprint = await setupMockShowStyleBlueprint(protectString('')) const showStyleBase = clone( await setupMockShowStyleBase(blueprint._id, { diff --git a/meteor/server/performanceMonitor.ts b/meteor/server/performanceMonitor.ts index ea875c300f5..50ad29cafa5 100644 --- a/meteor/server/performanceMonitor.ts +++ b/meteor/server/performanceMonitor.ts @@ -1,6 +1,5 @@ import { Meteor } from 'meteor/meteor' import * as _ from 'underscore' -import { MeteorStartupAsync } from './lib/lib' import { getCoreSystemAsync } from './coreSystem/collection' import { logger } from './logging' import { getRunningMethods, resetRunningMethods } from './methods' @@ -198,7 +197,7 @@ const monitorBlockedThread = () => { }, PERMORMANCE_CHECK_INTERVAL) } -MeteorStartupAsync(async () => { +Meteor.startup(async () => { const coreSystem = await getCoreSystemAsync() if (coreSystem?.enableMonitorBlockedThread) { Meteor.setTimeout(() => { diff --git a/meteor/server/publications/lib/ReactiveCacheCollection.ts b/meteor/server/publications/lib/ReactiveCacheCollection.ts index e78a3e18275..ab905492127 100644 --- a/meteor/server/publications/lib/ReactiveCacheCollection.ts +++ b/meteor/server/publications/lib/ReactiveCacheCollection.ts @@ -7,6 +7,10 @@ import { MongoModifier, MongoQuery } from '@sofie-automation/corelib/dist/mongo' type Reaction = () => void export class ReactiveCacheCollection }> { + /** + * The collection still works in sync mode when operating on `null` in Meteor 3.0 + * It may break in a later update, but this is fine for now. + */ readonly #collection: Mongo.Collection constructor(public collectionName: string, private reaction?: Reaction) { diff --git a/meteor/server/publications/lib/__tests__/rundownsObserver.test.ts b/meteor/server/publications/lib/__tests__/rundownsObserver.test.ts index d07d69cf515..ffeb44577b2 100644 --- a/meteor/server/publications/lib/__tests__/rundownsObserver.test.ts +++ b/meteor/server/publications/lib/__tests__/rundownsObserver.test.ts @@ -2,7 +2,7 @@ import { RundownId, RundownPlaylistId, StudioId } from '@sofie-automation/coreli import { protectString } from '@sofie-automation/corelib/dist/protectedString' import { Rundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { Rundowns } from '../../../collections' -import { runAllTimers, runTimersUntilNow, testInFiber, waitUntil } from '../../../../__mocks__/helpers/jest' +import { runAllTimers, runTimersUntilNow, waitUntil } from '../../../../__mocks__/helpers/jest' import { MongoMock } from '../../../../__mocks__/mongo' import { RundownsObserver } from '../rundownsObserver' @@ -15,7 +15,7 @@ describe('RundownsObserver', () => { jest.useFakeTimers() }) - testInFiber('create and destroy observer', async () => { + test('create and destroy observer', async () => { const studioId = protectString('studio0') const playlistId = protectString('playlist0') @@ -68,7 +68,7 @@ describe('RundownsObserver', () => { } }) - testInFiber('add a document', async () => { + test('add a document', async () => { const studioId = protectString('studio0') const playlistId = protectString('playlist0') @@ -122,7 +122,7 @@ describe('RundownsObserver', () => { } }) - testInFiber('change a document', async () => { + test('change a document', async () => { const studioId = protectString('studio0') const playlistId = protectString('playlist0') @@ -176,7 +176,7 @@ describe('RundownsObserver', () => { } }) - testInFiber('sequence of updates', async () => { + test('sequence of updates', async () => { const studioId = protectString('studio0') const playlistId = protectString('playlist0') diff --git a/meteor/server/publications/lib/lib.ts b/meteor/server/publications/lib/lib.ts index 993fd4e1996..57f953c0316 100644 --- a/meteor/server/publications/lib/lib.ts +++ b/meteor/server/publications/lib/lib.ts @@ -5,7 +5,6 @@ import { MongoQuery } from '@sofie-automation/corelib/dist/mongo' import { ResolvedCredentials, resolveCredentials } from '../../security/lib/credentials' import { Settings } from '../../Settings' import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' -import { MongoCursor } from '@sofie-automation/meteor-lib/dist/collections/lib' import { OrganizationId, PeripheralDeviceId, @@ -16,6 +15,7 @@ import { protectStringObject } from '../../lib/tempLib' import { DBShowStyleBase } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' import { PeripheralDevices, ShowStyleBases } from '../../collections' import { MetricsGauge } from '@sofie-automation/corelib/dist/prometheus' +import { MinimalMongoCursor } from '../../collections/implementations/asyncCollection' export const MeteorPublicationSignatures: { [key: string]: string[] } = {} export const MeteorPublications: { [key: string]: Function } = {} @@ -75,7 +75,7 @@ export function meteorPublish( callback: ( this: SubscriptionContext, ...args: Parameters - ) => Promise> | null> + ) => Promise> | null> ): void { meteorPublishUnsafe(name, callback) } diff --git a/meteor/server/publications/lib/observerChain.ts b/meteor/server/publications/lib/observerChain.ts index abbbd49467d..76d73ed7b24 100644 --- a/meteor/server/publications/lib/observerChain.ts +++ b/meteor/server/publications/lib/observerChain.ts @@ -1,10 +1,10 @@ import { ProtectedString } from '@sofie-automation/corelib/dist/protectedString' import { Meteor } from 'meteor/meteor' -import { MongoCursor } from '@sofie-automation/meteor-lib/dist/collections/lib' import { Simplify } from 'type-fest' import { assertNever } from '../../lib/tempLib' import { logger } from '../../logging' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' +import { MinimalMongoCursor } from '../../collections/implementations/asyncCollection' /** * https://stackoverflow.com/a/66011942 @@ -19,7 +19,7 @@ type Not = Yes extends Not ? never : Yes type Link = { next: }>( key: Not, - cursorChain: (state: T) => Promise | null> + cursorChain: (state: T) => Promise | null> ) => Link]: K }>> end: (complete: (state: T | null) => void) => Meteor.LiveQueryHandle @@ -28,7 +28,7 @@ type Link = { export function observerChain(): Pick, 'next'> { function createNextLink(baseCollectorObject: Record, liveQueryHandle: Meteor.LiveQueryHandle) { let mode: 'next' | 'end' | undefined - let chainedCursor: (state: Record) => Promise | null> + let chainedCursor: (state: Record) => Promise | null> let completeFunction: (state: Record | null) => void let chainedKey: string | undefined = undefined let previousObserver: Meteor.LiveQueryHandle | null = null diff --git a/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts b/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts index 8dbb0916751..4e3604807d4 100644 --- a/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts +++ b/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts @@ -33,7 +33,6 @@ import { MediaStreamType, } from '@sofie-automation/shared-lib/dist/core/model/MediaObjects' import { defaultStudio } from '../../../../__mocks__/defaultCollectionObjects' -import { testInFiber } from '../../../../__mocks__/helpers/jest' import { MediaObjects } from '../../../collections' import { PieceDependencies } from '../common' import { DEFAULT_MINIMUM_TAKE_SPAN } from '@sofie-automation/shared-lib/dist/core/constants' @@ -245,7 +244,7 @@ describe('lib/mediaObjects', () => { expect(mediaId3).toEqual(undefined) }) - testInFiber('checkPieceContentStatus', async () => { + test('checkPieceContentStatus', async () => { const mockStudioSettings: IStudioSettings = { supportedMediaFormats: '1920x1080i5000, 1280x720, i5000, i5000tff', mediaPreviewsUrl: '', @@ -265,7 +264,7 @@ describe('lib/mediaObjects', () => { packageContainers: applyAndValidateOverrides(mockDefaultStudio.packageContainersWithOverrides).obj, } - mockMediaObjectsCollection.insert( + await mockMediaObjectsCollection.insertAsync( literal({ _id: protectString(''), _attachments: {}, @@ -352,7 +351,7 @@ describe('lib/mediaObjects', () => { type: SourceLayerType.LIVE_SPEAK, }) - mockMediaObjectsCollection.insert( + await mockMediaObjectsCollection.insertAsync( literal({ _id: protectString(''), _attachments: {}, diff --git a/meteor/server/publications/segmentPartNotesUI/__tests__/publication.test.ts b/meteor/server/publications/segmentPartNotesUI/__tests__/publication.test.ts index a7f9023aaeb..f232e383710 100644 --- a/meteor/server/publications/segmentPartNotesUI/__tests__/publication.test.ts +++ b/meteor/server/publications/segmentPartNotesUI/__tests__/publication.test.ts @@ -1,7 +1,6 @@ import { RundownId, RundownPlaylistId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { ProtectedString, protectString } from '@sofie-automation/corelib/dist/protectedString' import { UISegmentPartNote } from '@sofie-automation/meteor-lib/dist/api/rundownNotifications' -import { testInFiber } from '../../../../__mocks__/helpers/jest' import { CustomPublishCollection } from '../../../lib/customPublication' import { ReactiveCacheCollection } from '../../lib/ReactiveCacheCollection' import { manipulateUISegmentPartNotesPublicationData, UISegmentPartNotesState } from '../publication' @@ -120,7 +119,7 @@ describe('manipulateUISegmentPartNotesPublicationData', () => { }, ] - testInFiber('basic call', async () => { + test('basic call', async () => { const state: Partial = {} const collection = createSpyPublishCollection() @@ -142,7 +141,7 @@ describe('manipulateUISegmentPartNotesPublicationData', () => { expect(collection.remove).toHaveBeenLastCalledWith(null) }) - testInFiber('first cache', async () => { + test('first cache', async () => { const playlistId = protectString('playlist0') const state: Partial = {} const collection = createSpyPublishCollection() @@ -167,7 +166,7 @@ describe('manipulateUISegmentPartNotesPublicationData', () => { expect(collection.replace).toHaveBeenCalledTimes(2) }) - testInFiber('replace cache', async () => { + test('replace cache', async () => { const playlistId = protectString('playlist0') const state: Partial = {} const collection = createSpyPublishCollection() @@ -212,7 +211,7 @@ describe('manipulateUISegmentPartNotesPublicationData', () => { expect(generateNotesForSegment.generateNotesForSegment).toHaveBeenCalledTimes(3) }) - testInFiber('update no reported changes', async () => { + test('update no reported changes', async () => { const playlistId = protectString('playlist0') const state: Partial = {} const collection = createSpyPublishCollection() @@ -249,7 +248,7 @@ describe('manipulateUISegmentPartNotesPublicationData', () => { expect(generateNotesForSegment.generateNotesForSegment).toHaveBeenCalledTimes(2) }) - testInFiber('rundown changed', async () => { + test('rundown changed', async () => { const playlistId = protectString('playlist0') const state: Partial = {} const collection = createSpyPublishCollection() @@ -287,7 +286,7 @@ describe('manipulateUISegmentPartNotesPublicationData', () => { expect(generateNotesForSegment.generateNotesForSegment).toHaveBeenCalledTimes(4) }) - testInFiber('segment changed', async () => { + test('segment changed', async () => { const playlistId = protectString('playlist0') const state: Partial = {} const collection = createSpyPublishCollection() @@ -366,7 +365,7 @@ describe('manipulateUISegmentPartNotesPublicationData', () => { part: 'part' as any, }) - testInFiber('segment changed', async () => { + test('segment changed', async () => { const playlistId = protectString('playlist0') const state: Partial = {} const collection = createSpyPublishCollection() diff --git a/meteor/server/security/__tests__/security.test.ts b/meteor/server/security/__tests__/security.test.ts index ebecf11b988..595791d812f 100644 --- a/meteor/server/security/__tests__/security.test.ts +++ b/meteor/server/security/__tests__/security.test.ts @@ -6,7 +6,6 @@ import { User } from '@sofie-automation/meteor-lib/dist/collections/Users' import { protectString } from '../../lib/tempLib' import { Settings } from '../../Settings' import { DefaultEnvironment, setupDefaultStudioEnvironment } from '../../../__mocks__/helpers/database' -import { beforeAllInFiber, testInFiber } from '../../../__mocks__/helpers/jest' import { BucketsAPI } from '../../api/buckets' import { storeSystemSnapshot } from '../../api/snapshot' import { BucketSecurity } from '../buckets' @@ -128,7 +127,7 @@ describe('Security', () => { return expect(fcn()).resolves.not.toBeUndefined() } let env: DefaultEnvironment - beforeAllInFiber(async () => { + beforeAll(async () => { env = await setupDefaultStudioEnvironment(org0._id) await Organizations.insertAsync(org0) @@ -142,7 +141,8 @@ describe('Security', () => { await Users.insertAsync({ ...getUser(idSuperAdminInOtherOrg, org2._id), superAdmin: true }) }) - testInFiber('Buckets', async () => { + // eslint-disable-next-line jest/expect-expect + test('Buckets', async () => { const access = await StudioContentWriteAccess.bucket(creator, env.studio._id) const bucket = await BucketsAPI.createNewBucket(access, 'myBucket') @@ -187,12 +187,14 @@ describe('Security', () => { }) }) - testInFiber('NoSecurity', async () => { + // eslint-disable-next-line jest/expect-expect + test('NoSecurity', async () => { await changeEnableUserAccounts(async () => { await expectAllowed(async () => NoSecurityReadAccess.any()) }) }) - testInFiber('Organization', async () => { + // eslint-disable-next-line jest/expect-expect + test('Organization', async () => { const token = generateToken() const snapshotId = await storeSystemSnapshot(superAdmin, hashSingleUseToken(token), env.studio._id, 'for test') diff --git a/meteor/server/security/lib/security.ts b/meteor/server/security/lib/security.ts index 2406c6430e8..ed27ed18461 100644 --- a/meteor/server/security/lib/security.ts +++ b/meteor/server/security/lib/security.ts @@ -53,7 +53,7 @@ export async function allowAccessToCoreSystem(cred: ResolvedCredentials): Promis */ export async function allowAccessToCurrentUser( cred0: Credentials | ResolvedCredentials, - userId: UserId + userId: UserId | null ): Promise> { if (!Settings.enableUserAccounts) return allAccess(null, 'No security') if (!userId) return noAccess('userId missing') diff --git a/meteor/server/security/lib/securityVerify.ts b/meteor/server/security/lib/securityVerify.ts index 7124987417d..edde48cb35d 100644 --- a/meteor/server/security/lib/securityVerify.ts +++ b/meteor/server/security/lib/securityVerify.ts @@ -62,7 +62,7 @@ export async function verifyAllMethods(): Promise { // Verify all Meteor methods let ok = true for (const methodName of AllMeteorMethods) { - ok = ok && verifyMethod(methodName) + ok = ok && (await verifyMethod(methodName)) if (!ok) return false // Bail on first error @@ -70,7 +70,7 @@ export async function verifyAllMethods(): Promise { } return ok } -function verifyMethod(methodName: string) { +async function verifyMethod(methodName: string) { let ok = true suppressExtraErrorLogging(true) try { @@ -78,7 +78,7 @@ function verifyMethod(methodName: string) { testWriteAccess() // Pass some fake args, to ensure that any trying to do a `arg.val` don't throw const fakeArgs = [{}, {}, {}, {}, {}] - Meteor.call(methodName, ...fakeArgs) + await Meteor.callAsync(methodName, ...fakeArgs) } catch (e) { const errStr = stringifyError(e) if (errStr.match(/triggerWriteAccess/i)) { diff --git a/meteor/server/security/system.ts b/meteor/server/security/system.ts index 5163c362e6e..d7d13b760e8 100644 --- a/meteor/server/security/system.ts +++ b/meteor/server/security/system.ts @@ -48,7 +48,7 @@ export namespace SystemWriteAccess { return true } /** Check if access is allowed to modify a User, and that user is the current User */ - export async function currentUser(userId: UserId, cred: Credentials): Promise { + export async function currentUser(userId: UserId | null, cred: Credentials): Promise { const access = await allowAccessToCurrentUser(cred, userId) if (!access.update) return logNotAllowed('Current user', access.reason) diff --git a/meteor/server/systemStatus/__tests__/api.test.ts b/meteor/server/systemStatus/__tests__/api.test.ts index 66ade93a28e..690be9cf62b 100644 --- a/meteor/server/systemStatus/__tests__/api.test.ts +++ b/meteor/server/systemStatus/__tests__/api.test.ts @@ -38,7 +38,7 @@ describe('systemStatus API', () => { test('REST /health with state BAD', async () => { env = await setupDefaultStudioEnvironment() - MeteorMock.mockRunMeteorStartup() + await MeteorMock.mockRunMeteorStartup() await MeteorMock.sleepNoFakeTimers(200) // The system is uninitialized, the status will be BAD @@ -73,7 +73,7 @@ describe('systemStatus API', () => { test('REST /health with state GOOD', async () => { env = await setupDefaultStudioEnvironment() - MeteorMock.mockRunMeteorStartup() + await MeteorMock.mockRunMeteorStartup() await MeteorMock.sleepNoFakeTimers(200) // simulate initialized system diff --git a/meteor/server/systemStatus/__tests__/systemStatus.test.ts b/meteor/server/systemStatus/__tests__/systemStatus.test.ts index 6b62e100b76..bd3b77146cf 100644 --- a/meteor/server/systemStatus/__tests__/systemStatus.test.ts +++ b/meteor/server/systemStatus/__tests__/systemStatus.test.ts @@ -1,5 +1,4 @@ import '../../../__mocks__/_extendJest' -import { testInFiber } from '../../../__mocks__/helpers/jest' import { setupDefaultStudioEnvironment, DefaultEnvironment } from '../../../__mocks__/helpers/database' import { generateTranslation, literal, protectString, unprotectString } from '../../lib/tempLib' import { MeteorMock } from '../../../__mocks__/meteor' @@ -34,7 +33,7 @@ describe('systemStatus', () => { }) let env: DefaultEnvironment - testInFiber('getSystemStatus: before startup', async () => { + test('getSystemStatus: before startup', async () => { // Before starting the system up, the system status will be unknown const expectedStatus0 = StatusCode.UNKNOWN const result0: StatusResponse = await MeteorCall.systemStatus.getSystemStatus() @@ -44,9 +43,9 @@ describe('systemStatus', () => { }) expect(result0.checks).toHaveLength(0) }) - testInFiber('getSystemStatus: after startup', async () => { + test('getSystemStatus: after startup', async () => { env = await setupDefaultStudioEnvironment() - MeteorMock.mockRunMeteorStartup() + await MeteorMock.mockRunMeteorStartup() await MeteorMock.sleepNoFakeTimers(200) const result0: StatusResponse = await MeteorCall.systemStatus.getSystemStatus() @@ -64,7 +63,7 @@ describe('systemStatus', () => { status: status2ExternalStatus(StatusCode.BAD), }) }) - testInFiber('getSystemStatus: after all migrations completed', async () => { + test('getSystemStatus: after all migrations completed', async () => { // simulate migrations completed setSystemStatus('databaseVersion', { statusCode: StatusCode.GOOD, @@ -84,7 +83,7 @@ describe('systemStatus', () => { status: status2ExternalStatus(StatusCode.GOOD), }) }) - testInFiber('getSystemStatus: a component has a fault', async () => { + test('getSystemStatus: a component has a fault', async () => { // simulate device failure await PeripheralDevices.updateAsync(env.ingestDevice._id, { $set: { @@ -110,7 +109,7 @@ describe('systemStatus', () => { status: status2ExternalStatus(expectedStatus0), }) }) - testInFiber('getSystemStatus: a component has a library version mismatch', async () => { + test('getSystemStatus: a component has a library version mismatch', async () => { // simulate device failure await PeripheralDevices.updateAsync(env.ingestDevice._id, { $set: { @@ -238,7 +237,7 @@ describe('systemStatus', () => { }) }) - testInFiber('getSystemStatus: blueprint upgrades need running', async () => { + test('getSystemStatus: blueprint upgrades need running', async () => { { // Ensure we start with a status of GOOD const result: StatusResponse = await MeteorCall.systemStatus.getSystemStatus() diff --git a/meteor/server/typings/meteor-kschingiz-elastic-apm.d.ts b/meteor/server/typings/meteor-kschingiz-elastic-apm.d.ts deleted file mode 100644 index f5e6619843c..00000000000 --- a/meteor/server/typings/meteor-kschingiz-elastic-apm.d.ts +++ /dev/null @@ -1,355 +0,0 @@ -/** - * This is mostly copied from https://github.com/elastic/apm-agent-nodejs/blob/master/index.d.ts - * As they do not export any of the inner types, adding `disableMeteorInstrumentations` to `AgentConfigOptions` is not trivial without just copying it all - */ - -declare module 'meteor/julusian:meteor-elastic-apm' { - /// - - import { IncomingMessage, ServerResponse } from 'http' - - export = agent - - declare const agent: Agent - - declare class Agent implements Taggable, StartSpanFn { - // Configuration - start(options?: AgentConfigOptions): Agent - isStarted(): boolean - setFramework(options: { name?: string; version?: string; overwrite?: boolean }): void - addPatch(modules: string | Array, handler: string | PatchHandler): void - removePatch(modules: string | Array, handler: string | PatchHandler): void - clearPatches(modules: string | Array): void - - // Data collection hooks - middleware: { connect(): Connect.ErrorHandleFunction } - lambda(handler: AwsLambda.Handler): AwsLambda.Handler - lambda(type: string, handler: AwsLambda.Handler): AwsLambda.Handler - handleUncaughtExceptions(fn?: (err: Error) => void): void - - // Errors - captureError(err: Error | string | ParameterizedMessageObject, callback?: CaptureErrorCallback): void - captureError( - err: Error | string | ParameterizedMessageObject, - options?: CaptureErrorOptions, - callback?: CaptureErrorCallback - ): void - - // Distributed Tracing - currentTraceparent: string | null - currentTraceIds: { - 'trace.id'?: string - 'transaction.id'?: string - 'span.id'?: string - } - - // Transactions - startTransaction(name?: string | null, options?: TransactionOptions): Transaction | null - startTransaction(name: string | null, type: string | null, options?: TransactionOptions): Transaction | null - startTransaction( - name: string | null, - type: string | null, - subtype: string | null, - options?: TransactionOptions - ): Transaction | null - startTransaction( - name: string | null, - type: string | null, - subtype: string | null, - action: string | null, - options?: TransactionOptions - ): Transaction | null - setTransactionName(name: string): void - endTransaction(result?: string | number, endTime?: number): void - currentTransaction: Transaction | null - - // Spans - startSpan(name?: string | null, options?: SpanOptions): Span | null - startSpan(name: string | null, type: string | null, options?: SpanOptions): Span | null - startSpan(name: string | null, type: string | null, subtype: string | null, options?: SpanOptions): Span | null - startSpan( - name: string | null, - type: string | null, - subtype: string | null, - action: string | null, - options?: SpanOptions - ): Span | null - currentSpan: Span | null - - // Context - setLabel(name: string, value: LabelValue): boolean - addLabels(labels: Labels): boolean - setUserContext(user: UserObject): void - setCustomContext(custom: object): void - - // Transport - addFilter(fn: FilterFn): void - addErrorFilter(fn: FilterFn): void - addSpanFilter(fn: FilterFn): void - addTransactionFilter(fn: FilterFn): void - flush(callback?: Function): void - destroy(): void - - // Utils - logger: Logger - - // Custom metrics - registerMetric(name: string, callback: Function): void - registerMetric(name: string, labels: Labels, callback: Function): void - } - - declare class GenericSpan implements Taggable { - // The following properties and methods are currently not documented as their API isn't considered official: - // timestamp, ended, id, traceId, parentId, sampled, duration() - - type: string | null - subtype: string | null - action: string | null - traceparent: string - - setType(type?: string | null, subtype?: string | null, action?: string | null): void - setLabel(name: string, value: LabelValue): boolean - addLabels(labels: Labels): boolean - } - - declare class Transaction extends GenericSpan implements StartSpanFn { - // The following properties and methods are currently not documented as their API isn't considered official: - // setUserContext(), setCustomContext(), toJSON(), setDefaultName(), setDefaultNameFromRequest() - - name: string - result: string | number - - startSpan(name?: string | null, options?: SpanOptions): Span | null - startSpan(name: string | null, type: string | null, options?: SpanOptions): Span | null - startSpan(name: string | null, type: string | null, subtype: string | null, options?: SpanOptions): Span | null - startSpan( - name: string | null, - type: string | null, - subtype: string | null, - action: string | null, - options?: SpanOptions - ): Span | null - ensureParentId(): string - end(result?: string | number | null, endTime?: number): void - } - - declare class Span extends GenericSpan { - // The following properties and methods are currently not documented as their API isn't considered official: - // customStackTrace(), setDbContext() - - transaction: Transaction - name: string - - end(endTime?: number): void - } - - interface AgentConfigOptions { - disableMeteorInstrumentations?: string[] - abortedErrorThreshold?: string // Also support `number`, but as we're removing this functionality soon, there's no need to advertise it - active?: boolean - addPatch?: KeyValueConfig - apiRequestSize?: string // Also support `number`, but as we're removing this functionality soon, there's no need to advertise it - apiRequestTime?: string // Also support `number`, but as we're removing this functionality soon, there's no need to advertise it - asyncHooks?: boolean - captureBody?: CaptureBody - captureErrorLogStackTraces?: CaptureErrorLogStackTraces - captureExceptions?: boolean - captureHeaders?: boolean - captureSpanStackTraces?: boolean - containerId?: string - disableInstrumentations?: string | string[] - environment?: string - errorMessageMaxLength?: string // Also support `number`, but as we're removing this functionality soon, there's no need to advertise it - errorOnAbortedRequests?: boolean - filterHttpHeaders?: boolean - frameworkName?: string - frameworkVersion?: string - globalLabels?: KeyValueConfig - hostname?: string - ignoreUrls?: Array - ignoreUserAgents?: Array - instrument?: boolean - instrumentIncomingHTTPRequests?: boolean - kubernetesNamespace?: string - kubernetesNodeName?: string - kubernetesPodName?: string - kubernetesPodUID?: string - logLevel?: LogLevel - logUncaughtExceptions?: boolean - logger?: Logger - metricsInterval?: string // Also support `number`, but as we're removing this functionality soon, there's no need to advertise it - payloadLogFile?: string - centralConfig?: boolean - secretToken?: string - serverCaCertFile?: string - serverTimeout?: string // Also support `number`, but as we're removing this functionality soon, there's no need to advertise it - serverUrl?: string - serviceName?: string - serviceVersion?: string - sourceLinesErrorAppFrames?: number - sourceLinesErrorLibraryFrames?: number - sourceLinesSpanAppFrames?: number - sourceLinesSpanLibraryFrames?: number - stackTraceLimit?: number - transactionMaxSpans?: number - transactionSampleRate?: number - usePathAsTransactionName?: boolean - verifyServerCert?: boolean - } - - interface CaptureErrorOptions { - request?: IncomingMessage - response?: ServerResponse - timestamp?: number - handled?: boolean - user?: UserObject - labels?: Labels - tags?: Labels - custom?: object - message?: string - } - - interface Labels { - [key: string]: LabelValue - } - - interface UserObject { - id?: string | number - username?: string - email?: string - } - - interface ParameterizedMessageObject { - message: string - params: Array - } - - interface Logger { - fatal(msg: string, ...args: any[]): void - fatal(obj: {}, msg?: string, ...args: any[]): void - error(msg: string, ...args: any[]): void - error(obj: {}, msg?: string, ...args: any[]): void - warn(msg: string, ...args: any[]): void - warn(obj: {}, msg?: string, ...args: any[]): void - info(msg: string, ...args: any[]): void - info(obj: {}, msg?: string, ...args: any[]): void - debug(msg: string, ...args: any[]): void - debug(obj: {}, msg?: string, ...args: any[]): void - trace(msg: string, ...args: any[]): void - trace(obj: {}, msg?: string, ...args: any[]): void - [propName: string]: any - } - - interface TransactionOptions { - startTime?: number - childOf?: Transaction | Span | string - } - - interface SpanOptions { - childOf?: Transaction | Span | string - } - - type CaptureBody = 'off' | 'errors' | 'transactions' | 'all' - type CaptureErrorLogStackTraces = 'never' | 'messages' | 'always' - type LogLevel = 'trace' | 'debug' | 'info' | 'warn' | 'error' | 'fatal' - - type CaptureErrorCallback = (err: Error | null, id: string) => void - type FilterFn = (payload: Payload) => Payload | boolean | void - type LabelValue = string | number | boolean | null | undefined - type KeyValueConfig = string | Labels | Array> - - type Payload = { [propName: string]: any } - - type PatchHandler = (exports: any, agent: Agent, options: PatchOptions) => any - - interface PatchOptions { - version: string | undefined - enabled: boolean - } - - interface Taggable { - setLabel(name: string, value: LabelValue): boolean - addLabels(labels: Labels): boolean - } - - interface StartSpanFn { - startSpan(name?: string | null, options?: SpanOptions): Span | null - startSpan(name: string | null, type: string | null, options?: SpanOptions): Span | null - startSpan(name: string | null, type: string | null, subtype: string | null, options?: SpanOptions): Span | null - startSpan( - name: string | null, - type: string | null, - subtype: string | null, - action: string | null, - options?: SpanOptions - ): Span | null - } - - // Inlined from @types/aws-lambda - start - declare namespace AwsLambda { - interface CognitoIdentity { - cognitoIdentityId: string - cognitoIdentityPoolId: string - } - - interface ClientContext { - client: ClientContextClient - custom?: any - env: ClientContextEnv - } - - interface ClientContextClient { - installationId: string - appTitle: string - appVersionName: string - appVersionCode: string - appPackageName: string - } - - interface ClientContextEnv { - platformVersion: string - platform: string - make: string - model: string - locale: string - } - - type Callback = (error?: Error | null | string, result?: TResult) => void - - interface Context { - // Properties - callbackWaitsForEmptyEventLoop: boolean - functionName: string - functionVersion: string - invokedFunctionArn: string - memoryLimitInMB: number - awsRequestId: string - logGroupName: string - logStreamName: string - identity?: CognitoIdentity - clientContext?: ClientContext - - // Functions - getRemainingTimeInMillis(): number - - // Functions for compatibility with earlier Node.js Runtime v0.10.42 - // For more details see http://docs.aws.amazon.com/lambda/latest/dg/nodejs-prog-model-using-old-runtime.html#nodejs-prog-model-oldruntime-context-methods - done(error?: Error, result?: any): void - fail(error: Error | string): void - succeed(messageOrObject: any): void - succeed(message: string, object: any): void - } - - type Handler = ( - event: TEvent, - context: Context, - callback: Callback - ) => void | Promise - } - - // Inlined from @types/connect - start - declare namespace Connect { - type NextFunction = (err?: any) => void - type ErrorHandleFunction = (err: any, req: IncomingMessage, res: ServerResponse, next: NextFunction) => void - } -} diff --git a/meteor/server/worker/worker.ts b/meteor/server/worker/worker.ts index f5e3c82885f..6a4b8651cff 100644 --- a/meteor/server/worker/worker.ts +++ b/meteor/server/worker/worker.ts @@ -9,7 +9,7 @@ import { threadedClass, Promisify, ThreadedClassManager } from 'threadedclass' import type { JobSpec } from '@sofie-automation/job-worker/dist/main' import type { IpcJobWorker } from '@sofie-automation/job-worker/dist/ipc' import { createManualPromise, getRandomString, ManualPromise, Time } from '../lib/tempLib' -import { MeteorStartupAsync, getCurrentTime } from '../lib/lib' +import { getCurrentTime } from '../lib/lib' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' import { UserActionsLogItem } from '@sofie-automation/meteor-lib/dist/collections/UserActionsLog' import { triggerFastTrackObserver, FastTrackObservers } from '../publications/fastTrack' @@ -262,7 +262,7 @@ async function logLine(msg: LogEntry): Promise { } let worker: Promisify | undefined -MeteorStartupAsync(async () => { +Meteor.startup(async () => { if (Meteor.isDevelopment) { // Ensure meteor restarts when the _force_restart file changes try { diff --git a/meteor/yarn.lock b/meteor/yarn.lock index 233d3a8179a..a01cc3dc27a 100644 --- a/meteor/yarn.lock +++ b/meteor/yarn.lock @@ -473,21 +473,19 @@ __metadata: languageName: node linkType: hard -"@elastic/ecs-helpers@npm:^1.1.0": - version: 1.1.0 - resolution: "@elastic/ecs-helpers@npm:1.1.0" - dependencies: - fast-json-stringify: ^2.4.1 - checksum: 8f64e86fe3cfe67540fd8c2a62e0b7db1f4e8cab8c4a63e2f49ce295c3d3b629d0f3363b0107fe530650898a89b5b1d86190717447a481932856651911e6ef61 +"@elastic/ecs-helpers@npm:^2.1.1": + version: 2.1.1 + resolution: "@elastic/ecs-helpers@npm:2.1.1" + checksum: 80db727963e26a28312c67e47cbc40bfe5441ff8937dc27157a7f968fcd20475345a19a6588cc1c6b97b2eeaf5990f2285eaf2f03e206f3c2cd9965160d3fdaa languageName: node linkType: hard -"@elastic/ecs-pino-format@npm:^1.2.0": - version: 1.3.0 - resolution: "@elastic/ecs-pino-format@npm:1.3.0" +"@elastic/ecs-pino-format@npm:^1.5.0": + version: 1.5.0 + resolution: "@elastic/ecs-pino-format@npm:1.5.0" dependencies: - "@elastic/ecs-helpers": ^1.1.0 - checksum: 1543b80b84e3f35b6be35b73b5d0153d267c357df750ba77af2c8d7b07097df8095f104d283e46b6500c902815327a1ad0005aa2e3855afbddbac0b683b72c6c + "@elastic/ecs-helpers": ^2.1.1 + checksum: e66a1801ecafa5d1f56037df8dafa9da9c302440c8254c636374e489a5a50e85166b77c951c1f8d3edd99cb77f43b5967c3f31a68cac23b2a4b95a87ce72b066 languageName: node linkType: hard @@ -1205,7 +1203,7 @@ __metadata: "@sofie-automation/shared-lib": 1.52.0-in-development amqplib: ^0.10.3 deepmerge: ^4.3.1 - elastic-apm-node: ^3.51.0 + elastic-apm-node: ^4.8.0 eventemitter3: ^4.0.7 mongodb: ^5.9.2 node-fetch: ^2.7.0 @@ -1395,13 +1393,6 @@ __metadata: languageName: node linkType: hard -"@types/fibers@npm:^3.1.4": - version: 3.1.4 - resolution: "@types/fibers@npm:3.1.4" - checksum: 36bb70198fb5b7f99b010c006ad0e77d473061cf03d4f63f1842040b7ecb62e8800041c9509b927ec59a1d5ac374d5e7c86b2fe0de2a9813f1538ab0248e5dea - languageName: node - linkType: hard - "@types/graceful-fs@npm:^4.1.3": version: 4.1.6 resolution: "@types/graceful-fs@npm:4.1.6" @@ -1573,10 +1564,12 @@ __metadata: languageName: node linkType: hard -"@types/node@npm:^14.18.63": - version: 14.18.63 - resolution: "@types/node@npm:14.18.63" - checksum: be909061a54931778c71c49dc562586c32f909c4b6197e3d71e6dac726d8bd9fccb9f599c0df99f52742b68153712b5097c0f00cac4e279fa894b0ea6719a8fd +"@types/node@npm:^20.17.6": + version: 20.17.6 + resolution: "@types/node@npm:20.17.6" + dependencies: + undici-types: ~6.19.2 + checksum: d51dbb9881c94d0310b32b5fd8013e3261595c61bc888fa27258469c93c3dc0b3c4d20a9f28f3f5f79562f6737e28e7f3dd04940dc8b4d966d34aaf318f7f69b languageName: node linkType: hard @@ -1849,6 +1842,15 @@ __metadata: languageName: node linkType: hard +"abort-controller@npm:^3.0.0": + version: 3.0.0 + resolution: "abort-controller@npm:3.0.0" + dependencies: + event-target-shim: ^5.0.0 + checksum: 170bdba9b47b7e65906a28c8ce4f38a7a369d78e2271706f020849c1bfe0ee2067d4261df8bbb66eb84f79208fd5b710df759d64191db58cfba7ce8ef9c54b75 + languageName: node + linkType: hard + "accepts@npm:^1.3.5, accepts@npm:^1.3.7": version: 1.3.8 resolution: "accepts@npm:1.3.8" @@ -1870,12 +1872,12 @@ __metadata: languageName: node linkType: hard -"acorn-import-assertions@npm:^1.9.0": - version: 1.9.0 - resolution: "acorn-import-assertions@npm:1.9.0" +"acorn-import-attributes@npm:^1.9.5": + version: 1.9.5 + resolution: "acorn-import-attributes@npm:1.9.5" peerDependencies: acorn: ^8 - checksum: 944fb2659d0845c467066bdcda2e20c05abe3aaf11972116df457ce2627628a81764d800dd55031ba19de513ee0d43bb771bc679cc0eda66dc8b4fade143bc0c + checksum: 1c0c49b6a244503964ae46ae850baccf306e84caf99bc2010ed6103c69a423987b07b520a6c619f075d215388bd4923eccac995886a54309eda049ab78a4be95 languageName: node linkType: hard @@ -1990,7 +1992,7 @@ __metadata: languageName: node linkType: hard -"ajv@npm:^6.11.0, ajv@npm:^6.12.4": +"ajv@npm:^6.12.4": version: 6.12.6 resolution: "ajv@npm:6.12.6" dependencies: @@ -2230,15 +2232,6 @@ __metadata: languageName: node linkType: hard -"async-cache@npm:^1.1.0": - version: 1.1.0 - resolution: "async-cache@npm:1.1.0" - dependencies: - lru-cache: ^4.0.0 - checksum: 3f55cc78b3ddc745b6604dd144fc7bca2e21c7ba4c5ea18d312234dc625133511723dff6c71b2283582421f95d591bdb24bf89ce4c4869151e4ecedbdad4acf2 - languageName: node - linkType: hard - "async-value-promise@npm:^1.1.1": version: 1.1.1 resolution: "async-value-promise@npm:1.1.1" @@ -2298,14 +2291,13 @@ __metadata: "@types/app-root-path": ^1.2.8 "@types/body-parser": ^1.19.5 "@types/deep-extend": ^0.6.2 - "@types/fibers": ^3.1.4 "@types/jest": ^29.5.11 "@types/koa": ^2.14.0 "@types/koa-bodyparser": ^4.3.12 "@types/koa-static": ^4.0.4 "@types/koa__cors": ^5.0.0 "@types/koa__router": ^12.0.4 - "@types/node": ^14.18.63 + "@types/node": ^20.17.6 "@types/request": ^2.48.12 "@types/semver": ^7.5.6 "@types/underscore": ^1.11.15 @@ -2319,14 +2311,13 @@ __metadata: deep-extend: 0.6.0 deepmerge: ^4.3.1 ejson: ^2.2.3 + elastic-apm-node: ^4.8.0 eslint: ^8.56.0 eslint-config-prettier: ^8.10.0 - eslint-plugin-custom-rules: "link:eslint-rules" eslint-plugin-jest: ^27.6.3 eslint-plugin-node: ^11.1.0 eslint-plugin-prettier: ^4.2.1 fast-clone: ^1.5.13 - fibers-npm: "npm:fibers@5.0.3" glob: ^8.1.0 i18next: ^21.10.0 i18next-conv: ^10.2.0 @@ -2338,7 +2329,6 @@ __metadata: koa-static: ^5.0.0 legally: ^3.5.10 meteor-node-stubs: ^1.2.7 - meteor-promise: 0.9.0 moment: ^2.30.1 nanoid: ^3.3.7 node-gyp: ^9.4.1 @@ -2493,6 +2483,13 @@ __metadata: languageName: node linkType: hard +"bignumber.js@npm:^9.0.0": + version: 9.1.2 + resolution: "bignumber.js@npm:9.1.2" + checksum: 582c03af77ec9cb0ebd682a373ee6c66475db94a4325f92299621d544aa4bd45cb45fd60001610e94aef8ae98a0905fa538241d9638d4422d57abbeeac6fadaf + languageName: node + linkType: hard + "binary-search@npm:^1.3.3": version: 1.3.6 resolution: "binary-search@npm:1.3.6" @@ -3468,10 +3465,10 @@ __metadata: languageName: node linkType: hard -"cookie@npm:^0.5.0": - version: 0.5.0 - resolution: "cookie@npm:0.5.0" - checksum: 1f4bd2ca5765f8c9689a7e8954183f5332139eb72b6ff783d8947032ec1fdf43109852c178e21a953a30c0dd42257828185be01b49d1eb1a67fd054ca588a180 +"cookie@npm:^0.7.1": + version: 0.7.2 + resolution: "cookie@npm:0.7.2" + checksum: 9bf8555e33530affd571ea37b615ccad9b9a34febbf2c950c86787088eb00a8973690833b0f8ebd6b69b753c62669ea60cec89178c1fb007bf0749abed74f93e languageName: node linkType: hard @@ -3819,15 +3816,6 @@ __metadata: languageName: node linkType: hard -"detect-libc@npm:^1.0.3": - version: 1.0.3 - resolution: "detect-libc@npm:1.0.3" - bin: - detect-libc: ./bin/detect-libc.js - checksum: daaaed925ffa7889bd91d56e9624e6c8033911bb60f3a50a74a87500680652969dbaab9526d1e200a4c94acf80fc862a22131841145a0a8482d60a99c24f4a3e - languageName: node - linkType: hard - "detect-libc@npm:^2.0.0": version: 2.0.2 resolution: "detect-libc@npm:2.0.2" @@ -3935,21 +3923,20 @@ __metadata: languageName: node linkType: hard -"elastic-apm-node@npm:^3.51.0": - version: 3.51.0 - resolution: "elastic-apm-node@npm:3.51.0" +"elastic-apm-node@npm:^4.8.0": + version: 4.8.0 + resolution: "elastic-apm-node@npm:4.8.0" dependencies: - "@elastic/ecs-pino-format": ^1.2.0 + "@elastic/ecs-pino-format": ^1.5.0 "@opentelemetry/api": ^1.4.1 "@opentelemetry/core": ^1.11.0 "@opentelemetry/sdk-metrics": ^1.12.0 after-all-results: ^2.0.0 agentkeepalive: ^4.2.1 - async-cache: ^1.1.0 async-value-promise: ^1.1.1 basic-auth: ^2.0.1 breadth-filter: ^2.0.0 - cookie: ^0.5.0 + cookie: ^0.7.1 core-util-is: ^1.0.2 end-of-stream: ^1.4.4 error-callsites: ^2.0.4 @@ -3958,26 +3945,26 @@ __metadata: fast-safe-stringify: ^2.0.7 fast-stream-to-buffer: ^1.0.0 http-headers: ^3.0.2 - import-in-the-middle: 1.4.2 - is-native: ^1.0.1 - lru-cache: ^6.0.0 + import-in-the-middle: 1.11.2 + json-bigint: ^1.0.0 + lru-cache: 10.2.0 measured-reporting: ^1.51.1 module-details-from-path: ^1.0.3 monitor-event-loop-delay: ^1.0.0 object-filter-sequence: ^1.0.0 object-identity-map: ^1.0.2 original-url: ^1.2.3 - pino: ^6.11.2 - readable-stream: ^3.4.0 + pino: ^8.15.0 + readable-stream: ^3.6.2 relative-microtime: ^2.0.0 require-in-the-middle: ^7.1.1 - semver: ^6.3.1 + semver: ^7.5.4 shallow-clone-shim: ^2.0.0 source-map: ^0.8.0-beta.0 sql-summary: ^1.0.1 stream-chopper: ^3.0.1 unicode-byte-truncate: ^1.0.0 - checksum: e6a801e731d6a5178e7450c76e88b9a519823129986365b2f59c4ee8e02c2a0e624deacebce6e85ae0964f6ea876a9f562901ca0b3538f4b0452a24d7f1b0303 + checksum: 4c6534481540b08412096ff192c67b8dcc9501672b57bcc2f6ad159e84a61f5c32a63c6a7edfe8fb289a6854e33e1278e0aa04b5cadcbb1c9cc51325761be45d languageName: node linkType: hard @@ -4224,12 +4211,6 @@ __metadata: languageName: node linkType: hard -"eslint-plugin-custom-rules@link:eslint-rules::locator=automation-core%40workspace%3A.": - version: 0.0.0-use.local - resolution: "eslint-plugin-custom-rules@link:eslint-rules::locator=automation-core%40workspace%3A." - languageName: node - linkType: soft - "eslint-plugin-es@npm:^3.0.0": version: 3.0.1 resolution: "eslint-plugin-es@npm:3.0.1" @@ -4452,6 +4433,13 @@ __metadata: languageName: node linkType: hard +"event-target-shim@npm:^5.0.0": + version: 5.0.1 + resolution: "event-target-shim@npm:5.0.1" + checksum: 1ffe3bb22a6d51bdeb6bf6f7cf97d2ff4a74b017ad12284cc9e6a279e727dc30a5de6bb613e5596ff4dc3e517841339ad09a7eec44266eccb1aa201a30448166 + languageName: node + linkType: hard + "eventemitter3@npm:^4.0.4, eventemitter3@npm:^4.0.7": version: 4.0.7 resolution: "eventemitter3@npm:4.0.7" @@ -4593,18 +4581,6 @@ __metadata: languageName: node linkType: hard -"fast-json-stringify@npm:^2.4.1": - version: 2.7.13 - resolution: "fast-json-stringify@npm:2.7.13" - dependencies: - ajv: ^6.11.0 - deepmerge: ^4.2.2 - rfdc: ^1.2.0 - string-similarity: ^4.0.1 - checksum: f78ab25047c790de5b521c369e0b18c595055d48a106add36e9f86fe45be40226f168ff4708a226e187d0b46f1d6b32129842041728944bd9a03ca5efbbe4ccb - languageName: node - linkType: hard - "fast-levenshtein@npm:^2.0.6": version: 2.0.6 resolution: "fast-levenshtein@npm:2.0.6" @@ -4612,14 +4588,14 @@ __metadata: languageName: node linkType: hard -"fast-redact@npm:^3.0.0": - version: 3.3.0 - resolution: "fast-redact@npm:3.3.0" - checksum: 3f7becc70a5a2662a9cbfdc52a4291594f62ae998806ee00315af307f32d9559dbf512146259a22739ee34401950ef47598c1f4777d33b0ed5027203d67f549c +"fast-redact@npm:^3.1.1": + version: 3.5.0 + resolution: "fast-redact@npm:3.5.0" + checksum: ef03f0d1849da074a520a531ad299bf346417b790a643931ab4e01cb72275c8d55b60dc8512fb1f1818647b696790edefaa96704228db9f012da935faa1940af languageName: node linkType: hard -"fast-safe-stringify@npm:^2.0.7, fast-safe-stringify@npm:^2.0.8": +"fast-safe-stringify@npm:^2.0.7": version: 2.1.1 resolution: "fast-safe-stringify@npm:2.1.1" checksum: a851cbddc451745662f8f00ddb622d6766f9bd97642dabfd9a405fb0d646d69fc0b9a1243cbf67f5f18a39f40f6fa821737651ff1bceeba06c9992ca2dc5bd3d @@ -4660,15 +4636,6 @@ __metadata: languageName: node linkType: hard -"fibers-npm@npm:fibers@5.0.3": - version: 5.0.3 - resolution: "fibers@npm:5.0.3" - dependencies: - detect-libc: ^1.0.3 - checksum: d66c5e18a911aab3480b846e1c837e5c7cfacb27a2a5fe512919865eaecef33cdd4abc14d777191a6a93473dc52356d48549c91a2a7b8b3450544c44104b23f3 - languageName: node - linkType: hard - "figures@npm:^3.1.0": version: 3.2.0 resolution: "figures@npm:3.2.0" @@ -4766,13 +4733,6 @@ __metadata: languageName: node linkType: hard -"flatstr@npm:^1.0.12": - version: 1.0.12 - resolution: "flatstr@npm:1.0.12" - checksum: e1bb562c94b119e958bf37e55738b172b5f8aaae6532b9660ecd877779f8559dbbc89613ba6b29ccc13447e14c59277d41450f785cf75c30df9fce62f459e9a8 - languageName: node - linkType: hard - "flatted@npm:^3.2.7": version: 3.2.9 resolution: "flatted@npm:3.2.9" @@ -5617,15 +5577,15 @@ __metadata: languageName: node linkType: hard -"import-in-the-middle@npm:1.4.2": - version: 1.4.2 - resolution: "import-in-the-middle@npm:1.4.2" +"import-in-the-middle@npm:1.11.2": + version: 1.11.2 + resolution: "import-in-the-middle@npm:1.11.2" dependencies: acorn: ^8.8.2 - acorn-import-assertions: ^1.9.0 + acorn-import-attributes: ^1.9.5 cjs-module-lexer: ^1.2.2 module-details-from-path: ^1.0.3 - checksum: 52971f821e9a3c94834cd5cf0ab5178321c07d4f4babd547b3cb24c4de21670d05b42ca1523890e7e90525c3bba6b7db7e54cf45421919b0b2712a34faa96ea5 + checksum: 06fb73100a918e00778779713119236cc8d3d4656aae9076a18159cfcd28eb0cc26e0a5040d11da309c5f8f8915c143b8d74e73c0734d3f5549b1813d1008bb9 languageName: node linkType: hard @@ -5933,16 +5893,6 @@ __metadata: languageName: node linkType: hard -"is-native@npm:^1.0.1": - version: 1.0.1 - resolution: "is-native@npm:1.0.1" - dependencies: - is-nil: ^1.0.0 - to-source-code: ^1.0.0 - checksum: 4967af8c4d7a06076cb16ef70fba5a5a2b61ef0a83d4d5dce437cf4c6b5315255cccf07db37d487bcdf2f0ded86edb166a62c46a712cfda1227532b70015029c - languageName: node - linkType: hard - "is-negated-glob@npm:^1.0.0": version: 1.0.0 resolution: "is-negated-glob@npm:1.0.0" @@ -5957,13 +5907,6 @@ __metadata: languageName: node linkType: hard -"is-nil@npm:^1.0.0": - version: 1.0.1 - resolution: "is-nil@npm:1.0.1" - checksum: e5b89c3b82068e719372381c5aaa5f3f28d09e6d501d7f7e4365f136433de1ae92f9f82eeedcb3c3282da1ccf374aad46cc06feab2647d2820067c4a35484760 - languageName: node - linkType: hard - "is-number-object@npm:^1.0.4": version: 1.0.7 resolution: "is-number-object@npm:1.0.7" @@ -6702,6 +6645,15 @@ __metadata: languageName: node linkType: hard +"json-bigint@npm:^1.0.0": + version: 1.0.0 + resolution: "json-bigint@npm:1.0.0" + dependencies: + bignumber.js: ^9.0.0 + checksum: c67bb93ccb3c291e60eb4b62931403e378906aab113ec1c2a8dd0f9a7f065ad6fd9713d627b732abefae2e244ac9ce1721c7a3142b2979532f12b258634ce6f6 + languageName: node + linkType: hard + "json-buffer@npm:3.0.1": version: 3.0.1 resolution: "json-buffer@npm:3.0.1" @@ -7113,13 +7065,10 @@ __metadata: languageName: node linkType: hard -"lru-cache@npm:^4.0.0": - version: 4.1.5 - resolution: "lru-cache@npm:4.1.5" - dependencies: - pseudomap: ^1.0.2 - yallist: ^2.1.2 - checksum: 4bb4b58a36cd7dc4dcec74cbe6a8f766a38b7426f1ff59d4cf7d82a2aa9b9565cd1cb98f6ff60ce5cd174524868d7bc9b7b1c294371851356066ca9ac4cf135a +"lru-cache@npm:10.2.0": + version: 10.2.0 + resolution: "lru-cache@npm:10.2.0" + checksum: eee7ddda4a7475deac51ac81d7dd78709095c6fa46e8350dc2d22462559a1faa3b81ed931d5464b13d48cbd7e08b46100b6f768c76833912bc444b99c37e25db languageName: node linkType: hard @@ -7396,13 +7345,6 @@ __metadata: languageName: node linkType: hard -"meteor-promise@npm:0.9.0": - version: 0.9.0 - resolution: "meteor-promise@npm:0.9.0" - checksum: 2837518debf173a2946d55c270f3d799fa9a9421c63291be42b06a24071b40f2e81120ac88feb268a49819e726b2c1e0fc9413f8aa747d945aab3d6e9ffaea2e - languageName: node - linkType: hard - "methods@npm:^1.1.2": version: 1.1.2 resolution: "methods@npm:1.1.2" @@ -8118,6 +8060,13 @@ __metadata: languageName: node linkType: hard +"on-exit-leak-free@npm:^2.1.0": + version: 2.1.2 + resolution: "on-exit-leak-free@npm:2.1.2" + checksum: 6ce7acdc7b9ceb51cf029b5239cbf41937ee4c8dcd9d4e475e1777b41702564d46caa1150a744e00da0ac6d923ab83471646a39a4470f97481cf6e2d8d253c3f + languageName: node + linkType: hard + "on-finished@npm:2.4.1, on-finished@npm:^2.3.0": version: 2.4.1 resolution: "on-finished@npm:2.4.1" @@ -8587,27 +8536,41 @@ __metadata: languageName: node linkType: hard -"pino-std-serializers@npm:^3.1.0": - version: 3.2.0 - resolution: "pino-std-serializers@npm:3.2.0" - checksum: 77e29675b116e42ae9fe6d4ef52ef3a082ffc54922b122d85935f93ddcc20277f0b0c873c5c6c5274a67b0409c672aaae3de6bcea10a2d84699718dda55ba95b +"pino-abstract-transport@npm:^1.2.0": + version: 1.2.0 + resolution: "pino-abstract-transport@npm:1.2.0" + dependencies: + readable-stream: ^4.0.0 + split2: ^4.0.0 + checksum: 3336c51fb91ced5ef8a4bfd70a96e41eb6deb905698e83350dc71eedffb34795db1286d2d992ce1da2f6cd330a68be3f7e2748775a6b8a2ee3416796070238d6 languageName: node linkType: hard -"pino@npm:^6.11.2": - version: 6.14.0 - resolution: "pino@npm:6.14.0" +"pino-std-serializers@npm:^6.0.0": + version: 6.2.2 + resolution: "pino-std-serializers@npm:6.2.2" + checksum: aeb0662edc46ec926de9961ed4780a4f0586bb7c37d212cd469c069639e7816887a62c5093bc93f260a4e0900322f44fc8ab1343b5a9fa2864a888acccdb22a4 + languageName: node + linkType: hard + +"pino@npm:^8.15.0": + version: 8.21.0 + resolution: "pino@npm:8.21.0" dependencies: - fast-redact: ^3.0.0 - fast-safe-stringify: ^2.0.8 - flatstr: ^1.0.12 - pino-std-serializers: ^3.1.0 - process-warning: ^1.0.0 + atomic-sleep: ^1.0.0 + fast-redact: ^3.1.1 + on-exit-leak-free: ^2.1.0 + pino-abstract-transport: ^1.2.0 + pino-std-serializers: ^6.0.0 + process-warning: ^3.0.0 quick-format-unescaped: ^4.0.3 - sonic-boom: ^1.0.2 + real-require: ^0.2.0 + safe-stable-stringify: ^2.3.1 + sonic-boom: ^3.7.0 + thread-stream: ^2.6.0 bin: pino: bin.js - checksum: eb13e12e3a3d682abe4a4da426455a9f4e041e55e4fa57d72d9677ee8d188a9c952f69347e728a3761c8262cdce76ef24bee29e1a53ab15aa9c5e851099163d0 + checksum: d895c37cfcb7ade33ad7ac4ca54c0497ab719ec726e42b7c7b9697e07572a09a7c7de18d751440769c3ea5ecbac2075fdac720cf182720a4764defe3de8a1411 languageName: node linkType: hard @@ -8670,10 +8633,10 @@ __metadata: languageName: node linkType: hard -"process-warning@npm:^1.0.0": - version: 1.0.0 - resolution: "process-warning@npm:1.0.0" - checksum: c708a03241deec3cabaeee39c4f9ee8c4d71f1c5ef9b746c8252cdb952a6059068cfcdaf348399775244cbc441b6ae5e26a9c87ed371f88335d84f26d19180f9 +"process-warning@npm:^3.0.0": + version: 3.0.0 + resolution: "process-warning@npm:3.0.0" + checksum: 1fc2eb4524041de3c18423334cc8b4e36bec5ad5472640ca1a936122c6e01da0864c1a4025858ef89aea93eabe7e77db93ccea225b10858617821cb6a8719efe languageName: node linkType: hard @@ -8720,13 +8683,6 @@ __metadata: languageName: node linkType: hard -"pseudomap@npm:^1.0.2": - version: 1.0.2 - resolution: "pseudomap@npm:1.0.2" - checksum: 856c0aae0ff2ad60881168334448e898ad7a0e45fe7386d114b150084254c01e200c957cf378378025df4e052c7890c5bd933939b0e0d2ecfcc1dc2f0b2991f5 - languageName: node - linkType: hard - "public-encrypt@npm:^4.0.0": version: 4.0.3 resolution: "public-encrypt@npm:4.0.3" @@ -8990,7 +8946,7 @@ __metadata: languageName: node linkType: hard -"readable-stream@npm:3, readable-stream@npm:^3.0.0, readable-stream@npm:^3.0.2, readable-stream@npm:^3.0.6, readable-stream@npm:^3.4.0, readable-stream@npm:^3.5.0, readable-stream@npm:^3.6.0": +"readable-stream@npm:3, readable-stream@npm:^3.0.0, readable-stream@npm:^3.0.2, readable-stream@npm:^3.0.6, readable-stream@npm:^3.4.0, readable-stream@npm:^3.5.0, readable-stream@npm:^3.6.0, readable-stream@npm:^3.6.2": version: 3.6.2 resolution: "readable-stream@npm:3.6.2" dependencies: @@ -9001,6 +8957,19 @@ __metadata: languageName: node linkType: hard +"readable-stream@npm:^4.0.0": + version: 4.5.2 + resolution: "readable-stream@npm:4.5.2" + dependencies: + abort-controller: ^3.0.0 + buffer: ^6.0.3 + events: ^3.3.0 + process: ^0.11.10 + string_decoder: ^1.3.0 + checksum: c4030ccff010b83e4f33289c535f7830190773e274b3fcb6e2541475070bdfd69c98001c3b0cb78763fc00c8b62f514d96c2b10a8bd35d5ce45203a25fa1d33a + languageName: node + linkType: hard + "readable-stream@npm:~2.3.6": version: 2.3.8 resolution: "readable-stream@npm:2.3.8" @@ -9037,6 +9006,13 @@ __metadata: languageName: node linkType: hard +"real-require@npm:^0.2.0": + version: 0.2.0 + resolution: "real-require@npm:0.2.0" + checksum: fa060f19f2f447adf678d1376928c76379dce5f72bd334da301685ca6cdcb7b11356813332cc243c88470796bc2e2b1e2917fc10df9143dd93c2ea608694971d + languageName: node + linkType: hard + "rechoir@npm:^0.6.2": version: 0.6.2 resolution: "rechoir@npm:0.6.2" @@ -9236,7 +9212,7 @@ __metadata: languageName: node linkType: hard -"rfdc@npm:^1.2.0, rfdc@npm:^1.3.0": +"rfdc@npm:^1.3.0": version: 1.3.0 resolution: "rfdc@npm:1.3.0" checksum: fb2ba8512e43519983b4c61bd3fa77c0f410eff6bae68b08614437bc3f35f91362215f7b4a73cbda6f67330b5746ce07db5dd9850ad3edc91271ad6deea0df32 @@ -9558,13 +9534,12 @@ __metadata: languageName: node linkType: hard -"sonic-boom@npm:^1.0.2": - version: 1.4.1 - resolution: "sonic-boom@npm:1.4.1" +"sonic-boom@npm:^3.7.0": + version: 3.8.1 + resolution: "sonic-boom@npm:3.8.1" dependencies: atomic-sleep: ^1.0.0 - flatstr: ^1.0.12 - checksum: 189fa8fe5c2dc05d3513fc1a4926a2f16f132fa6fa0b511745a436010cdcd9c1d3b3cb6a9d7c05bd32a965dc77673a5ac0eb0992e920bdedd16330d95323124f + checksum: 79c90d7a2f928489fd3d4b68d8f8d747a426ca6ccf83c3b102b36f899d4524463dd310982ab7ab6d6bcfd34b7c7c281ad25e495ad71fbff8fd6fa86d6273fc6b languageName: node linkType: hard @@ -9682,6 +9657,13 @@ __metadata: languageName: node linkType: hard +"split2@npm:^4.0.0": + version: 4.2.0 + resolution: "split2@npm:4.2.0" + checksum: 05d54102546549fe4d2455900699056580cca006c0275c334611420f854da30ac999230857a85fdd9914dc2109ae50f80fda43d2a445f2aa86eccdc1dfce779d + languageName: node + linkType: hard + "split@npm:^1.0.0": version: 1.0.1 resolution: "split@npm:1.0.1" @@ -9851,13 +9833,6 @@ __metadata: languageName: node linkType: hard -"string-similarity@npm:^4.0.1": - version: 4.0.4 - resolution: "string-similarity@npm:4.0.4" - checksum: 797b41b24e1eb6b3b0ab896950b58c295a19a82933479c75f7b5279ffb63e0b456a8c8d10329c02f607ca1a50370e961e83d552aa468ff3b0fa15809abc9eff7 - languageName: node - linkType: hard - "string-width-cjs@npm:string-width@^4.2.0, string-width@npm:^1.0.2 || 2 || 3 || 4, string-width@npm:^4.1.0, string-width@npm:^4.2.0, string-width@npm:^4.2.3": version: 4.2.3 resolution: "string-width@npm:4.2.3" @@ -10152,6 +10127,15 @@ __metadata: languageName: node linkType: hard +"thread-stream@npm:^2.6.0": + version: 2.7.0 + resolution: "thread-stream@npm:2.7.0" + dependencies: + real-require: ^0.2.0 + checksum: 75ab019cda628344c7779e5f5a88f7759764efd29d320327ad2e6c2622778b5f1c43a3966d76a9ee5744086d61c680b413548f5521030f9e9055487684436165 + languageName: node + linkType: hard + "threadedclass@npm:^1.2.2": version: 1.2.2 resolution: "threadedclass@npm:1.2.2" @@ -10245,15 +10229,6 @@ __metadata: languageName: node linkType: hard -"to-source-code@npm:^1.0.0": - version: 1.0.2 - resolution: "to-source-code@npm:1.0.2" - dependencies: - is-nil: ^1.0.0 - checksum: 24fd24767f185ad11f81c1e020c2f789fba29471195227731530ec39b2697bb680c16e1f6f7d0d68bffba81e3d95e68dd6014f8c88371399bddcf8c4ad036de3 - languageName: node - linkType: hard - "to-through@npm:^3.0.0": version: 3.0.0 resolution: "to-through@npm:3.0.0" @@ -10596,6 +10571,13 @@ __metadata: languageName: node linkType: hard +"undici-types@npm:~6.19.2": + version: 6.19.8 + resolution: "undici-types@npm:6.19.8" + checksum: de51f1b447d22571cf155dfe14ff6d12c5bdaec237c765085b439c38ca8518fc360e88c70f99469162bf2e14188a7b0bcb06e1ed2dc031042b984b0bb9544017 + languageName: node + linkType: hard + "unicode-byte-truncate@npm:^1.0.0": version: 1.0.0 resolution: "unicode-byte-truncate@npm:1.0.0" @@ -11058,13 +11040,6 @@ __metadata: languageName: node linkType: hard -"yallist@npm:^2.1.2": - version: 2.1.2 - resolution: "yallist@npm:2.1.2" - checksum: 9ba99409209f485b6fcb970330908a6d41fa1c933f75e08250316cce19383179a6b70a7e0721b89672ebb6199cc377bf3e432f55100da6a7d6e11902b0a642cb - languageName: node - linkType: hard - "yallist@npm:^3.0.2": version: 3.1.1 resolution: "yallist@npm:3.1.1" diff --git a/package.json b/package.json index ae7452cdddb..8a279813ee3 100644 --- a/package.json +++ b/package.json @@ -4,12 +4,12 @@ "license": "MIT", "private": true, "engines": { - "node": "^14.19 || ^16.14 || ^18.12" + "node": ">=20.18" }, "scripts": { "postinstall": "run install:packages && run install:meteor", - "install:meteor": "cd meteor && meteor --version && meteor npm install -g yarn && node ../scripts/fix-windows-yarn.js && meteor yarn install", - "install:packages": "cd packages && (node is_node_14.js && yarn lerna run --ignore openapi install || yarn install)", + "install:meteor": "cd meteor && meteor --version && meteor npm install -g yarn && node ../scripts/fix-windows-yarn.js && yarn install", + "install:packages": "cd packages && yarn install", "start": "yarn install && run install-and-build && run dev", "install-and-build": "node ./scripts/install-and-build.mjs", "dev": "node ./scripts/run.mjs", @@ -20,13 +20,13 @@ "unit:packages": "cd packages && run unit", "check-types:meteor": "cd meteor && run check-types", "test:meteor": "cd meteor && run test", - "lint:meteor": "cd meteor && meteor yarn lint", - "unit:meteor": "cd meteor && meteor yarn unit", - "meteor:run": "cd meteor && meteor yarn start", + "lint:meteor": "cd meteor && yarn lint", + "unit:meteor": "cd meteor && yarn unit", + "meteor:run": "cd meteor && yarn start", "lint": "run lint:meteor && run lint:packages", "unit": "run unit:meteor && run unit:packages", "validate:release": "yarn install && run install-and-build && run validate:release:packages && run validate:release:meteor", - "validate:release:meteor": "cd meteor && meteor yarn validate:prod-dependencies && meteor yarn license-validate && meteor yarn lint && meteor yarn test", + "validate:release:meteor": "cd meteor && yarn validate:prod-dependencies && yarn license-validate && yarn lint && yarn test", "validate:release:packages": "cd packages && run validate:dependencies && run test", "meteor": "cd meteor && meteor", "docs:serve": "cd packages && run docs:serve", diff --git a/packages/blueprints-integration/package.json b/packages/blueprints-integration/package.json index 7594583839a..d32f58ee438 100644 --- a/packages/blueprints-integration/package.json +++ b/packages/blueprints-integration/package.json @@ -29,7 +29,7 @@ "license-validate": "run -T sofie-licensecheck" }, "engines": { - "node": ">=14.19" + "node": ">=20.18" }, "files": [ "/dist", diff --git a/packages/blueprints-integration/src/migrations.ts b/packages/blueprints-integration/src/migrations.ts index d7eb39e6222..6309e8cc909 100644 --- a/packages/blueprints-integration/src/migrations.ts +++ b/packages/blueprints-integration/src/migrations.ts @@ -24,7 +24,10 @@ export interface MigrationStepInputFilteredResult { } export type ValidateFunctionCore = (afterMigration: boolean) => Promise -export type ValidateFunctionSystem = (context: MigrationContextSystem, afterMigration: boolean) => boolean | string +export type ValidateFunctionSystem = ( + context: MigrationContextSystem, + afterMigration: boolean +) => Promise export type ValidateFunctionStudio = (context: MigrationContextStudio, afterMigration: boolean) => boolean | string export type ValidateFunctionShowStyle = ( context: MigrationContextShowStyle, @@ -37,7 +40,10 @@ export type ValidateFunction = | ValidateFunctionCore export type MigrateFunctionCore = (input: MigrationStepInputFilteredResult) => Promise -export type MigrateFunctionSystem = (context: MigrationContextSystem, input: MigrationStepInputFilteredResult) => void +export type MigrateFunctionSystem = ( + context: MigrationContextSystem, + input: MigrationStepInputFilteredResult +) => Promise export type MigrateFunctionStudio = (context: MigrationContextStudio, input: MigrationStepInputFilteredResult) => void export type MigrateFunctionShowStyle = ( context: MigrationContextShowStyle, @@ -77,11 +83,11 @@ export interface ShowStyleVariantPart { } interface MigrationContextWithTriggeredActions { - getAllTriggeredActions: () => IBlueprintTriggeredActions[] - getTriggeredAction: (triggeredActionId: string) => IBlueprintTriggeredActions | undefined + getAllTriggeredActions: () => Promise + getTriggeredAction: (triggeredActionId: string) => Promise getTriggeredActionId: (triggeredActionId: string) => string - setTriggeredAction: (triggeredActions: IBlueprintTriggeredActions) => void - removeTriggeredAction: (triggeredActionId: string) => void + setTriggeredAction: (triggeredActions: IBlueprintTriggeredActions) => Promise + removeTriggeredAction: (triggeredActionId: string) => Promise } export interface MigrationContextShowStyle extends MigrationContextWithTriggeredActions { diff --git a/packages/corelib/package.json b/packages/corelib/package.json index 5d3fb3a56b0..52533e7332a 100644 --- a/packages/corelib/package.json +++ b/packages/corelib/package.json @@ -30,7 +30,7 @@ "license-validate": "run -T sofie-licensecheck" }, "engines": { - "node": "^14.19 || ^16.14 || ^18.12" + "node": ">=20.18" }, "files": [ "/dist", diff --git a/packages/documentation/package.json b/packages/documentation/package.json index d144d945167..7fabd6dc65e 100644 --- a/packages/documentation/package.json +++ b/packages/documentation/package.json @@ -15,7 +15,7 @@ "write-heading-ids": "docusaurus write-heading-ids" }, "engines": { - "node": ">=18.0" + "node": ">=20.18" }, "devDependencies": { "@docusaurus/core": "3.2.1", diff --git a/packages/is_node_14.js b/packages/is_node_14.js deleted file mode 100644 index 21623c3f469..00000000000 --- a/packages/is_node_14.js +++ /dev/null @@ -1,5 +0,0 @@ -if (process.version.match(/^v14/) !== null) { - process.exit(0) -} else { - process.exit(1) -} \ No newline at end of file diff --git a/packages/job-worker/package.json b/packages/job-worker/package.json index 4f42b539d20..8a7e17f85b3 100644 --- a/packages/job-worker/package.json +++ b/packages/job-worker/package.json @@ -31,7 +31,7 @@ "license-validate": "run -T sofie-licensecheck" }, "engines": { - "node": "^14.19 || ^16.14 || ^18.12" + "node": ">=20.18" }, "files": [ "/dist", @@ -46,7 +46,7 @@ "@sofie-automation/shared-lib": "1.52.0-in-development", "amqplib": "^0.10.3", "deepmerge": "^4.3.1", - "elastic-apm-node": "^3.51.0", + "elastic-apm-node": "^4.8.0", "eventemitter3": "^4.0.7", "mongodb": "^5.9.2", "node-fetch": "^2.7.0", diff --git a/packages/job-worker/src/events/__tests__/externalMessageQueue.test.ts b/packages/job-worker/src/events/__tests__/externalMessageQueue.test.ts index 9d9e5129320..51d53812c3d 100644 --- a/packages/job-worker/src/events/__tests__/externalMessageQueue.test.ts +++ b/packages/job-worker/src/events/__tests__/externalMessageQueue.test.ts @@ -338,7 +338,7 @@ describe('Test sending messages to mocked endpoints', () => { expect(ExternalMessageQueue.findOne()).toBeFalsy() }) - testInFiber('fail to send a slack-type message', async () => { + test('fail to send a slack-type message', async () => { // setLogLevel(LogLevel.DEBUG) expect(ExternalMessageQueue.findOne()).toBeFalsy() @@ -372,14 +372,14 @@ describe('Test sending messages to mocked endpoints', () => { expect(message.sent).toBeUndefined() }) - testInFiber('does not try to send again immediately', async () => { + test('does not try to send again immediately', async () => { // setLogLevel(LogLevel.DEBUG) await runAllTimers() // Does not try to send again yet ... too close to lastTry expect(sendSlackMessageToWebhook).toHaveBeenCalledTimes(2) }) - testInFiber('after a minute, tries to resend', async () => { + test('after a minute, tries to resend', async () => { // setLogLevel(LogLevel.DEBUG) // Reset the last try clock const sendTime = getCurrentTime() @@ -400,7 +400,7 @@ describe('Test sending messages to mocked endpoints', () => { expect(message.sent).toBeUndefined() }) - testInFiber('does not retry to send if on hold', async () => { + test('does not retry to send if on hold', async () => { // setLogLevel(LogLevel.DEBUG) Meteor.call(ExternalMessageQueueAPIMethods.toggleHold, message._id) @@ -422,7 +422,7 @@ describe('Test sending messages to mocked endpoints', () => { expect(message.hold).toBe(false) }) - testInFiber('does not retry after retryUntil time', async () => { + test('does not retry after retryUntil time', async () => { // setLogLevel(LogLevel.DEBUG) ExternalMessageQueue.update(message._id, { @@ -435,7 +435,7 @@ describe('Test sending messages to mocked endpoints', () => { expect(sendSlackMessageToWebhook).toHaveBeenCalledTimes(3) }) - testInFiber('can be forced to retry manually once', async () => { + test('can be forced to retry manually once', async () => { // setLogLevel(LogLevel.DEBUG) Meteor.call(ExternalMessageQueueAPIMethods.toggleHold, message._id) @@ -460,7 +460,7 @@ describe('Test sending messages to mocked endpoints', () => { }) }) - testInFiber('send a soap-type message', async () => { + test('send a soap-type message', async () => { // setLogLevel(LogLevel.DEBUG) expect( ExternalMessageQueue.findOne({ @@ -504,7 +504,7 @@ describe('Test sending messages to mocked endpoints', () => { ).toBeFalsy() }) - testInFiber('fail to send a soap message', async () => { + test('fail to send a soap message', async () => { // setLogLevel(LogLevel.DEBUG) expect(ExternalMessageQueue.findOne()).toBeFalsy() @@ -546,7 +546,7 @@ describe('Test sending messages to mocked endpoints', () => { expect(ExternalMessageQueue.findOne()).toBeFalsy() }) - testInFiber('fatal error when sending a soap-type message', async () => { + test('fatal error when sending a soap-type message', async () => { // setLogLevel(LogLevel.DEBUG) expect(ExternalMessageQueue.findOne()).toBeFalsy() @@ -597,7 +597,7 @@ describe('Test sending messages to mocked endpoints', () => { expect(ExternalMessageQueue.findOne()).toBeFalsy() }) - testInFiber('send a rabbit MQ-type message', async () => { + test('send a rabbit MQ-type message', async () => { // setLogLevel(LogLevel.DEBUG) expect(ExternalMessageQueue.findOne()).toBeFalsy() @@ -634,7 +634,7 @@ describe('Test sending messages to mocked endpoints', () => { expect(ExternalMessageQueue.findOne()).toBeFalsy() }) - testInFiber('fail to send a rabbitMQ-type message', async () => { + test('fail to send a rabbitMQ-type message', async () => { // setLogLevel(LogLevel.DEBUG) expect(ExternalMessageQueue.findOne()).toBeFalsy() @@ -679,7 +679,7 @@ describe('Test sending messages to mocked endpoints', () => { expect(ExternalMessageQueue.findOne()).toBeFalsy() }) - testInFiber('does not send expired messages', async () => { + test('does not send expired messages', async () => { // setLogLevel(LogLevel.DEBUG) expect(ExternalMessageQueue.findOne()).toBeFalsy() diff --git a/packages/job-worker/src/playout/lookahead/__tests__/lookahead.test.ts b/packages/job-worker/src/playout/lookahead/__tests__/lookahead.test.ts index c3f7d174a74..0256447f515 100644 --- a/packages/job-worker/src/playout/lookahead/__tests__/lookahead.test.ts +++ b/packages/job-worker/src/playout/lookahead/__tests__/lookahead.test.ts @@ -341,7 +341,7 @@ describe('Lookahead', () => { await expectLookaheadForLayerMock(playlistId, [expectedCurrent, expectedNext], expectedPrevious, fakeParts) }) - // testInFiber('Pieces', () => { + // test('Pieces', () => { // const fakeParts = partIds.map((p) => ({ _id: p })) as Part[] // getOrderedPartsAfterPlayheadMock.mockReturnValue(fakeParts) diff --git a/packages/job-worker/src/playout/timings/events.ts b/packages/job-worker/src/playout/timings/events.ts index aa3b147f2ad..eeca604b2f0 100644 --- a/packages/job-worker/src/playout/timings/events.ts +++ b/packages/job-worker/src/playout/timings/events.ts @@ -6,7 +6,7 @@ import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyE const EVENT_WAIT_TIME = 500 -const partInstanceTimingDebounceFunctions = new Map() +const partInstanceTimingDebounceFunctions = new Map() /** * Queue a PartInstanceTimings event to be sent diff --git a/packages/live-status-gateway/Dockerfile b/packages/live-status-gateway/Dockerfile index c9b3548b45d..d6a46474ce2 100644 --- a/packages/live-status-gateway/Dockerfile +++ b/packages/live-status-gateway/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:experimental # BUILD IMAGE -FROM node:18 +FROM node:20 WORKDIR /opt COPY package.json lerna.json yarn.lock tsconfig.json ./ @@ -15,7 +15,7 @@ RUN yarn build RUN yarn install --check-files --frozen-lockfile --production --force --ignore-scripts # purge dev-dependencies # DEPLOY IMAGE -FROM node:18-alpine +FROM node:20-alpine RUN apk add --no-cache tzdata COPY --from=0 /opt/package.json /opt/package.json diff --git a/packages/live-status-gateway/Dockerfile.circle b/packages/live-status-gateway/Dockerfile.circle index 4c01a0391e6..9fbbf474e7d 100644 --- a/packages/live-status-gateway/Dockerfile.circle +++ b/packages/live-status-gateway/Dockerfile.circle @@ -1,4 +1,4 @@ -FROM node:18-alpine +FROM node:20-alpine RUN apk add --no-cache tzdata COPY package.json /opt/ diff --git a/packages/live-status-gateway/package.json b/packages/live-status-gateway/package.json index e5d15d6575d..bf768b847ee 100644 --- a/packages/live-status-gateway/package.json +++ b/packages/live-status-gateway/package.json @@ -37,7 +37,7 @@ "license-validate": "run -T sofie-licensecheck" }, "engines": { - "node": "^14.18 || ^16.14 || ^18.5" + "node": ">=20.18" }, "keywords": [ "broadcast", diff --git a/packages/meteor-lib/package.json b/packages/meteor-lib/package.json index e66255cdfef..0abbc9d4799 100644 --- a/packages/meteor-lib/package.json +++ b/packages/meteor-lib/package.json @@ -30,7 +30,7 @@ "license-validate": "run -T sofie-licensecheck" }, "engines": { - "node": "^14.19 || ^16.14 || ^18.12" + "node": ">=20.18" }, "files": [ "/dist", diff --git a/packages/mos-gateway/Dockerfile b/packages/mos-gateway/Dockerfile index 1efa36de621..fe28949d7a2 100644 --- a/packages/mos-gateway/Dockerfile +++ b/packages/mos-gateway/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:experimental # BUILD IMAGE -FROM node:18 +FROM node:20 WORKDIR /opt COPY . . @@ -13,7 +13,7 @@ RUN yarn plugin import workspace-tools RUN yarn workspaces focus mos-gateway --production # purge dev-dependencies # DEPLOY IMAGE -FROM node:18-alpine +FROM node:20-alpine RUN apk add --no-cache tzdata COPY --from=0 /opt/package.json /opt/package.json diff --git a/packages/mos-gateway/Dockerfile.circle b/packages/mos-gateway/Dockerfile.circle index e6789a9abc7..0d89e15f17a 100644 --- a/packages/mos-gateway/Dockerfile.circle +++ b/packages/mos-gateway/Dockerfile.circle @@ -1,4 +1,4 @@ -FROM node:18-alpine +FROM node:20-alpine RUN apk add --no-cache tzdata COPY package.json /opt/ diff --git a/packages/mos-gateway/package.json b/packages/mos-gateway/package.json index c9e13e2fa47..3b93188b065 100644 --- a/packages/mos-gateway/package.json +++ b/packages/mos-gateway/package.json @@ -48,7 +48,7 @@ "license-validate": "run -T sofie-licensecheck" }, "engines": { - "node": "^14.19 || ^16.14 || ^18.12" + "node": ">=20.18" }, "keywords": [ "mos", diff --git a/packages/openapi/package.json b/packages/openapi/package.json index 474841a0248..78e975d6f48 100644 --- a/packages/openapi/package.json +++ b/packages/openapi/package.json @@ -28,7 +28,7 @@ }, "prettier": "@sofie-automation/code-standard-preset/.prettierrc.json", "engines": { - "node": ">=16.0.0" + "node": ">=20.18" }, "files": [ "/api", diff --git a/packages/package.json b/packages/package.json index 13435f2a653..fbe2eeb83be 100644 --- a/packages/package.json +++ b/packages/package.json @@ -14,7 +14,7 @@ ] }, "scripts": { - "prepare": "(node is_node_14.js && lerna run prepare --ignore @sofie-automation/openapi || lerna run prepare)", + "prepare": "lerna run prepare", "postinstall": "cd .. && \"$PROJECT_CWD/node_modules/.bin/husky\" install", "build": "lerna run build --ignore @sofie-automation/openapi", "build:try": "lerna run --no-bail build --ignore @sofie-automation/openapi", @@ -45,7 +45,7 @@ "@types/ejson": "^2.2.2", "@types/got": "^9.6.12", "@types/jest": "^29.5.11", - "@types/node": "^14.18.63", + "@types/node": "^20.17.6", "@types/node-fetch": "^2.6.11", "@types/object-path": "^0.11.4", "@types/underscore": "^1.11.15", diff --git a/packages/playout-gateway/Dockerfile b/packages/playout-gateway/Dockerfile index cf3d4516fd6..604a3587484 100644 --- a/packages/playout-gateway/Dockerfile +++ b/packages/playout-gateway/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:experimental # BUILD IMAGE -FROM node:18 +FROM node:20 WORKDIR /opt COPY . . @@ -13,7 +13,7 @@ RUN yarn plugin import workspace-tools RUN yarn workspaces focus playout-gateway --production # purge dev-dependencies # DEPLOY IMAGE -FROM node:18-alpine +FROM node:20-alpine RUN apk add --no-cache tzdata COPY --from=0 /opt/package.json /opt/package.json diff --git a/packages/playout-gateway/Dockerfile.circle b/packages/playout-gateway/Dockerfile.circle index 89e32da5d8d..8ceaeb4c3a7 100644 --- a/packages/playout-gateway/Dockerfile.circle +++ b/packages/playout-gateway/Dockerfile.circle @@ -1,4 +1,4 @@ -FROM node:18-alpine +FROM node:20-alpine RUN apk add --no-cache tzdata COPY package.json /opt/ diff --git a/packages/playout-gateway/package.json b/packages/playout-gateway/package.json index cb6bfdc773f..a4864865f42 100644 --- a/packages/playout-gateway/package.json +++ b/packages/playout-gateway/package.json @@ -40,7 +40,7 @@ "license-validate": "run -T sofie-licensecheck" }, "engines": { - "node": "^14.19 || ^16.14 || ^18.12" + "node": ">=20.18" }, "keywords": [ "broadcast", diff --git a/packages/playout-gateway/src/tsrHandler.ts b/packages/playout-gateway/src/tsrHandler.ts index 40def381031..f1c647977d2 100644 --- a/packages/playout-gateway/src/tsrHandler.ts +++ b/packages/playout-gateway/src/tsrHandler.ts @@ -828,7 +828,7 @@ export class TSRHandler { } private changedResults: PeripheralDeviceAPI.PlayoutChangedResults | undefined = undefined - private sendCallbacksTimeout: NodeJS.Timer | undefined = undefined + private sendCallbacksTimeout: NodeJS.Timeout | undefined = undefined private sendChangedResults = (): void => { this.sendCallbacksTimeout = undefined diff --git a/packages/server-core-integration/package.json b/packages/server-core-integration/package.json index 98605c6d68d..9b2a3cc289f 100644 --- a/packages/server-core-integration/package.json +++ b/packages/server-core-integration/package.json @@ -48,7 +48,7 @@ "license-validate": "run -T sofie-licensecheck" }, "engines": { - "node": ">=14.19" + "node": ">=20.18" }, "files": [ "/dist", diff --git a/packages/server-core-integration/src/lib/methods.ts b/packages/server-core-integration/src/lib/methods.ts index d20d2248e95..9c2c616d507 100644 --- a/packages/server-core-integration/src/lib/methods.ts +++ b/packages/server-core-integration/src/lib/methods.ts @@ -53,7 +53,7 @@ interface QueuedMethodCall { } export class ConnectionMethodsQueue { - private _triggerDoQueueTimer: NodeJS.Timer | null = null + private _triggerDoQueueTimer: NodeJS.Timeout | null = null private _timeLastMethodCall = 0 private _timeLastMethodReply = 0 private queuedMethodCalls: Array = [] diff --git a/packages/server-core-integration/src/lib/ping.ts b/packages/server-core-integration/src/lib/ping.ts index 56c8eb1cf29..3c005020831 100644 --- a/packages/server-core-integration/src/lib/ping.ts +++ b/packages/server-core-integration/src/lib/ping.ts @@ -1,5 +1,5 @@ export class CorePinger { - private _pingTimeout: NodeJS.Timer | null = null + private _pingTimeout: NodeJS.Timeout | null = null private _connected = false private _destroyed = false diff --git a/packages/server-core-integration/src/lib/watchDog.ts b/packages/server-core-integration/src/lib/watchDog.ts index 18090587374..17fc1e59a8f 100644 --- a/packages/server-core-integration/src/lib/watchDog.ts +++ b/packages/server-core-integration/src/lib/watchDog.ts @@ -15,8 +15,8 @@ export type WatchDogEvents = { */ export class WatchDog extends EventEmitter { public timeout: number - private _checkTimeout: NodeJS.Timer | null = null - private _dieTimeout: NodeJS.Timer | null = null + private _checkTimeout: NodeJS.Timeout | null = null + private _dieTimeout: NodeJS.Timeout | null = null private _watching = false private _checkFunctions: WatchDogCheckFunction[] = [] private _runningChecks = false diff --git a/packages/shared-lib/package.json b/packages/shared-lib/package.json index 40c506ca0c8..ca62e23353c 100644 --- a/packages/shared-lib/package.json +++ b/packages/shared-lib/package.json @@ -29,7 +29,7 @@ "license-validate": "run -T sofie-licensecheck" }, "engines": { - "node": ">=14.19" + "node": ">=20.18" }, "files": [ "/dist", diff --git a/packages/webui/.eslintrc.cjs b/packages/webui/.eslintrc.cjs index 17d7af8b276..f2437f72558 100644 --- a/packages/webui/.eslintrc.cjs +++ b/packages/webui/.eslintrc.cjs @@ -70,7 +70,6 @@ const tsBase = { allowModules: ['meteor', 'mongodb'], }, ], - 'jest/no-standalone-expect': 'off', // testInFiber confuses the rule ...tmpRules, 'react/react-in-jsx-scope': 'off', diff --git a/packages/webui/package.json b/packages/webui/package.json index a0761dac9f0..90458dd48e9 100644 --- a/packages/webui/package.json +++ b/packages/webui/package.json @@ -108,6 +108,6 @@ "xml2js": "^0.6.2" }, "engines": { - "node": ">=18" + "node": ">=20.18" } } diff --git a/packages/webui/src/__mocks__/mongo.ts b/packages/webui/src/__mocks__/mongo.ts index 160a603b46c..2f31d6400bc 100644 --- a/packages/webui/src/__mocks__/mongo.ts +++ b/packages/webui/src/__mocks__/mongo.ts @@ -246,7 +246,7 @@ export namespace MongoMock { return docs.length } - _ensureIndex(_obj: any) { + createIndex(_obj: any) { // todo } allow() { diff --git a/packages/webui/src/client/lib/data/mos/__tests__/plugin-support.test.ts b/packages/webui/src/client/lib/data/mos/__tests__/plugin-support.test.ts index 91fdfb7d947..e6e8dadc32f 100644 --- a/packages/webui/src/client/lib/data/mos/__tests__/plugin-support.test.ts +++ b/packages/webui/src/client/lib/data/mos/__tests__/plugin-support.test.ts @@ -42,6 +42,7 @@ describe('createMosAppInfoXmlString', () => { let ncsAppInfo: any beforeAll(async () => { ncsAppInfo = mos.ncsAppInfo + // eslint-disable-next-line jest/no-standalone-expect expect(ncsAppInfo).toHaveLength(1) ncsAppInfo = ncsAppInfo[0] }) @@ -49,6 +50,7 @@ describe('createMosAppInfoXmlString', () => { let ncsInformation: any beforeAll(async () => { ncsInformation = ncsAppInfo.ncsInformation + // eslint-disable-next-line jest/no-standalone-expect expect(ncsInformation).toHaveLength(1) ncsInformation = ncsInformation[0] }) diff --git a/packages/webui/src/client/lib/viewPort.ts b/packages/webui/src/client/lib/viewPort.ts index 61edc9e6e5a..30ecfed3b7a 100644 --- a/packages/webui/src/client/lib/viewPort.ts +++ b/packages/webui/src/client/lib/viewPort.ts @@ -9,7 +9,7 @@ import { logger } from './logging' const HEADER_MARGIN = 24 // TODOSYNC: TV2 uses 15. If it's needed to be different, it needs to be made generic somehow.. const FALLBACK_HEADER_HEIGHT = 65 -let focusInterval: NodeJS.Timer | undefined +let focusInterval: NodeJS.Timeout | undefined let _dontClearInterval = false export function maintainFocusOnPartInstance( diff --git a/packages/webui/src/client/ui/Prompter/PrompterView.tsx b/packages/webui/src/client/ui/Prompter/PrompterView.tsx index 0a473fa9707..535881934eb 100644 --- a/packages/webui/src/client/ui/Prompter/PrompterView.tsx +++ b/packages/webui/src/client/ui/Prompter/PrompterView.tsx @@ -636,7 +636,7 @@ const PrompterContent = withTranslation()( Translated & IPrompterTrackedProps>, {} > { - private _debounceUpdate: NodeJS.Timer | undefined + private _debounceUpdate: NodeJS.Timeout | undefined constructor(props: Translated & IPrompterTrackedProps>) { super(props) diff --git a/packages/webui/src/client/ui/SegmentTimeline/Parts/SegmentTimelinePart.tsx b/packages/webui/src/client/ui/SegmentTimeline/Parts/SegmentTimelinePart.tsx index 1d8f31ce31e..a34cffc5a01 100644 --- a/packages/webui/src/client/ui/SegmentTimeline/Parts/SegmentTimelinePart.tsx +++ b/packages/webui/src/client/ui/SegmentTimeline/Parts/SegmentTimelinePart.tsx @@ -245,7 +245,7 @@ export class SegmentTimelinePartClass extends React.Component { if (e && e.partId === this.props.part.partId && !e.pieceId) { diff --git a/packages/webui/src/client/ui/SegmentTimeline/SegmentTimeline.tsx b/packages/webui/src/client/ui/SegmentTimeline/SegmentTimeline.tsx index d93b94ce03b..eff3f7b6893 100644 --- a/packages/webui/src/client/ui/SegmentTimeline/SegmentTimeline.tsx +++ b/packages/webui/src/client/ui/SegmentTimeline/SegmentTimeline.tsx @@ -300,7 +300,7 @@ export class SegmentTimelineClass extends React.Component { if (e.segmentId === this.props.segment._id && !e.partId && !e.pieceId) { diff --git a/packages/webui/src/client/ui/SegmentTimeline/SourceLayerItem.tsx b/packages/webui/src/client/ui/SegmentTimeline/SourceLayerItem.tsx index 5e456672180..94b71a65908 100644 --- a/packages/webui/src/client/ui/SegmentTimeline/SourceLayerItem.tsx +++ b/packages/webui/src/client/ui/SegmentTimeline/SourceLayerItem.tsx @@ -411,7 +411,7 @@ export const SourceLayerItem = withTranslation()( // } // } - private highlightTimeout: NodeJS.Timer | undefined + private highlightTimeout: NodeJS.Timeout | undefined private onHighlight = (e: HighlightEvent) => { if ( diff --git a/packages/webui/src/client/ui/Shelf/TimelineDashboardPanel.tsx b/packages/webui/src/client/ui/Shelf/TimelineDashboardPanel.tsx index 79512a894d7..da19f6b97ec 100644 --- a/packages/webui/src/client/ui/Shelf/TimelineDashboardPanel.tsx +++ b/packages/webui/src/client/ui/Shelf/TimelineDashboardPanel.tsx @@ -39,7 +39,7 @@ export const TimelineDashboardPanel = React.memo( const TimelineDashboardPanelContent = withTranslation()( class TimelineDashboardPanelContent extends DashboardPanelInner { liveLine: HTMLDivElement | null = null - scrollIntoViewTimeout: NodeJS.Timer | undefined = undefined + scrollIntoViewTimeout: NodeJS.Timeout | undefined = undefined constructor(props: Translated) { super(props) diff --git a/packages/webui/src/meteor/meteor.js b/packages/webui/src/meteor/meteor.js index 7213f22d1c2..ada02242205 100644 --- a/packages/webui/src/meteor/meteor.js +++ b/packages/webui/src/meteor/meteor.js @@ -3,13 +3,6 @@ const Meteor = { console.debug(...args) }, - _suppressed_log_expected: () => { - return true - }, - _suppress_log: (i) => { - // - }, - _setImmediate: (cb) => { return setTimeout(cb, 0) }, diff --git a/packages/webui/src/meteor/tracker.js b/packages/webui/src/meteor/tracker.js index 632352b1a50..297ff7fc668 100644 --- a/packages/webui/src/meteor/tracker.js +++ b/packages/webui/src/meteor/tracker.js @@ -51,18 +51,6 @@ function _debugFunc() { : function () {} } -function _maybeSuppressMoreLogs(messagesLength) { - // Sometimes when running tests, we intentionally suppress logs on expected - // printed errors. Since the current implementation of _throwOrLog can log - // multiple separate log messages, suppress all of them if at least one suppress - // is expected as we still want them to count as one. - if (typeof Meteor !== 'undefined') { - if (Meteor._suppressed_log_expected()) { - Meteor._suppress_log(messagesLength - 1) - } - } -} - function _throwOrLog(from, e) { if (throwFirstError) { throw e @@ -78,7 +66,6 @@ function _throwOrLog(from, e) { } } printArgs.push(e.stack) - _maybeSuppressMoreLogs(printArgs.length) for (var i = 0; i < printArgs.length; i++) { _debugFunc()(printArgs[i]) diff --git a/packages/yarn.lock b/packages/yarn.lock index ac3dffc9e20..b831a95ad1b 100644 --- a/packages/yarn.lock +++ b/packages/yarn.lock @@ -2637,21 +2637,19 @@ __metadata: languageName: node linkType: hard -"@elastic/ecs-helpers@npm:^1.1.0": - version: 1.1.0 - resolution: "@elastic/ecs-helpers@npm:1.1.0" - dependencies: - fast-json-stringify: ^2.4.1 - checksum: 8f64e86fe3cfe67540fd8c2a62e0b7db1f4e8cab8c4a63e2f49ce295c3d3b629d0f3363b0107fe530650898a89b5b1d86190717447a481932856651911e6ef61 +"@elastic/ecs-helpers@npm:^2.1.1": + version: 2.1.1 + resolution: "@elastic/ecs-helpers@npm:2.1.1" + checksum: 80db727963e26a28312c67e47cbc40bfe5441ff8937dc27157a7f968fcd20475345a19a6588cc1c6b97b2eeaf5990f2285eaf2f03e206f3c2cd9965160d3fdaa languageName: node linkType: hard -"@elastic/ecs-pino-format@npm:^1.2.0": - version: 1.3.0 - resolution: "@elastic/ecs-pino-format@npm:1.3.0" +"@elastic/ecs-pino-format@npm:^1.5.0": + version: 1.5.0 + resolution: "@elastic/ecs-pino-format@npm:1.5.0" dependencies: - "@elastic/ecs-helpers": ^1.1.0 - checksum: 1543b80b84e3f35b6be35b73b5d0153d267c357df750ba77af2c8d7b07097df8095f104d283e46b6500c902815327a1ad0005aa2e3855afbddbac0b683b72c6c + "@elastic/ecs-helpers": ^2.1.1 + checksum: e66a1801ecafa5d1f56037df8dafa9da9c302440c8254c636374e489a5a50e85166b77c951c1f8d3edd99cb77f43b5967c3f31a68cac23b2a4b95a87ce72b066 languageName: node linkType: hard @@ -5216,7 +5214,7 @@ __metadata: "@sofie-automation/shared-lib": 1.52.0-in-development amqplib: ^0.10.3 deepmerge: ^4.3.1 - elastic-apm-node: ^3.51.0 + elastic-apm-node: ^4.8.0 eventemitter3: ^4.0.7 jest-mock-extended: ^3.0.5 mongodb: ^5.9.2 @@ -6467,19 +6465,12 @@ __metadata: languageName: node linkType: hard -"@types/node@npm:*, @types/node@npm:>=12.0.0, @types/node@npm:>=13.7.0": - version: 20.11.22 - resolution: "@types/node@npm:20.11.22" +"@types/node@npm:*, @types/node@npm:>=12.0.0, @types/node@npm:>=13.7.0, @types/node@npm:^20.17.6": + version: 20.17.6 + resolution: "@types/node@npm:20.17.6" dependencies: - undici-types: ~5.26.4 - checksum: ef8fd0b561c3c9810f3c23c990c856619232934e54308c84e79d4e39d44b66668eceb6eca89c64ebcbc78fb514446661ad58b0f8e6b5fa3d9ed9ff0983aac4ef - languageName: node - linkType: hard - -"@types/node@npm:^14.18.63": - version: 14.18.63 - resolution: "@types/node@npm:14.18.63" - checksum: be909061a54931778c71c49dc562586c32f909c4b6197e3d71e6dac726d8bd9fccb9f599c0df99f52742b68153712b5097c0f00cac4e279fa894b0ea6719a8fd + undici-types: ~6.19.2 + checksum: d51dbb9881c94d0310b32b5fd8013e3261595c61bc888fa27258469c93c3dc0b3c4d20a9f28f3f5f79562f6737e28e7f3dd04940dc8b4d966d34aaf318f7f69b languageName: node linkType: hard @@ -7367,6 +7358,15 @@ __metadata: languageName: node linkType: hard +"acorn-import-attributes@npm:^1.9.5": + version: 1.9.5 + resolution: "acorn-import-attributes@npm:1.9.5" + peerDependencies: + acorn: ^8 + checksum: 1c0c49b6a244503964ae46ae850baccf306e84caf99bc2010ed6103c69a423987b07b520a6c619f075d215388bd4923eccac995886a54309eda049ab78a4be95 + languageName: node + linkType: hard + "acorn-jsx@npm:^5.0.0, acorn-jsx@npm:^5.3.2": version: 5.3.2 resolution: "acorn-jsx@npm:5.3.2" @@ -7547,7 +7547,7 @@ __metadata: languageName: node linkType: hard -"ajv@npm:^6.11.0, ajv@npm:^6.12.2, ajv@npm:^6.12.4, ajv@npm:^6.12.5": +"ajv@npm:^6.12.2, ajv@npm:^6.12.4, ajv@npm:^6.12.5": version: 6.12.6 resolution: "ajv@npm:6.12.6" dependencies: @@ -8027,15 +8027,6 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"async-cache@npm:^1.1.0": - version: 1.1.0 - resolution: "async-cache@npm:1.1.0" - dependencies: - lru-cache: ^4.0.0 - checksum: 3f55cc78b3ddc745b6604dd144fc7bca2e21c7ba4c5ea18d312234dc625133511723dff6c71b2283582421f95d591bdb24bf89ce4c4869151e4ecedbdad4acf2 - languageName: node - linkType: hard - "async-value-promise@npm:^1.1.1": version: 1.1.1 resolution: "async-value-promise@npm:1.1.1" @@ -8430,6 +8421,13 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard +"bignumber.js@npm:^9.0.0": + version: 9.1.2 + resolution: "bignumber.js@npm:9.1.2" + checksum: 582c03af77ec9cb0ebd682a373ee6c66475db94a4325f92299621d544aa4bd45cb45fd60001610e94aef8ae98a0905fa538241d9638d4422d57abbeeac6fadaf + languageName: node + linkType: hard + "bin-links@npm:^3.0.3": version: 3.0.3 resolution: "bin-links@npm:3.0.3" @@ -9997,13 +9995,20 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"cookie@npm:0.5.0, cookie@npm:^0.5.0": +"cookie@npm:0.5.0": version: 0.5.0 resolution: "cookie@npm:0.5.0" checksum: 1f4bd2ca5765f8c9689a7e8954183f5332139eb72b6ff783d8947032ec1fdf43109852c178e21a953a30c0dd42257828185be01b49d1eb1a67fd054ca588a180 languageName: node linkType: hard +"cookie@npm:^0.7.1": + version: 0.7.2 + resolution: "cookie@npm:0.7.2" + checksum: 9bf8555e33530affd571ea37b615ccad9b9a34febbf2c950c86787088eb00a8973690833b0f8ebd6b69b753c62669ea60cec89178c1fb007bf0749abed74f93e + languageName: node + linkType: hard + "copy-text-to-clipboard@npm:^3.2.0": version: 3.2.0 resolution: "copy-text-to-clipboard@npm:3.2.0" @@ -11792,21 +11797,20 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"elastic-apm-node@npm:^3.51.0": - version: 3.51.0 - resolution: "elastic-apm-node@npm:3.51.0" +"elastic-apm-node@npm:^4.8.0": + version: 4.8.0 + resolution: "elastic-apm-node@npm:4.8.0" dependencies: - "@elastic/ecs-pino-format": ^1.2.0 + "@elastic/ecs-pino-format": ^1.5.0 "@opentelemetry/api": ^1.4.1 "@opentelemetry/core": ^1.11.0 "@opentelemetry/sdk-metrics": ^1.12.0 after-all-results: ^2.0.0 agentkeepalive: ^4.2.1 - async-cache: ^1.1.0 async-value-promise: ^1.1.1 basic-auth: ^2.0.1 breadth-filter: ^2.0.0 - cookie: ^0.5.0 + cookie: ^0.7.1 core-util-is: ^1.0.2 end-of-stream: ^1.4.4 error-callsites: ^2.0.4 @@ -11815,26 +11819,26 @@ asn1@evs-broadcast/node-asn1: fast-safe-stringify: ^2.0.7 fast-stream-to-buffer: ^1.0.0 http-headers: ^3.0.2 - import-in-the-middle: 1.4.2 - is-native: ^1.0.1 - lru-cache: ^6.0.0 + import-in-the-middle: 1.11.2 + json-bigint: ^1.0.0 + lru-cache: 10.2.0 measured-reporting: ^1.51.1 module-details-from-path: ^1.0.3 monitor-event-loop-delay: ^1.0.0 object-filter-sequence: ^1.0.0 object-identity-map: ^1.0.2 original-url: ^1.2.3 - pino: ^6.11.2 - readable-stream: ^3.4.0 + pino: ^8.15.0 + readable-stream: ^3.6.2 relative-microtime: ^2.0.0 require-in-the-middle: ^7.1.1 - semver: ^6.3.1 + semver: ^7.5.4 shallow-clone-shim: ^2.0.0 source-map: ^0.8.0-beta.0 sql-summary: ^1.0.1 stream-chopper: ^3.0.1 unicode-byte-truncate: ^1.0.0 - checksum: e6a801e731d6a5178e7450c76e88b9a519823129986365b2f59c4ee8e02c2a0e624deacebce6e85ae0964f6ea876a9f562901ca0b3538f4b0452a24d7f1b0303 + checksum: 4c6534481540b08412096ff192c67b8dcc9501672b57bcc2f6ad159e84a61f5c32a63c6a7edfe8fb289a6854e33e1278e0aa04b5cadcbb1c9cc51325761be45d languageName: node linkType: hard @@ -13116,18 +13120,6 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"fast-json-stringify@npm:^2.4.1": - version: 2.7.13 - resolution: "fast-json-stringify@npm:2.7.13" - dependencies: - ajv: ^6.11.0 - deepmerge: ^4.2.2 - rfdc: ^1.2.0 - string-similarity: ^4.0.1 - checksum: f78ab25047c790de5b521c369e0b18c595055d48a106add36e9f86fe45be40226f168ff4708a226e187d0b46f1d6b32129842041728944bd9a03ca5efbbe4ccb - languageName: node - linkType: hard - "fast-levenshtein@npm:^2.0.6": version: 2.0.6 resolution: "fast-levenshtein@npm:2.0.6" @@ -13142,14 +13134,14 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"fast-redact@npm:^3.0.0": - version: 3.3.0 - resolution: "fast-redact@npm:3.3.0" - checksum: 3f7becc70a5a2662a9cbfdc52a4291594f62ae998806ee00315af307f32d9559dbf512146259a22739ee34401950ef47598c1f4777d33b0ed5027203d67f549c +"fast-redact@npm:^3.1.1": + version: 3.5.0 + resolution: "fast-redact@npm:3.5.0" + checksum: ef03f0d1849da074a520a531ad299bf346417b790a643931ab4e01cb72275c8d55b60dc8512fb1f1818647b696790edefaa96704228db9f012da935faa1940af languageName: node linkType: hard -"fast-safe-stringify@npm:2.1.1, fast-safe-stringify@npm:^2.0.7, fast-safe-stringify@npm:^2.0.8": +"fast-safe-stringify@npm:2.1.1, fast-safe-stringify@npm:^2.0.7": version: 2.1.1 resolution: "fast-safe-stringify@npm:2.1.1" checksum: a851cbddc451745662f8f00ddb622d6766f9bd97642dabfd9a405fb0d646d69fc0b9a1243cbf67f5f18a39f40f6fa821737651ff1bceeba06c9992ca2dc5bd3d @@ -13426,13 +13418,6 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"flatstr@npm:^1.0.12": - version: 1.0.12 - resolution: "flatstr@npm:1.0.12" - checksum: e1bb562c94b119e958bf37e55738b172b5f8aaae6532b9660ecd877779f8559dbbc89613ba6b29ccc13447e14c59277d41450f785cf75c30df9fce62f459e9a8 - languageName: node - linkType: hard - "flatted@npm:^3.2.7": version: 3.2.9 resolution: "flatted@npm:3.2.9" @@ -15129,15 +15114,15 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"import-in-the-middle@npm:1.4.2": - version: 1.4.2 - resolution: "import-in-the-middle@npm:1.4.2" +"import-in-the-middle@npm:1.11.2": + version: 1.11.2 + resolution: "import-in-the-middle@npm:1.11.2" dependencies: acorn: ^8.8.2 - acorn-import-assertions: ^1.9.0 + acorn-import-attributes: ^1.9.5 cjs-module-lexer: ^1.2.2 module-details-from-path: ^1.0.3 - checksum: 52971f821e9a3c94834cd5cf0ab5178321c07d4f4babd547b3cb24c4de21670d05b42ca1523890e7e90525c3bba6b7db7e54cf45421919b0b2712a34faa96ea5 + checksum: 06fb73100a918e00778779713119236cc8d3d4656aae9076a18159cfcd28eb0cc26e0a5040d11da309c5f8f8915c143b8d74e73c0734d3f5549b1813d1008bb9 languageName: node linkType: hard @@ -15686,16 +15671,6 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"is-native@npm:^1.0.1": - version: 1.0.1 - resolution: "is-native@npm:1.0.1" - dependencies: - is-nil: ^1.0.0 - to-source-code: ^1.0.0 - checksum: 4967af8c4d7a06076cb16ef70fba5a5a2b61ef0a83d4d5dce437cf4c6b5315255cccf07db37d487bcdf2f0ded86edb166a62c46a712cfda1227532b70015029c - languageName: node - linkType: hard - "is-negative-zero@npm:^2.0.3": version: 2.0.3 resolution: "is-negative-zero@npm:2.0.3" @@ -15703,13 +15678,6 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"is-nil@npm:^1.0.0": - version: 1.0.1 - resolution: "is-nil@npm:1.0.1" - checksum: e5b89c3b82068e719372381c5aaa5f3f28d09e6d501d7f7e4365f136433de1ae92f9f82eeedcb3c3282da1ccf374aad46cc06feab2647d2820067c4a35484760 - languageName: node - linkType: hard - "is-npm@npm:^6.0.0": version: 6.0.0 resolution: "is-npm@npm:6.0.0" @@ -16834,6 +16802,15 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard +"json-bigint@npm:^1.0.0": + version: 1.0.0 + resolution: "json-bigint@npm:1.0.0" + dependencies: + bignumber.js: ^9.0.0 + checksum: c67bb93ccb3c291e60eb4b62931403e378906aab113ec1c2a8dd0f9a7f065ad6fd9713d627b732abefae2e244ac9ce1721c7a3142b2979532f12b258634ce6f6 + languageName: node + linkType: hard + "json-buffer@npm:3.0.1": version: 3.0.1 resolution: "json-buffer@npm:3.0.1" @@ -17699,13 +17676,10 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"lru-cache@npm:^4.0.0": - version: 4.1.5 - resolution: "lru-cache@npm:4.1.5" - dependencies: - pseudomap: ^1.0.2 - yallist: ^2.1.2 - checksum: 4bb4b58a36cd7dc4dcec74cbe6a8f766a38b7426f1ff59d4cf7d82a2aa9b9565cd1cb98f6ff60ce5cd174524868d7bc9b7b1c294371851356066ca9ac4cf135a +"lru-cache@npm:10.2.0, lru-cache@npm:^9.1.1 || ^10.0.0": + version: 10.2.0 + resolution: "lru-cache@npm:10.2.0" + checksum: eee7ddda4a7475deac51ac81d7dd78709095c6fa46e8350dc2d22462559a1faa3b81ed931d5464b13d48cbd7e08b46100b6f768c76833912bc444b99c37e25db languageName: node linkType: hard @@ -17734,13 +17708,6 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"lru-cache@npm:^9.1.1 || ^10.0.0": - version: 10.0.1 - resolution: "lru-cache@npm:10.0.1" - checksum: 06f8d0e1ceabd76bb6f644a26dbb0b4c471b79c7b514c13c6856113879b3bf369eb7b497dad4ff2b7e2636db202412394865b33c332100876d838ad1372f0181 - languageName: node - linkType: hard - "lru-queue@npm:^0.1.0": version: 0.1.0 resolution: "lru-queue@npm:0.1.0" @@ -20583,6 +20550,13 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard +"on-exit-leak-free@npm:^2.1.0": + version: 2.1.2 + resolution: "on-exit-leak-free@npm:2.1.2" + checksum: 6ce7acdc7b9ceb51cf029b5239cbf41937ee4c8dcd9d4e475e1777b41702564d46caa1150a744e00da0ac6d923ab83471646a39a4470f97481cf6e2d8d253c3f + languageName: node + linkType: hard + "on-finished@npm:2.4.1": version: 2.4.1 resolution: "on-finished@npm:2.4.1" @@ -21052,7 +21026,7 @@ asn1@evs-broadcast/node-asn1: "@types/ejson": ^2.2.2 "@types/got": ^9.6.12 "@types/jest": ^29.5.11 - "@types/node": ^14.18.63 + "@types/node": ^20.17.6 "@types/node-fetch": ^2.6.11 "@types/object-path": ^0.11.4 "@types/underscore": ^1.11.15 @@ -21625,27 +21599,41 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"pino-std-serializers@npm:^3.1.0": - version: 3.2.0 - resolution: "pino-std-serializers@npm:3.2.0" - checksum: 77e29675b116e42ae9fe6d4ef52ef3a082ffc54922b122d85935f93ddcc20277f0b0c873c5c6c5274a67b0409c672aaae3de6bcea10a2d84699718dda55ba95b +"pino-abstract-transport@npm:^1.2.0": + version: 1.2.0 + resolution: "pino-abstract-transport@npm:1.2.0" + dependencies: + readable-stream: ^4.0.0 + split2: ^4.0.0 + checksum: 3336c51fb91ced5ef8a4bfd70a96e41eb6deb905698e83350dc71eedffb34795db1286d2d992ce1da2f6cd330a68be3f7e2748775a6b8a2ee3416796070238d6 languageName: node linkType: hard -"pino@npm:^6.11.2": - version: 6.14.0 - resolution: "pino@npm:6.14.0" +"pino-std-serializers@npm:^6.0.0": + version: 6.2.2 + resolution: "pino-std-serializers@npm:6.2.2" + checksum: aeb0662edc46ec926de9961ed4780a4f0586bb7c37d212cd469c069639e7816887a62c5093bc93f260a4e0900322f44fc8ab1343b5a9fa2864a888acccdb22a4 + languageName: node + linkType: hard + +"pino@npm:^8.15.0": + version: 8.21.0 + resolution: "pino@npm:8.21.0" dependencies: - fast-redact: ^3.0.0 - fast-safe-stringify: ^2.0.8 - flatstr: ^1.0.12 - pino-std-serializers: ^3.1.0 - process-warning: ^1.0.0 + atomic-sleep: ^1.0.0 + fast-redact: ^3.1.1 + on-exit-leak-free: ^2.1.0 + pino-abstract-transport: ^1.2.0 + pino-std-serializers: ^6.0.0 + process-warning: ^3.0.0 quick-format-unescaped: ^4.0.3 - sonic-boom: ^1.0.2 + real-require: ^0.2.0 + safe-stable-stringify: ^2.3.1 + sonic-boom: ^3.7.0 + thread-stream: ^2.6.0 bin: pino: bin.js - checksum: eb13e12e3a3d682abe4a4da426455a9f4e041e55e4fa57d72d9677ee8d188a9c952f69347e728a3761c8262cdce76ef24bee29e1a53ab15aa9c5e851099163d0 + checksum: d895c37cfcb7ade33ad7ac4ca54c0497ab719ec726e42b7c7b9697e07572a09a7c7de18d751440769c3ea5ecbac2075fdac720cf182720a4764defe3de8a1411 languageName: node linkType: hard @@ -22298,10 +22286,10 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"process-warning@npm:^1.0.0": - version: 1.0.0 - resolution: "process-warning@npm:1.0.0" - checksum: c708a03241deec3cabaeee39c4f9ee8c4d71f1c5ef9b746c8252cdb952a6059068cfcdaf348399775244cbc441b6ae5e26a9c87ed371f88335d84f26d19180f9 +"process-warning@npm:^3.0.0": + version: 3.0.0 + resolution: "process-warning@npm:3.0.0" + checksum: 1fc2eb4524041de3c18423334cc8b4e36bec5ad5472640ca1a936122c6e01da0864c1a4025858ef89aea93eabe7e77db93ccea225b10858617821cb6a8719efe languageName: node linkType: hard @@ -22477,13 +22465,6 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"pseudomap@npm:^1.0.2": - version: 1.0.2 - resolution: "pseudomap@npm:1.0.2" - checksum: 856c0aae0ff2ad60881168334448e898ad7a0e45fe7386d114b150084254c01e200c957cf378378025df4e052c7890c5bd933939b0e0d2ecfcc1dc2f0b2991f5 - languageName: node - linkType: hard - "psl@npm:^1.1.33": version: 1.9.0 resolution: "psl@npm:1.9.0" @@ -23402,7 +23383,7 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"readable-stream@npm:3, readable-stream@npm:^3.0.0, readable-stream@npm:^3.0.2, readable-stream@npm:^3.0.6, readable-stream@npm:^3.1.1, readable-stream@npm:^3.4.0, readable-stream@npm:^3.5.0, readable-stream@npm:^3.6.0": +"readable-stream@npm:3, readable-stream@npm:^3.0.0, readable-stream@npm:^3.0.2, readable-stream@npm:^3.0.6, readable-stream@npm:^3.1.1, readable-stream@npm:^3.4.0, readable-stream@npm:^3.5.0, readable-stream@npm:^3.6.0, readable-stream@npm:^3.6.2": version: 3.6.2 resolution: "readable-stream@npm:3.6.2" dependencies: @@ -23428,16 +23409,16 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"readable-stream@npm:^4.1.0": - version: 4.4.2 - resolution: "readable-stream@npm:4.4.2" +"readable-stream@npm:^4.0.0, readable-stream@npm:^4.1.0": + version: 4.5.2 + resolution: "readable-stream@npm:4.5.2" dependencies: abort-controller: ^3.0.0 buffer: ^6.0.3 events: ^3.3.0 process: ^0.11.10 string_decoder: ^1.3.0 - checksum: 6f4063763dbdb52658d22d3f49ca976420e1fbe16bbd241f744383715845350b196a2f08b8d6330f8e219153dff34b140aeefd6296da828e1041a7eab1f20d5e + checksum: c4030ccff010b83e4f33289c535f7830190773e274b3fcb6e2541475070bdfd69c98001c3b0cb78763fc00c8b62f514d96c2b10a8bd35d5ce45203a25fa1d33a languageName: node linkType: hard @@ -23490,6 +23471,13 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard +"real-require@npm:^0.2.0": + version: 0.2.0 + resolution: "real-require@npm:0.2.0" + checksum: fa060f19f2f447adf678d1376928c76379dce5f72bd334da301685ca6cdcb7b11356813332cc243c88470796bc2e2b1e2917fc10df9143dd93c2ea608694971d + languageName: node + linkType: hard + "rechoir@npm:^0.6.2": version: 0.6.2 resolution: "rechoir@npm:0.6.2" @@ -23998,7 +23986,7 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"rfdc@npm:^1.2.0, rfdc@npm:^1.3.0": +"rfdc@npm:^1.3.0": version: 1.3.0 resolution: "rfdc@npm:1.3.0" checksum: fb2ba8512e43519983b4c61bd3fa77c0f410eff6bae68b08614437bc3f35f91362215f7b4a73cbda6f67330b5746ce07db5dd9850ad3edc91271ad6deea0df32 @@ -24947,13 +24935,12 @@ asn1@evs-broadcast/node-asn1: languageName: unknown linkType: soft -"sonic-boom@npm:^1.0.2": - version: 1.4.1 - resolution: "sonic-boom@npm:1.4.1" +"sonic-boom@npm:^3.7.0": + version: 3.8.1 + resolution: "sonic-boom@npm:3.8.1" dependencies: atomic-sleep: ^1.0.0 - flatstr: ^1.0.12 - checksum: 189fa8fe5c2dc05d3513fc1a4926a2f16f132fa6fa0b511745a436010cdcd9c1d3b3cb6a9d7c05bd32a965dc77673a5ac0eb0992e920bdedd16330d95323124f + checksum: 79c90d7a2f928489fd3d4b68d8f8d747a426ca6ccf83c3b102b36f899d4524463dd310982ab7ab6d6bcfd34b7c7c281ad25e495ad71fbff8fd6fa86d6273fc6b languageName: node linkType: hard @@ -25159,6 +25146,13 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard +"split2@npm:^4.0.0": + version: 4.2.0 + resolution: "split2@npm:4.2.0" + checksum: 05d54102546549fe4d2455900699056580cca006c0275c334611420f854da30ac999230857a85fdd9914dc2109ae50f80fda43d2a445f2aa86eccdc1dfce779d + languageName: node + linkType: hard + "split@npm:^1.0.0": version: 1.0.1 resolution: "split@npm:1.0.1" @@ -25344,13 +25338,6 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"string-similarity@npm:^4.0.1": - version: 4.0.4 - resolution: "string-similarity@npm:4.0.4" - checksum: 797b41b24e1eb6b3b0ab896950b58c295a19a82933479c75f7b5279ffb63e0b456a8c8d10329c02f607ca1a50370e961e83d552aa468ff3b0fa15809abc9eff7 - languageName: node - linkType: hard - "string-width-cjs@npm:string-width@^4.2.0, string-width@npm:^1.0.2 || 2 || 3 || 4, string-width@npm:^4.1.0, string-width@npm:^4.2.0, string-width@npm:^4.2.3": version: 4.2.3 resolution: "string-width@npm:4.2.3" @@ -25976,6 +25963,15 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard +"thread-stream@npm:^2.6.0": + version: 2.7.0 + resolution: "thread-stream@npm:2.7.0" + dependencies: + real-require: ^0.2.0 + checksum: 75ab019cda628344c7779e5f5a88f7759764efd29d320327ad2e6c2622778b5f1c43a3966d76a9ee5744086d61c680b413548f5521030f9e9055487684436165 + languageName: node + linkType: hard + "threadedclass@npm:^1.2.1, threadedclass@npm:^1.2.2": version: 1.2.2 resolution: "threadedclass@npm:1.2.2" @@ -26164,15 +26160,6 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"to-source-code@npm:^1.0.0": - version: 1.0.2 - resolution: "to-source-code@npm:1.0.2" - dependencies: - is-nil: ^1.0.0 - checksum: 24fd24767f185ad11f81c1e020c2f789fba29471195227731530ec39b2697bb680c16e1f6f7d0d68bffba81e3d95e68dd6014f8c88371399bddcf8c4ad036de3 - languageName: node - linkType: hard - "toidentifier@npm:1.0.1": version: 1.0.1 resolution: "toidentifier@npm:1.0.1" @@ -26847,10 +26834,10 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"undici-types@npm:~5.26.4": - version: 5.26.5 - resolution: "undici-types@npm:5.26.5" - checksum: 3192ef6f3fd5df652f2dc1cd782b49d6ff14dc98e5dced492aa8a8c65425227da5da6aafe22523c67f035a272c599bb89cfe803c1db6311e44bed3042fc25487 +"undici-types@npm:~6.19.2": + version: 6.19.8 + resolution: "undici-types@npm:6.19.8" + checksum: de51f1b447d22571cf155dfe14ff6d12c5bdaec237c765085b439c38ca8518fc360e88c70f99469162bf2e14188a7b0bcb06e1ed2dc031042b984b0bb9544017 languageName: node linkType: hard @@ -28356,13 +28343,6 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"yallist@npm:^2.1.2": - version: 2.1.2 - resolution: "yallist@npm:2.1.2" - checksum: 9ba99409209f485b6fcb970330908a6d41fa1c933f75e08250316cce19383179a6b70a7e0721b89672ebb6199cc377bf3e432f55100da6a7d6e11902b0a642cb - languageName: node - linkType: hard - "yallist@npm:^3.0.2": version: 3.1.1 resolution: "yallist@npm:3.1.1" diff --git a/scripts/fixTestFibers.js b/scripts/fixTestFibers.js deleted file mode 100644 index 2cad9c0e6bc..00000000000 --- a/scripts/fixTestFibers.js +++ /dev/null @@ -1,21 +0,0 @@ -// Fix fibers implementation, so it runs with Jest: - -const fs = require('fs') - -const filePath = './node_modules/fibers-npm/fibers.js' - -const stringToInsert = '\t\tif (process.env.JEST_WORKER_ID !== undefined ) modPath += \'.node\'' -const insertLineNumber = 13 - -const lines = fs.readFileSync(filePath).toString().split('\n') - -// Insert line: -if (lines[insertLineNumber].trim() !== stringToInsert.trim() ) { - console.log(`Inserting Jest-fix line into ${filePath}`) - lines.splice(insertLineNumber, 0, stringToInsert) -} -const text = lines.join('\n') - -fs.writeFile(filePath, text, function (err) { - if (err) return console.log(err) -}) diff --git a/scripts/run.mjs b/scripts/run.mjs index bc102ea8293..3d15810fc55 100644 --- a/scripts/run.mjs +++ b/scripts/run.mjs @@ -31,13 +31,13 @@ function watchWorker() { function watchMeteor() { return [ { - command: "meteor yarn watch-types --preserveWatchOutput", + command: "yarn watch-types --preserveWatchOutput", cwd: "meteor", name: "METEOR-TSC", prefixColor: "blue", }, { - command: `meteor yarn debug${config.inspectMeteor ? " --inspect" : ""}${ + command: `yarn debug${config.inspectMeteor ? " --inspect" : ""}${ config.verbose ? " --verbose" : "" }`, cwd: "meteor", diff --git a/sonar-project.properties b/sonar-project.properties index f07f1552af2..3811d59372d 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -11,7 +11,7 @@ sonar.organization=nrkno # Encoding of the source code. Default is default system encoding #sonar.sourceEncoding=UTF-8 -sonar.exclusions=meteor/__mocks__/check/**,meteor/eslint-rules/*.js,packages/documentation/** +sonar.exclusions=meteor/__mocks__/check/**,packages/documentation/** sonar.issue.ignore.multicriteria=ternary,todo,nullish,redundantalias,switchstatement3cases,preferoptionalchain From 7d12edf3d8bf041efcc61b8cefc651e5c5fbdd06 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Wed, 6 Nov 2024 08:33:00 +0000 Subject: [PATCH 57/81] fix: publications not becoming ready when stopping evaluation early --- meteor/server/publications/organization.ts | 10 ++- .../server/publications/peripheralDevice.ts | 5 +- meteor/server/publications/rundown.ts | 85 +++++++++++++++---- meteor/server/publications/rundownPlaylist.ts | 10 ++- meteor/server/publications/showStyle.ts | 20 ++++- meteor/server/publications/studio.ts | 20 ++++- 6 files changed, 120 insertions(+), 30 deletions(-) diff --git a/meteor/server/publications/organization.ts b/meteor/server/publications/organization.ts index f596d8b3c6f..2ee8baecbb4 100644 --- a/meteor/server/publications/organization.ts +++ b/meteor/server/publications/organization.ts @@ -18,7 +18,10 @@ import { getCurrentTime } from '../lib/lib' meteorPublish( MeteorPubSub.organization, async function (organizationId: OrganizationId | null, token: string | undefined) { - if (!organizationId) return null + if (!organizationId) { + this.ready() + return null + } const { cred, selector } = await AutoFillSelector.organizationId(this.userId, { _id: organizationId }, token) const modifier: FindOptions = { @@ -43,7 +46,10 @@ meteorPublish(CorelibPubSub.blueprints, async function (blueprintIds: BlueprintI check(blueprintIds, Match.Maybe(Array)) // If values were provided, they must have values - if (blueprintIds && blueprintIds.length === 0) return null + if (blueprintIds && blueprintIds.length === 0) { + this.ready() + return null + } const { cred, selector } = await AutoFillSelector.organizationId(this.userId, {}, token) diff --git a/meteor/server/publications/peripheralDevice.ts b/meteor/server/publications/peripheralDevice.ts index 1ead8e0e6d3..4983dbda004 100644 --- a/meteor/server/publications/peripheralDevice.ts +++ b/meteor/server/publications/peripheralDevice.ts @@ -44,7 +44,10 @@ meteorPublish( check(peripheralDeviceIds, Match.Maybe(Array)) // If values were provided, they must have values - if (peripheralDeviceIds && peripheralDeviceIds.length === 0) return null + if (peripheralDeviceIds && peripheralDeviceIds.length === 0) { + this.ready() + return null + } const { cred, selector } = await AutoFillSelector.organizationId(this.userId, {}, token) diff --git a/meteor/server/publications/rundown.ts b/meteor/server/publications/rundown.ts index f939a9baffa..f3b9710c1ba 100644 --- a/meteor/server/publications/rundown.ts +++ b/meteor/server/publications/rundown.ts @@ -65,7 +65,10 @@ meteorPublish(PeripheralDevicePubSub.rundownsForDevice, async function (deviceId throw new Meteor.Error(403, 'Publication can only be used by authorized PeripheralDevices') // No studio, then no rundowns - if (!resolvedCred.device.studioId) return null + if (!resolvedCred.device.studioId) { + this.ready() + return null + } selector.studioId = resolvedCred.device.studioId @@ -87,7 +90,10 @@ meteorPublish( check(playlistIds, Array) // If values were provided, they must have values - if (playlistIds.length === 0) return null + if (playlistIds.length === 0) { + this.ready() + return null + } const { cred, selector } = await AutoFillSelector.organizationId( this.userId, @@ -121,7 +127,10 @@ meteorPublish( async function (showStyleBaseIds: ShowStyleBaseId[], token: string | undefined) { check(showStyleBaseIds, Array) - if (showStyleBaseIds.length === 0) return null + if (showStyleBaseIds.length === 0) { + this.ready() + return null + } const { cred, selector } = await AutoFillSelector.organizationId( this.userId, @@ -156,7 +165,10 @@ meteorPublish( async function (rundownIds: RundownId[], filter: { omitHidden?: boolean } | undefined, token: string | undefined) { check(rundownIds, Array) - if (rundownIds.length === 0) return null + if (rundownIds.length === 0) { + this.ready() + return null + } const selector: MongoQuery = { rundownId: { $in: rundownIds }, @@ -183,8 +195,14 @@ meteorPublish( check(rundownIds, Array) check(segmentIds, Match.Maybe(Array)) - if (rundownIds.length === 0) return null - if (segmentIds && segmentIds.length === 0) return null + if (rundownIds.length === 0) { + this.ready() + return null + } + if (segmentIds && segmentIds.length === 0) { + this.ready() + return null + } const modifier: FindOptions = { fields: { @@ -219,7 +237,10 @@ meteorPublish( check(rundownIds, Array) check(playlistActivationId, Match.Maybe(String)) - if (rundownIds.length === 0 || !playlistActivationId) return null + if (rundownIds.length === 0 || !playlistActivationId) { + this.ready() + return null + } const modifier: FindOptions = { fields: { @@ -252,7 +273,10 @@ meteorPublish( ) { check(rundownIds, Array) - if (rundownIds.length === 0) return null + if (rundownIds.length === 0) { + this.ready() + return null + } const selector: MongoQuery = { rundownId: { $in: rundownIds }, @@ -290,7 +314,10 @@ meteorPublish( check(partIds, Match.Maybe(Array)) // If values were provided, they must have values - if (partIds && partIds.length === 0) return null + if (partIds && partIds.length === 0) { + this.ready() + return null + } const selector: MongoQuery = { startRundownId: { $in: rundownIds }, @@ -361,7 +388,10 @@ const adlibPiecesSubFields: MongoFieldSpecifierZeroes = { meteorPublish(CorelibPubSub.adLibPieces, async function (rundownIds: RundownId[], token: string | undefined) { check(rundownIds, Array) - if (rundownIds.length === 0) return null + if (rundownIds.length === 0) { + this.ready() + return null + } const selector: MongoQuery = { rundownId: { $in: rundownIds }, @@ -417,8 +447,14 @@ meteorPublish( check(partInstanceIds, Match.Maybe(Array)) // If values were provided, they must have values - if (rundownIds.length === 0) return null - if (partInstanceIds && partInstanceIds.length === 0) return null + if (rundownIds.length === 0) { + this.ready() + return null + } + if (partInstanceIds && partInstanceIds.length === 0) { + this.ready() + return null + } const selector: MongoQuery = { rundownId: { $in: rundownIds }, @@ -485,7 +521,10 @@ meteorPublish( ) { check(rundownIds, Array) - if (rundownIds.length === 0) return null + if (rundownIds.length === 0) { + this.ready() + return null + } const selector: MongoQuery = { rundownId: { $in: rundownIds }, @@ -522,7 +561,10 @@ meteorPublish( if (!peripheralDevice) throw new Meteor.Error(`PeripheralDevice "${deviceId}" not found`) const studioId = peripheralDevice.studioId - if (!studioId) return null + if (!studioId) { + this.ready() + return null + } return ExpectedPlayoutItems.findWithCursor({ studioId }) } @@ -551,7 +593,10 @@ meteorPublish( async function (rundownIds: RundownId[], token: string | undefined) { check(rundownIds, Array) - if (rundownIds.length === 0) return null + if (rundownIds.length === 0) { + this.ready() + return null + } const selector: MongoQuery = { rundownId: { $in: rundownIds }, @@ -579,7 +624,10 @@ const adlibActionSubFields: MongoFieldSpecifierZeroes = { meteorPublish(CorelibPubSub.adLibActions, async function (rundownIds: RundownId[], token: string | undefined) { check(rundownIds, Array) - if (rundownIds.length === 0) return null + if (rundownIds.length === 0) { + this.ready() + return null + } const selector: MongoQuery = { rundownId: { $in: rundownIds }, @@ -618,7 +666,10 @@ meteorPublish( async function (rundownIds: RundownId[], token: string | undefined) { check(rundownIds, Array) - if (rundownIds.length === 0) return null + if (rundownIds.length === 0) { + this.ready() + return null + } const selector: MongoQuery = { rundownId: { $in: rundownIds }, diff --git a/meteor/server/publications/rundownPlaylist.ts b/meteor/server/publications/rundownPlaylist.ts index 89378b15875..71637f219f9 100644 --- a/meteor/server/publications/rundownPlaylist.ts +++ b/meteor/server/publications/rundownPlaylist.ts @@ -24,8 +24,14 @@ meteorPublish( check(studioIds, Match.Maybe(Array)) // If values were provided, they must have values - if (rundownPlaylistIds && rundownPlaylistIds.length === 0) return null - if (studioIds && studioIds.length === 0) return null + if (rundownPlaylistIds && rundownPlaylistIds.length === 0) { + this.ready() + return null + } + if (studioIds && studioIds.length === 0) { + this.ready() + return null + } const { cred, selector } = await AutoFillSelector.organizationId(this.userId, {}, token) diff --git a/meteor/server/publications/showStyle.ts b/meteor/server/publications/showStyle.ts index 99b3099e508..121e55a46f5 100644 --- a/meteor/server/publications/showStyle.ts +++ b/meteor/server/publications/showStyle.ts @@ -19,7 +19,10 @@ meteorPublish( check(showStyleBaseIds, Match.Maybe(Array)) // If values were provided, they must have values - if (showStyleBaseIds && showStyleBaseIds.length === 0) return null + if (showStyleBaseIds && showStyleBaseIds.length === 0) { + this.ready() + return null + } const { cred, selector } = await AutoFillSelector.organizationId(this.userId, {}, token) @@ -50,8 +53,14 @@ meteorPublish( check(showStyleVariantIds, Match.Maybe(Array)) // If values were provided, they must have values - if (showStyleBaseIds && showStyleBaseIds.length === 0) return null - if (showStyleVariantIds && showStyleVariantIds.length === 0) return null + if (showStyleBaseIds && showStyleBaseIds.length === 0) { + this.ready() + return null + } + if (showStyleVariantIds && showStyleVariantIds.length === 0) { + this.ready() + return null + } const { cred, selector } = await AutoFillSelector.showStyleBaseId(this.userId, {}, token) @@ -77,7 +86,10 @@ meteorPublish( check(showStyleBaseIds, Match.Maybe(Array)) // If values were provided, they must have values - if (showStyleBaseIds && showStyleBaseIds.length === 0) return null + if (showStyleBaseIds && showStyleBaseIds.length === 0) { + this.ready() + return null + } const selector0: MongoQuery = {} if (showStyleBaseIds) selector0.showStyleBaseId = { $in: showStyleBaseIds } diff --git a/meteor/server/publications/studio.ts b/meteor/server/publications/studio.ts index 08002e6938a..6060b7ee993 100644 --- a/meteor/server/publications/studio.ts +++ b/meteor/server/publications/studio.ts @@ -42,7 +42,10 @@ meteorPublish(CorelibPubSub.studios, async function (studioIds: StudioId[] | nul check(studioIds, Match.Maybe(Array)) // If values were provided, they must have values - if (studioIds && studioIds.length === 0) return null + if (studioIds && studioIds.length === 0) { + this.ready() + return null + } const { cred, selector } = await AutoFillSelector.organizationId(this.userId, {}, token) @@ -78,7 +81,10 @@ meteorPublish(CorelibPubSub.expectedPackages, async function (studioIds: StudioI // Note: This differs from the expected packages sent to the Package Manager, instead @see PubSub.expectedPackagesForDevice check(studioIds, Array) - if (studioIds.length === 0) return null + if (studioIds.length === 0) { + this.ready() + return null + } if (await StudioReadAccess.studioContent(studioIds, { userId: this.userId, token })) { return ExpectedPackages.findWithCursor({ @@ -92,7 +98,10 @@ meteorPublish( async function (studioIds: StudioId[], token: string | undefined) { check(studioIds, Array) - if (studioIds.length === 0) return null + if (studioIds.length === 0) { + this.ready() + return null + } if (await StudioReadAccess.studioContent(studioIds, { userId: this.userId, token })) { return ExpectedPackageWorkStatuses.findWithCursor({ @@ -107,7 +116,10 @@ meteorPublish( async function (studioIds: StudioId[], token: string | undefined) { check(studioIds, Array) - if (studioIds.length === 0) return null + if (studioIds.length === 0) { + this.ready() + return null + } if (await StudioReadAccess.studioContent(studioIds, { userId: this.userId, token })) { return PackageContainerStatuses.findWithCursor({ From f9dfb740eda1bfba840a17a21e6ded88005e1661 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Wed, 6 Nov 2024 08:49:28 +0000 Subject: [PATCH 58/81] Revert "fix: publications not becoming ready when stopping evaluation early" This reverts commit 7d12edf3d8bf041efcc61b8cefc651e5c5fbdd06. --- meteor/server/publications/organization.ts | 10 +-- .../server/publications/peripheralDevice.ts | 5 +- meteor/server/publications/rundown.ts | 85 ++++--------------- meteor/server/publications/rundownPlaylist.ts | 10 +-- meteor/server/publications/showStyle.ts | 20 +---- meteor/server/publications/studio.ts | 20 +---- 6 files changed, 30 insertions(+), 120 deletions(-) diff --git a/meteor/server/publications/organization.ts b/meteor/server/publications/organization.ts index 2ee8baecbb4..f596d8b3c6f 100644 --- a/meteor/server/publications/organization.ts +++ b/meteor/server/publications/organization.ts @@ -18,10 +18,7 @@ import { getCurrentTime } from '../lib/lib' meteorPublish( MeteorPubSub.organization, async function (organizationId: OrganizationId | null, token: string | undefined) { - if (!organizationId) { - this.ready() - return null - } + if (!organizationId) return null const { cred, selector } = await AutoFillSelector.organizationId(this.userId, { _id: organizationId }, token) const modifier: FindOptions = { @@ -46,10 +43,7 @@ meteorPublish(CorelibPubSub.blueprints, async function (blueprintIds: BlueprintI check(blueprintIds, Match.Maybe(Array)) // If values were provided, they must have values - if (blueprintIds && blueprintIds.length === 0) { - this.ready() - return null - } + if (blueprintIds && blueprintIds.length === 0) return null const { cred, selector } = await AutoFillSelector.organizationId(this.userId, {}, token) diff --git a/meteor/server/publications/peripheralDevice.ts b/meteor/server/publications/peripheralDevice.ts index 4983dbda004..1ead8e0e6d3 100644 --- a/meteor/server/publications/peripheralDevice.ts +++ b/meteor/server/publications/peripheralDevice.ts @@ -44,10 +44,7 @@ meteorPublish( check(peripheralDeviceIds, Match.Maybe(Array)) // If values were provided, they must have values - if (peripheralDeviceIds && peripheralDeviceIds.length === 0) { - this.ready() - return null - } + if (peripheralDeviceIds && peripheralDeviceIds.length === 0) return null const { cred, selector } = await AutoFillSelector.organizationId(this.userId, {}, token) diff --git a/meteor/server/publications/rundown.ts b/meteor/server/publications/rundown.ts index f3b9710c1ba..f939a9baffa 100644 --- a/meteor/server/publications/rundown.ts +++ b/meteor/server/publications/rundown.ts @@ -65,10 +65,7 @@ meteorPublish(PeripheralDevicePubSub.rundownsForDevice, async function (deviceId throw new Meteor.Error(403, 'Publication can only be used by authorized PeripheralDevices') // No studio, then no rundowns - if (!resolvedCred.device.studioId) { - this.ready() - return null - } + if (!resolvedCred.device.studioId) return null selector.studioId = resolvedCred.device.studioId @@ -90,10 +87,7 @@ meteorPublish( check(playlistIds, Array) // If values were provided, they must have values - if (playlistIds.length === 0) { - this.ready() - return null - } + if (playlistIds.length === 0) return null const { cred, selector } = await AutoFillSelector.organizationId( this.userId, @@ -127,10 +121,7 @@ meteorPublish( async function (showStyleBaseIds: ShowStyleBaseId[], token: string | undefined) { check(showStyleBaseIds, Array) - if (showStyleBaseIds.length === 0) { - this.ready() - return null - } + if (showStyleBaseIds.length === 0) return null const { cred, selector } = await AutoFillSelector.organizationId( this.userId, @@ -165,10 +156,7 @@ meteorPublish( async function (rundownIds: RundownId[], filter: { omitHidden?: boolean } | undefined, token: string | undefined) { check(rundownIds, Array) - if (rundownIds.length === 0) { - this.ready() - return null - } + if (rundownIds.length === 0) return null const selector: MongoQuery = { rundownId: { $in: rundownIds }, @@ -195,14 +183,8 @@ meteorPublish( check(rundownIds, Array) check(segmentIds, Match.Maybe(Array)) - if (rundownIds.length === 0) { - this.ready() - return null - } - if (segmentIds && segmentIds.length === 0) { - this.ready() - return null - } + if (rundownIds.length === 0) return null + if (segmentIds && segmentIds.length === 0) return null const modifier: FindOptions = { fields: { @@ -237,10 +219,7 @@ meteorPublish( check(rundownIds, Array) check(playlistActivationId, Match.Maybe(String)) - if (rundownIds.length === 0 || !playlistActivationId) { - this.ready() - return null - } + if (rundownIds.length === 0 || !playlistActivationId) return null const modifier: FindOptions = { fields: { @@ -273,10 +252,7 @@ meteorPublish( ) { check(rundownIds, Array) - if (rundownIds.length === 0) { - this.ready() - return null - } + if (rundownIds.length === 0) return null const selector: MongoQuery = { rundownId: { $in: rundownIds }, @@ -314,10 +290,7 @@ meteorPublish( check(partIds, Match.Maybe(Array)) // If values were provided, they must have values - if (partIds && partIds.length === 0) { - this.ready() - return null - } + if (partIds && partIds.length === 0) return null const selector: MongoQuery = { startRundownId: { $in: rundownIds }, @@ -388,10 +361,7 @@ const adlibPiecesSubFields: MongoFieldSpecifierZeroes = { meteorPublish(CorelibPubSub.adLibPieces, async function (rundownIds: RundownId[], token: string | undefined) { check(rundownIds, Array) - if (rundownIds.length === 0) { - this.ready() - return null - } + if (rundownIds.length === 0) return null const selector: MongoQuery = { rundownId: { $in: rundownIds }, @@ -447,14 +417,8 @@ meteorPublish( check(partInstanceIds, Match.Maybe(Array)) // If values were provided, they must have values - if (rundownIds.length === 0) { - this.ready() - return null - } - if (partInstanceIds && partInstanceIds.length === 0) { - this.ready() - return null - } + if (rundownIds.length === 0) return null + if (partInstanceIds && partInstanceIds.length === 0) return null const selector: MongoQuery = { rundownId: { $in: rundownIds }, @@ -521,10 +485,7 @@ meteorPublish( ) { check(rundownIds, Array) - if (rundownIds.length === 0) { - this.ready() - return null - } + if (rundownIds.length === 0) return null const selector: MongoQuery = { rundownId: { $in: rundownIds }, @@ -561,10 +522,7 @@ meteorPublish( if (!peripheralDevice) throw new Meteor.Error(`PeripheralDevice "${deviceId}" not found`) const studioId = peripheralDevice.studioId - if (!studioId) { - this.ready() - return null - } + if (!studioId) return null return ExpectedPlayoutItems.findWithCursor({ studioId }) } @@ -593,10 +551,7 @@ meteorPublish( async function (rundownIds: RundownId[], token: string | undefined) { check(rundownIds, Array) - if (rundownIds.length === 0) { - this.ready() - return null - } + if (rundownIds.length === 0) return null const selector: MongoQuery = { rundownId: { $in: rundownIds }, @@ -624,10 +579,7 @@ const adlibActionSubFields: MongoFieldSpecifierZeroes = { meteorPublish(CorelibPubSub.adLibActions, async function (rundownIds: RundownId[], token: string | undefined) { check(rundownIds, Array) - if (rundownIds.length === 0) { - this.ready() - return null - } + if (rundownIds.length === 0) return null const selector: MongoQuery = { rundownId: { $in: rundownIds }, @@ -666,10 +618,7 @@ meteorPublish( async function (rundownIds: RundownId[], token: string | undefined) { check(rundownIds, Array) - if (rundownIds.length === 0) { - this.ready() - return null - } + if (rundownIds.length === 0) return null const selector: MongoQuery = { rundownId: { $in: rundownIds }, diff --git a/meteor/server/publications/rundownPlaylist.ts b/meteor/server/publications/rundownPlaylist.ts index 71637f219f9..89378b15875 100644 --- a/meteor/server/publications/rundownPlaylist.ts +++ b/meteor/server/publications/rundownPlaylist.ts @@ -24,14 +24,8 @@ meteorPublish( check(studioIds, Match.Maybe(Array)) // If values were provided, they must have values - if (rundownPlaylistIds && rundownPlaylistIds.length === 0) { - this.ready() - return null - } - if (studioIds && studioIds.length === 0) { - this.ready() - return null - } + if (rundownPlaylistIds && rundownPlaylistIds.length === 0) return null + if (studioIds && studioIds.length === 0) return null const { cred, selector } = await AutoFillSelector.organizationId(this.userId, {}, token) diff --git a/meteor/server/publications/showStyle.ts b/meteor/server/publications/showStyle.ts index 121e55a46f5..99b3099e508 100644 --- a/meteor/server/publications/showStyle.ts +++ b/meteor/server/publications/showStyle.ts @@ -19,10 +19,7 @@ meteorPublish( check(showStyleBaseIds, Match.Maybe(Array)) // If values were provided, they must have values - if (showStyleBaseIds && showStyleBaseIds.length === 0) { - this.ready() - return null - } + if (showStyleBaseIds && showStyleBaseIds.length === 0) return null const { cred, selector } = await AutoFillSelector.organizationId(this.userId, {}, token) @@ -53,14 +50,8 @@ meteorPublish( check(showStyleVariantIds, Match.Maybe(Array)) // If values were provided, they must have values - if (showStyleBaseIds && showStyleBaseIds.length === 0) { - this.ready() - return null - } - if (showStyleVariantIds && showStyleVariantIds.length === 0) { - this.ready() - return null - } + if (showStyleBaseIds && showStyleBaseIds.length === 0) return null + if (showStyleVariantIds && showStyleVariantIds.length === 0) return null const { cred, selector } = await AutoFillSelector.showStyleBaseId(this.userId, {}, token) @@ -86,10 +77,7 @@ meteorPublish( check(showStyleBaseIds, Match.Maybe(Array)) // If values were provided, they must have values - if (showStyleBaseIds && showStyleBaseIds.length === 0) { - this.ready() - return null - } + if (showStyleBaseIds && showStyleBaseIds.length === 0) return null const selector0: MongoQuery = {} if (showStyleBaseIds) selector0.showStyleBaseId = { $in: showStyleBaseIds } diff --git a/meteor/server/publications/studio.ts b/meteor/server/publications/studio.ts index 6060b7ee993..08002e6938a 100644 --- a/meteor/server/publications/studio.ts +++ b/meteor/server/publications/studio.ts @@ -42,10 +42,7 @@ meteorPublish(CorelibPubSub.studios, async function (studioIds: StudioId[] | nul check(studioIds, Match.Maybe(Array)) // If values were provided, they must have values - if (studioIds && studioIds.length === 0) { - this.ready() - return null - } + if (studioIds && studioIds.length === 0) return null const { cred, selector } = await AutoFillSelector.organizationId(this.userId, {}, token) @@ -81,10 +78,7 @@ meteorPublish(CorelibPubSub.expectedPackages, async function (studioIds: StudioI // Note: This differs from the expected packages sent to the Package Manager, instead @see PubSub.expectedPackagesForDevice check(studioIds, Array) - if (studioIds.length === 0) { - this.ready() - return null - } + if (studioIds.length === 0) return null if (await StudioReadAccess.studioContent(studioIds, { userId: this.userId, token })) { return ExpectedPackages.findWithCursor({ @@ -98,10 +92,7 @@ meteorPublish( async function (studioIds: StudioId[], token: string | undefined) { check(studioIds, Array) - if (studioIds.length === 0) { - this.ready() - return null - } + if (studioIds.length === 0) return null if (await StudioReadAccess.studioContent(studioIds, { userId: this.userId, token })) { return ExpectedPackageWorkStatuses.findWithCursor({ @@ -116,10 +107,7 @@ meteorPublish( async function (studioIds: StudioId[], token: string | undefined) { check(studioIds, Array) - if (studioIds.length === 0) { - this.ready() - return null - } + if (studioIds.length === 0) return null if (await StudioReadAccess.studioContent(studioIds, { userId: this.userId, token })) { return PackageContainerStatuses.findWithCursor({ From f80b7d550b434f0133ba5afdbca66007e706711b Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Wed, 6 Nov 2024 08:51:58 +0000 Subject: [PATCH 59/81] fix: publications not becoming ready when returning null --- meteor/server/publications/lib/lib.ts | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/meteor/server/publications/lib/lib.ts b/meteor/server/publications/lib/lib.ts index 57f953c0316..b0fda6dcc6d 100644 --- a/meteor/server/publications/lib/lib.ts +++ b/meteor/server/publications/lib/lib.ts @@ -51,11 +51,13 @@ export function meteorPublishUnsafe( const publicationGauge = MeteorPublicationsGauge.labels({ publication: name }) - Meteor.publish(name, function (...args: any[]): any { + Meteor.publish(name, async function (...args: any[]): Promise { publicationGauge.inc() this.onStop(() => publicationGauge.dec()) - return callback.apply(protectStringObject(this), args) || [] + const callbackRes = await callback.apply(protectStringObject(this), args) + // If no value is returned, return an empty array so that meteor marks the subscription as ready + return callbackRes || [] }) } From 1fd2445da0f77de5130ab8adbdb3bb3243baa98f Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Wed, 6 Nov 2024 10:16:43 +0000 Subject: [PATCH 60/81] fix: simpler websocket url --- meteor/__mocks__/meteor.ts | 3 - packages/webui/src/meteor/meteor.js | 72 ------------------- .../src/meteor/socket-stream-client/urls.js | 4 -- 3 files changed, 79 deletions(-) diff --git a/meteor/__mocks__/meteor.ts b/meteor/__mocks__/meteor.ts index 1b2ec694180..a63b6181f43 100644 --- a/meteor/__mocks__/meteor.ts +++ b/meteor/__mocks__/meteor.ts @@ -212,9 +212,6 @@ export namespace MeteorMock { // but it'll do for now: return callAsync(methodName, ...args) } - export function absoluteUrl(path?: string): string { - return path + '' // todo - } export function setTimeout(fcn: () => void | Promise, time: number): number { return $.setTimeout(() => { Promise.resolve() diff --git a/packages/webui/src/meteor/meteor.js b/packages/webui/src/meteor/meteor.js index ada02242205..049cad2f65f 100644 --- a/packages/webui/src/meteor/meteor.js +++ b/packages/webui/src/meteor/meteor.js @@ -234,78 +234,6 @@ Meteor.Error.prototype.clone = function () { return new Meteor.Error(self.error, self.reason, self.details) } -/** - * @summary Generate an absolute URL pointing to the application. The server reads from the `ROOT_URL` environment variable to determine where it is running. This is taken care of automatically for apps deployed to Galaxy, but must be provided when using `meteor build`. - * @locus Anywhere - * @param {String} [path] A path to append to the root URL. Do not include a leading "`/`". - * @param {Object} [options] - * @param {Boolean} options.secure Create an HTTPS URL. - * @param {Boolean} options.replaceLocalhost Replace localhost with 127.0.0.1. Useful for services that don't recognize localhost as a domain name. - * @param {String} options.rootUrl Override the default ROOT_URL from the server environment. For example: "`http://foo.example.com`" - */ -Meteor.absoluteUrl = function (path, options) { - // path is optional - if (!options && typeof path === 'object') { - options = path - path = undefined - } - // merge options with defaults - options = Object.assign({}, Meteor.absoluteUrl.defaultOptions, options || {}) - - var url = options.rootUrl - if (!url) throw new Error('Must pass options.rootUrl or set ROOT_URL in the server environment') - - if (!/^http[s]?:\/\//i.test(url)) - // url starts with 'http://' or 'https://' - url = 'http://' + url // we will later fix to https if options.secure is set - - if (!url.endsWith('/')) { - url += '/' - } - - if (path) { - // join url and path with a / separator - while (path.startsWith('/')) { - path = path.slice(1) - } - url += path - } - - // turn http to https if secure option is set, and we're not talking - // to localhost. - if ( - options.secure && - /^http:/.test(url) && // url starts with 'http:' - !/http:\/\/localhost[:\/]/.test(url) && // doesn't match localhost - !/http:\/\/127\.0\.0\.1[:\/]/.test(url) - ) - // or 127.0.0.1 - url = url.replace(/^http:/, 'https:') - - if (options.replaceLocalhost) url = url.replace(/^http:\/\/localhost([:\/].*)/, 'http://127.0.0.1$1') - - return url -} - -// allow later packages to override default options -var defaultOptions = (Meteor.absoluteUrl.defaultOptions = {}) - -// available only in a browser environment -var location = typeof window === 'object' && window.location - -if (typeof window.__meteor_runtime_config__ === 'object' && window.__meteor_runtime_config__.ROOT_URL) { - defaultOptions.rootUrl = window.__meteor_runtime_config__.ROOT_URL -} else if (location && location.protocol && location.host) { - defaultOptions.rootUrl = location.protocol + '//' + location.host -} - -// Make absolute URLs use HTTPS by default if the current window.location -// uses HTTPS. Since this is just a default, it can be overridden by -// passing { secure: false } if necessary. -if (location && location.protocol === 'https:') { - defaultOptions.secure = true -} - Meteor._relativeToSiteRootUrl = function (link) { if (typeof window.__meteor_runtime_config__ === 'object' && link.substr(0, 1) === '/') link = (window.__meteor_runtime_config__.ROOT_URL_PATH_PREFIX || '') + link diff --git a/packages/webui/src/meteor/socket-stream-client/urls.js b/packages/webui/src/meteor/socket-stream-client/urls.js index 232c9bc247b..d5bb41e57ba 100644 --- a/packages/webui/src/meteor/socket-stream-client/urls.js +++ b/packages/webui/src/meteor/socket-stream-client/urls.js @@ -12,10 +12,6 @@ function translateUrl(url, newSchemeBase, subPath) { newSchemeBase = 'http'; } - if (subPath !== "sockjs" && url.startsWith("/")) { - url = Meteor.absoluteUrl(url.substr(1)); - } - var ddpUrlMatch = url.match(/^ddp(i?)\+sockjs:\/\//); var httpUrlMatch = url.match(/^http(s?):\/\//); var newScheme; From 3a526b7688b002c6f29f7510257700e917e2f8d7 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Wed, 6 Nov 2024 13:42:01 +0000 Subject: [PATCH 61/81] chore: fix dev websocket --- packages/webui/vite.config.mts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/packages/webui/vite.config.mts b/packages/webui/vite.config.mts index 69b894f48e5..ed80e5b1df8 100644 --- a/packages/webui/vite.config.mts +++ b/packages/webui/vite.config.mts @@ -66,6 +66,10 @@ export default defineConfig({ '/api': 'http://127.0.0.1:3000', '/site.webmanifest': 'http://127.0.0.1:3000', '/meteor-runtime-config.js': 'http://127.0.0.1:3000', + '/websocket': { + target: `ws://127.0.0.1:3000`, + ws: true, + }, }, }, From 460a2a97b6565dedefe5a1d67ac38b8439a82e53 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Wed, 20 Nov 2024 16:34:04 +0000 Subject: [PATCH 62/81] feat: configure Core system/studio settings via blueprints SOFIE-192 (#33) --- meteor/__mocks__/defaultCollectionObjects.ts | 4 +- meteor/__mocks__/helpers/database.ts | 19 + .../serviceMessagesApi.test.ts | 3 + meteor/server/api/evaluations.ts | 4 +- meteor/server/api/rest/v1/typeConversion.ts | 10 +- meteor/server/api/studio/api.ts | 4 +- meteor/server/collections/index.ts | 5 +- meteor/server/coreSystem/index.ts | 25 +- meteor/server/cronjobs.ts | 21 +- meteor/server/logo.ts | 2 +- meteor/server/migration/0_1_0.ts | 444 +----------- meteor/server/migration/X_X_X.ts | 151 ++++- .../migration/__tests__/migrations.test.ts | 12 +- meteor/server/migration/api.ts | 9 +- meteor/server/migration/databaseMigration.ts | 4 + meteor/server/migration/upgrades/context.ts | 16 +- .../upgrades/defaultSystemActionTriggers.ts | 416 ++++++++++++ meteor/server/migration/upgrades/lib.ts | 76 +++ .../migration/upgrades/showStyleBase.ts | 80 +-- meteor/server/migration/upgrades/system.ts | 108 +++ .../blueprintUpgradeStatus/checkStatus.ts | 68 +- .../blueprintUpgradeStatus/publication.ts | 58 +- .../reactiveContentCache.ts | 21 + .../upgradesContentObserver.ts | 6 +- meteor/server/publications/lib/quickLoop.ts | 12 +- .../partInstancesUI/publication.ts | 8 +- .../partInstancesUI/reactiveContentCache.ts | 16 +- .../partInstancesUI/rundownContentObserver.ts | 27 +- .../publications/partsUI/publication.ts | 8 +- .../partsUI/reactiveContentCache.ts | 16 +- .../partsUI/rundownContentObserver.ts | 27 +- .../checkPieceContentStatus.ts | 4 +- .../pieceContentStatusUI/common.ts | 6 +- meteor/server/publications/studioUI.ts | 6 +- meteor/server/publications/system.ts | 4 +- .../blueprints-integration/src/api/studio.ts | 5 +- .../blueprints-integration/src/api/system.ts | 22 +- .../src/context/systemApplyConfigContext.ts | 6 + packages/blueprints-integration/src/index.ts | 2 + .../blueprints-integration/src/triggers.ts | 24 + packages/corelib/src/dataModel/Blueprint.ts | 2 +- .../corelib/src/dataModel/RundownPlaylist.ts | 10 +- packages/corelib/src/dataModel/Studio.ts | 64 +- packages/corelib/src/studio/baseline.ts | 4 +- packages/job-worker/src/__mocks__/context.ts | 12 +- .../src/__mocks__/defaultCollectionObjects.ts | 4 +- .../src/blueprints/__tests__/config.test.ts | 16 +- .../src/blueprints/__tests__/context.test.ts | 3 +- packages/job-worker/src/blueprints/config.ts | 9 +- .../context/OnTimelineGenerateContext.ts | 5 +- .../blueprints/context/PartEventContext.ts | 5 +- .../src/blueprints/context/RundownContext.ts | 5 +- .../blueprints/context/RundownEventContext.ts | 5 +- .../blueprints/context/ShowStyleContext.ts | 5 +- .../src/blueprints/context/StudioContext.ts | 13 +- .../blueprints/context/StudioUserContext.ts | 4 +- .../SyncIngestUpdateToPartInstanceContext.ts | 5 +- .../src/blueprints/context/adlibActions.ts | 4 +- .../src/ingest/__tests__/ingest.test.ts | 7 +- .../__tests__/selectShowStyleVariant.test.ts | 18 +- .../job-worker/src/ingest/expectedPackages.ts | 17 +- .../mosDevice/__tests__/mosIngest.test.ts | 2 +- packages/job-worker/src/jobs/index.ts | 9 +- packages/job-worker/src/jobs/studio.ts | 58 ++ .../src/playout/__tests__/playout.test.ts | 7 +- .../playout/__tests__/selectNextPart.test.ts | 3 +- .../src/playout/abPlayback/index.ts | 3 +- .../playout/abPlayback/routeSetDisabling.ts | 5 +- .../lookahead/__tests__/lookahead.test.ts | 8 +- .../playout/lookahead/__tests__/util.test.ts | 5 +- .../job-worker/src/playout/lookahead/index.ts | 5 +- .../model/services/QuickLoopService.ts | 2 +- .../job-worker/src/playout/selectNextPart.ts | 7 +- .../src/playout/timeline/generate.ts | 5 +- packages/job-worker/src/playout/upgrade.ts | 15 +- packages/job-worker/src/rundownPlaylists.ts | 12 +- packages/job-worker/src/workers/caches.ts | 40 +- .../src/workers/context/JobContextImpl.ts | 10 +- .../workers/context/StudioCacheContextImpl.ts | 24 +- .../workers/context/StudioRouteSetUpdater.ts | 59 +- .../__tests__/StudioRouteSetUpdater.spec.ts | 64 +- .../job-worker/src/workers/events/child.ts | 4 +- .../job-worker/src/workers/ingest/child.ts | 4 +- .../job-worker/src/workers/studio/child.ts | 4 +- packages/meteor-lib/src/api/migration.ts | 18 +- packages/meteor-lib/src/api/upgradeStatus.ts | 16 +- .../meteor-lib/src/collections/CoreSystem.ts | 36 +- .../src/core/model/CoreSystemSettings.ts | 23 + .../src/core/model/StudioSettings.ts | 66 ++ ...{sofie-logo.svg => sofie-logo-default.svg} | 0 .../src/__mocks__/defaultCollectionObjects.ts | 4 +- .../webui/src/__mocks__/helpers/database.ts | 19 + .../lib/Components/LabelAndOverrides.tsx | 12 +- .../lib/Components/MultiLineTextInput.tsx | 18 +- .../lib/__tests__/rundownTiming.test.ts | 7 +- .../lib/forms/SchemaFormWithOverrides.tsx | 2 - .../src/client/ui/AfterBroadcastForm.tsx | 9 +- .../BlueprintConfiguration/index.tsx | 3 +- .../ui/Settings/ShowStyle/OutputLayer.tsx | 12 +- .../ui/Settings/ShowStyle/SourceLayer.tsx | 23 +- .../Studio/BlueprintConfiguration/index.tsx | 3 +- .../Studio/Devices/GenericSubDevices.tsx | 2 - .../src/client/ui/Settings/Studio/Generic.tsx | 636 ++++++++++-------- .../client/ui/Settings/Studio/Mappings.tsx | 7 - .../PackageManager/AccessorTableRow.tsx | 23 - .../PackageManager/PackageContainers.tsx | 2 - .../Studio/Routings/ExclusivityGroups.tsx | 1 - .../Studio/Routings/RouteSetAbPlayers.tsx | 2 - .../ui/Settings/Studio/Routings/RouteSets.tsx | 11 - .../client/ui/Settings/SystemManagement.tsx | 260 ++++--- .../Settings/SystemManagement/Blueprint.tsx | 99 +++ .../ui/Settings/Upgrades/Components.tsx | 74 ++ .../src/client/ui/Settings/Upgrades/View.tsx | 26 +- .../TriggeredActionsEditor.tsx | 14 +- packages/webui/src/client/ui/SupportPopUp.tsx | 7 +- 115 files changed, 2397 insertions(+), 1369 deletions(-) create mode 100644 meteor/server/migration/upgrades/defaultSystemActionTriggers.ts create mode 100644 meteor/server/migration/upgrades/lib.ts create mode 100644 meteor/server/migration/upgrades/system.ts create mode 100644 packages/blueprints-integration/src/context/systemApplyConfigContext.ts create mode 100644 packages/job-worker/src/jobs/studio.ts create mode 100644 packages/shared-lib/src/core/model/CoreSystemSettings.ts create mode 100644 packages/shared-lib/src/core/model/StudioSettings.ts rename packages/webui/public/images/{sofie-logo.svg => sofie-logo-default.svg} (100%) create mode 100644 packages/webui/src/client/ui/Settings/SystemManagement/Blueprint.tsx diff --git a/meteor/__mocks__/defaultCollectionObjects.ts b/meteor/__mocks__/defaultCollectionObjects.ts index d2349c80ec6..62420caaac4 100644 --- a/meteor/__mocks__/defaultCollectionObjects.ts +++ b/meteor/__mocks__/defaultCollectionObjects.ts @@ -105,12 +105,12 @@ export function defaultStudio(_id: StudioId): DBStudio { mappingsWithOverrides: wrapDefaultObject({}), supportedShowStyleBase: [], blueprintConfigWithOverrides: wrapDefaultObject({}), - settings: { + settingsWithOverrides: wrapDefaultObject({ frameRate: 25, mediaPreviewsUrl: '', minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, fallbackPartDuration: DEFAULT_FALLBACK_PART_DURATION, - }, + }), _rundownVersionHash: '', routeSetsWithOverrides: wrapDefaultObject({}), routeSetExclusivityGroupsWithOverrides: wrapDefaultObject({}), diff --git a/meteor/__mocks__/helpers/database.ts b/meteor/__mocks__/helpers/database.ts index 6abd5a60bff..7c4bd82c412 100644 --- a/meteor/__mocks__/helpers/database.ts +++ b/meteor/__mocks__/helpers/database.ts @@ -171,6 +171,25 @@ export async function setupMockCore(doc?: Partial): Promise { diff --git a/meteor/server/api/evaluations.ts b/meteor/server/api/evaluations.ts index 3034110a653..3be9d578182 100644 --- a/meteor/server/api/evaluations.ts +++ b/meteor/server/api/evaluations.ts @@ -10,6 +10,7 @@ import { sendSlackMessageToWebhook } from './integration/slack' import { OrganizationId, UserId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { Evaluations, RundownPlaylists } from '../collections' +import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' export async function saveEvaluation( credentials: { @@ -33,8 +34,9 @@ export async function saveEvaluation( deferAsync(async () => { const studio = await fetchStudioLight(evaluation.studioId) if (!studio) throw new Meteor.Error(500, `Studio ${evaluation.studioId} not found!`) + const studioSettings = applyAndValidateOverrides(studio.settingsWithOverrides).obj - const webhookUrls = _.compact((studio.settings.slackEvaluationUrls || '').split(',')) + const webhookUrls = _.compact((studioSettings.slackEvaluationUrls || '').split(',')) if (webhookUrls.length) { // Only send notes if not everything is OK diff --git a/meteor/server/api/rest/v1/typeConversion.ts b/meteor/server/api/rest/v1/typeConversion.ts index a9c43b9bf04..7b442546878 100644 --- a/meteor/server/api/rest/v1/typeConversion.ts +++ b/meteor/server/api/rest/v1/typeConversion.ts @@ -38,7 +38,7 @@ import { DEFAULT_FALLBACK_PART_DURATION, } from '@sofie-automation/shared-lib/dist/core/constants' import { Bucket } from '@sofie-automation/meteor-lib/dist/collections/Buckets' -import { ForceQuickLoopAutoNext } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import { ForceQuickLoopAutoNext } from '@sofie-automation/shared-lib/dist/core/model/StudioSettings' /* This file contains functions that convert between the internal Sofie-Core types and types exposed to the external API. @@ -266,13 +266,17 @@ export async function studioFrom(apiStudio: APIStudio, existingId?: StudioId): P ? updateOverrides(studio.blueprintConfigWithOverrides, apiStudio.config as IBlueprintConfig) : wrapDefaultObject({}) + const studioSettings = studioSettingsFrom(apiStudio.settings) + return { _id: existingId ?? getRandomId(), name: apiStudio.name, blueprintId: blueprint?._id, blueprintConfigPresetId: apiStudio.blueprintConfigPresetId, blueprintConfigWithOverrides: blueprintConfig, - settings: studioSettingsFrom(apiStudio.settings), + settingsWithOverrides: studio + ? updateOverrides(studio.settingsWithOverrides, studioSettings) + : wrapDefaultObject(studioSettings), supportedShowStyleBase: apiStudio.supportedShowStyleBase?.map((id) => protectString(id)) ?? [], organizationId: null, mappingsWithOverrides: wrapDefaultObject({}), @@ -293,7 +297,7 @@ export async function studioFrom(apiStudio: APIStudio, existingId?: StudioId): P } export function APIStudioFrom(studio: DBStudio): APIStudio { - const studioSettings = APIStudioSettingsFrom(studio.settings) + const studioSettings = APIStudioSettingsFrom(applyAndValidateOverrides(studio.settingsWithOverrides).obj) return { name: studio.name, diff --git a/meteor/server/api/studio/api.ts b/meteor/server/api/studio/api.ts index 94ac811d40d..fa5d7c1c372 100644 --- a/meteor/server/api/studio/api.ts +++ b/meteor/server/api/studio/api.ts @@ -45,11 +45,11 @@ export async function insertStudioInner(organizationId: OrganizationId | null, n supportedShowStyleBase: [], blueprintConfigWithOverrides: wrapDefaultObject({}), // testToolsConfig?: ITestToolsConfig - settings: { + settingsWithOverrides: wrapDefaultObject({ frameRate: 25, mediaPreviewsUrl: '', minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, - }, + }), _rundownVersionHash: '', routeSetsWithOverrides: wrapDefaultObject({}), routeSetExclusivityGroupsWithOverrides: wrapDefaultObject({}), diff --git a/meteor/server/collections/index.ts b/meteor/server/collections/index.ts index 303096db7b3..9b574870c93 100644 --- a/meteor/server/collections/index.ts +++ b/meteor/server/collections/index.ts @@ -67,14 +67,13 @@ export const CoreSystem = createAsyncOnlyMongoCollection(Collection if (!access.update) return logNotAllowed('CoreSystem', access.reason) return allowOnlyFields(doc, fields, [ - 'support', 'systemInfo', 'name', 'logLevel', 'apm', - 'cron', 'logo', - 'evaluations', + 'blueprintId', + 'settingsWithOverrides', ]) }, }) diff --git a/meteor/server/coreSystem/index.ts b/meteor/server/coreSystem/index.ts index 95f4b740800..85a2586745a 100644 --- a/meteor/server/coreSystem/index.ts +++ b/meteor/server/coreSystem/index.ts @@ -10,13 +10,14 @@ import { getEnvLogLevel, logger, LogLevel, setLogLevel } from '../logging' const PackageInfo = require('../../package.json') import { startAgent } from '../api/profiler/apm' import { profiler } from '../api/profiler' -import { TMP_TSR_VERSION } from '@sofie-automation/blueprints-integration' +import { ICoreSystemSettings, TMP_TSR_VERSION } from '@sofie-automation/blueprints-integration' import { getAbsolutePath } from '../lib' import * as fs from 'fs/promises' import path from 'path' import { checkDatabaseVersions } from './checkDatabaseVersions' import PLazy from 'p-lazy' import { getCoreSystemAsync } from './collection' +import { wrapDefaultObject } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' export { PackageInfo } @@ -59,11 +60,25 @@ async function initializeCoreSystem() { enabled: false, transactionSampleRate: -1, }, - cron: { - casparCGRestart: { - enabled: true, + settingsWithOverrides: wrapDefaultObject({ + cron: { + casparCGRestart: { + enabled: true, + }, + storeRundownSnapshots: { + enabled: false, + }, }, - }, + support: { + message: '', + }, + evaluationsMessage: { + enabled: false, + heading: '', + message: '', + }, + }), + lastBlueprintConfig: undefined, }) if (!isRunningInJest()) { diff --git a/meteor/server/cronjobs.ts b/meteor/server/cronjobs.ts index 88bb16c8516..7b9d0d1ffb6 100644 --- a/meteor/server/cronjobs.ts +++ b/meteor/server/cronjobs.ts @@ -18,13 +18,14 @@ import { deferAsync, normalizeArrayToMap } from '@sofie-automation/corelib/dist/ import { getCoreSystemAsync } from './coreSystem/collection' import { cleanupOldDataInner } from './api/cleanup' import { CollectionCleanupResult } from '@sofie-automation/meteor-lib/dist/api/system' -import { ICoreSystem } from '@sofie-automation/meteor-lib/dist/collections/CoreSystem' +import { ICoreSystemSettings } from '@sofie-automation/shared-lib/dist/core/model/CoreSystemSettings' import { executePeripheralDeviceFunctionWithCustomTimeout } from './api/peripheralDevice/executeFunction' import { interpollateTranslation, isTranslatableMessage, translateMessage, } from '@sofie-automation/corelib/dist/TranslatableMessage' +import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' const lowPrioFcn = (fcn: () => any) => { // Do it at a random time in the future: @@ -49,15 +50,17 @@ export async function nightlyCronjobInner(): Promise { logger.info('Nightly cronjob: starting...') const system = await getCoreSystemAsync() + const systemSettings = system && applyAndValidateOverrides(system.settingsWithOverrides).obj + await Promise.allSettled([ cleanupOldDataCronjob().catch((error) => { logger.error(`Cronjob: Error when cleaning up old data: ${stringifyError(error)}`) logger.error(error) }), - restartCasparCG(system, previousLastNightlyCronjob).catch((e) => { + restartCasparCG(systemSettings, previousLastNightlyCronjob).catch((e) => { logger.error(`Cron: Restart CasparCG error: ${stringifyError(e)}`) }), - storeSnapshots(system).catch((e) => { + storeSnapshots(systemSettings).catch((e) => { logger.error(`Cron: Rundown Snapshots error: ${stringifyError(e)}`) }), ]) @@ -81,8 +84,8 @@ async function cleanupOldDataCronjob() { const CASPARCG_LAST_SEEN_PERIOD_MS = 3 * 60 * 1000 // Note: this must be higher than the ping interval used by playout-gateway -async function restartCasparCG(system: ICoreSystem | undefined, previousLastNightlyCronjob: number) { - if (!system?.cron?.casparCGRestart?.enabled) return +async function restartCasparCG(systemSettings: ICoreSystemSettings | undefined, previousLastNightlyCronjob: number) { + if (!systemSettings?.cron?.casparCGRestart?.enabled) return let shouldRetryAttempt = false const ps: Array> = [] @@ -176,10 +179,10 @@ async function restartCasparCG(system: ICoreSystem | undefined, previousLastNigh } } -async function storeSnapshots(system: ICoreSystem | undefined) { - if (system?.cron?.storeRundownSnapshots?.enabled) { - const filter = system.cron.storeRundownSnapshots.rundownNames?.length - ? { name: { $in: system.cron.storeRundownSnapshots.rundownNames } } +async function storeSnapshots(systemSettings: ICoreSystemSettings | undefined) { + if (systemSettings?.cron?.storeRundownSnapshots?.enabled) { + const filter = systemSettings.cron.storeRundownSnapshots.rundownNames?.length + ? { name: { $in: systemSettings.cron.storeRundownSnapshots.rundownNames } } : {} const playlists = await RundownPlaylists.findFetchAsync(filter) diff --git a/meteor/server/logo.ts b/meteor/server/logo.ts index 26536e65b87..2e7910bae6c 100644 --- a/meteor/server/logo.ts +++ b/meteor/server/logo.ts @@ -13,7 +13,7 @@ logoRouter.get('/', async (ctx) => { const logo = core?.logo ?? SofieLogo.Default const paths: Record = { - [SofieLogo.Default]: '/images/sofie-logo.svg', + [SofieLogo.Default]: '/images/sofie-logo-default.svg', [SofieLogo.Pride]: '/images/sofie-logo-pride.svg', [SofieLogo.Norway]: '/images/sofie-logo-norway.svg', [SofieLogo.Christmas]: '/images/sofie-logo-christmas.svg', diff --git a/meteor/server/migration/0_1_0.ts b/meteor/server/migration/0_1_0.ts index 80247e212bf..f4a6abf7ad3 100644 --- a/meteor/server/migration/0_1_0.ts +++ b/meteor/server/migration/0_1_0.ts @@ -1,15 +1,9 @@ import { addMigrationSteps } from './databaseMigration' import { logger } from '../logging' -import { getRandomId, protectString, generateTranslation as t, getHash } from '../lib/tempLib' +import { getRandomId, protectString } from '../lib/tempLib' import { wrapDefaultObject } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' import { ShowStyleVariantId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { ShowStyleBases, ShowStyleVariants, Studios, TriggeredActions } from '../collections' -import { - IBlueprintTriggeredActions, - ClientActions, - TriggerType, - PlayoutActions, -} from '@sofie-automation/blueprints-integration' +import { ShowStyleBases, ShowStyleVariants, Studios } from '../collections' import { DEFAULT_MINIMUM_TAKE_SPAN } from '@sofie-automation/shared-lib/dist/core/constants' /** @@ -17,408 +11,6 @@ import { DEFAULT_MINIMUM_TAKE_SPAN } from '@sofie-automation/shared-lib/dist/cor * These files are combined with / overridden by migration steps defined in the blueprints. */ -let j = 0 - -const DEFAULT_CORE_TRIGGERS: IBlueprintTriggeredActions[] = [ - { - _id: 'core_toggleShelf', - actions: { - '0': { - action: ClientActions.shelf, - filterChain: [ - { - object: 'view', - }, - ], - state: 'toggle', - }, - }, - triggers: { - '0': { - type: TriggerType.hotkey, - keys: 'Tab', - up: true, - }, - }, - _rank: ++j * 1000, - name: t('Toggle Shelf'), - }, - { - _id: 'core_activateRundownPlaylist', - actions: { - '0': { - action: PlayoutActions.activateRundownPlaylist, - rehearsal: false, - filterChain: [ - { - object: 'view', - }, - ], - }, - }, - triggers: { - '0': { - type: TriggerType.hotkey, - keys: 'Backquote', - up: true, - }, - }, - _rank: ++j * 1000, - name: t('Activate (On-Air)'), - }, - { - _id: 'core_activateRundownPlaylist_rehearsal', - actions: { - '0': { - action: PlayoutActions.activateRundownPlaylist, - rehearsal: true, - filterChain: [ - { - object: 'view', - }, - ], - }, - }, - triggers: { - '0': { - type: TriggerType.hotkey, - keys: 'Control+Backquote', - up: true, - }, - }, - _rank: ++j * 1000, - name: t('Activate (Rehearsal)'), - }, - { - _id: 'core_deactivateRundownPlaylist', - actions: { - '0': { - action: PlayoutActions.deactivateRundownPlaylist, - filterChain: [ - { - object: 'view', - }, - ], - }, - }, - triggers: { - '0': { - type: TriggerType.hotkey, - keys: 'Control+Shift+Backquote', - up: true, - }, - }, - _rank: ++j * 1000, - name: t('Deactivate'), - }, - { - _id: 'core_take', - actions: { - '0': { - action: PlayoutActions.take, - filterChain: [ - { - object: 'view', - }, - ], - }, - }, - triggers: { - '0': { - type: TriggerType.hotkey, - keys: 'NumpadEnter', - up: true, - }, - '1': { - type: TriggerType.hotkey, - keys: 'F12', - up: true, - }, - }, - _rank: ++j * 1000, - name: t('Take'), - }, - { - _id: 'core_hold', - actions: { - '0': { - action: PlayoutActions.hold, - filterChain: [ - { - object: 'view', - }, - ], - }, - }, - triggers: { - '0': { - type: TriggerType.hotkey, - keys: 'KeyH', - up: true, - }, - }, - _rank: ++j * 1000, - name: t('Hold'), - }, - { - _id: 'core_hold_undo', - actions: { - '0': { - action: PlayoutActions.hold, - undo: true, - filterChain: [ - { - object: 'view', - }, - ], - }, - }, - triggers: { - '0': { - type: TriggerType.hotkey, - keys: 'Shift+KeyH', - up: true, - }, - }, - _rank: ++j * 1000, - name: t('Undo Hold'), - }, - { - _id: 'core_reset_rundown_playlist', - actions: { - '0': { - action: PlayoutActions.resetRundownPlaylist, - filterChain: [ - { - object: 'view', - }, - ], - }, - }, - triggers: { - '0': { - type: TriggerType.hotkey, - keys: 'Control+Shift+F12', - up: true, - }, - '1': { - type: TriggerType.hotkey, - keys: 'Control+Shift+AnyEnter', - up: true, - }, - }, - _rank: ++j * 1000, - name: t('Reset Rundown'), - }, - { - _id: 'core_disable_next_piece', - actions: { - '0': { - action: PlayoutActions.disableNextPiece, - filterChain: [ - { - object: 'view', - }, - ], - }, - }, - triggers: { - '0': { - type: TriggerType.hotkey, - keys: 'KeyG', - up: true, - }, - }, - _rank: ++j * 1000, - name: t('Disable the next element'), - }, - { - _id: 'core_disable_next_piece_undo', - actions: { - '0': { - action: PlayoutActions.disableNextPiece, - filterChain: [ - { - object: 'view', - }, - ], - undo: true, - }, - }, - triggers: { - '0': { - type: TriggerType.hotkey, - keys: 'Shift+KeyG', - up: true, - }, - }, - _rank: ++j * 1000, - name: t('Undo Disable the next element'), - }, - { - _id: 'core_create_snapshot_for_debug', - actions: { - '0': { - action: PlayoutActions.createSnapshotForDebug, - filterChain: [ - { - object: 'view', - }, - ], - }, - }, - triggers: { - '0': { - type: TriggerType.hotkey, - keys: 'Backspace', - up: true, - }, - }, - _rank: ++j * 1000, - name: t('Store Snapshot'), - }, - { - _id: 'core_move_next_part', - actions: { - '0': { - action: PlayoutActions.moveNext, - filterChain: [ - { - object: 'view', - }, - ], - parts: 1, - segments: 0, - }, - }, - triggers: { - '0': { - type: TriggerType.hotkey, - keys: 'F9', - up: true, - }, - }, - _rank: ++j * 1000, - name: t('Move Next forwards'), - }, - { - _id: 'core_move_next_segment', - actions: { - '0': { - action: PlayoutActions.moveNext, - filterChain: [ - { - object: 'view', - }, - ], - parts: 0, - segments: 1, - }, - }, - triggers: { - '0': { - type: TriggerType.hotkey, - keys: 'F10', - up: true, - }, - }, - _rank: ++j * 1000, - name: t('Move Next to the following segment'), - }, - { - _id: 'core_move_previous_part', - actions: { - '0': { - action: PlayoutActions.moveNext, - filterChain: [ - { - object: 'view', - }, - ], - parts: -1, - segments: 0, - }, - }, - triggers: { - '0': { - type: TriggerType.hotkey, - keys: 'Shift+F9', - up: true, - }, - }, - _rank: ++j * 1000, - name: t('Move Next backwards'), - }, - { - _id: 'core_move_previous_segment', - actions: { - '0': { - action: PlayoutActions.moveNext, - filterChain: [ - { - object: 'view', - }, - ], - parts: 0, - segments: -1, - }, - }, - triggers: { - '0': { - type: TriggerType.hotkey, - keys: 'Shift+F10', - up: true, - }, - }, - _rank: ++j * 1000, - name: t('Move Next to the previous segment'), - }, - { - _id: 'core_go_to_onAir_line', - actions: { - '0': { - action: ClientActions.goToOnAirLine, - filterChain: [ - { - object: 'view', - }, - ], - }, - }, - triggers: { - '0': { - type: TriggerType.hotkey, - keys: 'Control+Home', - up: true, - }, - }, - _rank: ++j * 1000, - name: t('Go to On Air line'), - }, - { - _id: 'core_rewind_segments', - actions: { - '0': { - action: ClientActions.rewindSegments, - filterChain: [ - { - object: 'view', - }, - ], - }, - }, - triggers: { - '0': { - type: TriggerType.hotkey, - keys: 'Shift+Home', - up: true, - }, - }, - _rank: ++j * 1000, - name: t('Rewind segments to start'), - }, -] - // 0.1.0: These are the "base" migration steps, setting up a default system export const addSteps = addMigrationSteps('0.1.0', [ { @@ -437,11 +29,11 @@ export const addSteps = addMigrationSteps('0.1.0', [ name: 'Default studio', organizationId: null, supportedShowStyleBase: [], - settings: { + settingsWithOverrides: wrapDefaultObject({ frameRate: 25, mediaPreviewsUrl: '', minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, - }, + }), mappingsWithOverrides: wrapDefaultObject({}), blueprintConfigWithOverrides: wrapDefaultObject({}), _rundownVersionHash: '', @@ -538,32 +130,4 @@ export const addSteps = addMigrationSteps('0.1.0', [ } }, }, - { - id: 'TriggeredActions.core', - canBeRunAutomatically: true, - validate: async () => { - const coreTriggeredActionsCount = await TriggeredActions.countDocuments({ - showStyleBaseId: null, - }) - - if (coreTriggeredActionsCount === 0) { - return `No system-wide triggered actions set up.` - } - - return false - }, - migrate: async () => { - for (const triggeredAction of DEFAULT_CORE_TRIGGERS) { - await TriggeredActions.insertAsync({ - _id: protectString(getHash(triggeredAction._id)), - _rank: triggeredAction._rank, - name: triggeredAction.name, - blueprintUniqueId: null, - showStyleBaseId: null, - actionsWithOverrides: wrapDefaultObject(triggeredAction.actions), - triggersWithOverrides: wrapDefaultObject(triggeredAction.triggers), - }) - } - }, - }, ]) diff --git a/meteor/server/migration/X_X_X.ts b/meteor/server/migration/X_X_X.ts index 37862b391d7..62abd9a8f84 100644 --- a/meteor/server/migration/X_X_X.ts +++ b/meteor/server/migration/X_X_X.ts @@ -1,12 +1,19 @@ import { addMigrationSteps } from './databaseMigration' import { CURRENT_SYSTEM_VERSION } from './currentSystemVersion' -import { Studios } from '../collections' -import { convertObjectIntoOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' +import { CoreSystem, Studios, TriggeredActions } from '../collections' +import { + convertObjectIntoOverrides, + wrapDefaultObject, +} from '@sofie-automation/corelib/dist/settings/objectWithOverrides' import { StudioRouteSet, StudioRouteSetExclusivityGroup, StudioPackageContainer, + IStudioSettings, } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { DEFAULT_CORE_TRIGGER_IDS } from './upgrades/defaultSystemActionTriggers' +import { ICoreSystem } from '@sofie-automation/meteor-lib/dist/collections/CoreSystem' +import { ICoreSystemSettings } from '@sofie-automation/shared-lib/dist/core/model/CoreSystemSettings' /* * ************************************************************************************** @@ -187,4 +194,144 @@ export const addSteps = addMigrationSteps(CURRENT_SYSTEM_VERSION, [ } }, }, + { + id: 'TriggeredActions.remove old systemwide', + canBeRunAutomatically: true, + validate: async () => { + const coreTriggeredActionsCount = await TriggeredActions.countDocuments({ + showStyleBaseId: null, + blueprintUniqueId: null, + _id: { $in: DEFAULT_CORE_TRIGGER_IDS }, + }) + + if (coreTriggeredActionsCount > 0) { + return `System-wide triggered actions needing removal.` + } + + return false + }, + migrate: async () => { + await TriggeredActions.removeAsync({ + showStyleBaseId: null, + blueprintUniqueId: null, + _id: { $in: DEFAULT_CORE_TRIGGER_IDS }, + }) + }, + }, + + { + id: `convert studio.settings to ObjectWithOverrides`, + canBeRunAutomatically: true, + validate: async () => { + const studios = await Studios.findFetchAsync({ + settings: { $exists: true }, + settingsWithOverrides: { $exists: false }, + }) + + for (const studio of studios) { + //@ts-expect-error settings is not typed as ObjectWithOverrides + if (studio.settings) { + return 'settings must be converted to an ObjectWithOverrides' + } + } + + return false + }, + migrate: async () => { + const studios = await Studios.findFetchAsync({ + settings: { $exists: true }, + settingsWithOverrides: { $exists: false }, + }) + + for (const studio of studios) { + //@ts-expect-error settings is typed as Record + const oldSettings = studio.settings + + const newSettings = wrapDefaultObject(oldSettings || {}) + + await Studios.updateAsync(studio._id, { + $set: { + settingsWithOverrides: newSettings, + }, + $unset: { + // settings: 1, + }, + }) + } + }, + }, + + { + id: `convert CoreSystem.settingsWithOverrides`, + canBeRunAutomatically: true, + validate: async () => { + const systems = await CoreSystem.findFetchAsync({ + settingsWithOverrides: { $exists: false }, + }) + + if (systems.length > 0) { + return 'settings must be converted to an ObjectWithOverrides' + } + + return false + }, + migrate: async () => { + const systems = await CoreSystem.findFetchAsync({ + settingsWithOverrides: { $exists: false }, + }) + + for (const system of systems) { + const oldSystem = system as ICoreSystem as PartialOldICoreSystem + + const newSettings = wrapDefaultObject({ + cron: { + casparCGRestart: { + enabled: false, + }, + storeRundownSnapshots: { + enabled: false, + }, + ...oldSystem.cron, + }, + support: oldSystem.support ?? { message: '' }, + evaluationsMessage: oldSystem.evaluations ?? { enabled: false, heading: '', message: '' }, + }) + + await CoreSystem.updateAsync(system._id, { + $set: { + settingsWithOverrides: newSettings, + }, + $unset: { + cron: 1, + support: 1, + evaluations: 1, + }, + }) + } + }, + }, ]) + +interface PartialOldICoreSystem { + /** Support info */ + support?: { + message: string + } + + evaluations?: { + enabled: boolean + heading: string + message: string + } + + /** Cron jobs running nightly */ + cron?: { + casparCGRestart?: { + enabled: boolean + } + storeRundownSnapshots?: { + enabled: boolean + rundownNames?: string[] + } + } +} diff --git a/meteor/server/migration/__tests__/migrations.test.ts b/meteor/server/migration/__tests__/migrations.test.ts index 276eed40541..62967260fe8 100644 --- a/meteor/server/migration/__tests__/migrations.test.ts +++ b/meteor/server/migration/__tests__/migrations.test.ts @@ -121,11 +121,11 @@ describe('Migrations', () => { name: 'Default studio', organizationId: null, supportedShowStyleBase: [], - settings: { + settingsWithOverrides: wrapDefaultObject({ mediaPreviewsUrl: '', frameRate: 25, minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, - }, + }), mappingsWithOverrides: wrapDefaultObject({}), blueprintConfigWithOverrides: wrapDefaultObject({}), _rundownVersionHash: '', @@ -159,11 +159,11 @@ describe('Migrations', () => { name: 'Default studio', organizationId: null, supportedShowStyleBase: [], - settings: { + settingsWithOverrides: wrapDefaultObject({ mediaPreviewsUrl: '', frameRate: 25, minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, - }, + }), mappingsWithOverrides: wrapDefaultObject({}), blueprintConfigWithOverrides: wrapDefaultObject({}), _rundownVersionHash: '', @@ -197,11 +197,11 @@ describe('Migrations', () => { name: 'Default studio', organizationId: null, supportedShowStyleBase: [], - settings: { + settingsWithOverrides: wrapDefaultObject({ mediaPreviewsUrl: '', frameRate: 25, minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, - }, + }), mappingsWithOverrides: wrapDefaultObject({}), blueprintConfigWithOverrides: wrapDefaultObject({}), _rundownVersionHash: '', diff --git a/meteor/server/migration/api.ts b/meteor/server/migration/api.ts index fd4fac48e11..23a1169759f 100644 --- a/meteor/server/migration/api.ts +++ b/meteor/server/migration/api.ts @@ -20,8 +20,9 @@ import { validateConfigForShowStyleBase, validateConfigForStudio, } from './upgrades' -import { ShowStyleBaseId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { CoreSystemId, ShowStyleBaseId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { BlueprintValidateConfigForStudioResult } from '@sofie-automation/corelib/dist/worker/studio' +import { runUpgradeForCoreSystem } from './upgrades/system' class ServerMigrationAPI extends MethodContextAPI implements NewMigrationAPI { async getMigrationStatus() { @@ -123,5 +124,11 @@ class ServerMigrationAPI extends MethodContextAPI implements NewMigrationAPI { return runUpgradeForShowStyleBase(showStyleBaseId) } + + async runUpgradeForCoreSystem(coreSystemId: CoreSystemId): Promise { + await SystemWriteAccess.migrations(this) + + return runUpgradeForCoreSystem(coreSystemId) + } } registerClassToMeteorMethods(MigrationAPIMethods, ServerMigrationAPI, false) diff --git a/meteor/server/migration/databaseMigration.ts b/meteor/server/migration/databaseMigration.ts index 42b0d76b1e1..b342be20fd3 100644 --- a/meteor/server/migration/databaseMigration.ts +++ b/meteor/server/migration/databaseMigration.ts @@ -254,6 +254,10 @@ export async function prepareMigration(returnAllChunks?: boolean): Promise(DEFAULT_CORE_TRIGGERS).map( + (triggeredAction) => protectString(getHash(triggeredAction._id)) +) diff --git a/meteor/server/migration/upgrades/lib.ts b/meteor/server/migration/upgrades/lib.ts new file mode 100644 index 00000000000..ce825f2fd77 --- /dev/null +++ b/meteor/server/migration/upgrades/lib.ts @@ -0,0 +1,76 @@ +import type { ShowStyleBaseId, TriggeredActionId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { TriggeredActions } from '../../collections' +import { Complete, getRandomId, literal, normalizeArrayToMap } from '@sofie-automation/corelib/dist/lib' +import type { DBTriggeredActions } from '@sofie-automation/meteor-lib/dist/collections/TriggeredActions' +import type { AnyBulkWriteOperation } from 'mongodb' +import { wrapDefaultObject } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' +import type { IBlueprintTriggeredActions } from '@sofie-automation/blueprints-integration' + +export async function updateTriggeredActionsForShowStyleBaseId( + showStyleBaseId: ShowStyleBaseId | null, + triggeredActions: IBlueprintTriggeredActions[] +): Promise { + const oldTriggeredActionsArray = await TriggeredActions.findFetchAsync({ + showStyleBaseId: showStyleBaseId, + blueprintUniqueId: { $ne: null }, + }) + const oldTriggeredActions = normalizeArrayToMap(oldTriggeredActionsArray, 'blueprintUniqueId') + + const newDocIds: TriggeredActionId[] = [] + const bulkOps: AnyBulkWriteOperation[] = [] + + for (const newTriggeredAction of triggeredActions) { + const oldValue = oldTriggeredActions.get(newTriggeredAction._id) + if (oldValue) { + // Update an existing TriggeredAction + newDocIds.push(oldValue._id) + bulkOps.push({ + updateOne: { + filter: { + _id: oldValue._id, + }, + update: { + $set: { + _rank: newTriggeredAction._rank, + name: newTriggeredAction.name, + 'triggersWithOverrides.defaults': newTriggeredAction.triggers, + 'actionsWithOverrides.defaults': newTriggeredAction.actions, + }, + }, + }, + }) + } else { + // Insert a new TriggeredAction + const newDocId = getRandomId() + newDocIds.push(newDocId) + bulkOps.push({ + insertOne: { + document: literal>({ + _id: newDocId, + _rank: newTriggeredAction._rank, + name: newTriggeredAction.name, + showStyleBaseId: showStyleBaseId, + blueprintUniqueId: newTriggeredAction._id, + triggersWithOverrides: wrapDefaultObject(newTriggeredAction.triggers), + actionsWithOverrides: wrapDefaultObject(newTriggeredAction.actions), + styleClassNames: newTriggeredAction.styleClassNames, + }), + }, + }) + } + } + + // Remove any removed TriggeredAction + // Future: should this orphan them or something? Will that cause issues if they get re-added? + bulkOps.push({ + deleteMany: { + filter: { + showStyleBaseId: showStyleBaseId, + blueprintUniqueId: { $ne: null }, + _id: { $nin: newDocIds }, + }, + }, + }) + + await TriggeredActions.bulkWriteAsync(bulkOps) +} diff --git a/meteor/server/migration/upgrades/showStyleBase.ts b/meteor/server/migration/upgrades/showStyleBase.ts index d2281043ae3..bcbe0156170 100644 --- a/meteor/server/migration/upgrades/showStyleBase.ts +++ b/meteor/server/migration/upgrades/showStyleBase.ts @@ -3,25 +3,21 @@ import { JSONBlobParse, ShowStyleBlueprintManifest, } from '@sofie-automation/blueprints-integration' -import { ShowStyleBaseId, TriggeredActionId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { normalizeArray, normalizeArrayToMap, getRandomId, literal, Complete } from '@sofie-automation/corelib/dist/lib' -import { - applyAndValidateOverrides, - wrapDefaultObject, -} from '@sofie-automation/corelib/dist/settings/objectWithOverrides' +import { ShowStyleBaseId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { normalizeArray } from '@sofie-automation/corelib/dist/lib' +import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' import { wrapTranslatableMessageFromBlueprints } from '@sofie-automation/corelib/dist/TranslatableMessage' import { BlueprintValidateConfigForStudioResult } from '@sofie-automation/corelib/dist/worker/studio' import { Meteor } from 'meteor/meteor' -import { Blueprints, ShowStyleBases, TriggeredActions } from '../../collections' +import { Blueprints, ShowStyleBases } from '../../collections' import { DBShowStyleBase } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' -import { DBTriggeredActions } from '@sofie-automation/meteor-lib/dist/collections/TriggeredActions' import { evalBlueprint } from '../../api/blueprints/cache' import { logger } from '../../logging' import { CommonContext } from './context' -import type { AnyBulkWriteOperation } from 'mongodb' import { FixUpBlueprintConfigContext } from '@sofie-automation/corelib/dist/fixUpBlueprintConfig/context' import { Blueprint } from '@sofie-automation/corelib/dist/dataModel/Blueprint' import { BlueprintFixUpConfigMessage } from '@sofie-automation/meteor-lib/dist/api/migration' +import { updateTriggeredActionsForShowStyleBaseId } from './lib' export async function fixupConfigForShowStyleBase( showStyleBaseId: ShowStyleBaseId @@ -100,7 +96,7 @@ export async function validateConfigForShowStyleBase( throwIfNeedsFixupConfigRunning(showStyleBase, blueprint, blueprintManifest) const blueprintContext = new CommonContext( - 'applyConfig', + 'validateConfig', `showStyleBase:${showStyleBaseId},blueprint:${blueprint._id}` ) const rawBlueprintConfig = applyAndValidateOverrides(showStyleBase.blueprintConfigWithOverrides).obj @@ -146,69 +142,7 @@ export async function runUpgradeForShowStyleBase(showStyleBaseId: ShowStyleBaseI }, }) - const oldTriggeredActionsArray = await TriggeredActions.findFetchAsync({ - showStyleBaseId: showStyleBaseId, - blueprintUniqueId: { $ne: null }, - }) - const oldTriggeredActions = normalizeArrayToMap(oldTriggeredActionsArray, 'blueprintUniqueId') - - const newDocIds: TriggeredActionId[] = [] - const bulkOps: AnyBulkWriteOperation[] = [] - - for (const newTriggeredAction of result.triggeredActions) { - const oldValue = oldTriggeredActions.get(newTriggeredAction._id) - if (oldValue) { - // Update an existing TriggeredAction - newDocIds.push(oldValue._id) - bulkOps.push({ - updateOne: { - filter: { - _id: oldValue._id, - }, - update: { - $set: { - _rank: newTriggeredAction._rank, - name: newTriggeredAction.name, - 'triggersWithOverrides.defaults': newTriggeredAction.triggers, - 'actionsWithOverrides.defaults': newTriggeredAction.actions, - }, - }, - }, - }) - } else { - // Insert a new TriggeredAction - const newDocId = getRandomId() - newDocIds.push(newDocId) - bulkOps.push({ - insertOne: { - document: literal>({ - _id: newDocId, - _rank: newTriggeredAction._rank, - name: newTriggeredAction.name, - showStyleBaseId: showStyleBaseId, - blueprintUniqueId: newTriggeredAction._id, - triggersWithOverrides: wrapDefaultObject(newTriggeredAction.triggers), - actionsWithOverrides: wrapDefaultObject(newTriggeredAction.actions), - styleClassNames: newTriggeredAction.styleClassNames, - }), - }, - }) - } - } - - // Remove any removed TriggeredAction - // Future: should this orphan them or something? Will that cause issues if they get re-added? - bulkOps.push({ - deleteMany: { - filter: { - showStyleBaseId: showStyleBaseId, - blueprintUniqueId: { $ne: null }, - _id: { $nin: newDocIds }, - }, - }, - }) - - await TriggeredActions.bulkWriteAsync(bulkOps) + await updateTriggeredActionsForShowStyleBaseId(showStyleBaseId, result.triggeredActions) } async function loadShowStyleAndBlueprint(showStyleBaseId: ShowStyleBaseId) { diff --git a/meteor/server/migration/upgrades/system.ts b/meteor/server/migration/upgrades/system.ts new file mode 100644 index 00000000000..15ae90bea81 --- /dev/null +++ b/meteor/server/migration/upgrades/system.ts @@ -0,0 +1,108 @@ +import { Meteor } from 'meteor/meteor' +import { logger } from '../../logging' +import { Blueprints, CoreSystem } from '../../collections' +import { + BlueprintManifestType, + BlueprintResultApplySystemConfig, + IBlueprintTriggeredActions, + SystemBlueprintManifest, +} from '@sofie-automation/blueprints-integration' +import { evalBlueprint } from '../../api/blueprints/cache' +import { CoreSystemApplyConfigContext } from './context' +import { updateTriggeredActionsForShowStyleBaseId } from './lib' +import { CoreSystemId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { DEFAULT_CORE_TRIGGERS } from './defaultSystemActionTriggers' +import { protectString } from '@sofie-automation/corelib/dist/protectedString' +import { ICoreSystemSettings } from '@sofie-automation/shared-lib/dist/core/model/CoreSystemSettings' + +export async function runUpgradeForCoreSystem(coreSystemId: CoreSystemId): Promise { + logger.info(`Running upgrade for CoreSystem`) + + const { coreSystem, blueprint, blueprintManifest } = await loadCoreSystemAndBlueprint(coreSystemId) + + let result: BlueprintResultApplySystemConfig + + if (blueprintManifest && typeof blueprintManifest.applyConfig === 'function') { + const blueprintContext = new CoreSystemApplyConfigContext( + 'applyConfig', + `coreSystem:${coreSystem._id},blueprint:${blueprint.blueprintId}` + ) + + result = blueprintManifest.applyConfig(blueprintContext) + } else { + // Ensure some defaults are populated when no blueprint method is present + result = generateDefaultSystemConfig() + } + + const coreSystemSettings: ICoreSystemSettings = result.settings + + await CoreSystem.updateAsync(coreSystemId, { + $set: { + 'settingsWithOverrides.defaults': coreSystemSettings, + lastBlueprintConfig: { + blueprintHash: blueprint?.blueprintHash ?? protectString('default'), + blueprintId: blueprint?._id ?? protectString('default'), + blueprintConfigPresetId: undefined, + config: {}, + }, + }, + }) + + await updateTriggeredActionsForShowStyleBaseId(null, result.triggeredActions) +} + +async function loadCoreSystemAndBlueprint(coreSystemId: CoreSystemId) { + const coreSystem = await CoreSystem.findOneAsync(coreSystemId) + if (!coreSystem) throw new Meteor.Error(404, `CoreSystem "${coreSystemId}" not found!`) + + if (!coreSystem.blueprintId) { + // No blueprint is valid + return { + coreSystem, + blueprint: undefined, + blueprintHash: undefined, + } + } + + // if (!showStyleBase.blueprintConfigPresetId) throw new Meteor.Error(500, 'ShowStyleBase is missing config preset') + + const blueprint = await Blueprints.findOneAsync({ + _id: coreSystem.blueprintId, + blueprintType: BlueprintManifestType.SYSTEM, + }) + if (!blueprint) throw new Meteor.Error(404, `Blueprint "${coreSystem.blueprintId}" not found!`) + + if (!blueprint.blueprintHash) throw new Meteor.Error(500, 'Blueprint is not valid') + + const blueprintManifest = evalBlueprint(blueprint) as SystemBlueprintManifest + + return { + coreSystem, + blueprint, + blueprintManifest, + } +} + +function generateDefaultSystemConfig(): BlueprintResultApplySystemConfig { + return { + settings: { + cron: { + casparCGRestart: { + enabled: true, + }, + storeRundownSnapshots: { + enabled: false, + }, + }, + support: { + message: '', + }, + evaluationsMessage: { + enabled: false, + heading: '', + message: '', + }, + }, + triggeredActions: Object.values(DEFAULT_CORE_TRIGGERS), + } +} diff --git a/meteor/server/publications/blueprintUpgradeStatus/checkStatus.ts b/meteor/server/publications/blueprintUpgradeStatus/checkStatus.ts index 5567ab0fb38..5fde9762ab8 100644 --- a/meteor/server/publications/blueprintUpgradeStatus/checkStatus.ts +++ b/meteor/server/publications/blueprintUpgradeStatus/checkStatus.ts @@ -14,12 +14,13 @@ import { DBShowStyleBase } from '@sofie-automation/corelib/dist/dataModel/ShowSt import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' import { joinObjectPathFragments, objectPathGet } from '@sofie-automation/corelib/dist/lib' import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' -import { generateTranslation } from '../../lib/tempLib' +import { generateTranslation, protectString } from '../../lib/tempLib' import { logger } from '../../logging' -import { ShowStyleBaseFields, StudioFields } from './reactiveContentCache' +import { CoreSystemFields, ShowStyleBaseFields, StudioFields } from './reactiveContentCache' import _ from 'underscore' import { UIBlueprintUpgradeStatusBase } from '@sofie-automation/meteor-lib/dist/api/upgradeStatus' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' +import { ICoreSystem } from '@sofie-automation/meteor-lib/dist/collections/CoreSystem' export interface BlueprintMapEntry { _id: BlueprintId @@ -39,7 +40,7 @@ export function checkDocUpgradeStatus( // Studio blueprint is missing/invalid return { invalidReason: generateTranslation('Invalid blueprint: "{{blueprintId}}"', { - blueprintId: doc.blueprintId, + blueprintId: doc.blueprintId ?? 'undefined', }), pendingRunOfFixupFunction: false, changes: [], @@ -101,7 +102,7 @@ export function checkDocUpgradeStatus( changes.push(generateTranslation('Blueprint has a new version')) } - if (doc.lastBlueprintConfig) { + if (doc.lastBlueprintConfig && doc.blueprintConfigWithOverrides) { // Check if the config blob has changed since last run const newConfig = applyAndValidateOverrides(doc.blueprintConfigWithOverrides).obj const oldConfig = doc.lastBlueprintConfig.config @@ -135,6 +136,65 @@ export function checkDocUpgradeStatus( } } +export function checkSystemUpgradeStatus( + blueprintMap: Map, + doc: Pick +): Pick { + const changes: ITranslatableMessage[] = [] + + // Check the blueprintId is valid + if (doc.blueprintId) { + const blueprint = blueprintMap.get(doc.blueprintId) + if (!blueprint || !blueprint.configPresets) { + // Studio blueprint is missing/invalid + return { + invalidReason: generateTranslation('Invalid blueprint: "{{blueprintId}}"', { + blueprintId: doc.blueprintId ?? 'undefined', + }), + pendingRunOfFixupFunction: false, + changes: [], + } + } + + // Some basic property checks + if (!doc.lastBlueprintConfig) { + changes.push(generateTranslation('Config has not been applied before')) + } else if (doc.lastBlueprintConfig.blueprintId !== doc.blueprintId) { + changes.push( + generateTranslation('Blueprint has been changed. From "{{ oldValue }}", to "{{ newValue }}"', { + oldValue: doc.lastBlueprintConfig.blueprintId || '', + newValue: doc.blueprintId || '', + }) + ) + } else if (doc.lastBlueprintConfig.blueprintHash !== blueprint.blueprintHash) { + changes.push(generateTranslation('Blueprint has a new version')) + } + } else { + // No blueprint assigned + + const defaultId = protectString('default') + + // Some basic property checks + if (!doc.lastBlueprintConfig) { + changes.push(generateTranslation('Config has not been applied before')) + } else if (doc.lastBlueprintConfig.blueprintId !== defaultId) { + changes.push( + generateTranslation('Blueprint has been changed. From "{{ oldValue }}", to "{{ newValue }}"', { + oldValue: doc.lastBlueprintConfig.blueprintId || '', + newValue: defaultId, + }) + ) + } else if (doc.lastBlueprintConfig.blueprintHash !== defaultId) { + changes.push(generateTranslation('Blueprint has a new version')) + } + } + + return { + changes, + pendingRunOfFixupFunction: false, + } +} + /** * This is a slightly crude diffing of objects based on a jsonschema. Only keys in the schema will be compared. * For now this has some limitations such as not looking inside of arrays, but this could be expanded later on diff --git a/meteor/server/publications/blueprintUpgradeStatus/publication.ts b/meteor/server/publications/blueprintUpgradeStatus/publication.ts index 568f9b07567..9ea8d72fe5d 100644 --- a/meteor/server/publications/blueprintUpgradeStatus/publication.ts +++ b/meteor/server/publications/blueprintUpgradeStatus/publication.ts @@ -13,9 +13,15 @@ import { import { logger } from '../../logging' import { resolveCredentials } from '../../security/lib/credentials' import { NoSecurityReadAccess } from '../../security/noSecurity' -import { ContentCache, createReactiveContentCache, ShowStyleBaseFields, StudioFields } from './reactiveContentCache' +import { + ContentCache, + CoreSystemFields, + createReactiveContentCache, + ShowStyleBaseFields, + StudioFields, +} from './reactiveContentCache' import { UpgradesContentObserver } from './upgradesContentObserver' -import { BlueprintMapEntry, checkDocUpgradeStatus } from './checkStatus' +import { BlueprintMapEntry, checkDocUpgradeStatus, checkSystemUpgradeStatus } from './checkStatus' import { BlueprintManifestType } from '@sofie-automation/blueprints-integration' import { DBShowStyleBase } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' @@ -23,6 +29,7 @@ import { UIBlueprintUpgradeStatus, UIBlueprintUpgradeStatusId, } from '@sofie-automation/meteor-lib/dist/api/upgradeStatus' +import { ICoreSystem } from '@sofie-automation/meteor-lib/dist/collections/CoreSystem' type BlueprintUpgradeStatusArgs = Record @@ -33,6 +40,7 @@ export interface BlueprintUpgradeStatusState { interface BlueprintUpgradeStatusUpdateProps { newCache: ContentCache + invalidateSystem: boolean invalidateStudioIds: StudioId[] invalidateShowStyleBaseIds: ShowStyleBaseId[] invalidateBlueprintIds: BlueprintId[] @@ -54,6 +62,11 @@ async function setupBlueprintUpgradeStatusPublicationObservers( return [ mongoObserver, + cache.CoreSystem.find({}).observeChanges({ + added: () => triggerUpdate({ invalidateSystem: true }), + changed: () => triggerUpdate({ invalidateSystem: true }), + removed: () => triggerUpdate({ invalidateSystem: true }), + }), cache.Studios.find({}).observeChanges({ added: (id) => triggerUpdate({ invalidateStudioIds: [protectString(id)] }), changed: (id) => triggerUpdate({ invalidateStudioIds: [protectString(id)] }), @@ -72,7 +85,10 @@ async function setupBlueprintUpgradeStatusPublicationObservers( ] } -function getDocumentId(type: 'studio' | 'showStyle', id: ProtectedString): UIBlueprintUpgradeStatusId { +function getDocumentId( + type: 'coreSystem' | 'studio' | 'showStyle', + id: ProtectedString +): UIBlueprintUpgradeStatusId { return protectString(`${type}:${id}`) } @@ -100,6 +116,7 @@ export async function manipulateBlueprintUpgradeStatusPublicationData( const studioBlueprintsMap = new Map() const showStyleBlueprintsMap = new Map() + const systemBlueprintsMap = new Map() state.contentCache.Blueprints.find({}).forEach((blueprint) => { switch (blueprint.blueprintType) { case BlueprintManifestType.SHOWSTYLE: @@ -120,6 +137,15 @@ export async function manipulateBlueprintUpgradeStatusPublicationData( hasFixUpFunction: blueprint.hasFixUpFunction, }) break + case BlueprintManifestType.SYSTEM: + systemBlueprintsMap.set(blueprint._id, { + _id: blueprint._id, + configPresets: {}, + configSchema: undefined, // TODO + blueprintHash: blueprint.blueprintHash, + hasFixUpFunction: false, + }) + break // TODO - default? } }) @@ -136,6 +162,10 @@ export async function manipulateBlueprintUpgradeStatusPublicationData( state.contentCache.ShowStyleBases.find({}).forEach((showStyleBase) => { updateShowStyleUpgradeStatus(collection, showStyleBlueprintsMap, showStyleBase) }) + + state.contentCache.CoreSystem.find({}).forEach((coreSystem) => { + updateCoreSystemUpgradeStatus(collection, systemBlueprintsMap, coreSystem) + }) } else { const regenerateForStudioIds = new Set(updateProps.invalidateStudioIds) const regenerateForShowStyleBaseIds = new Set(updateProps.invalidateShowStyleBaseIds) @@ -181,9 +211,31 @@ export async function manipulateBlueprintUpgradeStatusPublicationData( collection.remove(getDocumentId('showStyle', showStyleBaseId)) } } + + if (updateProps.invalidateSystem) { + state.contentCache.CoreSystem.find({}).forEach((coreSystem) => { + updateCoreSystemUpgradeStatus(collection, systemBlueprintsMap, coreSystem) + }) + } } } +function updateCoreSystemUpgradeStatus( + collection: CustomPublishCollection, + blueprintsMap: Map, + coreSystem: Pick +) { + const status = checkSystemUpgradeStatus(blueprintsMap, coreSystem) + + collection.replace({ + ...status, + _id: getDocumentId('coreSystem', coreSystem._id), + documentType: 'coreSystem', + documentId: coreSystem._id, + name: coreSystem.name ?? 'System', + }) +} + function updateStudioUpgradeStatus( collection: CustomPublishCollection, blueprintsMap: Map, diff --git a/meteor/server/publications/blueprintUpgradeStatus/reactiveContentCache.ts b/meteor/server/publications/blueprintUpgradeStatus/reactiveContentCache.ts index 501e6780623..1aa34747204 100644 --- a/meteor/server/publications/blueprintUpgradeStatus/reactiveContentCache.ts +++ b/meteor/server/publications/blueprintUpgradeStatus/reactiveContentCache.ts @@ -4,6 +4,25 @@ import { MongoFieldSpecifierOnesStrict } from '@sofie-automation/corelib/dist/mo import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' import { DBShowStyleBase } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' import { Blueprint } from '@sofie-automation/corelib/dist/dataModel/Blueprint' +import { ICoreSystem } from '@sofie-automation/meteor-lib/dist/collections/CoreSystem' + +export type CoreSystemFields = + | '_id' + | 'blueprintId' + | 'blueprintConfigPresetId' + | 'lastBlueprintConfig' + | 'blueprintConfigWithOverrides' + | 'lastBlueprintFixUpHash' + | 'name' +export const coreSystemFieldsSpecifier = literal>>({ + _id: 1, + blueprintId: 1, + blueprintConfigPresetId: 1, + lastBlueprintConfig: 1, + lastBlueprintFixUpHash: 1, + blueprintConfigWithOverrides: 1, + name: 1, +}) export type StudioFields = | '_id' @@ -64,6 +83,7 @@ export const blueprintFieldSpecifier = literal> Studios: ReactiveCacheCollection> ShowStyleBases: ReactiveCacheCollection> Blueprints: ReactiveCacheCollection> @@ -71,6 +91,7 @@ export interface ContentCache { export function createReactiveContentCache(): ContentCache { const cache: ContentCache = { + CoreSystem: new ReactiveCacheCollection>('coreSystem'), Studios: new ReactiveCacheCollection>('studios'), ShowStyleBases: new ReactiveCacheCollection>('showStyleBases'), Blueprints: new ReactiveCacheCollection>('blueprints'), diff --git a/meteor/server/publications/blueprintUpgradeStatus/upgradesContentObserver.ts b/meteor/server/publications/blueprintUpgradeStatus/upgradesContentObserver.ts index a88ba8575b9..e8f8d6281a6 100644 --- a/meteor/server/publications/blueprintUpgradeStatus/upgradesContentObserver.ts +++ b/meteor/server/publications/blueprintUpgradeStatus/upgradesContentObserver.ts @@ -3,10 +3,11 @@ import { logger } from '../../logging' import { blueprintFieldSpecifier, ContentCache, + coreSystemFieldsSpecifier, showStyleFieldSpecifier, studioFieldSpecifier, } from './reactiveContentCache' -import { Blueprints, ShowStyleBases, Studios } from '../../collections' +import { Blueprints, CoreSystem, ShowStyleBases, Studios } from '../../collections' import { waitForAllObserversReady } from '../lib/lib' export class UpgradesContentObserver { @@ -22,6 +23,9 @@ export class UpgradesContentObserver { logger.silly(`Creating UpgradesContentObserver`) const observers = await waitForAllObserversReady([ + CoreSystem.observeChanges({}, cache.CoreSystem.link(), { + projection: coreSystemFieldsSpecifier, + }), Studios.observeChanges({}, cache.Studios.link(), { projection: studioFieldSpecifier, }), diff --git a/meteor/server/publications/lib/quickLoop.ts b/meteor/server/publications/lib/quickLoop.ts index 967d4ac745b..6178ddad6c8 100644 --- a/meteor/server/publications/lib/quickLoop.ts +++ b/meteor/server/publications/lib/quickLoop.ts @@ -1,16 +1,16 @@ import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { DBRundownPlaylist, - ForceQuickLoopAutoNext, QuickLoopMarker, QuickLoopMarkerType, } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import { ForceQuickLoopAutoNext } from '@sofie-automation/shared-lib/dist/core/model/StudioSettings' import { MarkerPosition, compareMarkerPositions } from '@sofie-automation/corelib/dist/playout/playlist' import { ProtectedString, unprotectString } from '@sofie-automation/corelib/dist/protectedString' import { DEFAULT_FALLBACK_PART_DURATION } from '@sofie-automation/shared-lib/dist/core/constants' import { getCurrentTime } from '../../lib/lib' import { generateTranslation } from '@sofie-automation/corelib/dist/lib' -import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { IStudioSettings } from '@sofie-automation/corelib/dist/dataModel/Studio' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' import { ReadonlyObjectDeep } from 'type-fest/source/readonly-deep' @@ -47,7 +47,7 @@ export function modifyPartForQuickLoop( segmentRanks: Record, rundownRanks: Record, playlist: Pick, - studio: Pick, + studioSettings: IStudioSettings, quickLoopStartPosition: MarkerPosition | undefined, quickLoopEndPosition: MarkerPosition | undefined, canSetAutoNext = () => true @@ -60,7 +60,7 @@ export function modifyPartForQuickLoop( compareMarkerPositions(quickLoopStartPosition, partPosition) >= 0 && compareMarkerPositions(partPosition, quickLoopEndPosition) >= 0 - const fallbackPartDuration = studio.settings.fallbackPartDuration ?? DEFAULT_FALLBACK_PART_DURATION + const fallbackPartDuration = studioSettings.fallbackPartDuration ?? DEFAULT_FALLBACK_PART_DURATION if (isLoopingOverriden && (part.expectedDuration ?? 0) < fallbackPartDuration) { if (playlist.quickLoop?.forceAutoNext === ForceQuickLoopAutoNext.ENABLED_FORCING_MIN_DURATION) { @@ -82,7 +82,7 @@ export function modifyPartInstanceForQuickLoop( segmentRanks: Record, rundownRanks: Record, playlist: Pick, - studio: Pick, + studioSettings: IStudioSettings, quickLoopStartPosition: MarkerPosition | undefined, quickLoopEndPosition: MarkerPosition | undefined ): void { @@ -107,7 +107,7 @@ export function modifyPartInstanceForQuickLoop( segmentRanks, rundownRanks, playlist, - studio, + studioSettings, quickLoopStartPosition, quickLoopEndPosition, canAutoNext // do not adjust the part instance if we have passed the time where we can still enable auto next diff --git a/meteor/server/publications/partInstancesUI/publication.ts b/meteor/server/publications/partInstancesUI/publication.ts index 01a21711f21..11e017ae164 100644 --- a/meteor/server/publications/partInstancesUI/publication.ts +++ b/meteor/server/publications/partInstancesUI/publication.ts @@ -106,7 +106,7 @@ async function setupUIPartInstancesPublicationObservers( changed: () => triggerUpdate({ invalidateQuickLoop: true }), removed: () => triggerUpdate({ invalidateQuickLoop: true }), }), - cache.Studios.find({}).observeChanges({ + cache.StudioSettings.find({}).observeChanges({ added: () => triggerUpdate({ invalidateQuickLoop: true }), changed: () => triggerUpdate({ invalidateQuickLoop: true }), removed: () => triggerUpdate({ invalidateQuickLoop: true }), @@ -148,8 +148,8 @@ export async function manipulateUIPartInstancesPublicationData( const playlist = state.contentCache.RundownPlaylists.findOne({}) if (!playlist) return - const studio = state.contentCache.Studios.findOne({}) - if (!studio) return + const studioSettings = state.contentCache.StudioSettings.findOne({}) + if (!studioSettings) return const rundownRanks = stringsToIndexLookup(playlist.rundownIdsInOrder as unknown as string[]) const segmentRanks = extractRanks(state.contentCache.Segments.find({}).fetch()) @@ -191,7 +191,7 @@ export async function manipulateUIPartInstancesPublicationData( segmentRanks, rundownRanks, playlist, - studio, + studioSettings.settings, quickLoopStartPosition, quickLoopEndPosition ) diff --git a/meteor/server/publications/partInstancesUI/reactiveContentCache.ts b/meteor/server/publications/partInstancesUI/reactiveContentCache.ts index b7c01b627ab..aac741cbb4f 100644 --- a/meteor/server/publications/partInstancesUI/reactiveContentCache.ts +++ b/meteor/server/publications/partInstancesUI/reactiveContentCache.ts @@ -3,9 +3,10 @@ import { ReactiveCacheCollection } from '../lib/ReactiveCacheCollection' import { literal } from '@sofie-automation/corelib/dist/lib' import { MongoFieldSpecifierOnesStrict, MongoFieldSpecifierZeroes } from '@sofie-automation/corelib/dist/mongo' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' -import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { DBStudio, IStudioSettings } from '@sofie-automation/corelib/dist/dataModel/Studio' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' +import { StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' export type RundownPlaylistCompact = Pick export const rundownPlaylistFieldSpecifier = literal>({ @@ -36,14 +37,19 @@ export const partInstanceFieldSpecifier = literal>>({ _id: 1, - settings: 1, + settingsWithOverrides: 1, }) +export interface StudioSettingsDoc { + _id: StudioId + settings: IStudioSettings +} + export interface ContentCache { - Studios: ReactiveCacheCollection> + StudioSettings: ReactiveCacheCollection Segments: ReactiveCacheCollection> Parts: ReactiveCacheCollection> PartInstances: ReactiveCacheCollection> @@ -52,7 +58,7 @@ export interface ContentCache { export function createReactiveContentCache(): ContentCache { const cache: ContentCache = { - Studios: new ReactiveCacheCollection>('studios'), + StudioSettings: new ReactiveCacheCollection('studioSettings'), Segments: new ReactiveCacheCollection>('segments'), Parts: new ReactiveCacheCollection>('parts'), PartInstances: new ReactiveCacheCollection>('partInstances'), diff --git a/meteor/server/publications/partInstancesUI/rundownContentObserver.ts b/meteor/server/publications/partInstancesUI/rundownContentObserver.ts index a2f14e6c447..2acff0301c0 100644 --- a/meteor/server/publications/partInstancesUI/rundownContentObserver.ts +++ b/meteor/server/publications/partInstancesUI/rundownContentObserver.ts @@ -7,10 +7,21 @@ import { partInstanceFieldSpecifier, rundownPlaylistFieldSpecifier, segmentFieldSpecifier, + StudioFields, studioFieldSpecifier, + StudioSettingsDoc, } from './reactiveContentCache' import { PartInstances, Parts, RundownPlaylists, Segments, Studios } from '../../collections' import { waitForAllObserversReady } from '../lib/lib' +import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' + +function convertStudioSettingsDoc(doc: Pick): StudioSettingsDoc { + return { + _id: doc._id, + settings: applyAndValidateOverrides(doc.settingsWithOverrides).obj, + } +} export class RundownContentObserver { readonly #cache: ContentCache @@ -31,11 +42,23 @@ export class RundownContentObserver { logger.silly(`Creating RundownContentObserver for rundowns "${rundownIds.join(',')}"`) const observers = await waitForAllObserversReady([ - Studios.observeChanges( + Studios.observe( { _id: studioId, }, - cache.Studios.link(), + { + added: (doc) => { + const newDoc = convertStudioSettingsDoc(doc) + cache.StudioSettings.upsert(doc._id, { $set: newDoc as Partial }) + }, + changed: (doc) => { + const newDoc = convertStudioSettingsDoc(doc) + cache.StudioSettings.upsert(doc._id, { $set: newDoc as Partial }) + }, + removed: (doc) => { + cache.StudioSettings.remove(doc._id) + }, + }, { fields: studioFieldSpecifier, } diff --git a/meteor/server/publications/partsUI/publication.ts b/meteor/server/publications/partsUI/publication.ts index 31af1ed0319..6e5b0515536 100644 --- a/meteor/server/publications/partsUI/publication.ts +++ b/meteor/server/publications/partsUI/publication.ts @@ -93,7 +93,7 @@ async function setupUIPartsPublicationObservers( changed: () => triggerUpdate({ invalidateQuickLoop: true }), removed: () => triggerUpdate({ invalidateQuickLoop: true }), }), - cache.Studios.find({}).observeChanges({ + cache.StudioSettings.find({}).observeChanges({ added: () => triggerUpdate({ invalidateQuickLoop: true }), changed: () => triggerUpdate({ invalidateQuickLoop: true }), removed: () => triggerUpdate({ invalidateQuickLoop: true }), @@ -135,8 +135,8 @@ export async function manipulateUIPartsPublicationData( const playlist = state.contentCache.RundownPlaylists.findOne({}) if (!playlist) return - const studio = state.contentCache.Studios.findOne({}) - if (!studio) return + const studioSettings = state.contentCache.StudioSettings.findOne({}) + if (!studioSettings) return const rundownRanks = stringsToIndexLookup(playlist.rundownIdsInOrder as unknown as string[]) const segmentRanks = extractRanks(state.contentCache.Segments.find({}).fetch()) @@ -178,7 +178,7 @@ export async function manipulateUIPartsPublicationData( segmentRanks, rundownRanks, playlist, - studio, + studioSettings.settings, quickLoopStartPosition, quickLoopEndPosition ) diff --git a/meteor/server/publications/partsUI/reactiveContentCache.ts b/meteor/server/publications/partsUI/reactiveContentCache.ts index 13361fd51c5..12d9423e8e4 100644 --- a/meteor/server/publications/partsUI/reactiveContentCache.ts +++ b/meteor/server/publications/partsUI/reactiveContentCache.ts @@ -4,7 +4,8 @@ import { ReactiveCacheCollection } from '../lib/ReactiveCacheCollection' import { literal } from '@sofie-automation/corelib/dist/lib' import { MongoFieldSpecifierOnesStrict, MongoFieldSpecifierZeroes } from '@sofie-automation/corelib/dist/mongo' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' -import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { DBStudio, IStudioSettings } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' export type RundownPlaylistCompact = Pick export const rundownPlaylistFieldSpecifier = literal>({ @@ -26,14 +27,19 @@ export const partFieldSpecifier = literal>>({ _id: 1, - settings: 1, + settingsWithOverrides: 1, }) +export interface StudioSettingsDoc { + _id: StudioId + settings: IStudioSettings +} + export interface ContentCache { - Studios: ReactiveCacheCollection> + StudioSettings: ReactiveCacheCollection Segments: ReactiveCacheCollection> Parts: ReactiveCacheCollection> RundownPlaylists: ReactiveCacheCollection @@ -41,7 +47,7 @@ export interface ContentCache { export function createReactiveContentCache(): ContentCache { const cache: ContentCache = { - Studios: new ReactiveCacheCollection>('studios'), + StudioSettings: new ReactiveCacheCollection('studioSettings'), Segments: new ReactiveCacheCollection>('segments'), Parts: new ReactiveCacheCollection>('parts'), RundownPlaylists: new ReactiveCacheCollection('rundownPlaylists'), diff --git a/meteor/server/publications/partsUI/rundownContentObserver.ts b/meteor/server/publications/partsUI/rundownContentObserver.ts index ee7e92c7d62..8a8032ecf5c 100644 --- a/meteor/server/publications/partsUI/rundownContentObserver.ts +++ b/meteor/server/publications/partsUI/rundownContentObserver.ts @@ -6,10 +6,21 @@ import { partFieldSpecifier, rundownPlaylistFieldSpecifier, segmentFieldSpecifier, + StudioFields, studioFieldSpecifier, + StudioSettingsDoc, } from './reactiveContentCache' import { Parts, RundownPlaylists, Segments, Studios } from '../../collections' import { waitForAllObserversReady } from '../lib/lib' +import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' + +function convertStudioSettingsDoc(doc: Pick): StudioSettingsDoc { + return { + _id: doc._id, + settings: applyAndValidateOverrides(doc.settingsWithOverrides).obj, + } +} export class RundownContentObserver { readonly #cache: ContentCache @@ -29,11 +40,23 @@ export class RundownContentObserver { logger.silly(`Creating RundownContentObserver for rundowns "${rundownIds.join(',')}"`) const observers = await waitForAllObserversReady([ - Studios.observeChanges( + Studios.observe( { _id: studioId, }, - cache.Studios.link(), + { + added: (doc) => { + const newDoc = convertStudioSettingsDoc(doc) + cache.StudioSettings.upsert(doc._id, { $set: newDoc as Partial }) + }, + changed: (doc) => { + const newDoc = convertStudioSettingsDoc(doc) + cache.StudioSettings.upsert(doc._id, { $set: newDoc as Partial }) + }, + removed: (doc) => { + cache.StudioSettings.remove(doc._id) + }, + }, { fields: studioFieldSpecifier, } diff --git a/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts b/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts index 536c41fbed9..159830fbc5c 100644 --- a/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts +++ b/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts @@ -185,7 +185,7 @@ export type PieceContentStatusPiece = Pick { + extends Pick { /** Mappings between the physical devices / outputs and logical ones */ mappings: MappingsExt /** Route sets with overrides */ @@ -194,6 +194,8 @@ export interface PieceContentStatusStudio * (These are used by the Package Manager and the Expected Packages) */ packageContainers: Record + + settings: IStudioSettings } export async function checkPieceContentStatusAndDependencies( diff --git a/meteor/server/publications/pieceContentStatusUI/common.ts b/meteor/server/publications/pieceContentStatusUI/common.ts index 591f1eb16ec..32cf9328d10 100644 --- a/meteor/server/publications/pieceContentStatusUI/common.ts +++ b/meteor/server/publications/pieceContentStatusUI/common.ts @@ -13,7 +13,7 @@ import { PieceContentStatusStudio } from './checkPieceContentStatus' export type StudioFields = | '_id' - | 'settings' + | 'settingsWithOverrides' | 'packageContainersWithOverrides' | 'previewContainerIds' | 'thumbnailContainerIds' @@ -21,7 +21,7 @@ export type StudioFields = | 'routeSetsWithOverrides' export const studioFieldSpecifier = literal>>({ _id: 1, - settings: 1, + settingsWithOverrides: 1, packageContainersWithOverrides: 1, previewContainerIds: 1, thumbnailContainerIds: 1, @@ -112,7 +112,7 @@ export async function fetchStudio(studioId: StudioId): Promise): UIStudio { name: studio.name, mappings: applyAndValidateOverrides(studio.mappingsWithOverrides).obj, - settings: studio.settings, + settings: applyAndValidateOverrides(studio.settingsWithOverrides).obj, routeSets: applyAndValidateOverrides(studio.routeSetsWithOverrides).obj, routeSetExclusivityGroups: applyAndValidateOverrides(studio.routeSetExclusivityGroupsWithOverrides).obj, @@ -47,14 +47,14 @@ type StudioFields = | '_id' | 'name' | 'mappingsWithOverrides' - | 'settings' + | 'settingsWithOverrides' | 'routeSetsWithOverrides' | 'routeSetExclusivityGroupsWithOverrides' const fieldSpecifier = literal>>({ _id: 1, name: 1, mappingsWithOverrides: 1, - settings: 1, + settingsWithOverrides: 1, routeSetsWithOverrides: 1, routeSetExclusivityGroupsWithOverrides: 1, }) diff --git a/meteor/server/publications/system.ts b/meteor/server/publications/system.ts index e494807e859..d1f615a9dfa 100644 --- a/meteor/server/publications/system.ts +++ b/meteor/server/publications/system.ts @@ -15,16 +15,14 @@ meteorPublish(MeteorPubSub.coreSystem, async function (token: string | undefined fields: { // Include only specific fields in the result documents: _id: 1, - support: 1, systemInfo: 1, apm: 1, name: 1, logLevel: 1, serviceMessages: 1, blueprintId: 1, - cron: 1, logo: 1, - evaluations: 1, + settingsWithOverrides: 1, }, }) } diff --git a/packages/blueprints-integration/src/api/studio.ts b/packages/blueprints-integration/src/api/studio.ts index f4a3b6525bc..198e7c554e4 100644 --- a/packages/blueprints-integration/src/api/studio.ts +++ b/packages/blueprints-integration/src/api/studio.ts @@ -27,7 +27,8 @@ import type { StudioRouteSet, StudioRouteSetExclusivityGroup, } from '@sofie-automation/shared-lib/dist/core/model/StudioRouteSet' -import { StudioPackageContainer } from '@sofie-automation/shared-lib/dist/core/model/PackageContainer' +import type { StudioPackageContainer } from '@sofie-automation/shared-lib/dist/core/model/PackageContainer' +import type { IStudioSettings } from '@sofie-automation/shared-lib/dist/core/model/StudioSettings' export interface StudioBlueprintManifest extends BlueprintManifestBase { @@ -142,6 +143,8 @@ export interface BlueprintResultApplyStudioConfig { routeSetExclusivityGroups?: Record /** Package Containers */ packageContainers?: Record + + studioSettings?: IStudioSettings } export interface IStudioConfigPreset { diff --git a/packages/blueprints-integration/src/api/system.ts b/packages/blueprints-integration/src/api/system.ts index 078ab6ffed1..a4c544d5718 100644 --- a/packages/blueprints-integration/src/api/system.ts +++ b/packages/blueprints-integration/src/api/system.ts @@ -1,12 +1,32 @@ +import type { IBlueprintTriggeredActions } from '../triggers' import type { MigrationStepSystem } from '../migrations' import type { BlueprintManifestBase, BlueprintManifestType } from './base' +import type { ICoreSystemApplyConfigContext } from '../context/systemApplyConfigContext' +import type { ICoreSystemSettings } from '@sofie-automation/shared-lib/dist/core/model/CoreSystemSettings' export interface SystemBlueprintManifest extends BlueprintManifestBase { blueprintType: BlueprintManifestType.SYSTEM - /** A list of Migration steps related to the Core system */ + /** A list of Migration steps related to the Core system + * @deprecated This has been replaced with `applyConfig` + */ coreMigrations: MigrationStepSystem[] /** Translations connected to the studio (as stringified JSON) */ translations?: string + + /** + * Apply the config by generating the data to be saved into the db. + * This should be written to give a predictable and stable result, it can be called with the same config multiple times + */ + applyConfig?: ( + context: ICoreSystemApplyConfigContext + // config: TRawConfig, + ) => BlueprintResultApplySystemConfig +} + +export interface BlueprintResultApplySystemConfig { + settings: ICoreSystemSettings + + triggeredActions: IBlueprintTriggeredActions[] } diff --git a/packages/blueprints-integration/src/context/systemApplyConfigContext.ts b/packages/blueprints-integration/src/context/systemApplyConfigContext.ts new file mode 100644 index 00000000000..c1878ed75c0 --- /dev/null +++ b/packages/blueprints-integration/src/context/systemApplyConfigContext.ts @@ -0,0 +1,6 @@ +import type { IBlueprintDefaultCoreSystemTriggers } from '../triggers' +import type { ICommonContext } from './baseContext' + +export interface ICoreSystemApplyConfigContext extends ICommonContext { + getDefaultSystemActionTriggers(): IBlueprintDefaultCoreSystemTriggers +} diff --git a/packages/blueprints-integration/src/index.ts b/packages/blueprints-integration/src/index.ts index d5196e59f74..4eb1fa41a56 100644 --- a/packages/blueprints-integration/src/index.ts +++ b/packages/blueprints-integration/src/index.ts @@ -28,3 +28,5 @@ export { JSONSchema } from '@sofie-automation/shared-lib/dist/lib/JSONSchemaType export * from '@sofie-automation/shared-lib/dist/lib/JSONBlob' export * from '@sofie-automation/shared-lib/dist/lib/JSONSchemaUtil' export * from '@sofie-automation/shared-lib/dist/core/model/StudioRouteSet' +export * from '@sofie-automation/shared-lib/dist/core/model/StudioSettings' +export * from '@sofie-automation/shared-lib/dist/core/model/CoreSystemSettings' diff --git a/packages/blueprints-integration/src/triggers.ts b/packages/blueprints-integration/src/triggers.ts index 3b7a54db85d..c360fa65676 100644 --- a/packages/blueprints-integration/src/triggers.ts +++ b/packages/blueprints-integration/src/triggers.ts @@ -340,3 +340,27 @@ export interface IBlueprintTriggeredActions { } export { SomeActionIdentifier, ClientActions, PlayoutActions } + +export enum IBlueprintDefaultCoreSystemTriggersType { + toggleShelf = 'toggleShelf', + activateRundownPlaylist = 'activateRundownPlaylist', + activateRundownPlaylistRehearsal = 'activateRundownPlaylistRehearsal', + deactivateRundownPlaylist = 'deactivateRundownPlaylist', + take = 'take', + hold = 'hold', + holdUndo = 'holdUndo', + resetRundownPlaylist = 'resetRundownPlaylist', + disableNextPiece = 'disableNextPiece', + disableNextPieceUndo = 'disableNextPieceUndo', + createSnapshotForDebug = 'createSnapshotForDebug', + moveNextPart = 'moveNextPart', + moveNextSegment = 'moveNextSegment', + movePreviousPart = 'movePreviousPart', + movePreviousSegment = 'movePreviousSegment', + goToOnAirLine = 'goToOnAirLine', + rewindSegments = 'rewindSegments', +} + +export type IBlueprintDefaultCoreSystemTriggers = { + [key in IBlueprintDefaultCoreSystemTriggersType]: IBlueprintTriggeredActions +} diff --git a/packages/corelib/src/dataModel/Blueprint.ts b/packages/corelib/src/dataModel/Blueprint.ts index 99e025bfdf8..32ba8af5e5c 100644 --- a/packages/corelib/src/dataModel/Blueprint.ts +++ b/packages/corelib/src/dataModel/Blueprint.ts @@ -64,7 +64,7 @@ export interface Blueprint { export interface LastBlueprintConfig { blueprintId: BlueprintId blueprintHash: BlueprintHash - blueprintConfigPresetId: string + blueprintConfigPresetId: string | undefined config: IBlueprintConfig } diff --git a/packages/corelib/src/dataModel/RundownPlaylist.ts b/packages/corelib/src/dataModel/RundownPlaylist.ts index 241e0c38959..f9ff938c024 100644 --- a/packages/corelib/src/dataModel/RundownPlaylist.ts +++ b/packages/corelib/src/dataModel/RundownPlaylist.ts @@ -11,6 +11,7 @@ import { RundownId, } from './Ids' import { RundownPlaylistNote } from './Notes' +import { ForceQuickLoopAutoNext } from '@sofie-automation/shared-lib/dist/core/model/StudioSettings' /** Details of an ab-session requested by the blueprints in onTimelineGenerate */ export interface ABSessionInfo { @@ -80,15 +81,6 @@ export type QuickLoopMarker = | QuickLoopRundownMarker | QuickLoopPlaylistMarker -export enum ForceQuickLoopAutoNext { - /** Parts will auto-next only when explicitly set by the NRCS/blueprints */ - DISABLED = 'disabled', - /** Parts will auto-next when the expected duration is set and within range */ - ENABLED_WHEN_VALID_DURATION = 'enabled_when_valid_duration', - /** All parts will auto-next. If expected duration is undefined or low, the default display duration will be used */ - ENABLED_FORCING_MIN_DURATION = 'enabled_forcing_min_duration', -} - export interface QuickLoopProps { /** The Start marker */ start?: QuickLoopMarker diff --git a/packages/corelib/src/dataModel/Studio.ts b/packages/corelib/src/dataModel/Studio.ts index 78ee077c196..5a440f9d186 100644 --- a/packages/corelib/src/dataModel/Studio.ts +++ b/packages/corelib/src/dataModel/Studio.ts @@ -3,7 +3,6 @@ import { ObjectWithOverrides } from '../settings/objectWithOverrides' import { StudioId, OrganizationId, BlueprintId, ShowStyleBaseId, MappingsHash, PeripheralDeviceId } from './Ids' import { BlueprintHash, LastBlueprintConfig } from './Blueprint' import { MappingsExt, MappingExt } from '@sofie-automation/shared-lib/dist/core/model/Timeline' -import { ForceQuickLoopAutoNext } from './RundownPlaylist' import { ResultingMappingRoute, RouteMapping, @@ -15,8 +14,9 @@ import { StudioAbPlayerDisabling, } from '@sofie-automation/shared-lib/dist/core/model/StudioRouteSet' import { StudioPackageContainer } from '@sofie-automation/shared-lib/dist/core/model/PackageContainer' +import { IStudioSettings } from '@sofie-automation/shared-lib/dist/core/model/StudioSettings' -export { MappingsExt, MappingExt, MappingsHash } +export { MappingsExt, MappingExt, MappingsHash, IStudioSettings } // RouteSet functions has been moved to shared-lib: // So we need to re-export them here: @@ -32,64 +32,6 @@ export { StudioPackageContainer, } -export interface IStudioSettings { - /** The framerate (frames per second) used to convert internal timing information (in milliseconds) - * into timecodes and timecode-like strings and interpret timecode user input - * Default: 25 - */ - frameRate: number - - /** URL to endpoint where media preview are exposed */ - mediaPreviewsUrl: string // (former media_previews_url in config) - /** URLs for slack webhook to send evaluations */ - slackEvaluationUrls?: string // (former slack_evaluation in config) - - /** Media Resolutions supported by the studio for media playback */ - supportedMediaFormats?: string // (former mediaResolutions in config) - /** Audio Stream Formats supported by the studio for media playback */ - supportedAudioStreams?: string // (former audioStreams in config) - - /** Should the play from anywhere feature be enabled in this studio */ - enablePlayFromAnywhere?: boolean - - /** - * If set, forces the multi-playout-gateway mode (aka set "now"-time right away) - * for single playout-gateways setups - */ - forceMultiGatewayMode?: boolean - - /** How much extra delay to add to the Now-time (used for the "multi-playout-gateway" feature) . - * A higher value adds delays in playout, but reduces the risk of missed frames. */ - multiGatewayNowSafeLatency?: number - - /** Allow resets while a rundown is on-air */ - allowRundownResetOnAir?: boolean - - /** Preserve unsynced segments psoition in the rundown, relative to the other segments */ - preserveOrphanedSegmentPositionInRundown?: boolean - - /** - * The minimum amount of time, in milliseconds, that must pass after a take before another take may be performed. - * Default: 1000 - */ - minimumTakeSpan: number - - /** Whether to allow adlib testing mode, before a Part is playing in a Playlist */ - allowAdlibTestingSegment?: boolean - - /** Should QuickLoop context menu options be available to the users. It does not affect Playlist loop enabled by the NRCS. */ - enableQuickLoop?: boolean - - /** If and how to force auto-nexting in a looping Playlist */ - forceQuickLoopAutoNext?: ForceQuickLoopAutoNext - - /** - * The duration to apply on too short Parts Within QuickLoop when ForceQuickLoopAutoNext.ENABLED_FORCING_MIN_DURATION is selected - * Default: 3000 - */ - fallbackPartDuration?: number -} - export type StudioLight = Omit /** A set of available layer groups in a given installation */ @@ -122,7 +64,7 @@ export interface DBStudio { /** Config values are used by the Blueprints */ blueprintConfigWithOverrides: ObjectWithOverrides - settings: IStudioSettings + settingsWithOverrides: ObjectWithOverrides _rundownVersionHash: string diff --git a/packages/corelib/src/studio/baseline.ts b/packages/corelib/src/studio/baseline.ts index d1766e12457..86492cf75c6 100644 --- a/packages/corelib/src/studio/baseline.ts +++ b/packages/corelib/src/studio/baseline.ts @@ -1,4 +1,4 @@ -import { StudioLight } from '../dataModel/Studio' +import { DBStudio } from '../dataModel/Studio' import { TimelineComplete } from '../dataModel/Timeline' import { ReadonlyDeep } from 'type-fest' import { unprotectString } from '../protectedString' @@ -6,7 +6,7 @@ import { Blueprint } from '../dataModel/Blueprint' export function shouldUpdateStudioBaselineInner( coreVersion: string, - studio: ReadonlyDeep, + studio: Pick, studioTimeline: ReadonlyDeep | null, studioBlueprint: Pick | null ): string | false { diff --git a/packages/job-worker/src/__mocks__/context.ts b/packages/job-worker/src/__mocks__/context.ts index 57a861bb9ff..d06fac51541 100644 --- a/packages/job-worker/src/__mocks__/context.ts +++ b/packages/job-worker/src/__mocks__/context.ts @@ -42,6 +42,7 @@ import { IDirectCollections } from '../db' import { ApmSpan, JobContext, + JobStudio, ProcessedShowStyleBase, ProcessedShowStyleCompound, ProcessedShowStyleVariant, @@ -56,6 +57,7 @@ import { JSONBlobStringify } from '@sofie-automation/shared-lib/dist/lib/JSONBlo import { removeRundownPlaylistFromDb } from '../ingest/__tests__/lib' import { processShowStyleBase, processShowStyleVariant } from '../jobs/showStyle' import { defaultStudio } from './defaultCollectionObjects' +import { convertStudioToJobStudio } from '../jobs/studio' export function setupDefaultJobEnvironment(studioId?: StudioId): MockJobContext { const { mockCollections, jobCollections } = getMockCollections() @@ -75,6 +77,7 @@ export class MockJobContext implements JobContext { #jobCollections: Readonly #mockCollections: Readonly #studio: ReadonlyDeep + #jobStudio: ReadonlyDeep #studioBlueprint: ReadonlyDeep #showStyleBlueprint: ReadonlyDeep @@ -87,6 +90,7 @@ export class MockJobContext implements JobContext { this.#jobCollections = jobCollections this.#mockCollections = mockCollections this.#studio = studio + this.#jobStudio = convertStudioToJobStudio(clone(studio)) this.#studioBlueprint = MockStudioBlueprint() this.#showStyleBlueprint = MockShowStyleBlueprint() @@ -103,7 +107,10 @@ export class MockJobContext implements JobContext { get studioId(): StudioId { return this.#studio._id } - get studio(): ReadonlyDeep { + get studio(): ReadonlyDeep { + return this.#jobStudio + } + get rawStudio(): ReadonlyDeep { return this.#studio } @@ -219,7 +226,7 @@ export class MockJobContext implements JobContext { } } getShowStyleBlueprintConfig(showStyle: ReadonlyDeep): ProcessedShowStyleConfig { - return preprocessShowStyleConfig(showStyle, this.#showStyleBlueprint, this.#studio.settings) + return preprocessShowStyleConfig(showStyle, this.#showStyleBlueprint, this.studio.settings) } hackPublishTimelineToFastTrack(_newTimeline: TimelineComplete): void { @@ -244,6 +251,7 @@ export class MockJobContext implements JobContext { setStudio(studio: ReadonlyDeep): void { this.#studio = clone(studio) + this.#jobStudio = convertStudioToJobStudio(clone(studio)) } setShowStyleBlueprint(blueprint: ReadonlyDeep): void { this.#showStyleBlueprint = blueprint diff --git a/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts b/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts index 5f13ba6285b..752b8f9eb6a 100644 --- a/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts +++ b/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts @@ -102,12 +102,12 @@ export function defaultStudio(_id: StudioId): DBStudio { mappingsWithOverrides: wrapDefaultObject({}), supportedShowStyleBase: [], blueprintConfigWithOverrides: wrapDefaultObject({}), - settings: { + settingsWithOverrides: wrapDefaultObject({ frameRate: 25, mediaPreviewsUrl: '', minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, allowAdlibTestingSegment: true, - }, + }), routeSetsWithOverrides: wrapDefaultObject({}), routeSetExclusivityGroupsWithOverrides: wrapDefaultObject({}), packageContainersWithOverrides: wrapDefaultObject({}), diff --git a/packages/job-worker/src/blueprints/__tests__/config.test.ts b/packages/job-worker/src/blueprints/__tests__/config.test.ts index 2e77bd5dd87..5e142a6bec3 100644 --- a/packages/job-worker/src/blueprints/__tests__/config.test.ts +++ b/packages/job-worker/src/blueprints/__tests__/config.test.ts @@ -10,12 +10,12 @@ describe('Test blueprint config', () => { test('compileStudioConfig', () => { const jobContext = setupDefaultJobEnvironment() jobContext.setStudio({ - ...jobContext.studio, - settings: { + ...jobContext.rawStudio, + settingsWithOverrides: wrapDefaultObject({ mediaPreviewsUrl: '', frameRate: 25, minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, - }, + }), blueprintConfigWithOverrides: wrapDefaultObject({ sdfsdf: 'one', another: 5 }), }) jobContext.updateStudioBlueprint({ @@ -33,12 +33,12 @@ describe('Test blueprint config', () => { test('compileStudioConfig with function', () => { const jobContext = setupDefaultJobEnvironment() jobContext.setStudio({ - ...jobContext.studio, - settings: { + ...jobContext.rawStudio, + settingsWithOverrides: wrapDefaultObject({ mediaPreviewsUrl: '', frameRate: 25, minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, - }, + }), blueprintConfigWithOverrides: wrapDefaultObject({ sdfsdf: 'one', another: 5 }), }) jobContext.updateStudioBlueprint({ @@ -136,7 +136,7 @@ describe('Test blueprint config', () => { const studioId = jobContext.studioId jobContext.setStudio({ - ...jobContext.studio, + ...jobContext.rawStudio, blueprintConfigWithOverrides: wrapDefaultObject({ two: 'abc', number: 99, @@ -183,7 +183,7 @@ describe('Test blueprint config', () => { }, }) jobContext.setStudio({ - ...jobContext.studio, + ...jobContext.rawStudio, supportedShowStyleBase: [showStyle._id], }) jobContext.updateShowStyleBlueprint({ diff --git a/packages/job-worker/src/blueprints/__tests__/context.test.ts b/packages/job-worker/src/blueprints/__tests__/context.test.ts index 307289c2dfc..5388b213035 100644 --- a/packages/job-worker/src/blueprints/__tests__/context.test.ts +++ b/packages/job-worker/src/blueprints/__tests__/context.test.ts @@ -1,6 +1,5 @@ import { getHash } from '@sofie-automation/corelib/dist/lib' import { unprotectString } from '@sofie-automation/corelib/dist/protectedString' -import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' import { MockJobContext, setupDefaultJobEnvironment } from '../../__mocks__/context' import { getShowStyleConfigRef, getStudioConfigRef } from '../configRefs' import { CommonContext } from '../context/CommonContext' @@ -81,7 +80,7 @@ describe('Test blueprint api context', () => { expect(context.studio).toBe(studio) expect(context.getStudioConfig()).toBe(studioConfig) - expect(context.getStudioMappings()).toEqual(applyAndValidateOverrides(studio.mappingsWithOverrides).obj) + expect(context.getStudioMappings()).toEqual(studio.mappings) }) test('getStudioConfigRef', () => { const context = new StudioContext( diff --git a/packages/job-worker/src/blueprints/config.ts b/packages/job-worker/src/blueprints/config.ts index 77ae34389db..78b755419f0 100644 --- a/packages/job-worker/src/blueprints/config.ts +++ b/packages/job-worker/src/blueprints/config.ts @@ -11,10 +11,9 @@ import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyE import _ = require('underscore') import { logger } from '../logging' import { CommonContext } from './context' -import { DBStudio, IStudioSettings } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { IStudioSettings } from '@sofie-automation/corelib/dist/dataModel/Studio' import { protectString } from '@sofie-automation/corelib/dist/protectedString' -import { ProcessedShowStyleCompound, StudioCacheContext } from '../jobs' -import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' +import { JobStudio, ProcessedShowStyleCompound, StudioCacheContext } from '../jobs' /** * Parse a string containing BlueprintConfigRefs (`${studio.studio0.myConfigField}`) to replace the refs with the current values @@ -100,10 +99,10 @@ export function compileCoreConfigValues(studioSettings: ReadonlyDeep, + studio: ReadonlyDeep, blueprint: ReadonlyDeep ): ProcessedStudioConfig { - let res: any = applyAndValidateOverrides(studio.blueprintConfigWithOverrides).obj + let res: any = studio.blueprintConfig try { if (blueprint.preprocessConfig) { diff --git a/packages/job-worker/src/blueprints/context/OnTimelineGenerateContext.ts b/packages/job-worker/src/blueprints/context/OnTimelineGenerateContext.ts index 6c3f8cd30d8..c71a4d33fe5 100644 --- a/packages/job-worker/src/blueprints/context/OnTimelineGenerateContext.ts +++ b/packages/job-worker/src/blueprints/context/OnTimelineGenerateContext.ts @@ -4,7 +4,6 @@ import { ITimelineEventContext, } from '@sofie-automation/blueprints-integration' import { ReadonlyDeep } from 'type-fest' -import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' import { OnGenerateTimelineObjExt } from '@sofie-automation/corelib/dist/dataModel/Timeline' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' import { clone } from '@sofie-automation/corelib/dist/lib' @@ -14,7 +13,7 @@ import { getCurrentTime } from '../../lib' import { PieceInstance, ResolvedPieceInstance } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' import { ProcessedStudioConfig, ProcessedShowStyleConfig } from '../config' import _ = require('underscore') -import { ProcessedShowStyleCompound } from '../../jobs' +import { JobStudio, ProcessedShowStyleCompound } from '../../jobs' import { convertPartInstanceToBlueprints, createBlueprintQuickLoopInfo } from './lib' import { RundownContext } from './RundownContext' import { AbSessionHelper } from '../../playout/abPlayback/abSessionHelper' @@ -33,7 +32,7 @@ export class OnTimelineGenerateContext extends RundownContext implements ITimeli readonly #pieceInstanceCache = new Map>() constructor( - studio: ReadonlyDeep, + studio: ReadonlyDeep, studioBlueprintConfig: ProcessedStudioConfig, showStyleCompound: ReadonlyDeep, showStyleBlueprintConfig: ProcessedShowStyleConfig, diff --git a/packages/job-worker/src/blueprints/context/PartEventContext.ts b/packages/job-worker/src/blueprints/context/PartEventContext.ts index 880aa4b923a..34722ec3ac9 100644 --- a/packages/job-worker/src/blueprints/context/PartEventContext.ts +++ b/packages/job-worker/src/blueprints/context/PartEventContext.ts @@ -1,11 +1,10 @@ import { IBlueprintPartInstance, IPartEventContext } from '@sofie-automation/blueprints-integration' import { ReadonlyDeep } from 'type-fest' -import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { getCurrentTime } from '../../lib' import { ProcessedStudioConfig, ProcessedShowStyleConfig } from '../config' -import { ProcessedShowStyleCompound } from '../../jobs' +import { JobStudio, ProcessedShowStyleCompound } from '../../jobs' import { convertPartInstanceToBlueprints } from './lib' import { RundownContext } from './RundownContext' @@ -14,7 +13,7 @@ export class PartEventContext extends RundownContext implements IPartEventContex constructor( eventName: string, - studio: ReadonlyDeep, + studio: ReadonlyDeep, studioBlueprintConfig: ProcessedStudioConfig, showStyleCompound: ReadonlyDeep, showStyleBlueprintConfig: ProcessedShowStyleConfig, diff --git a/packages/job-worker/src/blueprints/context/RundownContext.ts b/packages/job-worker/src/blueprints/context/RundownContext.ts index 8faaefeba18..c84a27ac708 100644 --- a/packages/job-worker/src/blueprints/context/RundownContext.ts +++ b/packages/job-worker/src/blueprints/context/RundownContext.ts @@ -1,10 +1,9 @@ import { IRundownContext, IBlueprintSegmentRundown } from '@sofie-automation/blueprints-integration' import { ReadonlyDeep } from 'type-fest' -import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' import { unprotectString } from '@sofie-automation/corelib/dist/protectedString' import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { ProcessedStudioConfig, ProcessedShowStyleConfig } from '../config' -import { ProcessedShowStyleCompound } from '../../jobs' +import { JobStudio, ProcessedShowStyleCompound } from '../../jobs' import { convertRundownToBlueprintSegmentRundown } from './lib' import { ContextInfo } from './CommonContext' import { ShowStyleContext } from './ShowStyleContext' @@ -19,7 +18,7 @@ export class RundownContext extends ShowStyleContext implements IRundownContext constructor( contextInfo: ContextInfo, - studio: ReadonlyDeep, + studio: ReadonlyDeep, studioBlueprintConfig: ProcessedStudioConfig, showStyleCompound: ReadonlyDeep, showStyleBlueprintConfig: ProcessedShowStyleConfig, diff --git a/packages/job-worker/src/blueprints/context/RundownEventContext.ts b/packages/job-worker/src/blueprints/context/RundownEventContext.ts index b28f533063a..9852e0dd72f 100644 --- a/packages/job-worker/src/blueprints/context/RundownEventContext.ts +++ b/packages/job-worker/src/blueprints/context/RundownEventContext.ts @@ -1,15 +1,14 @@ import { IEventContext } from '@sofie-automation/blueprints-integration' import { ReadonlyDeep } from 'type-fest' -import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { getCurrentTime } from '../../lib' import { ProcessedStudioConfig, ProcessedShowStyleConfig } from '../config' -import { ProcessedShowStyleCompound } from '../../jobs' +import { JobStudio, ProcessedShowStyleCompound } from '../../jobs' import { RundownContext } from './RundownContext' export class RundownEventContext extends RundownContext implements IEventContext { constructor( - studio: ReadonlyDeep, + studio: ReadonlyDeep, studioBlueprintConfig: ProcessedStudioConfig, showStyleCompound: ReadonlyDeep, showStyleBlueprintConfig: ProcessedShowStyleConfig, diff --git a/packages/job-worker/src/blueprints/context/ShowStyleContext.ts b/packages/job-worker/src/blueprints/context/ShowStyleContext.ts index 7a243320b5d..1310c72f720 100644 --- a/packages/job-worker/src/blueprints/context/ShowStyleContext.ts +++ b/packages/job-worker/src/blueprints/context/ShowStyleContext.ts @@ -1,9 +1,8 @@ import { IOutputLayer, IShowStyleContext, ISourceLayer } from '@sofie-automation/blueprints-integration' import { ReadonlyDeep } from 'type-fest' -import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' import { ProcessedStudioConfig, ProcessedShowStyleConfig } from '../config' import { getShowStyleConfigRef } from '../configRefs' -import { ProcessedShowStyleCompound } from '../../jobs' +import { JobStudio, ProcessedShowStyleCompound } from '../../jobs' import { ContextInfo } from './CommonContext' import { StudioContext } from './StudioContext' @@ -12,7 +11,7 @@ import { StudioContext } from './StudioContext' export class ShowStyleContext extends StudioContext implements IShowStyleContext { constructor( contextInfo: ContextInfo, - studio: ReadonlyDeep, + studio: ReadonlyDeep, studioBlueprintConfig: ProcessedStudioConfig, public readonly showStyleCompound: ReadonlyDeep, public readonly showStyleBlueprintConfig: ProcessedShowStyleConfig diff --git a/packages/job-worker/src/blueprints/context/StudioContext.ts b/packages/job-worker/src/blueprints/context/StudioContext.ts index f1627c483b9..8d5915d338a 100644 --- a/packages/job-worker/src/blueprints/context/StudioContext.ts +++ b/packages/job-worker/src/blueprints/context/StudioContext.ts @@ -1,21 +1,18 @@ import { IStudioContext, BlueprintMappings } from '@sofie-automation/blueprints-integration' import { ReadonlyDeep } from 'type-fest' -import { DBStudio, MappingsExt } from '@sofie-automation/corelib/dist/dataModel/Studio' import { unprotectString } from '@sofie-automation/corelib/dist/protectedString' import { StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { ProcessedStudioConfig } from '../config' import { getStudioConfigRef } from '../configRefs' -import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' import { CommonContext, ContextInfo } from './CommonContext' +import { JobStudio } from '../../jobs' /** Studio */ export class StudioContext extends CommonContext implements IStudioContext { - #processedMappings: ReadonlyDeep | undefined - constructor( contextInfo: ContextInfo, - public readonly studio: ReadonlyDeep, + public readonly studio: ReadonlyDeep, public readonly studioBlueprintConfig: ProcessedStudioConfig ) { super(contextInfo) @@ -36,10 +33,8 @@ export class StudioContext extends CommonContext implements IStudioContext { return getStudioConfigRef(this.studio._id, configKey) } getStudioMappings(): Readonly { - if (!this.#processedMappings) { - this.#processedMappings = applyAndValidateOverrides(this.studio.mappingsWithOverrides).obj - } + const mappings = this.studio.mappings // @ts-expect-error ProtectedString deviceId not compatible with string - return this.#processedMappings + return mappings } } diff --git a/packages/job-worker/src/blueprints/context/StudioUserContext.ts b/packages/job-worker/src/blueprints/context/StudioUserContext.ts index be2c471dc46..fff5232cd11 100644 --- a/packages/job-worker/src/blueprints/context/StudioUserContext.ts +++ b/packages/job-worker/src/blueprints/context/StudioUserContext.ts @@ -1,17 +1,17 @@ import { IStudioUserContext, NoteSeverity } from '@sofie-automation/blueprints-integration' import { ReadonlyDeep } from 'type-fest' -import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' import { ProcessedStudioConfig } from '../config' import { INoteBase } from '@sofie-automation/corelib/dist/dataModel/Notes' import { ContextInfo } from './CommonContext' import { StudioContext } from './StudioContext' +import { JobStudio } from '../../jobs' export class StudioUserContext extends StudioContext implements IStudioUserContext { public readonly notes: INoteBase[] = [] constructor( contextInfo: ContextInfo, - studio: ReadonlyDeep, + studio: ReadonlyDeep, studioBlueprintConfig: ProcessedStudioConfig ) { super(contextInfo, studio, studioBlueprintConfig) diff --git a/packages/job-worker/src/blueprints/context/SyncIngestUpdateToPartInstanceContext.ts b/packages/job-worker/src/blueprints/context/SyncIngestUpdateToPartInstanceContext.ts index 1515ae71ecc..efef608cc8d 100644 --- a/packages/job-worker/src/blueprints/context/SyncIngestUpdateToPartInstanceContext.ts +++ b/packages/job-worker/src/blueprints/context/SyncIngestUpdateToPartInstanceContext.ts @@ -25,8 +25,7 @@ import { convertPartialBlueprintMutablePartToCore, } from './lib' import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' -import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' -import { JobContext, ProcessedShowStyleCompound } from '../../jobs' +import { JobContext, JobStudio, ProcessedShowStyleCompound } from '../../jobs' import { PieceTimelineObjectsBlob, serializePieceTimelineObjectsBlob, @@ -44,7 +43,7 @@ export class SyncIngestUpdateToPartInstanceContext constructor( private readonly _context: JobContext, contextInfo: ContextInfo, - studio: ReadonlyDeep, + studio: ReadonlyDeep, showStyleCompound: ReadonlyDeep, rundown: ReadonlyDeep, partInstance: PlayoutPartInstanceModel, diff --git a/packages/job-worker/src/blueprints/context/adlibActions.ts b/packages/job-worker/src/blueprints/context/adlibActions.ts index 2ff5e499792..8ce4882fd81 100644 --- a/packages/job-worker/src/blueprints/context/adlibActions.ts +++ b/packages/job-worker/src/blueprints/context/adlibActions.ts @@ -31,7 +31,6 @@ import { executePeripheralDeviceAction, listPlayoutDevices } from '../../periphe import { ActionPartChange, PartAndPieceInstanceActionService } from './services/PartAndPieceInstanceActionService' import { BlueprintQuickLookInfo } from '@sofie-automation/blueprints-integration/dist/context/quickLoopInfo' import { setNextPartFromPart } from '../../playout/setNext' -import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' export class DatastoreActionExecutionContext extends ShowStyleUserContext @@ -201,7 +200,8 @@ export class ActionExecutionContext extends ShowStyleUserContext implements IAct } async listRouteSets(): Promise> { - return applyAndValidateOverrides(this._context.studio.routeSetsWithOverrides).obj + // Discard ReadonlyDeep wrapper + return this._context.studio.routeSets as Record } async switchRouteSet(routeSetId: string, state: boolean | 'toggle'): Promise { diff --git a/packages/job-worker/src/ingest/__tests__/ingest.test.ts b/packages/job-worker/src/ingest/__tests__/ingest.test.ts index e46a0f827bf..a9d96c0c97a 100644 --- a/packages/job-worker/src/ingest/__tests__/ingest.test.ts +++ b/packages/job-worker/src/ingest/__tests__/ingest.test.ts @@ -47,6 +47,7 @@ import { UserErrorMessage } from '@sofie-automation/corelib/dist/error' import { PlayoutPartInstanceModel } from '../../playout/model/PlayoutPartInstanceModel' import { NrcsIngestCacheType } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' import { wrapGenericIngestJob, wrapGenericIngestJobWithPrecheck } from '../jobWrappers' +import { wrapDefaultObject } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' const handleRemovedRundownWrapped = wrapGenericIngestJob(handleRemovedRundown) const handleUpdatedRundownWrapped = wrapGenericIngestJob(handleUpdatedRundown) @@ -121,11 +122,11 @@ describe('Test ingest actions for rundowns and segments', () => { const showStyleCompound = await setupMockShowStyleCompound(context) context.setStudio({ - ...context.studio, - settings: { + ...context.rawStudio, + settingsWithOverrides: wrapDefaultObject({ ...context.studio.settings, minimumTakeSpan: 0, - }, + }), supportedShowStyleBase: [showStyleCompound._id], }) diff --git a/packages/job-worker/src/ingest/__tests__/selectShowStyleVariant.test.ts b/packages/job-worker/src/ingest/__tests__/selectShowStyleVariant.test.ts index 2c6e7d8ce4d..cb63251f090 100644 --- a/packages/job-worker/src/ingest/__tests__/selectShowStyleVariant.test.ts +++ b/packages/job-worker/src/ingest/__tests__/selectShowStyleVariant.test.ts @@ -35,7 +35,7 @@ describe('selectShowStyleVariant', () => { const context = setupDefaultJobEnvironment() const showStyleCompound = await setupMockShowStyleCompound(context) context.setStudio({ - ...context.studio, + ...context.rawStudio, supportedShowStyleBase: [showStyleCompound._id], }) @@ -57,7 +57,7 @@ describe('selectShowStyleVariant', () => { const context = setupDefaultJobEnvironment() const showStyleCompound = await setupMockShowStyleCompound(context) context.setStudio({ - ...context.studio, + ...context.rawStudio, supportedShowStyleBase: [], }) @@ -76,7 +76,7 @@ describe('selectShowStyleVariant', () => { const context = setupDefaultJobEnvironment() const showStyleCompound = await setupMockShowStyleCompound(context) context.setStudio({ - ...context.studio, + ...context.rawStudio, supportedShowStyleBase: [showStyleCompound._id], }) @@ -118,7 +118,7 @@ describe('selectShowStyleVariant', () => { const showStyleCompoundVariant2 = await setupMockShowStyleVariant(context, showStyleCompound._id) const showStyleCompound2 = await setupMockShowStyleCompound(context) context.setStudio({ - ...context.studio, + ...context.rawStudio, supportedShowStyleBase: [showStyleCompound._id, showStyleCompound2._id], }) @@ -153,7 +153,7 @@ describe('selectShowStyleVariant', () => { test('no show style bases', async () => { const context = setupDefaultJobEnvironment() context.setStudio({ - ...context.studio, + ...context.rawStudio, supportedShowStyleBase: [protectString('fakeId')], }) @@ -176,7 +176,7 @@ describe('selectShowStyleVariant', () => { const context = setupDefaultJobEnvironment() const showStyleCompound = await setupMockShowStyleCompound(context) context.setStudio({ - ...context.studio, + ...context.rawStudio, supportedShowStyleBase: [showStyleCompound._id], }) @@ -201,7 +201,7 @@ describe('selectShowStyleVariant', () => { const context = setupDefaultJobEnvironment() const showStyleCompound = await setupMockShowStyleCompound(context) context.setStudio({ - ...context.studio, + ...context.rawStudio, supportedShowStyleBase: [showStyleCompound._id], }) @@ -226,7 +226,7 @@ describe('selectShowStyleVariant', () => { const context = setupDefaultJobEnvironment() const showStyleCompound = await setupMockShowStyleCompound(context) context.setStudio({ - ...context.studio, + ...context.rawStudio, supportedShowStyleBase: [showStyleCompound._id], }) @@ -251,7 +251,7 @@ describe('selectShowStyleVariant', () => { const context = setupDefaultJobEnvironment() const showStyleCompound = await setupMockShowStyleCompound(context) context.setStudio({ - ...context.studio, + ...context.rawStudio, supportedShowStyleBase: [showStyleCompound._id], }) diff --git a/packages/job-worker/src/ingest/expectedPackages.ts b/packages/job-worker/src/ingest/expectedPackages.ts index c1d6099a9e1..b94c6498b98 100644 --- a/packages/job-worker/src/ingest/expectedPackages.ts +++ b/packages/job-worker/src/ingest/expectedPackages.ts @@ -41,9 +41,8 @@ import { updateExpectedPlayoutItemsForPartModel, updateExpectedPlayoutItemsForRundownBaseline, } from './expectedPlayoutItems' -import { JobContext } from '../jobs' +import { JobContext, JobStudio } from '../jobs' import { ExpectedPackageForIngestModelBaseline, IngestModel } from './model/IngestModel' -import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' import { IngestPartModel } from './model/IngestPartModel' import { clone } from '@sofie-automation/corelib/dist/lib' @@ -160,7 +159,7 @@ export async function updateExpectedPackagesForRundownBaseline( } function generateExpectedPackagesForPiece( - studio: ReadonlyDeep, + studio: ReadonlyDeep, rundownId: RundownId, segmentId: SegmentId, pieces: ReadonlyDeep[], @@ -186,7 +185,7 @@ function generateExpectedPackagesForPiece( return packages } function generateExpectedPackagesForBaselineAdlibPiece( - studio: ReadonlyDeep, + studio: ReadonlyDeep, rundownId: RundownId, pieces: ReadonlyDeep ) { @@ -207,7 +206,7 @@ function generateExpectedPackagesForBaselineAdlibPiece( return packages } function generateExpectedPackagesForAdlibAction( - studio: ReadonlyDeep, + studio: ReadonlyDeep, rundownId: RundownId, segmentId: SegmentId, actions: ReadonlyDeep @@ -231,7 +230,7 @@ function generateExpectedPackagesForAdlibAction( return packages } function generateExpectedPackagesForBaselineAdlibAction( - studio: ReadonlyDeep, + studio: ReadonlyDeep, rundownId: RundownId, actions: ReadonlyDeep ) { @@ -251,7 +250,7 @@ function generateExpectedPackagesForBaselineAdlibAction( } return packages } -function generateExpectedPackagesForBucketAdlib(studio: ReadonlyDeep, adlibs: BucketAdLib[]) { +function generateExpectedPackagesForBucketAdlib(studio: ReadonlyDeep, adlibs: BucketAdLib[]) { const packages: ExpectedPackageDBFromBucketAdLib[] = [] for (const adlib of adlibs) { if (adlib.expectedPackages) { @@ -270,7 +269,7 @@ function generateExpectedPackagesForBucketAdlib(studio: ReadonlyDeep, return packages } function generateExpectedPackagesForBucketAdlibAction( - studio: ReadonlyDeep, + studio: ReadonlyDeep, adlibActions: BucketAdLibAction[] ) { const packages: ExpectedPackageDBFromBucketAdLibAction[] = [] @@ -291,7 +290,7 @@ function generateExpectedPackagesForBucketAdlibAction( return packages } function generateExpectedPackageBases( - studio: ReadonlyDeep, + studio: ReadonlyDeep, ownerId: | PieceId | AdLibActionId diff --git a/packages/job-worker/src/ingest/mosDevice/__tests__/mosIngest.test.ts b/packages/job-worker/src/ingest/mosDevice/__tests__/mosIngest.test.ts index c96a18890d3..d6e675ba1f5 100644 --- a/packages/job-worker/src/ingest/mosDevice/__tests__/mosIngest.test.ts +++ b/packages/job-worker/src/ingest/mosDevice/__tests__/mosIngest.test.ts @@ -87,7 +87,7 @@ describe('Test recieved mos ingest payloads', () => { const showStyleCompound = await setupMockShowStyleCompound(context) context.setStudio({ - ...context.studio, + ...context.rawStudio, supportedShowStyleBase: [showStyleCompound._id], }) diff --git a/packages/job-worker/src/jobs/index.ts b/packages/job-worker/src/jobs/index.ts index f8ab90e9f4c..41103a4c23f 100644 --- a/packages/job-worker/src/jobs/index.ts +++ b/packages/job-worker/src/jobs/index.ts @@ -18,9 +18,11 @@ import { PlaylistLock, RundownLock } from './lock' import { BaseModel } from '../modelBase' import { TimelineComplete } from '@sofie-automation/corelib/dist/dataModel/Timeline' import { ProcessedShowStyleBase, ProcessedShowStyleVariant, ProcessedShowStyleCompound } from './showStyle' +import { JobStudio } from './studio' export { ApmSpan } export { ProcessedShowStyleVariant, ProcessedShowStyleBase, ProcessedShowStyleCompound } +export { JobStudio } /** * Context for any job run in the job-worker @@ -104,7 +106,12 @@ export interface StudioCacheContext { /** * The Studio the job belongs to */ - readonly studio: ReadonlyDeep + readonly studio: ReadonlyDeep + + /** + * The Studio the job belongs to + */ + readonly rawStudio: ReadonlyDeep /** * Blueprint for the studio the job belongs to diff --git a/packages/job-worker/src/jobs/studio.ts b/packages/job-worker/src/jobs/studio.ts new file mode 100644 index 00000000000..00a2f878931 --- /dev/null +++ b/packages/job-worker/src/jobs/studio.ts @@ -0,0 +1,58 @@ +import type { + IBlueprintConfig, + StudioRouteSet, + StudioRouteSetExclusivityGroup, +} from '@sofie-automation/blueprints-integration' +import type { DBStudio, IStudioSettings, MappingsExt } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { omit } from '@sofie-automation/corelib/dist/lib' +import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' + +/** + * A lightly processed version of DBStudio, with any ObjectWithOverrides pre-flattened + */ +export interface JobStudio + extends Omit< + DBStudio, + | 'mappingsWithOverrides' + | 'blueprintConfigWithOverrides' + | 'settingsWithOverrides' + | 'routeSetsWithOverrides' + | 'routeSetExclusivityGroupsWithOverrides' + | 'packageContainersWithOverrides' + > { + /** Mappings between the physical devices / outputs and logical ones */ + mappings: MappingsExt + + /** Config values are used by the Blueprints */ + blueprintConfig: IBlueprintConfig + + settings: IStudioSettings + + routeSets: Record + routeSetExclusivityGroups: Record + + // /** Contains settings for which Package Containers are present in the studio. + // * (These are used by the Package Manager and the Expected Packages) + // */ + // packageContainers: Record +} + +export function convertStudioToJobStudio(studio: DBStudio): JobStudio { + return { + ...omit( + studio, + 'mappingsWithOverrides', + 'blueprintConfigWithOverrides', + 'settingsWithOverrides', + 'routeSetsWithOverrides', + 'routeSetExclusivityGroupsWithOverrides', + 'packageContainersWithOverrides' + ), + mappings: applyAndValidateOverrides(studio.mappingsWithOverrides).obj, + blueprintConfig: applyAndValidateOverrides(studio.blueprintConfigWithOverrides).obj, + settings: applyAndValidateOverrides(studio.settingsWithOverrides).obj, + routeSets: applyAndValidateOverrides(studio.routeSetsWithOverrides).obj, + routeSetExclusivityGroups: applyAndValidateOverrides(studio.routeSetExclusivityGroupsWithOverrides).obj, + // packageContainers: applyAndValidateOverrides(studio.packageContainersWithOverrides).obj, + } +} diff --git a/packages/job-worker/src/playout/__tests__/playout.test.ts b/packages/job-worker/src/playout/__tests__/playout.test.ts index 01c63b2d175..93ab3ef7535 100644 --- a/packages/job-worker/src/playout/__tests__/playout.test.ts +++ b/packages/job-worker/src/playout/__tests__/playout.test.ts @@ -47,6 +47,7 @@ import { PlayoutChangedType } from '@sofie-automation/shared-lib/dist/peripheral import { ProcessedShowStyleCompound } from '../../jobs' import { handleOnPlayoutPlaybackChanged } from '../timings' import { sleep } from '@sofie-automation/shared-lib/dist/lib/lib' +import { wrapDefaultObject } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' // const mockGetCurrentTime = jest.spyOn(lib, 'getCurrentTime') const mockExecutePeripheralDeviceFunction = jest @@ -98,11 +99,11 @@ describe('Playout API', () => { context = setupDefaultJobEnvironment() context.setStudio({ - ...context.studio, - settings: { + ...context.rawStudio, + settingsWithOverrides: wrapDefaultObject({ ...context.studio.settings, minimumTakeSpan: 0, - }, + }), }) // Ignore event jobs diff --git a/packages/job-worker/src/playout/__tests__/selectNextPart.test.ts b/packages/job-worker/src/playout/__tests__/selectNextPart.test.ts index 2d5f84d4be3..387f21b6e8e 100644 --- a/packages/job-worker/src/playout/__tests__/selectNextPart.test.ts +++ b/packages/job-worker/src/playout/__tests__/selectNextPart.test.ts @@ -8,7 +8,8 @@ import { MockJobContext, setupDefaultJobEnvironment } from '../../__mocks__/cont import { PlayoutSegmentModelImpl } from '../model/implementation/PlayoutSegmentModelImpl' import { PlayoutSegmentModel } from '../model/PlayoutSegmentModel' import { selectNextPart } from '../selectNextPart' -import { ForceQuickLoopAutoNext, QuickLoopMarkerType } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import { QuickLoopMarkerType } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import { ForceQuickLoopAutoNext } from '@sofie-automation/shared-lib/dist/core/model/StudioSettings' class MockPart { constructor( diff --git a/packages/job-worker/src/playout/abPlayback/index.ts b/packages/job-worker/src/playout/abPlayback/index.ts index 9ab7534d06b..f3b0c80c8e3 100644 --- a/packages/job-worker/src/playout/abPlayback/index.ts +++ b/packages/job-worker/src/playout/abPlayback/index.ts @@ -17,7 +17,6 @@ import { AbSessionHelper } from './abSessionHelper' import { ShowStyleContext } from '../../blueprints/context' import { logger } from '../../logging' import { ABPlayerDefinition, NoteSeverity } from '@sofie-automation/blueprints-integration' -import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' import { abPoolFilterDisabled, findPlayersInRouteSets } from './routeSetDisabling' import type { INotification } from '../../notifications/NotificationsModel' import { generateTranslation } from '@sofie-automation/corelib/dist/lib' @@ -85,7 +84,7 @@ export function applyAbPlaybackForTimeline( const notifications: INotification[] = [] const abConfiguration = blueprint.blueprint.getAbResolverConfiguration(blueprintContext) - const routeSetMembers = findPlayersInRouteSets(applyAndValidateOverrides(context.studio.routeSetsWithOverrides).obj) + const routeSetMembers = findPlayersInRouteSets(context.studio.routeSets) for (const [poolName, players] of Object.entries(abConfiguration.pools)) { // Filter out offline devices diff --git a/packages/job-worker/src/playout/abPlayback/routeSetDisabling.ts b/packages/job-worker/src/playout/abPlayback/routeSetDisabling.ts index d6be0a4ab65..9f71f1d0acc 100644 --- a/packages/job-worker/src/playout/abPlayback/routeSetDisabling.ts +++ b/packages/job-worker/src/playout/abPlayback/routeSetDisabling.ts @@ -1,6 +1,7 @@ import type { ABPlayerDefinition } from '@sofie-automation/blueprints-integration' import type { StudioRouteSet } from '@sofie-automation/corelib/dist/dataModel/Studio' import { logger } from '../../logging' +import { ReadonlyDeep } from 'type-fest' /** * Map> @@ -8,9 +9,9 @@ import { logger } from '../../logging' */ type MembersOfRouteSets = Map> -export function findPlayersInRouteSets(routeSets: Record): MembersOfRouteSets { +export function findPlayersInRouteSets(routeSets: ReadonlyDeep>): MembersOfRouteSets { const routeSetEnabledPlayers: MembersOfRouteSets = new Map() - for (const [_key, routeSet] of Object.entries(routeSets)) { + for (const [_key, routeSet] of Object.entries>(routeSets)) { for (const abPlayer of routeSet.abPlayers) { let poolEntry = routeSetEnabledPlayers.get(abPlayer.poolName) if (!poolEntry) { diff --git a/packages/job-worker/src/playout/lookahead/__tests__/lookahead.test.ts b/packages/job-worker/src/playout/lookahead/__tests__/lookahead.test.ts index 0256447f515..1a925310bbf 100644 --- a/packages/job-worker/src/playout/lookahead/__tests__/lookahead.test.ts +++ b/packages/job-worker/src/playout/lookahead/__tests__/lookahead.test.ts @@ -51,7 +51,7 @@ describe('Lookahead', () => { } } context.setStudio({ - ...context.studio, + ...context.rawStudio, mappingsWithOverrides: wrapDefaultObject(mappings), }) @@ -222,7 +222,7 @@ describe('Lookahead', () => { // Set really low { - const studio = clone(context.studio) + const studio = clone(context.rawStudio) studio.mappingsWithOverrides.defaults['WHEN_CLEAR'].lookaheadMaxSearchDistance = 0 studio.mappingsWithOverrides.defaults['PRELOAD'].lookaheadMaxSearchDistance = 0 context.setStudio(studio) @@ -236,7 +236,7 @@ describe('Lookahead', () => { // really high getOrderedPartsAfterPlayheadMock.mockClear() { - const studio = clone(context.studio) + const studio = clone(context.rawStudio) studio.mappingsWithOverrides.defaults['WHEN_CLEAR'].lookaheadMaxSearchDistance = -1 studio.mappingsWithOverrides.defaults['PRELOAD'].lookaheadMaxSearchDistance = 2000 context.setStudio(studio) @@ -250,7 +250,7 @@ describe('Lookahead', () => { // unset getOrderedPartsAfterPlayheadMock.mockClear() { - const studio = clone(context.studio) + const studio = clone(context.rawStudio) studio.mappingsWithOverrides.defaults['WHEN_CLEAR'].lookaheadMaxSearchDistance = undefined studio.mappingsWithOverrides.defaults['PRELOAD'].lookaheadMaxSearchDistance = -1 context.setStudio(studio) diff --git a/packages/job-worker/src/playout/lookahead/__tests__/util.test.ts b/packages/job-worker/src/playout/lookahead/__tests__/util.test.ts index 4c3cc76bd80..969506ce1ae 100644 --- a/packages/job-worker/src/playout/lookahead/__tests__/util.test.ts +++ b/packages/job-worker/src/playout/lookahead/__tests__/util.test.ts @@ -11,7 +11,8 @@ import { defaultRundownPlaylist } from '../../../__mocks__/defaultCollectionObje import _ = require('underscore') import { wrapPartToTemporaryInstance } from '../../../__mocks__/partinstance' import { wrapDefaultObject } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' -import { ForceQuickLoopAutoNext, QuickLoopMarkerType } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import { QuickLoopMarkerType } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import { ForceQuickLoopAutoNext } from '@sofie-automation/shared-lib/dist/core/model/StudioSettings' describe('getOrderedPartsAfterPlayhead', () => { let context!: MockJobContext @@ -37,7 +38,7 @@ describe('getOrderedPartsAfterPlayhead', () => { } } context.setStudio({ - ...context.studio, + ...context.rawStudio, mappingsWithOverrides: wrapDefaultObject(mappings), }) diff --git a/packages/job-worker/src/playout/lookahead/index.ts b/packages/job-worker/src/playout/lookahead/index.ts index 893d6eb174e..12ac4935ec6 100644 --- a/packages/job-worker/src/playout/lookahead/index.ts +++ b/packages/job-worker/src/playout/lookahead/index.ts @@ -22,7 +22,6 @@ import { LOOKAHEAD_DEFAULT_SEARCH_DISTANCE } from '@sofie-automation/shared-lib/ import { prefixSingleObjectId } from '../lib' import { LookaheadTimelineObject } from './findObjects' import { hasPieceInstanceDefinitelyEnded } from '../timeline/lib' -import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { ReadonlyDeep } from 'type-fest' @@ -65,8 +64,8 @@ export async function getLookeaheadObjects( partInstancesInfo0: SelectedPartInstancesTimelineInfo ): Promise> { const span = context.startSpan('getLookeaheadObjects') - const allMappings = applyAndValidateOverrides(context.studio.mappingsWithOverrides) - const mappingsToConsider = Object.entries(allMappings.obj).filter( + const allMappings = context.studio.mappings + const mappingsToConsider = Object.entries(allMappings).filter( ([_id, map]) => map.lookahead !== LookaheadMode.NONE && map.lookahead !== undefined ) if (mappingsToConsider.length === 0) { diff --git a/packages/job-worker/src/playout/model/services/QuickLoopService.ts b/packages/job-worker/src/playout/model/services/QuickLoopService.ts index eb7bb0c6e11..b9d252ca7ad 100644 --- a/packages/job-worker/src/playout/model/services/QuickLoopService.ts +++ b/packages/job-worker/src/playout/model/services/QuickLoopService.ts @@ -1,11 +1,11 @@ import { MarkerPosition, compareMarkerPositions } from '@sofie-automation/corelib/dist/playout/playlist' import { PlayoutModelReadonly } from '../PlayoutModel' import { - ForceQuickLoopAutoNext, QuickLoopMarker, QuickLoopMarkerType, QuickLoopProps, } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import { ForceQuickLoopAutoNext } from '@sofie-automation/shared-lib/dist/core/model/StudioSettings' import { ReadonlyObjectDeep } from 'type-fest/source/readonly-deep' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { RundownId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' diff --git a/packages/job-worker/src/playout/selectNextPart.ts b/packages/job-worker/src/playout/selectNextPart.ts index d07abbf4467..48d495d85c7 100644 --- a/packages/job-worker/src/playout/selectNextPart.ts +++ b/packages/job-worker/src/playout/selectNextPart.ts @@ -1,11 +1,8 @@ import { DBPart, isPartPlayable } from '@sofie-automation/corelib/dist/dataModel/Part' import { JobContext } from '../jobs' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' -import { - DBRundownPlaylist, - ForceQuickLoopAutoNext, - QuickLoopMarkerType, -} from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import { DBRundownPlaylist, QuickLoopMarkerType } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import { ForceQuickLoopAutoNext } from '@sofie-automation/shared-lib/dist/core/model/StudioSettings' import { SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { ReadonlyDeep } from 'type-fest' import { PlayoutSegmentModel } from './model/PlayoutSegmentModel' diff --git a/packages/job-worker/src/playout/timeline/generate.ts b/packages/job-worker/src/playout/timeline/generate.ts index 8c71749fd68..535fac659eb 100644 --- a/packages/job-worker/src/playout/timeline/generate.ts +++ b/packages/job-worker/src/playout/timeline/generate.ts @@ -1,5 +1,5 @@ import { BlueprintId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { JobContext } from '../../jobs' +import { JobContext, JobStudio } from '../../jobs' import { ReadonlyDeep } from 'type-fest' import { BlueprintResultBaseline, @@ -36,7 +36,6 @@ import { WatchedPackagesHelper } from '../../blueprints/context/watchedPackages' import { postProcessStudioBaselineObjects } from '../../blueprints/postProcess' import { updateBaselineExpectedPackagesOnStudio } from '../../ingest/expectedPackages' import { endTrace, sendTrace, startTrace } from '@sofie-automation/corelib/dist/influxdb' -import { StudioLight } from '@sofie-automation/corelib/dist/dataModel/Studio' import { deserializePieceTimelineObjectsBlob } from '@sofie-automation/corelib/dist/dataModel/Piece' import { convertResolvedPieceInstanceToBlueprints } from '../../blueprints/context/lib' import { buildTimelineObjsForRundown, RundownTimelineTimingContext } from './rundown' @@ -54,7 +53,7 @@ function isModelForStudio(model: StudioPlayoutModelBase): model is StudioPlayout } function generateTimelineVersions( - studio: ReadonlyDeep, + studio: ReadonlyDeep, blueprintId: BlueprintId | undefined, blueprintVersion: string ): TimelineCompleteGenerationVersions { diff --git a/packages/job-worker/src/playout/upgrade.ts b/packages/job-worker/src/playout/upgrade.ts index 66fba34cfa1..4e2cf3dece3 100644 --- a/packages/job-worker/src/playout/upgrade.ts +++ b/packages/job-worker/src/playout/upgrade.ts @@ -1,6 +1,7 @@ import { BlueprintMapping, BlueprintMappings, + IStudioSettings, JSONBlobParse, StudioRouteBehavior, TSR, @@ -26,6 +27,7 @@ import { compileCoreConfigValues } from '../blueprints/config' import { CommonContext } from '../blueprints/context' import { JobContext } from '../jobs' import { FixUpBlueprintConfigContext } from '@sofie-automation/corelib/dist/fixUpBlueprintConfig/context' +import { DEFAULT_MINIMUM_TAKE_SPAN } from '@sofie-automation/shared-lib/dist/core/constants' /** * Run the Blueprint applyConfig for the studio @@ -41,7 +43,7 @@ export async function handleBlueprintUpgradeForStudio(context: JobContext, _data name: 'applyConfig', identifier: `studio:${context.studioId},blueprint:${blueprint.blueprintId}`, }) - const rawBlueprintConfig = applyAndValidateOverrides(context.studio.blueprintConfigWithOverrides).obj + const rawBlueprintConfig = context.studio.blueprintConfig const result = blueprint.blueprint.applyConfig( blueprintContext, @@ -109,8 +111,15 @@ export async function handleBlueprintUpgradeForStudio(context: JobContext, _data ]) ) + const studioSettings: IStudioSettings = result.studioSettings ?? { + frameRate: 25, + mediaPreviewsUrl: '', + minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, + } + await context.directCollections.Studios.update(context.studioId, { $set: { + 'settingsWithOverrides.defaults': studioSettings, 'mappingsWithOverrides.defaults': translateMappings(result.mappings), 'peripheralDeviceSettings.playoutDevices.defaults': playoutDevices, 'peripheralDeviceSettings.ingestDevices.defaults': ingestDevices, @@ -158,7 +167,7 @@ export async function handleBlueprintValidateConfigForStudio( name: 'validateConfig', identifier: `studio:${context.studioId},blueprint:${blueprint.blueprintId}`, }) - const rawBlueprintConfig = applyAndValidateOverrides(context.studio.blueprintConfigWithOverrides).obj + const rawBlueprintConfig = applyAndValidateOverrides(context.rawStudio.blueprintConfigWithOverrides).obj // This clone seems excessive, but without it a DataCloneError is generated when posting the result to the parent const messages = clone(blueprint.blueprint.validateConfig(blueprintContext, rawBlueprintConfig)) @@ -200,7 +209,7 @@ export async function handleBlueprintFixUpConfigForStudio( const blueprintContext = new FixUpBlueprintConfigContext( commonContext, JSONBlobParse(blueprint.blueprint.studioConfigSchema), - context.studio.blueprintConfigWithOverrides + context.rawStudio.blueprintConfigWithOverrides ) blueprint.blueprint.fixUpConfig(blueprintContext) diff --git a/packages/job-worker/src/rundownPlaylists.ts b/packages/job-worker/src/rundownPlaylists.ts index 9306cc458a8..31e8e90ffd8 100644 --- a/packages/job-worker/src/rundownPlaylists.ts +++ b/packages/job-worker/src/rundownPlaylists.ts @@ -1,10 +1,7 @@ import { RundownPlaylistId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { DBRundown, Rundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' -import { - DBRundownPlaylist, - ForceQuickLoopAutoNext, - QuickLoopMarkerType, -} from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import { DBRundownPlaylist, QuickLoopMarkerType } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import { ForceQuickLoopAutoNext } from '@sofie-automation/shared-lib/dist/core/model/StudioSettings' import { clone, getHash, @@ -27,12 +24,11 @@ import { IBlueprintRundown, NoteSeverity, } from '@sofie-automation/blueprints-integration' -import { JobContext } from './jobs' +import { JobContext, JobStudio } from './jobs' import { logger } from './logging' import { resetRundownPlaylist } from './playout/lib' import { runJobWithPlaylistLock, runWithPlayoutModel } from './playout/lock' import { updateTimeline } from './playout/timeline/generate' -import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' import { WrappedStudioBlueprint } from './blueprints/cache' import { StudioUserContext } from './blueprints/context' import { getCurrentTime } from './lib' @@ -329,7 +325,7 @@ export function produceRundownPlaylistInfoFromRundown( function defaultPlaylistForRundown( rundown: ReadonlyDeep, - studio: ReadonlyDeep, + studio: ReadonlyDeep, existingPlaylist?: ReadonlyDeep ): Omit { return { diff --git a/packages/job-worker/src/workers/caches.ts b/packages/job-worker/src/workers/caches.ts index f252a9a0de6..6fee8f1aed5 100644 --- a/packages/job-worker/src/workers/caches.ts +++ b/packages/job-worker/src/workers/caches.ts @@ -15,8 +15,9 @@ import { protectString } from '@sofie-automation/corelib/dist/protectedString' import { clone, deepFreeze } from '@sofie-automation/corelib/dist/lib' import { logger } from '../logging' import deepmerge = require('deepmerge') -import { ProcessedShowStyleBase, ProcessedShowStyleVariant, StudioCacheContext } from '../jobs' +import { JobStudio, ProcessedShowStyleBase, ProcessedShowStyleVariant, StudioCacheContext } from '../jobs' import { StudioCacheContextImpl } from './context/StudioCacheContextImpl' +import { convertStudioToJobStudio } from '../jobs/studio' /** * A Wrapper to maintain a cache and provide a context using the cache when appropriate @@ -43,7 +44,7 @@ export class WorkerDataCacheWrapperImpl implements WorkerDataCacheWrapper { * The StudioId the cache is maintained for */ get studioId(): StudioId { - return this.#dataCache.studio._id + return this.#dataCache.rawStudio._id } constructor(collections: IDirectCollections, dataCache: WorkerDataCache) { @@ -99,7 +100,8 @@ export class WorkerDataCacheWrapperImpl implements WorkerDataCacheWrapper { * This is a reusable cache of these properties */ export interface WorkerDataCache { - studio: ReadonlyDeep + rawStudio: ReadonlyDeep + jobStudio: ReadonlyDeep studioBlueprint: ReadonlyDeep studioBlueprintConfig: ProcessedStudioConfig | undefined @@ -133,12 +135,16 @@ export async function loadWorkerDataCache( studioId: StudioId ): Promise { // Load some 'static' data from the db - const studio = deepFreeze(await collections.Studios.findOne(studioId)) - if (!studio) throw new Error('Missing studio') + const dbStudio = await collections.Studios.findOne(studioId) + if (!dbStudio) throw new Error('Missing studio') + const studio = deepFreeze(dbStudio) const studioBlueprint = await loadStudioBlueprintOrPlaceholder(collections, studio) + const jobStudio = deepFreeze(convertStudioToJobStudio(dbStudio)) + return { - studio, + rawStudio: studio, + jobStudio: jobStudio, studioBlueprint, studioBlueprintConfig: undefined, @@ -157,11 +163,12 @@ export async function invalidateWorkerDataCache( if (data.forceAll) { // Clear everything! - const newStudio = await collections.Studios.findOne(cache.studio._id) + const newStudio = await collections.Studios.findOne(cache.rawStudio._id) if (!newStudio) throw new Error(`Studio is missing during cache invalidation!`) - cache.studio = deepFreeze(newStudio) + cache.rawStudio = deepFreeze(newStudio) + cache.jobStudio = deepFreeze(convertStudioToJobStudio(newStudio)) - cache.studioBlueprint = await loadStudioBlueprintOrPlaceholder(collections, cache.studio) + cache.studioBlueprint = await loadStudioBlueprintOrPlaceholder(collections, cache.rawStudio) cache.studioBlueprintConfig = undefined cache.showStyleBases.clear() @@ -176,25 +183,26 @@ export async function invalidateWorkerDataCache( if (data.studio) { logger.debug('WorkerDataCache: Reloading studio') - const newStudio = await collections.Studios.findOne(cache.studio._id) + const newStudio = await collections.Studios.findOne(cache.rawStudio._id) if (!newStudio) throw new Error(`Studio is missing during cache invalidation!`) // If studio blueprintId changed, then force it to be reloaded - if (newStudio.blueprintId !== cache.studio.blueprintId) updateStudioBlueprint = true + if (newStudio.blueprintId !== cache.rawStudio.blueprintId) updateStudioBlueprint = true - cache.studio = deepFreeze(newStudio) + cache.rawStudio = deepFreeze(newStudio) + cache.jobStudio = deepFreeze(convertStudioToJobStudio(newStudio)) cache.studioBlueprintConfig = undefined } // Check if studio blueprint was in the changed list - if (!updateStudioBlueprint && cache.studio.blueprintId) { - updateStudioBlueprint = data.blueprints.includes(cache.studio.blueprintId) + if (!updateStudioBlueprint && cache.rawStudio.blueprintId) { + updateStudioBlueprint = data.blueprints.includes(cache.rawStudio.blueprintId) } // Reload studioBlueprint if (updateStudioBlueprint) { logger.debug('WorkerDataCache: Reloading studioBlueprint') - cache.studioBlueprint = await loadStudioBlueprintOrPlaceholder(collections, cache.studio) + cache.studioBlueprint = await loadStudioBlueprintOrPlaceholder(collections, cache.rawStudio) cache.studioBlueprintConfig = undefined } @@ -210,7 +218,7 @@ export async function invalidateWorkerDataCache( if (data.studio) { // Ensure showStyleBases & showStyleVariants are all still valid for the studio - const allowedBases = new Set(cache.studio.supportedShowStyleBase) + const allowedBases = new Set(cache.rawStudio.supportedShowStyleBase) for (const id of Array.from(cache.showStyleBases.keys())) { if (!allowedBases.has(id)) { diff --git a/packages/job-worker/src/workers/context/JobContextImpl.ts b/packages/job-worker/src/workers/context/JobContextImpl.ts index 7be35b55f26..8cd15572dce 100644 --- a/packages/job-worker/src/workers/context/JobContextImpl.ts +++ b/packages/job-worker/src/workers/context/JobContextImpl.ts @@ -1,5 +1,5 @@ import { IDirectCollections } from '../../db' -import { JobContext } from '../../jobs' +import { JobContext, JobStudio } from '../../jobs' import { WorkerDataCache } from '../caches' import { RundownId, RundownPlaylistId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { getIngestQueueName, IngestJobFunc } from '@sofie-automation/corelib/dist/worker/ingest' @@ -41,8 +41,12 @@ export class JobContextImpl extends StudioCacheContextImpl implements JobContext this.studioRouteSetUpdater = new StudioRouteSetUpdater(directCollections, cacheData) } - get studio(): ReadonlyDeep { - return this.studioRouteSetUpdater.studioWithChanges ?? super.studio + get studio(): ReadonlyDeep { + return this.studioRouteSetUpdater.jobStudioWithChanges ?? super.studio + } + + get rawStudio(): ReadonlyDeep { + return this.studioRouteSetUpdater.rawStudioWithChanges ?? super.rawStudio } trackCache(cache: BaseModel): void { diff --git a/packages/job-worker/src/workers/context/StudioCacheContextImpl.ts b/packages/job-worker/src/workers/context/StudioCacheContextImpl.ts index dff38b6e883..183a89d01be 100644 --- a/packages/job-worker/src/workers/context/StudioCacheContextImpl.ts +++ b/packages/job-worker/src/workers/context/StudioCacheContextImpl.ts @@ -4,6 +4,7 @@ import { ProcessedShowStyleVariant, ProcessedShowStyleCompound, StudioCacheContext, + JobStudio, } from '../../jobs' import { ReadonlyDeep } from 'type-fest' import { WorkerDataCache } from '../caches' @@ -30,9 +31,14 @@ export class StudioCacheContextImpl implements StudioCacheContext { protected readonly cacheData: WorkerDataCache ) {} - get studio(): ReadonlyDeep { + get studio(): ReadonlyDeep { // This is frozen at the point of populating the cache - return this.cacheData.studio + return this.cacheData.jobStudio + } + + get rawStudio(): ReadonlyDeep { + // This is frozen at the point of populating the cache + return this.cacheData.rawStudio } get studioId(): StudioId { @@ -47,7 +53,9 @@ export class StudioCacheContextImpl implements StudioCacheContext { getStudioBlueprintConfig(): ProcessedStudioConfig { if (!this.cacheData.studioBlueprintConfig) { this.cacheData.studioBlueprintConfig = deepFreeze( - clone(preprocessStudioConfig(this.cacheData.studio, this.cacheData.studioBlueprint.blueprint) ?? null) + clone( + preprocessStudioConfig(this.cacheData.jobStudio, this.cacheData.studioBlueprint.blueprint) ?? null + ) ) } @@ -59,7 +67,7 @@ export class StudioCacheContextImpl implements StudioCacheContext { const loadedDocs: Array> = [] // Figure out what is already cached, and what needs loading - for (const id of this.cacheData.studio.supportedShowStyleBase) { + for (const id of this.cacheData.jobStudio.supportedShowStyleBase) { const doc = this.cacheData.showStyleBases.get(id) if (doc === undefined) { docsToLoad.push(id) @@ -95,7 +103,7 @@ export class StudioCacheContextImpl implements StudioCacheContext { async getShowStyleBase(id: ShowStyleBaseId): Promise> { // Check if allowed - if (!this.cacheData.studio.supportedShowStyleBase.includes(id)) { + if (!this.cacheData.jobStudio.supportedShowStyleBase.includes(id)) { throw new Error(`ShowStyleBase "${id}" is not allowed in studio`) } @@ -123,7 +131,7 @@ export class StudioCacheContextImpl implements StudioCacheContext { async getShowStyleVariants(id: ShowStyleBaseId): Promise>> { // Check if allowed - if (!this.cacheData.studio.supportedShowStyleBase.includes(id)) { + if (!this.cacheData.jobStudio.supportedShowStyleBase.includes(id)) { throw new Error(`ShowStyleBase "${id}" is not allowed in studio`) } @@ -172,7 +180,7 @@ export class StudioCacheContextImpl implements StudioCacheContext { const doc0 = await this.directCollections.ShowStyleVariants.findOne(id) // Check allowed - if (doc0 && !this.cacheData.studio.supportedShowStyleBase.includes(doc0.showStyleBaseId)) { + if (doc0 && !this.cacheData.jobStudio.supportedShowStyleBase.includes(doc0.showStyleBaseId)) { throw new Error(`ShowStyleVariant "${id}" is not allowed in studio`) } @@ -187,7 +195,7 @@ export class StudioCacheContextImpl implements StudioCacheContext { if (doc) { // Check allowed - if (!this.cacheData.studio.supportedShowStyleBase.includes(doc.showStyleBaseId)) { + if (!this.cacheData.jobStudio.supportedShowStyleBase.includes(doc.showStyleBaseId)) { throw new Error(`ShowStyleVariant "${id}" is not allowed in studio`) } diff --git a/packages/job-worker/src/workers/context/StudioRouteSetUpdater.ts b/packages/job-worker/src/workers/context/StudioRouteSetUpdater.ts index cea5c9e53b5..9de2f486f4a 100644 --- a/packages/job-worker/src/workers/context/StudioRouteSetUpdater.ts +++ b/packages/job-worker/src/workers/context/StudioRouteSetUpdater.ts @@ -10,26 +10,39 @@ import { logger } from '../../logging' import type { ReadonlyDeep } from 'type-fest' import type { WorkerDataCache } from '../caches' import type { IDirectCollections } from '../../db' +import { JobStudio } from '../../jobs' +import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' export class StudioRouteSetUpdater { readonly #directCollections: Readonly - readonly #cacheData: Pick + readonly #cacheData: Pick - constructor(directCollections: Readonly, cacheData: Pick) { + constructor( + directCollections: Readonly, + cacheData: Pick + ) { this.#directCollections = directCollections this.#cacheData = cacheData } // Future: this could store a Map, if the context exposed a simplified view of DBStudio - #studioWithRouteSetChanges: ReadonlyDeep | undefined = undefined - - get studioWithChanges(): ReadonlyDeep | undefined { - return this.#studioWithRouteSetChanges + #studioWithRouteSetChanges: + | { + rawStudio: ReadonlyDeep + jobStudio: ReadonlyDeep + } + | undefined = undefined + + get rawStudioWithChanges(): ReadonlyDeep | undefined { + return this.#studioWithRouteSetChanges?.rawStudio + } + get jobStudioWithChanges(): ReadonlyDeep | undefined { + return this.#studioWithRouteSetChanges?.jobStudio } setRouteSetActive(routeSetId: string, isActive: boolean | 'toggle'): boolean { - const currentStudio = this.#studioWithRouteSetChanges ?? this.#cacheData.studio - const currentRouteSets = getAllCurrentItemsFromOverrides(currentStudio.routeSetsWithOverrides, null) + const currentStudios = this.#studioWithRouteSetChanges ?? this.#cacheData + const currentRouteSets = getAllCurrentItemsFromOverrides(currentStudios.rawStudio.routeSetsWithOverrides, null) const routeSet = currentRouteSets.find((routeSet) => routeSet.id === routeSetId) if (!routeSet) throw new Error(`RouteSet "${routeSetId}" not found!`) @@ -41,10 +54,10 @@ export class StudioRouteSetUpdater { if (routeSet.computed.behavior === StudioRouteBehavior.ACTIVATE_ONLY && !isActive) throw new Error(`RouteSet "${routeSet.id}" is ACTIVATE_ONLY`) - const overrideHelper = new OverrideOpHelperImpl(null, currentStudio.routeSetsWithOverrides) + const overrideHelper = new OverrideOpHelperImpl(null, currentStudios.rawStudio.routeSetsWithOverrides) // Update the pending changes - logger.debug(`switchRouteSet "${this.#cacheData.studio._id}" "${routeSet.id}"=${isActive}`) + logger.debug(`switchRouteSet "${this.#cacheData.rawStudio._id}" "${routeSet.id}"=${isActive}`) overrideHelper.setItemValue(routeSetId, 'active', isActive) let mayAffectTimeline = couldRoutesetAffectTimelineGeneration(routeSet) @@ -54,7 +67,9 @@ export class StudioRouteSetUpdater { for (const otherRouteSet of Object.values>(currentRouteSets)) { if (otherRouteSet.id === routeSet.id) continue if (otherRouteSet.computed?.exclusivityGroup === routeSet.computed.exclusivityGroup) { - logger.debug(`switchRouteSet Other ID "${this.#cacheData.studio._id}" "${otherRouteSet.id}"=false`) + logger.debug( + `switchRouteSet Other ID "${this.#cacheData.rawStudio._id}" "${otherRouteSet.id}"=false` + ) overrideHelper.setItemValue(otherRouteSet.id, 'active', false) mayAffectTimeline = mayAffectTimeline || couldRoutesetAffectTimelineGeneration(otherRouteSet) @@ -65,13 +80,22 @@ export class StudioRouteSetUpdater { const updatedOverrideOps = overrideHelper.getPendingOps() // Update the cached studio - this.#studioWithRouteSetChanges = Object.freeze({ - ...currentStudio, + const updatedRawStudio: ReadonlyDeep = Object.freeze({ + ...currentStudios.rawStudio, routeSetsWithOverrides: Object.freeze({ - ...currentStudio.routeSetsWithOverrides, + ...currentStudios.rawStudio.routeSetsWithOverrides, overrides: deepFreeze(updatedOverrideOps), }), }) + const updatedJobStudio: ReadonlyDeep = Object.freeze({ + ...currentStudios.jobStudio, + routeSets: deepFreeze(applyAndValidateOverrides(updatedRawStudio.routeSetsWithOverrides).obj), + }) + + this.#studioWithRouteSetChanges = { + rawStudio: updatedRawStudio, + jobStudio: updatedJobStudio, + } return mayAffectTimeline } @@ -83,18 +107,19 @@ export class StudioRouteSetUpdater { // This is technically a little bit of a race condition, if someone uses the config pages but no more so than the rest of the system await this.#directCollections.Studios.update( { - _id: this.#cacheData.studio._id, + _id: this.#cacheData.rawStudio._id, }, { $set: { 'routeSetsWithOverrides.overrides': - this.#studioWithRouteSetChanges.routeSetsWithOverrides.overrides, + this.#studioWithRouteSetChanges.rawStudio.routeSetsWithOverrides.overrides, }, } ) // Pretend that the studio as reported by the database has changed, this will be fixed after this job by the ChangeStream firing - this.#cacheData.studio = this.#studioWithRouteSetChanges + this.#cacheData.rawStudio = this.#studioWithRouteSetChanges.rawStudio + this.#cacheData.jobStudio = this.#studioWithRouteSetChanges.jobStudio this.#studioWithRouteSetChanges = undefined } diff --git a/packages/job-worker/src/workers/context/__tests__/StudioRouteSetUpdater.spec.ts b/packages/job-worker/src/workers/context/__tests__/StudioRouteSetUpdater.spec.ts index 77692f4072a..e6ef0fe40df 100644 --- a/packages/job-worker/src/workers/context/__tests__/StudioRouteSetUpdater.spec.ts +++ b/packages/job-worker/src/workers/context/__tests__/StudioRouteSetUpdater.spec.ts @@ -6,11 +6,15 @@ import { wrapDefaultObject } from '@sofie-automation/corelib/dist/settings/objec function setupTest(routeSets: Record) { const context = setupDefaultJobEnvironment() - const mockCache: Pick = { - studio: { - ...context.studio, + const mockCache: Pick = { + rawStudio: { + ...context.rawStudio, routeSetsWithOverrides: wrapDefaultObject(routeSets), }, + jobStudio: { + ...context.studio, + routeSets: routeSets, + }, } const mockCollection = context.mockCollections.Studios const routeSetHelper = new StudioRouteSetUpdater(context.directCollections, mockCache) @@ -197,11 +201,13 @@ describe('StudioRouteSetUpdater', () => { routeSetHelper.setRouteSetActive('one', true) - expect(routeSetHelper.studioWithChanges).toBeTruthy() + expect(routeSetHelper.rawStudioWithChanges).toBeTruthy() + expect(routeSetHelper.jobStudioWithChanges).toBeTruthy() routeSetHelper.discardRouteSetChanges() - expect(routeSetHelper.studioWithChanges).toBeFalsy() + expect(routeSetHelper.rawStudioWithChanges).toBeFalsy() + expect(routeSetHelper.jobStudioWithChanges).toBeFalsy() expect(mockCollection.operations).toHaveLength(0) await routeSetHelper.saveRouteSetChanges() @@ -211,54 +217,70 @@ describe('StudioRouteSetUpdater', () => { it('save should update mockCache', async () => { const { mockCache, mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET) - const studioBefore = mockCache.studio - expect(routeSetHelper.studioWithChanges).toBeFalsy() + const rawStudioBefore = mockCache.rawStudio + const jobStudioBefore = mockCache.jobStudio + expect(routeSetHelper.rawStudioWithChanges).toBeFalsy() + expect(routeSetHelper.jobStudioWithChanges).toBeFalsy() routeSetHelper.setRouteSetActive('one', true) - expect(routeSetHelper.studioWithChanges).toBeTruthy() + expect(routeSetHelper.rawStudioWithChanges).toBeTruthy() + expect(routeSetHelper.jobStudioWithChanges).toBeTruthy() expect(mockCollection.operations).toHaveLength(0) await routeSetHelper.saveRouteSetChanges() expect(mockCollection.operations).toHaveLength(1) // Object should have changed - expect(mockCache.studio).not.toBe(studioBefore) + expect(mockCache.rawStudio).not.toBe(rawStudioBefore) + expect(mockCache.jobStudio).not.toBe(jobStudioBefore) // Object should not be equal - expect(mockCache.studio).not.toEqual(studioBefore) - expect(routeSetHelper.studioWithChanges).toBeFalsy() + expect(mockCache.rawStudio).not.toEqual(rawStudioBefore) + expect(mockCache.jobStudio).not.toEqual(jobStudioBefore) + expect(routeSetHelper.rawStudioWithChanges).toBeFalsy() + expect(routeSetHelper.jobStudioWithChanges).toBeFalsy() }) it('no changes should not update mockCache', async () => { const { mockCache, mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET) - const studioBefore = mockCache.studio - expect(routeSetHelper.studioWithChanges).toBeFalsy() + const rawStudioBefore = mockCache.rawStudio + const jobStudioBefore = mockCache.jobStudio + expect(routeSetHelper.rawStudioWithChanges).toBeFalsy() + expect(routeSetHelper.jobStudioWithChanges).toBeFalsy() expect(mockCollection.operations).toHaveLength(0) await routeSetHelper.saveRouteSetChanges() expect(mockCollection.operations).toHaveLength(0) - expect(mockCache.studio).toBe(studioBefore) - expect(routeSetHelper.studioWithChanges).toBeFalsy() + expect(mockCache.rawStudio).toBe(rawStudioBefore) + expect(mockCache.jobStudio).toBe(jobStudioBefore) + expect(routeSetHelper.rawStudioWithChanges).toBeFalsy() + expect(routeSetHelper.jobStudioWithChanges).toBeFalsy() }) it('discard changes should not update mockCache', async () => { const { mockCache, mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET) - const studioBefore = mockCache.studio - expect(routeSetHelper.studioWithChanges).toBeFalsy() + const rawStudioBefore = mockCache.rawStudio + const jobStudioBefore = mockCache.jobStudio + expect(routeSetHelper.rawStudioWithChanges).toBeFalsy() + expect(routeSetHelper.jobStudioWithChanges).toBeFalsy() routeSetHelper.setRouteSetActive('one', true) - expect(routeSetHelper.studioWithChanges).toBeTruthy() + expect(routeSetHelper.rawStudioWithChanges).toBeTruthy() + expect(routeSetHelper.jobStudioWithChanges).toBeTruthy() routeSetHelper.discardRouteSetChanges() - expect(routeSetHelper.studioWithChanges).toBeFalsy() + expect(routeSetHelper.rawStudioWithChanges).toBeFalsy() + expect(routeSetHelper.jobStudioWithChanges).toBeFalsy() expect(mockCollection.operations).toHaveLength(0) await routeSetHelper.saveRouteSetChanges() expect(mockCollection.operations).toHaveLength(0) - expect(mockCache.studio).toBe(studioBefore) - expect(routeSetHelper.studioWithChanges).toBeFalsy() + expect(mockCache.rawStudio).toBe(rawStudioBefore) + expect(mockCache.jobStudio).toBe(jobStudioBefore) + expect(routeSetHelper.rawStudioWithChanges).toBeFalsy() + expect(routeSetHelper.jobStudioWithChanges).toBeFalsy() }) it('ACTIVATE_ONLY routeset can be activated', async () => { diff --git a/packages/job-worker/src/workers/events/child.ts b/packages/job-worker/src/workers/events/child.ts index 76d95c4c316..d6c0a59da15 100644 --- a/packages/job-worker/src/workers/events/child.ts +++ b/packages/job-worker/src/workers/events/child.ts @@ -98,7 +98,7 @@ export class EventsWorkerChild { const transaction = startTransaction('invalidateCaches', 'worker-studio') if (transaction) { - transaction.setLabel('studioId', unprotectString(this.#staticData.dataCache.studio._id)) + transaction.setLabel('studioId', unprotectString(this.#staticData.dataCache.jobStudio._id)) } try { @@ -118,7 +118,7 @@ export class EventsWorkerChild { const trace = startTrace('studioWorker' + jobName) const transaction = startTransaction(jobName, 'worker-studio') if (transaction) { - transaction.setLabel('studioId', unprotectString(this.#staticData.dataCache.studio._id)) + transaction.setLabel('studioId', unprotectString(this.#staticData.dataCache.jobStudio._id)) } const context = new JobContextImpl( diff --git a/packages/job-worker/src/workers/ingest/child.ts b/packages/job-worker/src/workers/ingest/child.ts index 86af4b86343..41ebc90eaf2 100644 --- a/packages/job-worker/src/workers/ingest/child.ts +++ b/packages/job-worker/src/workers/ingest/child.ts @@ -81,7 +81,7 @@ export class IngestWorkerChild { const transaction = startTransaction('invalidateCaches', 'worker-studio') if (transaction) { - transaction.setLabel('studioId', unprotectString(this.#staticData.dataCache.studio._id)) + transaction.setLabel('studioId', unprotectString(this.#staticData.dataCache.jobStudio._id)) } try { @@ -99,7 +99,7 @@ export class IngestWorkerChild { const trace = startTrace('ingestWorker:' + jobName) const transaction = startTransaction(jobName, 'worker-ingest') if (transaction) { - transaction.setLabel('studioId', unprotectString(this.#staticData.dataCache.studio._id)) + transaction.setLabel('studioId', unprotectString(this.#staticData.dataCache.jobStudio._id)) // transaction.setLabel('rundownId', unprotectString(staticData.rundownId)) } diff --git a/packages/job-worker/src/workers/studio/child.ts b/packages/job-worker/src/workers/studio/child.ts index 40903527c6e..cd468780ee8 100644 --- a/packages/job-worker/src/workers/studio/child.ts +++ b/packages/job-worker/src/workers/studio/child.ts @@ -82,7 +82,7 @@ export class StudioWorkerChild { const transaction = startTransaction('invalidateCaches', 'worker-studio') if (transaction) { - transaction.setLabel('studioId', unprotectString(this.#staticData.dataCache.studio._id)) + transaction.setLabel('studioId', unprotectString(this.#staticData.dataCache.jobStudio._id)) } try { @@ -100,7 +100,7 @@ export class StudioWorkerChild { const trace = startTrace('studioWorker:' + jobName) const transaction = startTransaction(jobName, 'worker-studio') if (transaction) { - transaction.setLabel('studioId', unprotectString(this.#staticData.dataCache.studio._id)) + transaction.setLabel('studioId', unprotectString(this.#staticData.dataCache.jobStudio._id)) } const context = new JobContextImpl( diff --git a/packages/meteor-lib/src/api/migration.ts b/packages/meteor-lib/src/api/migration.ts index 40df1b77a01..80f64856679 100644 --- a/packages/meteor-lib/src/api/migration.ts +++ b/packages/meteor-lib/src/api/migration.ts @@ -1,5 +1,11 @@ import { MigrationStepInput, MigrationStepInputResult } from '@sofie-automation/blueprints-integration' -import { BlueprintId, ShowStyleBaseId, SnapshotId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { + BlueprintId, + CoreSystemId, + ShowStyleBaseId, + SnapshotId, + StudioId, +} from '@sofie-automation/corelib/dist/dataModel/Ids' import { ITranslatableMessage } from '@sofie-automation/corelib/dist/TranslatableMessage' import { BlueprintValidateConfigForStudioResult } from '@sofie-automation/corelib/dist/worker/studio' @@ -64,10 +70,15 @@ export interface NewMigrationAPI { validateConfigForShowStyleBase(showStyleBaseId: ShowStyleBaseId): Promise /** - * Run `applyConfig` on the blueprint for a Studio, and store the results into the db - * @param studioId Id of the Studio + * Run `applyConfig` on the blueprint for a ShowStyleBase, and store the results into the db + * @param showStyleBaseId Id of the ShowStyleBase */ runUpgradeForShowStyleBase(showStyleBaseId: ShowStyleBaseId): Promise + + /** + * Run `applyConfig` on the blueprint for the CoreSystem, and store the results into the db + */ + runUpgradeForCoreSystem(coreSystemId: CoreSystemId): Promise } export enum MigrationAPIMethods { @@ -85,6 +96,7 @@ export enum MigrationAPIMethods { 'ignoreFixupConfigForShowStyleBase' = 'migration.ignoreFixupConfigForShowStyleBase', 'validateConfigForShowStyleBase' = 'migration.validateConfigForShowStyleBase', 'runUpgradeForShowStyleBase' = 'migration.runUpgradeForShowStyleBase', + 'runUpgradeForCoreSystem' = 'migration.runUpgradeForCoreSystem', } export interface GetMigrationStatusResult { diff --git a/packages/meteor-lib/src/api/upgradeStatus.ts b/packages/meteor-lib/src/api/upgradeStatus.ts index d50fdd81e1f..2f6d025d3f9 100644 --- a/packages/meteor-lib/src/api/upgradeStatus.ts +++ b/packages/meteor-lib/src/api/upgradeStatus.ts @@ -1,16 +1,19 @@ import { ITranslatableMessage } from '@sofie-automation/blueprints-integration' -import { StudioId, ShowStyleBaseId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { StudioId, ShowStyleBaseId, CoreSystemId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { ProtectedString } from '@sofie-automation/corelib/dist/protectedString' export type UIBlueprintUpgradeStatusId = ProtectedString<'UIBlueprintUpgradeStatus'> -export type UIBlueprintUpgradeStatus = UIBlueprintUpgradeStatusStudio | UIBlueprintUpgradeStatusShowStyle +export type UIBlueprintUpgradeStatus = + | UIBlueprintUpgradeStatusCoreSystem + | UIBlueprintUpgradeStatusStudio + | UIBlueprintUpgradeStatusShowStyle export interface UIBlueprintUpgradeStatusBase { _id: UIBlueprintUpgradeStatusId - documentType: 'studio' | 'showStyle' - documentId: StudioId | ShowStyleBaseId + documentType: 'coreSystem' | 'studio' | 'showStyle' + documentId: CoreSystemId | StudioId | ShowStyleBaseId name: string @@ -30,6 +33,11 @@ export interface UIBlueprintUpgradeStatusBase { changes: ITranslatableMessage[] } +export interface UIBlueprintUpgradeStatusCoreSystem extends UIBlueprintUpgradeStatusBase { + documentType: 'coreSystem' + documentId: CoreSystemId +} + export interface UIBlueprintUpgradeStatusStudio extends UIBlueprintUpgradeStatusBase { documentType: 'studio' documentId: StudioId diff --git a/packages/meteor-lib/src/collections/CoreSystem.ts b/packages/meteor-lib/src/collections/CoreSystem.ts index 5a8f58641b2..e710091a162 100644 --- a/packages/meteor-lib/src/collections/CoreSystem.ts +++ b/packages/meteor-lib/src/collections/CoreSystem.ts @@ -1,6 +1,9 @@ +import { LastBlueprintConfig } from '@sofie-automation/corelib/dist/dataModel/Blueprint' import { LogLevel } from '../lib' import { CoreSystemId, BlueprintId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { protectString } from '@sofie-automation/corelib/dist/protectedString' +import { ObjectWithOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' +import { ICoreSystemSettings } from '@sofie-automation/shared-lib/dist/core/model/CoreSystemSettings' export const SYSTEM_ID: CoreSystemId = protectString('core') @@ -54,22 +57,11 @@ export interface ICoreSystem { /** Id of the blueprint used by this system */ blueprintId?: BlueprintId - /** Support info */ - support?: { - message: string - } - systemInfo?: { message: string enabled: boolean } - evaluations?: { - enabled: boolean - heading: string - message: string - } - /** A user-defined name for the installation */ name?: string @@ -95,18 +87,20 @@ export interface ICoreSystem { } enableMonitorBlockedThread?: boolean - /** Cron jobs running nightly */ - cron?: { - casparCGRestart?: { - enabled: boolean - } - storeRundownSnapshots?: { - enabled: boolean - rundownNames?: string[] - } - } + settingsWithOverrides: ObjectWithOverrides logo?: SofieLogo + + /** Details on the last blueprint used to generate the defaults values for this + * Note: This doesn't currently have any 'config' which it relates to. + * The name is to be consistent with studio/showstyle, and in preparation for their being config/configpresets used here + */ + lastBlueprintConfig: LastBlueprintConfig | undefined + + /** These fields are to have type consistency with the full config driven upgrades flow, but we don't use them yet */ + blueprintConfigPresetId?: undefined + lastBlueprintFixUpHash?: undefined + blueprintConfigWithOverrides?: undefined } /** In the beginning, there was the database, and the database was with Sofie, and the database was Sofie. diff --git a/packages/shared-lib/src/core/model/CoreSystemSettings.ts b/packages/shared-lib/src/core/model/CoreSystemSettings.ts new file mode 100644 index 00000000000..e5392915a58 --- /dev/null +++ b/packages/shared-lib/src/core/model/CoreSystemSettings.ts @@ -0,0 +1,23 @@ +export interface ICoreSystemSettings { + /** Cron jobs running nightly */ + cron: { + casparCGRestart: { + enabled: boolean + } + storeRundownSnapshots?: { + enabled: boolean + rundownNames?: string[] + } + } + + /** Support info */ + support: { + message: string + } + + evaluationsMessage: { + enabled: boolean + heading: string + message: string + } +} diff --git a/packages/shared-lib/src/core/model/StudioSettings.ts b/packages/shared-lib/src/core/model/StudioSettings.ts new file mode 100644 index 00000000000..45f88be0a42 --- /dev/null +++ b/packages/shared-lib/src/core/model/StudioSettings.ts @@ -0,0 +1,66 @@ +export enum ForceQuickLoopAutoNext { + /** Parts will auto-next only when explicitly set by the NRCS/blueprints */ + DISABLED = 'disabled', + /** Parts will auto-next when the expected duration is set and within range */ + ENABLED_WHEN_VALID_DURATION = 'enabled_when_valid_duration', + /** All parts will auto-next. If expected duration is undefined or low, the default display duration will be used */ + ENABLED_FORCING_MIN_DURATION = 'enabled_forcing_min_duration', +} + +export interface IStudioSettings { + /** The framerate (frames per second) used to convert internal timing information (in milliseconds) + * into timecodes and timecode-like strings and interpret timecode user input + * Default: 25 + */ + frameRate: number + + /** URL to endpoint where media preview are exposed */ + mediaPreviewsUrl: string // (former media_previews_url in config) + /** URLs for slack webhook to send evaluations */ + slackEvaluationUrls?: string // (former slack_evaluation in config) + + /** Media Resolutions supported by the studio for media playback */ + supportedMediaFormats?: string // (former mediaResolutions in config) + /** Audio Stream Formats supported by the studio for media playback */ + supportedAudioStreams?: string // (former audioStreams in config) + + /** Should the play from anywhere feature be enabled in this studio */ + enablePlayFromAnywhere?: boolean + + /** + * If set, forces the multi-playout-gateway mode (aka set "now"-time right away) + * for single playout-gateways setups + */ + forceMultiGatewayMode?: boolean + + /** How much extra delay to add to the Now-time (used for the "multi-playout-gateway" feature). + * A higher value adds delays in playout, but reduces the risk of missed frames. */ + multiGatewayNowSafeLatency?: number + + /** Allow resets while a rundown is on-air */ + allowRundownResetOnAir?: boolean + + /** Preserve unsynced segments position in the rundown, relative to the other segments */ + preserveOrphanedSegmentPositionInRundown?: boolean + + /** + * The minimum amount of time, in milliseconds, that must pass after a take before another take may be performed. + * Default: 1000 + */ + minimumTakeSpan: number + + /** Whether to allow adlib testing mode, before a Part is playing in a Playlist */ + allowAdlibTestingSegment?: boolean + + /** Should QuickLoop context menu options be available to the users. It does not affect Playlist loop enabled by the NRCS. */ + enableQuickLoop?: boolean + + /** If and how to force auto-nexting in a looping Playlist */ + forceQuickLoopAutoNext?: ForceQuickLoopAutoNext + + /** + * The duration to apply on too short Parts Within QuickLoop when ForceQuickLoopAutoNext.ENABLED_FORCING_MIN_DURATION is selected + * Default: 3000 + */ + fallbackPartDuration?: number +} diff --git a/packages/webui/public/images/sofie-logo.svg b/packages/webui/public/images/sofie-logo-default.svg similarity index 100% rename from packages/webui/public/images/sofie-logo.svg rename to packages/webui/public/images/sofie-logo-default.svg diff --git a/packages/webui/src/__mocks__/defaultCollectionObjects.ts b/packages/webui/src/__mocks__/defaultCollectionObjects.ts index af06dffe311..609f774ae79 100644 --- a/packages/webui/src/__mocks__/defaultCollectionObjects.ts +++ b/packages/webui/src/__mocks__/defaultCollectionObjects.ts @@ -101,11 +101,11 @@ export function defaultStudio(_id: StudioId): DBStudio { mappingsWithOverrides: wrapDefaultObject({}), supportedShowStyleBase: [], blueprintConfigWithOverrides: wrapDefaultObject({}), - settings: { + settingsWithOverrides: wrapDefaultObject({ frameRate: 25, mediaPreviewsUrl: '', minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, - }, + }), _rundownVersionHash: '', routeSetsWithOverrides: wrapDefaultObject({}), routeSetExclusivityGroupsWithOverrides: wrapDefaultObject({}), diff --git a/packages/webui/src/__mocks__/helpers/database.ts b/packages/webui/src/__mocks__/helpers/database.ts index fbc094fa42a..e5d1b622741 100644 --- a/packages/webui/src/__mocks__/helpers/database.ts +++ b/packages/webui/src/__mocks__/helpers/database.ts @@ -72,6 +72,25 @@ export async function setupMockCore(doc?: Partial): Promise { hint?: string item: WrappedOverridableItemNormal itemKey: keyof ReadonlyDeep - opPrefix: string overrideHelper: OverrideOpHelperForItemContents showClearButton?: boolean @@ -32,7 +31,6 @@ export function LabelAndOverrides({ hint, item, itemKey, - opPrefix, overrideHelper, showClearButton, formatDefaultValue, @@ -40,16 +38,16 @@ export function LabelAndOverrides({ const { t } = useTranslation() const clearOverride = useCallback(() => { - overrideHelper().clearItemOverrides(opPrefix, String(itemKey)).commit() - }, [overrideHelper, opPrefix, itemKey]) + overrideHelper().clearItemOverrides(item.id, String(itemKey)).commit() + }, [overrideHelper, item.id, itemKey]) const setValue = useCallback( (newValue: any) => { - overrideHelper().setItemValue(opPrefix, String(itemKey), newValue).commit() + overrideHelper().setItemValue(item.id, String(itemKey), newValue).commit() }, - [overrideHelper, opPrefix, itemKey] + [overrideHelper, item.id, itemKey] ) - const isOverridden = hasOpWithPath(item.overrideOps, opPrefix, String(itemKey)) + const isOverridden = hasOpWithPath(item.overrideOps, item.id, String(itemKey)) let displayValue: JSX.Element | string | null = '""' if (item.defaults) { diff --git a/packages/webui/src/client/lib/Components/MultiLineTextInput.tsx b/packages/webui/src/client/lib/Components/MultiLineTextInput.tsx index 7a371661c6b..b6aa6a0983b 100644 --- a/packages/webui/src/client/lib/Components/MultiLineTextInput.tsx +++ b/packages/webui/src/client/lib/Components/MultiLineTextInput.tsx @@ -1,4 +1,4 @@ -import React, { useCallback, useState } from 'react' +import React, { useCallback, useMemo, useState } from 'react' import ClassNames from 'classnames' export function splitValueIntoLines(v: string | undefined): string[] { @@ -85,3 +85,19 @@ export function MultiLineTextInputControl({ /> ) } + +interface ICombinedMultiLineTextInputControlProps + extends Omit { + value: string + handleUpdate: (value: string) => void +} +export function CombinedMultiLineTextInputControl({ + value, + handleUpdate, + ...props +}: Readonly): JSX.Element { + const valueArray = useMemo(() => splitValueIntoLines(value), [value]) + const handleUpdateArray = useCallback((value: string[]) => handleUpdate(joinLines(value)), [handleUpdate]) + + return +} diff --git a/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts b/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts index 262fe8922c6..89c4b34f05b 100644 --- a/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts +++ b/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts @@ -1,8 +1,5 @@ -import { - DBRundownPlaylist, - ForceQuickLoopAutoNext, - QuickLoopMarkerType, -} from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import { DBRundownPlaylist, QuickLoopMarkerType } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import { ForceQuickLoopAutoNext } from '@sofie-automation/shared-lib/dist/core/model/StudioSettings' import { PartInstance, wrapPartToTemporaryInstance } from '@sofie-automation/meteor-lib/dist/collections/PartInstances' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' diff --git a/packages/webui/src/client/lib/forms/SchemaFormWithOverrides.tsx b/packages/webui/src/client/lib/forms/SchemaFormWithOverrides.tsx index bcd00e9afbb..2e9779a076f 100644 --- a/packages/webui/src/client/lib/forms/SchemaFormWithOverrides.tsx +++ b/packages/webui/src/client/lib/forms/SchemaFormWithOverrides.tsx @@ -45,7 +45,6 @@ interface FormComponentProps { item: WrappedOverridableItemNormal overrideHelper: OverrideOpHelperForItemContents itemKey: string - opPrefix: string /** Whether a clear button should be showed for fields not marked as "required" */ showClearButton: boolean @@ -68,7 +67,6 @@ function useChildPropsForFormComponent(props: Readonly CoreSystem.findOne(), []) + const coreSystemSettings = useTracker(() => { + const core = CoreSystem.findOne(SYSTEM_ID, { projection: { settingsWithOverrides: 1 } }) + return core && applyAndValidateOverrides(core.settingsWithOverrides).obj + }, []) - const message = coreSystem?.evaluations?.enabled ? coreSystem.evaluations : undefined + const message = coreSystemSettings?.evaluationsMessage?.enabled ? coreSystemSettings.evaluationsMessage : undefined if (!message) return null return ( diff --git a/packages/webui/src/client/ui/Settings/ShowStyle/BlueprintConfiguration/index.tsx b/packages/webui/src/client/ui/Settings/ShowStyle/BlueprintConfiguration/index.tsx index ef3e341451a..9a0fae510e5 100644 --- a/packages/webui/src/client/ui/Settings/ShowStyle/BlueprintConfiguration/index.tsx +++ b/packages/webui/src/client/ui/Settings/ShowStyle/BlueprintConfiguration/index.tsx @@ -12,6 +12,7 @@ import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { useSubscription, useTracker } from '../../../../lib/ReactMeteorData/ReactMeteorData' import { UIBlueprintUpgradeStatuses } from '../../../Collections' import { getUpgradeStatusMessage, UpgradeStatusButtons } from '../../Upgrades/Components' +import { UIBlueprintUpgradeStatusShowStyle } from '@sofie-automation/meteor-lib/dist/api/upgradeStatus' interface ShowStyleBaseBlueprintConfigurationSettingsProps { showStyleBase: DBShowStyleBase @@ -33,7 +34,7 @@ export function ShowStyleBaseBlueprintConfigurationSettings( UIBlueprintUpgradeStatuses.findOne({ documentId: props.showStyleBase._id, documentType: 'showStyle', - }), + }) as UIBlueprintUpgradeStatusShowStyle | undefined, [props.showStyleBase._id] ) const statusMessage = isStatusReady && status ? getUpgradeStatusMessage(t, status) ?? t('OK') : t('Loading...') diff --git a/packages/webui/src/client/ui/Settings/ShowStyle/OutputLayer.tsx b/packages/webui/src/client/ui/Settings/ShowStyle/OutputLayer.tsx index c0c331ab7dd..ad50735d12d 100644 --- a/packages/webui/src/client/ui/Settings/ShowStyle/OutputLayer.tsx +++ b/packages/webui/src/client/ui/Settings/ShowStyle/OutputLayer.tsx @@ -270,13 +270,7 @@ function OutputLayerEntry({ item, isExpanded, toggleExpanded, overrideHelper }:
- + {(value, handleUpdate) => ( {(value, handleUpdate) => } @@ -309,7 +302,6 @@ function OutputLayerEntry({ item, isExpanded, toggleExpanded, overrideHelper }: label={t('Display Rank')} item={item} itemKey={'_rank'} - opPrefix={item.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -325,7 +317,6 @@ function OutputLayerEntry({ item, isExpanded, toggleExpanded, overrideHelper }: label={t('Is collapsed by default')} item={item} itemKey={'isDefaultCollapsed'} - opPrefix={item.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => } @@ -334,7 +325,6 @@ function OutputLayerEntry({ item, isExpanded, toggleExpanded, overrideHelper }: label={t('Is flattened')} item={item} itemKey={'isFlattened'} - opPrefix={item.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => } diff --git a/packages/webui/src/client/ui/Settings/ShowStyle/SourceLayer.tsx b/packages/webui/src/client/ui/Settings/ShowStyle/SourceLayer.tsx index a7405ffe7ad..9b31c7bac99 100644 --- a/packages/webui/src/client/ui/Settings/ShowStyle/SourceLayer.tsx +++ b/packages/webui/src/client/ui/Settings/ShowStyle/SourceLayer.tsx @@ -295,13 +295,7 @@ function SourceLayerEntry({ item, isExpanded, toggleExpanded, overrideHelper }:
- + {(value, handleUpdate) => ( {(value, handleUpdate) => ( @@ -341,7 +334,6 @@ function SourceLayerEntry({ item, isExpanded, toggleExpanded, overrideHelper }: label={t('Source Type')} item={item} itemKey={'type'} - opPrefix={item.id} overrideHelper={overrideHelper} options={getDropdownInputOptions(SourceLayerType)} > @@ -358,7 +350,6 @@ function SourceLayerEntry({ item, isExpanded, toggleExpanded, overrideHelper }: label={t('Is a Live Remote Input')} item={item} itemKey={'isRemoteInput'} - opPrefix={item.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => } @@ -367,7 +358,6 @@ function SourceLayerEntry({ item, isExpanded, toggleExpanded, overrideHelper }: label={t('Is a Guest Input')} item={item} itemKey={'isGuestInput'} - opPrefix={item.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => } @@ -376,7 +366,6 @@ function SourceLayerEntry({ item, isExpanded, toggleExpanded, overrideHelper }: label={t('Is hidden')} item={item} itemKey={'isHidden'} - opPrefix={item.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => } @@ -385,7 +374,6 @@ function SourceLayerEntry({ item, isExpanded, toggleExpanded, overrideHelper }: label={t('Display Rank')} item={item} itemKey={'_rank'} - opPrefix={item.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -401,7 +389,6 @@ function SourceLayerEntry({ item, isExpanded, toggleExpanded, overrideHelper }: label={t('Treat as Main content')} item={item} itemKey={'onPresenterScreen'} - opPrefix={item.id} overrideHelper={overrideHelper} hint="When set, Pieces on this Source Layer will be used to display summaries, thumbnails etc for the Part in GUIs. " > @@ -411,7 +398,6 @@ function SourceLayerEntry({ item, isExpanded, toggleExpanded, overrideHelper }: label={t('Display in a column in List View')} item={item} itemKey={'onListViewColumn'} - opPrefix={item.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => } @@ -420,7 +406,6 @@ function SourceLayerEntry({ item, isExpanded, toggleExpanded, overrideHelper }: label={t('Display AdLibs in a column in List View')} item={item} itemKey={'onListViewAdLibColumn'} - opPrefix={item.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => } @@ -429,7 +414,6 @@ function SourceLayerEntry({ item, isExpanded, toggleExpanded, overrideHelper }: label={t('Pieces on this layer can be cleared')} item={item} itemKey={'isClearable'} - opPrefix={item.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => } @@ -438,7 +422,6 @@ function SourceLayerEntry({ item, isExpanded, toggleExpanded, overrideHelper }: label={t('Pieces on this layer are sticky')} item={item} itemKey={'isSticky'} - opPrefix={item.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => } @@ -447,7 +430,6 @@ function SourceLayerEntry({ item, isExpanded, toggleExpanded, overrideHelper }: label={t('Only Pieces present in rundown are sticky')} item={item} itemKey={'stickyOriginalOnly'} - opPrefix={item.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => } @@ -456,7 +438,6 @@ function SourceLayerEntry({ item, isExpanded, toggleExpanded, overrideHelper }: label={t('Allow disabling of Pieces')} item={item} itemKey={'allowDisable'} - opPrefix={item.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => } @@ -465,7 +446,6 @@ function SourceLayerEntry({ item, isExpanded, toggleExpanded, overrideHelper }: label={t('AdLibs on this layer can be queued')} item={item} itemKey={'isQueueable'} - opPrefix={item.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => } @@ -474,7 +454,6 @@ function SourceLayerEntry({ item, isExpanded, toggleExpanded, overrideHelper }: label={t('Exclusivity group')} item={item} itemKey={'exclusiveGroup'} - opPrefix={item.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( diff --git a/packages/webui/src/client/ui/Settings/Studio/BlueprintConfiguration/index.tsx b/packages/webui/src/client/ui/Settings/Studio/BlueprintConfiguration/index.tsx index 9358e540688..78d28da1a07 100644 --- a/packages/webui/src/client/ui/Settings/Studio/BlueprintConfiguration/index.tsx +++ b/packages/webui/src/client/ui/Settings/Studio/BlueprintConfiguration/index.tsx @@ -15,6 +15,7 @@ import { SelectBlueprint } from './SelectBlueprint' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { UIBlueprintUpgradeStatuses } from '../../../Collections' import { getUpgradeStatusMessage, UpgradeStatusButtons } from '../../Upgrades/Components' +import { UIBlueprintUpgradeStatusStudio } from '@sofie-automation/meteor-lib/dist/api/upgradeStatus' interface StudioBlueprintConfigurationSettingsProps { studio: DBStudio @@ -31,7 +32,7 @@ export function StudioBlueprintConfigurationSettings( UIBlueprintUpgradeStatuses.findOne({ documentId: props.studio._id, documentType: 'studio', - }), + }) as UIBlueprintUpgradeStatusStudio | undefined, [props.studio._id] ) const statusMessage = isStatusReady && status ? getUpgradeStatusMessage(t, status) ?? t('OK') : t('Loading...') diff --git a/packages/webui/src/client/ui/Settings/Studio/Devices/GenericSubDevices.tsx b/packages/webui/src/client/ui/Settings/Studio/Devices/GenericSubDevices.tsx index b3967bf83e3..ae9b79ae302 100644 --- a/packages/webui/src/client/ui/Settings/Studio/Devices/GenericSubDevices.tsx +++ b/packages/webui/src/client/ui/Settings/Studio/Devices/GenericSubDevices.tsx @@ -285,7 +285,6 @@ function SubDeviceEditRow({ label={t('Peripheral Device ID')} item={item} overrideHelper={overrideHelper} - opPrefix={item.id} itemKey={'peripheralDeviceId'} options={peripheralDeviceOptions} > @@ -379,7 +378,6 @@ function SubDeviceEditForm({ peripheralDevice, item, overrideHelper }: Readonly< label={t('Device Type')} item={item} overrideHelper={overrideHelper} - opPrefix={item.id} itemKey={'options.type'} options={subdeviceTypeOptions} > diff --git a/packages/webui/src/client/ui/Settings/Studio/Generic.tsx b/packages/webui/src/client/ui/Settings/Studio/Generic.tsx index 011672ff341..485c197da95 100644 --- a/packages/webui/src/client/ui/Settings/Studio/Generic.tsx +++ b/packages/webui/src/client/ui/Settings/Studio/Generic.tsx @@ -1,9 +1,8 @@ import * as React from 'react' -import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' -import { Translated } from '../../../lib/ReactMeteorData/react-meteor-data' +import { DBStudio, IStudioSettings } from '@sofie-automation/corelib/dist/dataModel/Studio' import { FontAwesomeIcon } from '@fortawesome/react-fontawesome' import { faExclamationTriangle } from '@fortawesome/free-solid-svg-icons' -import { withTranslation } from 'react-i18next' +import { useTranslation } from 'react-i18next' import { EditAttribute } from '../../../lib/EditAttribute' import { StudioBaselineStatus } from './Baseline' import { ShowStyleBaseId } from '@sofie-automation/corelib/dist/dataModel/Ids' @@ -11,9 +10,27 @@ import { DBShowStyleBase } from '@sofie-automation/corelib/dist/dataModel/ShowSt import { Studios } from '../../../collections' import { useHistory } from 'react-router-dom' import { MeteorCall } from '../../../lib/meteorApi' -import { LabelActual } from '../../../lib/Components/LabelAndOverrides' +import { + LabelActual, + LabelAndOverrides, + LabelAndOverridesForCheckbox, + LabelAndOverridesForDropdown, + LabelAndOverridesForInt, +} from '../../../lib/Components/LabelAndOverrides' import { catchError } from '../../../lib/lib' -import { ForceQuickLoopAutoNext } from '@sofie-automation/corelib/src/dataModel/RundownPlaylist' +import { ForceQuickLoopAutoNext } from '@sofie-automation/shared-lib/dist/core/model/StudioSettings' +import { + applyAndValidateOverrides, + ObjectWithOverrides, + SomeObjectOverrideOp, +} from '@sofie-automation/corelib/dist/settings/objectWithOverrides' +import { useOverrideOpHelper, WrappedOverridableItemNormal } from '../util/OverrideOpHelper' +import { IntInputControl } from '../../../lib/Components/IntInput' +import { literal } from '@sofie-automation/corelib/dist/lib' +import { useMemo } from 'react' +import { CheckboxControl } from '../../../lib/Components/Checkbox' +import { TextInputControl } from '../../../lib/Components/TextInput' +import { DropdownInputControl, DropdownInputOption } from '../../../lib/Components/DropdownInput' interface IStudioGenericPropertiesProps { studio: DBStudio @@ -23,285 +40,76 @@ interface IStudioGenericPropertiesProps { showStyleBase: DBShowStyleBase }> } -interface IStudioGenericPropertiesState {} -export const StudioGenericProperties = withTranslation()( - class StudioGenericProperties extends React.Component< - Translated, - IStudioGenericPropertiesState - > { - constructor(props: Translated) { - super(props) - } - renderShowStyleEditButtons() { - const buttons: JSX.Element[] = [] - if (this.props.studio) { - for (const showStyleBaseId of this.props.studio.supportedShowStyleBase) { - const showStyleBase = this.props.availableShowStyleBases.find( - (base) => base.showStyleBase._id === showStyleBaseId - ) - if (showStyleBase) { - buttons.push( - - ) - } - } - } - return buttons - } +export function StudioGenericProperties({ + studio, + availableShowStyleBases, +}: IStudioGenericPropertiesProps): JSX.Element { + const { t } = useTranslation() - render(): JSX.Element { - const { t } = this.props - return ( -
-

{t('Generic Properties')}

- -
- {t('Select Compatible Show Styles')} -
- - {this.renderShowStyleEditButtons()} - -
- {!this.props.studio.supportedShowStyleBase.length ? ( -
- {t('Show style not set')} -
- ) : null} -
- - - - - - - - - - - - - - - - - - - - - -
+ const showStyleEditButtons: JSX.Element[] = [] + for (const showStyleBaseId of studio.supportedShowStyleBase) { + const showStyleBase = availableShowStyleBases.find((base) => base.showStyleBase._id === showStyleBaseId) + if (showStyleBase) { + showStyleEditButtons.push( + ) } } -) + + return ( +
+

{t('Generic Properties')}

+ +
+ {t('Select Compatible Show Styles')} +
+ + {showStyleEditButtons} + +
+ {!studio.supportedShowStyleBase.length ? ( +
+ {t('Show style not set')} +
+ ) : null} +
+ + + + +
+ ) +} const NewShowStyleButton = React.memo(function NewShowStyleButton() { const history = useHistory() @@ -336,3 +144,273 @@ const RedirectToShowStyleButton = React.memo(function RedirectToShowStyleButton( ) }) + +function StudioSettings({ studio }: { studio: DBStudio }): JSX.Element { + const { t } = useTranslation() + + const saveOverrides = React.useCallback( + (newOps: SomeObjectOverrideOp[]) => { + Studios.update(studio._id, { + $set: { + 'settingsWithOverrides.overrides': newOps.map((op) => ({ + ...op, + path: op.path.startsWith('0.') ? op.path.slice(2) : op.path, + })), + }, + }) + }, + [studio._id] + ) + + const [wrappedItem, wrappedConfigObject] = useMemo(() => { + const prefixedOps = studio.settingsWithOverrides.overrides.map((op) => ({ + ...op, + // TODO: can we avoid doing this hack? + path: `0.${op.path}`, + })) + + const computedValue = applyAndValidateOverrides(studio.settingsWithOverrides).obj + + const wrappedItem = literal>({ + type: 'normal', + id: '0', + computed: computedValue, + defaults: studio.settingsWithOverrides.defaults, + overrideOps: prefixedOps, + }) + + const wrappedConfigObject: ObjectWithOverrides = { + defaults: studio.settingsWithOverrides.defaults, + overrides: prefixedOps, + } + + return [wrappedItem, wrappedConfigObject] + }, [studio.settingsWithOverrides]) + + const overrideHelper = useOverrideOpHelper(saveOverrides, wrappedConfigObject) + + const autoNextOptions: DropdownInputOption[] = useMemo( + () => [ + { + name: t('Disabled'), + value: ForceQuickLoopAutoNext.DISABLED, + i: 0, + }, + { + name: t('Enabled, but skipping parts with undefined or 0 duration'), + value: ForceQuickLoopAutoNext.ENABLED_WHEN_VALID_DURATION, + i: 1, + }, + { + name: t('Enabled on all Parts, applying QuickLoop Fallback Part Duration if needed'), + value: ForceQuickLoopAutoNext.ENABLED_FORCING_MIN_DURATION, + i: 2, + }, + ], + [t] + ) + + return ( + <> + + {(value, handleUpdate) => ( + + )} + + + + {(value, handleUpdate) => ( + + )} + + + + {(value, handleUpdate) => } + + + + {(value, handleUpdate) => ( + + )} + + + + {(value, handleUpdate) => ( + + )} + + + + {(value, handleUpdate) => ( + + )} + + + + {(value, handleUpdate) => ( + + )} + + + + {(value, handleUpdate) => } + + + + {(value, handleUpdate) => ( + + )} + + + + {(value, handleUpdate) => } + + + + {(value, handleUpdate) => } + + + + {(value, handleUpdate) => } + + + + {(value, handleUpdate) => } + + + + {(value, handleUpdate, options) => ( + + )} + + + + {(value, handleUpdate) => ( + + )} + + + ) +} diff --git a/packages/webui/src/client/ui/Settings/Studio/Mappings.tsx b/packages/webui/src/client/ui/Settings/Studio/Mappings.tsx index f47a0bc4fcf..5807d916e3e 100644 --- a/packages/webui/src/client/ui/Settings/Studio/Mappings.tsx +++ b/packages/webui/src/client/ui/Settings/Studio/Mappings.tsx @@ -432,7 +432,6 @@ function StudioMappingsEntry({ hint={t('Human-readable name of the layer')} item={item} itemKey={'layerName'} - opPrefix={item.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -450,7 +449,6 @@ function StudioMappingsEntry({ hint={t('The type of device to use for the output')} item={item} itemKey={'device'} - opPrefix={item.id} overrideHelper={overrideHelper} options={deviceTypeOptions} > @@ -469,7 +467,6 @@ function StudioMappingsEntry({ hint={t('ID of the device (corresponds to the device ID in the peripheralDevice settings)')} item={item} itemKey={'deviceId'} - opPrefix={item.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -486,7 +483,6 @@ function StudioMappingsEntry({ label={t('Lookahead Mode')} item={item} itemKey={'lookahead'} - opPrefix={item.id} overrideHelper={overrideHelper} options={getDropdownInputOptions(LookaheadMode)} > @@ -504,7 +500,6 @@ function StudioMappingsEntry({ label={t('Lookahead Target Objects (Undefined = 1)')} item={item} itemKey={'lookaheadDepth'} - opPrefix={item.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -523,7 +518,6 @@ function StudioMappingsEntry({ })} item={item} itemKey={'lookaheadMaxSearchDistance'} - opPrefix={item.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -543,7 +537,6 @@ function StudioMappingsEntry({ hint={t('The type of mapping to use')} item={item} itemKey={'options.mappingType'} - opPrefix={item.id} overrideHelper={overrideHelper} options={mappingTypeOptions} > diff --git a/packages/webui/src/client/ui/Settings/Studio/PackageManager/AccessorTableRow.tsx b/packages/webui/src/client/ui/Settings/Studio/PackageManager/AccessorTableRow.tsx index eee92146cdd..780a8afb121 100644 --- a/packages/webui/src/client/ui/Settings/Studio/PackageManager/AccessorTableRow.tsx +++ b/packages/webui/src/client/ui/Settings/Studio/PackageManager/AccessorTableRow.tsx @@ -133,7 +133,6 @@ export function AccessorTableRow({ item={packageContainer} //@ts-expect-error can't be 4 levels deep itemKey={`container.accessors.${accessorId}.label`} - opPrefix={packageContainer.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -150,7 +149,6 @@ export function AccessorTableRow({ item={packageContainer} //@ts-expect-error can't be 4 levels deep itemKey={`container.accessors.${accessorId}.type`} - opPrefix={packageContainer.id} overrideHelper={overrideHelper} options={getDropdownInputOptions(Accessor.AccessType)} > @@ -173,7 +171,6 @@ export function AccessorTableRow({ item={packageContainer} //@ts-expect-error can't be 4 levels deep itemKey={`container.accessors.${accessorId}.folderPath`} - opPrefix={packageContainer.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -191,7 +188,6 @@ export function AccessorTableRow({ item={packageContainer} //@ts-expect-error can't be 4 levels deep itemKey={`container.accessors.${accessorId}.resourceId`} - opPrefix={packageContainer.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -212,7 +208,6 @@ export function AccessorTableRow({ item={packageContainer} //@ts-expect-error can't be 4 levels deep itemKey={`container.accessors.${accessorId}.baseUrl`} - opPrefix={packageContainer.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -230,7 +225,6 @@ export function AccessorTableRow({ item={packageContainer} //@ts-expect-error can't be 4 levels deep itemKey={`container.accessors.${accessorId}.isImmutable`} - opPrefix={packageContainer.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -249,7 +243,6 @@ export function AccessorTableRow({ item={packageContainer} //@ts-expect-error can't be 4 levels deep itemKey={`container.accessors.${accessorId}.useGETinsteadOfHEAD`} - opPrefix={packageContainer.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -269,7 +262,6 @@ export function AccessorTableRow({ item={packageContainer} //@ts-expect-error can't be 4 levels deep itemKey={`container.accessors.${accessorId}.networkId`} - opPrefix={packageContainer.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -290,7 +282,6 @@ export function AccessorTableRow({ item={packageContainer} //@ts-expect-error can't be 4 levels deep itemKey={`container.accessors.${accessorId}.baseUrl`} - opPrefix={packageContainer.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -310,7 +301,6 @@ export function AccessorTableRow({ item={packageContainer} //@ts-expect-error can't be 4 levels deep itemKey={`container.accessors.${accessorId}.networkId`} - opPrefix={packageContainer.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -331,7 +321,6 @@ export function AccessorTableRow({ item={packageContainer} //@ts-expect-error can't be 4 levels deep itemKey={`container.accessors.${accessorId}.folderPath`} - opPrefix={packageContainer.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -349,7 +338,6 @@ export function AccessorTableRow({ item={packageContainer} //@ts-expect-error can't be 4 levels deep itemKey={`container.accessors.${accessorId}.userName`} - opPrefix={packageContainer.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -367,7 +355,6 @@ export function AccessorTableRow({ item={packageContainer} //@ts-expect-error can't be 4 levels deep itemKey={`container.accessors.${accessorId}.password`} - opPrefix={packageContainer.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -385,7 +372,6 @@ export function AccessorTableRow({ item={packageContainer} //@ts-expect-error can't be 4 levels deep itemKey={`container.accessors.${accessorId}.networkId`} - opPrefix={packageContainer.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -406,7 +392,6 @@ export function AccessorTableRow({ item={packageContainer} //@ts-expect-error can't be 4 levels deep itemKey={`container.accessors.${accessorId}.quantelGatewayUrl`} - opPrefix={packageContainer.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -424,7 +409,6 @@ export function AccessorTableRow({ item={packageContainer} //@ts-expect-error can't be 4 levels deep itemKey={`container.accessors.${accessorId}.ISAUrls`} - opPrefix={packageContainer.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -442,7 +426,6 @@ export function AccessorTableRow({ item={packageContainer} //@ts-expect-error can't be 4 levels deep itemKey={`container.accessors.${accessorId}.zoneId`} - opPrefix={packageContainer.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -462,7 +445,6 @@ export function AccessorTableRow({ item={packageContainer} //@ts-expect-error can't be 4 levels deep itemKey={`container.accessors.${accessorId}.serverId`} - opPrefix={packageContainer.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -480,7 +462,6 @@ export function AccessorTableRow({ item={packageContainer} //@ts-expect-error can't be 4 levels deep itemKey={`container.accessors.${accessorId}.transformerURL`} - opPrefix={packageContainer.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -498,7 +479,6 @@ export function AccessorTableRow({ item={packageContainer} //@ts-expect-error can't be 4 levels deep itemKey={`container.accessors.${accessorId}.fileflowURL`} - opPrefix={packageContainer.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -516,7 +496,6 @@ export function AccessorTableRow({ item={packageContainer} //@ts-expect-error can't be 4 levels deep itemKey={`container.accessors.${accessorId}.fileflowProfile`} - opPrefix={packageContainer.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -536,7 +515,6 @@ export function AccessorTableRow({ item={packageContainer} //@ts-expect-error can't be 4 levels deep itemKey={`container.accessors.${accessorId}.allowRead`} - opPrefix={packageContainer.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => } @@ -546,7 +524,6 @@ export function AccessorTableRow({ item={packageContainer} //@ts-expect-error can't be 4 levels deep itemKey={`container.accessors.${accessorId}.allowWrite`} - opPrefix={packageContainer.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => } diff --git a/packages/webui/src/client/ui/Settings/Studio/PackageManager/PackageContainers.tsx b/packages/webui/src/client/ui/Settings/Studio/PackageManager/PackageContainers.tsx index d92e3e31c82..ac915bfbabc 100644 --- a/packages/webui/src/client/ui/Settings/Studio/PackageManager/PackageContainers.tsx +++ b/packages/webui/src/client/ui/Settings/Studio/PackageManager/PackageContainers.tsx @@ -285,7 +285,6 @@ function PackageContainerRow({ item={packageContainer} //@ts-expect-error can't be 2 levels deep itemKey={'container.label'} - opPrefix={packageContainer.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -302,7 +301,6 @@ function PackageContainerRow({ hint={t('Select which playout devices are using this package container')} item={packageContainer} itemKey={'deviceIds'} - opPrefix={packageContainer.id} overrideHelper={overrideHelper} options={availablePlayoutDevicesOptions} > diff --git a/packages/webui/src/client/ui/Settings/Studio/Routings/ExclusivityGroups.tsx b/packages/webui/src/client/ui/Settings/Studio/Routings/ExclusivityGroups.tsx index 7b3a167749f..ec442fd0cc0 100644 --- a/packages/webui/src/client/ui/Settings/Studio/Routings/ExclusivityGroups.tsx +++ b/packages/webui/src/client/ui/Settings/Studio/Routings/ExclusivityGroups.tsx @@ -223,7 +223,6 @@ function ExclusivityGroupRow({ label={t('Exclusivity Group Name')} item={exclusivityGroup} itemKey={'name'} - opPrefix={exclusivityGroup.id} overrideHelper={exclusivityOverrideHelper} > {(value, handleUpdate) => ( diff --git a/packages/webui/src/client/ui/Settings/Studio/Routings/RouteSetAbPlayers.tsx b/packages/webui/src/client/ui/Settings/Studio/Routings/RouteSetAbPlayers.tsx index f6dc204d505..83ec047d7ae 100644 --- a/packages/webui/src/client/ui/Settings/Studio/Routings/RouteSetAbPlayers.tsx +++ b/packages/webui/src/client/ui/Settings/Studio/Routings/RouteSetAbPlayers.tsx @@ -118,7 +118,6 @@ function AbPlayerRow({ label={t('Pool name')} item={player} itemKey={'poolName'} - opPrefix={player.id} overrideHelper={tableOverrideHelper} > {(value, handleUpdate) => ( @@ -134,7 +133,6 @@ function AbPlayerRow({ label={t('Pool PlayerId')} item={player} itemKey={'playerId'} - opPrefix={player.id} overrideHelper={tableOverrideHelper} > {(value, handleUpdate) => ( diff --git a/packages/webui/src/client/ui/Settings/Studio/Routings/RouteSets.tsx b/packages/webui/src/client/ui/Settings/Studio/Routings/RouteSets.tsx index cf555b46111..fe82a556471 100644 --- a/packages/webui/src/client/ui/Settings/Studio/Routings/RouteSets.tsx +++ b/packages/webui/src/client/ui/Settings/Studio/Routings/RouteSets.tsx @@ -306,7 +306,6 @@ function RouteSetRow({ hint={t('he default state of this Route Set')} item={routeSet} itemKey={'defaultActive'} - opPrefix={routeSet.id} overrideHelper={overrideHelper} options={getDropdownInputOptions(DEFAULT_ACTIVE_OPTIONS)} > @@ -323,7 +322,6 @@ function RouteSetRow({ label={t('Active')} item={routeSet} itemKey={'active'} - opPrefix={routeSet.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => } @@ -332,7 +330,6 @@ function RouteSetRow({ label={t('Route Set Name')} item={routeSet} itemKey={'name'} - opPrefix={routeSet.id} overrideHelper={overrideHelper} > {(value, handleUpdate) => ( @@ -350,7 +347,6 @@ function RouteSetRow({ hint={t('If set, only one Route Set will be active per exclusivity group')} item={routeSet} itemKey={'exclusivityGroup'} - opPrefix={routeSet.id} overrideHelper={overrideHelper} options={exclusivityGroupOptions} > @@ -369,7 +365,6 @@ function RouteSetRow({ hint={t('The way this Route Set should behave towards the user')} item={routeSet} itemKey={'behavior'} - opPrefix={routeSet.id} overrideHelper={overrideHelper} options={getDropdownInputOptions(StudioRouteBehavior)} > @@ -601,7 +596,6 @@ function RenderRoutesRow({ label={t('Original Layer')} item={route} itemKey={'mappedLayer'} - opPrefix={route.id} overrideHelper={tableOverrideHelper} options={getDropdownInputOptions(Object.keys(studioMappings))} > @@ -619,7 +613,6 @@ function RenderRoutesRow({ label={t('New Layer')} item={route} itemKey={'outputMappedLayer'} - opPrefix={route.id} overrideHelper={tableOverrideHelper} > {(value, handleUpdate) => ( @@ -636,7 +629,6 @@ function RenderRoutesRow({ label={t('Route Type')} item={route} itemKey={'routeType'} - opPrefix={route.id} overrideHelper={tableOverrideHelper} options={getDropdownInputOptions(StudioRouteType)} > @@ -660,7 +652,6 @@ function RenderRoutesRow({ label={t('Device Type')} item={route} itemKey={'deviceType'} - opPrefix={route.id} overrideHelper={tableOverrideHelper} options={getDropdownInputOptions(TSR.DeviceType)} > @@ -689,7 +680,6 @@ function RenderRoutesRow({ label={t('Mapping Type')} item={route} itemKey={'remapping.options.mappingType'} - opPrefix={route.id} overrideHelper={tableOverrideHelper} options={mappingTypeOptions} > @@ -710,7 +700,6 @@ function RenderRoutesRow({ label={t('Device ID')} item={route} itemKey={'remapping.deviceId'} - opPrefix={route.id} overrideHelper={tableOverrideHelper} showClearButton={true} > diff --git a/packages/webui/src/client/ui/Settings/SystemManagement.tsx b/packages/webui/src/client/ui/Settings/SystemManagement.tsx index 742e04e5b24..b15eef16225 100644 --- a/packages/webui/src/client/ui/Settings/SystemManagement.tsx +++ b/packages/webui/src/client/ui/Settings/SystemManagement.tsx @@ -9,14 +9,30 @@ import { languageAnd } from '../../lib/language' import { TriggeredActionsEditor } from './components/triggeredActions/TriggeredActionsEditor' import { TFunction, useTranslation } from 'react-i18next' import { Meteor } from 'meteor/meteor' -import { LogLevel } from '../../lib/tempLib' +import { literal, LogLevel } from '../../lib/tempLib' import { CoreSystem } from '../../collections' import { CollectionCleanupResult } from '@sofie-automation/meteor-lib/dist/api/system' -import { LabelActual } from '../../lib/Components/LabelAndOverrides' +import { + LabelActual, + LabelAndOverrides, + LabelAndOverridesForCheckbox, + LabelAndOverridesForMultiLineText, +} from '../../lib/Components/LabelAndOverrides' import { catchError } from '../../lib/lib' +import { SystemManagementBlueprint } from './SystemManagement/Blueprint' +import { + applyAndValidateOverrides, + ObjectWithOverrides, + SomeObjectOverrideOp, +} from '@sofie-automation/corelib/dist/settings/objectWithOverrides' +import { ICoreSystemSettings } from '@sofie-automation/blueprints-integration' +import { WrappedOverridableItemNormal, useOverrideOpHelper } from './util/OverrideOpHelper' +import { CheckboxControl } from '../../lib/Components/Checkbox' +import { CombinedMultiLineTextInputControl, MultiLineTextInputControl } from '../../lib/Components/MultiLineTextInput' +import { TextInputControl } from '../../lib/Components/TextInput' interface WithCoreSystemProps { - coreSystem: ICoreSystem | undefined + coreSystem: ICoreSystem } export default function SystemManagement(): JSX.Element | null { @@ -30,6 +46,8 @@ export default function SystemManagement(): JSX.Element | null {
+ + @@ -156,25 +174,29 @@ function SystemManagementNotificationMessage({ coreSystem }: Readonly) { const { t } = useTranslation() + const { wrappedItem, overrideHelper } = useCoreSystemSettingsWithOverrides(coreSystem) + return ( <>

{t('Support Panel')}

- + )} +
) @@ -183,50 +205,56 @@ function SystemManagementSupportPanel({ coreSystem }: Readonly) { const { t } = useTranslation() + const { wrappedItem, overrideHelper } = useCoreSystemSettingsWithOverrides(coreSystem) + return ( <>

{t('Evaluations')}

- - - + )} +
) @@ -304,55 +332,49 @@ function SystemManagementMonitoring({ coreSystem }: Readonly) { const { t } = useTranslation() + const { wrappedItem, overrideHelper } = useCoreSystemSettingsWithOverrides(coreSystem) + return ( <>

{t('Cron jobs')}

- - - + )} +
) @@ -571,3 +593,51 @@ function SystemManagementHeapSnapshot() { ) } + +function useCoreSystemSettingsWithOverrides(coreSystem: ICoreSystem) { + const saveOverrides = useCallback( + (newOps: SomeObjectOverrideOp[]) => { + CoreSystem.update(coreSystem._id, { + $set: { + 'settingsWithOverrides.overrides': newOps.map((op) => ({ + ...op, + path: op.path.startsWith('0.') ? op.path.slice(2) : op.path, + })), + }, + }) + }, + [coreSystem._id] + ) + + const [wrappedItem, wrappedConfigObject] = useMemo(() => { + const prefixedOps = coreSystem.settingsWithOverrides.overrides.map((op) => ({ + ...op, + // TODO: can we avoid doing this hack? + path: `0.${op.path}`, + })) + + const computedValue = applyAndValidateOverrides(coreSystem.settingsWithOverrides).obj + + const wrappedItem = literal>({ + type: 'normal', + id: '0', + computed: computedValue, + defaults: coreSystem.settingsWithOverrides.defaults, + overrideOps: prefixedOps, + }) + + const wrappedConfigObject: ObjectWithOverrides = { + defaults: coreSystem.settingsWithOverrides.defaults, + overrides: prefixedOps, + } + + return [wrappedItem, wrappedConfigObject] + }, [coreSystem.settingsWithOverrides]) + + const overrideHelper = useOverrideOpHelper(saveOverrides, wrappedConfigObject) + + return { + wrappedItem, + overrideHelper, + } +} diff --git a/packages/webui/src/client/ui/Settings/SystemManagement/Blueprint.tsx b/packages/webui/src/client/ui/Settings/SystemManagement/Blueprint.tsx new file mode 100644 index 00000000000..ce87ee092e3 --- /dev/null +++ b/packages/webui/src/client/ui/Settings/SystemManagement/Blueprint.tsx @@ -0,0 +1,99 @@ +import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' +import { UIBlueprintUpgradeStatusCoreSystem } from '@sofie-automation/meteor-lib/dist/api/upgradeStatus' +import { useTranslation } from 'react-i18next' +import { useSubscription, useTracker } from '../../../lib/ReactMeteorData/ReactMeteorData' +import { UIBlueprintUpgradeStatuses } from '../../Collections' +import { getUpgradeStatusMessage, SystemUpgradeStatusButtons } from '../Upgrades/Components' +import { ICoreSystem } from '@sofie-automation/meteor-lib/dist/collections/CoreSystem' +import { Blueprints, CoreSystem } from '../../../collections' +import { BlueprintManifestType } from '@sofie-automation/blueprints-integration' +import { faExclamationTriangle } from '@fortawesome/free-solid-svg-icons' +import { FontAwesomeIcon } from '@fortawesome/react-fontawesome' +import { BlueprintId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { unprotectString } from '@sofie-automation/corelib/dist/protectedString' +import { useMemo } from 'react' +import { LabelActual } from '../../../lib/Components/LabelAndOverrides' +import { EditAttribute } from '../../../lib/EditAttribute' +import { RedirectToBlueprintButton } from '../../../lib/SettingsNavigation' + +interface SystemManagementBlueprintProps { + coreSystem: ICoreSystem | undefined +} +export function SystemManagementBlueprint({ coreSystem }: Readonly): JSX.Element { + const { t } = useTranslation() + + const isStatusReady = useSubscription(MeteorPubSub.uiBlueprintUpgradeStatuses) + const status = useTracker( + () => + coreSystem && + (UIBlueprintUpgradeStatuses.findOne({ + documentId: coreSystem._id, + documentType: 'coreSystem', + }) as UIBlueprintUpgradeStatusCoreSystem | undefined), + [coreSystem?._id] + ) + const statusMessage = isStatusReady && status ? getUpgradeStatusMessage(t, status) ?? t('OK') : t('Loading...') + + return ( +
+
+ + +

+ {t('Upgrade Status')}: {statusMessage} + {status && } +

+
+
+ ) +} + +interface SelectBlueprintProps { + coreSystem: ICoreSystem | undefined +} + +function SelectBlueprint({ coreSystem }: Readonly): JSX.Element { + const { t } = useTranslation() + + const allSystemBlueprints = useTracker(() => { + return Blueprints.find({ + blueprintType: BlueprintManifestType.SYSTEM, + }).fetch() + }, []) + const blueprintOptions: { name: string; value: BlueprintId | null }[] = useMemo(() => { + if (allSystemBlueprints) { + return allSystemBlueprints.map((blueprint) => { + return { + name: blueprint.name ? `${blueprint.name} (${blueprint._id})` : unprotectString(blueprint._id), + value: blueprint._id, + } + }) + } else { + return [] + } + }, [allSystemBlueprints]) + + return ( +
+ +
+ ) +} diff --git a/packages/webui/src/client/ui/Settings/Upgrades/Components.tsx b/packages/webui/src/client/ui/Settings/Upgrades/Components.tsx index 19e7fc17785..1034a6a91a9 100644 --- a/packages/webui/src/client/ui/Settings/Upgrades/Components.tsx +++ b/packages/webui/src/client/ui/Settings/Upgrades/Components.tsx @@ -10,6 +10,7 @@ import { NoteSeverity } from '@sofie-automation/blueprints-integration' import { NotificationCenter, NoticeLevel, Notification } from '../../../lib/notifications/notifications' import { UIBlueprintUpgradeStatusBase, + UIBlueprintUpgradeStatusCoreSystem, UIBlueprintUpgradeStatusShowStyle, UIBlueprintUpgradeStatusStudio, } from '@sofie-automation/meteor-lib/dist/api/upgradeStatus' @@ -288,3 +289,76 @@ export function UpgradeStatusButtons({ upgradeResult }: Readonly ) } + +interface SystemUpgradeStatusButtonsProps { + upgradeResult: UIBlueprintUpgradeStatusCoreSystem +} +export function SystemUpgradeStatusButtons({ upgradeResult }: Readonly): JSX.Element { + const { t } = useTranslation() + + const applyConfig = useCallback( + async () => MeteorCall.migration.runUpgradeForCoreSystem(upgradeResult.documentId), + [upgradeResult.documentId, upgradeResult.documentType] + ) + + const clickApply = useCallback(() => { + applyConfig() + .then(() => { + NotificationCenter.push( + new Notification( + undefined, + NoticeLevel.NOTIFICATION, + t('Config for {{name}} upgraded successfully', { name: upgradeResult.name }), + 'UpgradesView' + ) + ) + }) + .catch((e) => { + catchError('Upgrade applyConfig')(e) + NotificationCenter.push( + new Notification( + undefined, + NoticeLevel.WARNING, + t('Config for {{name}} upgraded failed', { name: upgradeResult.name }), + 'UpgradesView' + ) + ) + }) + }, [upgradeResult, applyConfig]) + + const clickShowChanges = useCallback(() => { + doModalDialog({ + title: t('Upgrade config for {{name}}', { name: upgradeResult.name }), + message: ( +
+ {upgradeResult.changes.length === 0 &&

{t('No changes')}

} + {upgradeResult.changes.map((msg, i) => ( +

{translateMessage(msg, i18nTranslator)}

+ ))} +
+ ), + acceptOnly: true, + yes: t('Dismiss'), + onAccept: () => { + // Do nothing + }, + }) + }, [upgradeResult]) + + return ( +
+ + +
+ ) +} diff --git a/packages/webui/src/client/ui/Settings/Upgrades/View.tsx b/packages/webui/src/client/ui/Settings/Upgrades/View.tsx index 2e6f2bbfd72..7302d2acd08 100644 --- a/packages/webui/src/client/ui/Settings/Upgrades/View.tsx +++ b/packages/webui/src/client/ui/Settings/Upgrades/View.tsx @@ -4,11 +4,8 @@ import { unprotectString } from '@sofie-automation/corelib/dist/protectedString' import { useSubscription, useTracker } from '../../../lib/ReactMeteorData/ReactMeteorData' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { UIBlueprintUpgradeStatuses } from '../../Collections' -import { - UIBlueprintUpgradeStatusShowStyle, - UIBlueprintUpgradeStatusStudio, -} from '@sofie-automation/meteor-lib/dist/api/upgradeStatus' -import { getUpgradeStatusMessage, UpgradeStatusButtons } from './Components' +import { UIBlueprintUpgradeStatus } from '@sofie-automation/meteor-lib/dist/api/upgradeStatus' +import { getUpgradeStatusMessage, SystemUpgradeStatusButtons, UpgradeStatusButtons } from './Components' export function UpgradesView(): JSX.Element { const { t } = useTranslation() @@ -39,6 +36,17 @@ export function UpgradesView(): JSX.Element { )} + {statuses?.map( + (document) => + document.documentType === 'coreSystem' && ( + + ) + )} + {statuses?.map( (document) => document.documentType === 'studio' && ( @@ -69,7 +77,7 @@ export function UpgradesView(): JSX.Element { interface ShowUpgradesRowProps { resourceName: string - upgradeResult: UIBlueprintUpgradeStatusStudio | UIBlueprintUpgradeStatusShowStyle + upgradeResult: UIBlueprintUpgradeStatus } function ShowUpgradesRow({ resourceName, upgradeResult }: Readonly) { const { t } = useTranslation() @@ -83,7 +91,11 @@ function ShowUpgradesRow({ resourceName, upgradeResult }: Readonly{getUpgradeStatusMessage(t, upgradeResult)} - + {upgradeResult.documentType === 'coreSystem' ? ( + + ) : ( + + )} ) diff --git a/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionsEditor.tsx b/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionsEditor.tsx index 8a623bc7730..d313b89a5c9 100644 --- a/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionsEditor.tsx +++ b/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionsEditor.tsx @@ -437,7 +437,7 @@ export const TriggeredActionsEditor: React.FC = function TriggeredAction {showStyleBaseId !== null ? ( <>
- {(systemTriggeredActionIds?.length ?? 0) > 0 && !parsedTriggerFilter ? ( + {!parsedTriggerFilter ? (

setSystemWideCollapsed(!systemWideCollapsed)} @@ -470,13 +470,19 @@ export const TriggeredActionsEditor: React.FC = function TriggeredAction /> )) : null} + + {!systemWideCollapsed && !parsedTriggerFilter && systemTriggeredActionIds?.length === 0 && ( +

{t('No Action Triggers set up.')}

+ )}

) : null}
- + + + { - const core = CoreSystem.findOne() + const core = CoreSystem.findOne(SYSTEM_ID, { projection: { settingsWithOverrides: 1 } }) + const coreSettings = core && applyAndValidateOverrides(core.settingsWithOverrides).obj return { - supportMessage: core?.support?.message ?? '', + supportMessage: coreSettings?.support?.message ?? '', } }, [], From 77e101ed784a4e19af335331b5b8cf20d6e5fad4 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Wed, 20 Nov 2024 17:02:41 +0000 Subject: [PATCH 63/81] feat: option to disable HOLD & direct play (#32) --- meteor/__mocks__/defaultCollectionObjects.ts | 2 + meteor/server/api/rest/v1/typeConversion.ts | 14 +++++-- meteor/server/api/studio/api.ts | 2 + meteor/server/lib/rest/v1/studios.ts | 3 ++ meteor/server/migration/0_1_0.ts | 2 + meteor/server/migration/X_X_X.ts | 37 +++++++++++++++++++ .../migration/__tests__/migrations.test.ts | 6 +++ .../__tests__/checkPieceContentStatus.test.ts | 4 +- .../checkPieceContentStatus.ts | 4 +- .../src/__mocks__/defaultCollectionObjects.ts | 2 + .../src/blueprints/__tests__/config.test.ts | 4 ++ packages/job-worker/src/playout/adlibJobs.ts | 6 +++ packages/job-worker/src/playout/holdJobs.ts | 9 +++++ packages/job-worker/src/playout/upgrade.ts | 2 + packages/openapi/api/definitions/studios.yaml | 9 +++++ .../src/core/model/StudioSettings.ts | 13 +++++++ .../src/__mocks__/defaultCollectionObjects.ts | 2 + packages/webui/src/client/ui/RundownView.tsx | 5 ++- .../src/client/ui/Settings/Studio/Generic.tsx | 20 ++++++++++ 19 files changed, 137 insertions(+), 9 deletions(-) diff --git a/meteor/__mocks__/defaultCollectionObjects.ts b/meteor/__mocks__/defaultCollectionObjects.ts index 62420caaac4..ae3cc5aa1f2 100644 --- a/meteor/__mocks__/defaultCollectionObjects.ts +++ b/meteor/__mocks__/defaultCollectionObjects.ts @@ -110,6 +110,8 @@ export function defaultStudio(_id: StudioId): DBStudio { mediaPreviewsUrl: '', minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, fallbackPartDuration: DEFAULT_FALLBACK_PART_DURATION, + allowHold: false, + allowPieceDirectPlay: false, }), _rundownVersionHash: '', routeSetsWithOverrides: wrapDefaultObject({}), diff --git a/meteor/server/api/rest/v1/typeConversion.ts b/meteor/server/api/rest/v1/typeConversion.ts index 7b442546878..afd3f7ccd3f 100644 --- a/meteor/server/api/rest/v1/typeConversion.ts +++ b/meteor/server/api/rest/v1/typeConversion.ts @@ -10,7 +10,7 @@ import { PeripheralDevice, PeripheralDeviceType } from '@sofie-automation/coreli import { Blueprint } from '@sofie-automation/corelib/dist/dataModel/Blueprint' import { BucketId, ShowStyleBaseId, ShowStyleVariantId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { DBStudio, IStudioSettings } from '@sofie-automation/corelib/dist/dataModel/Studio' -import { assertNever, getRandomId, literal } from '@sofie-automation/corelib/dist/lib' +import { assertNever, Complete, getRandomId, literal } from '@sofie-automation/corelib/dist/lib' import { protectString, unprotectString } from '@sofie-automation/corelib/dist/protectedString' import { applyAndValidateOverrides, @@ -296,7 +296,7 @@ export async function studioFrom(apiStudio: APIStudio, existingId?: StudioId): P } } -export function APIStudioFrom(studio: DBStudio): APIStudio { +export function APIStudioFrom(studio: DBStudio): Complete { const studioSettings = APIStudioSettingsFrom(applyAndValidateOverrides(studio.settingsWithOverrides).obj) return { @@ -309,7 +309,7 @@ export function APIStudioFrom(studio: DBStudio): APIStudio { } } -export function studioSettingsFrom(apiStudioSettings: APIStudioSettings): IStudioSettings { +export function studioSettingsFrom(apiStudioSettings: APIStudioSettings): Complete { return { frameRate: apiStudioSettings.frameRate, mediaPreviewsUrl: apiStudioSettings.mediaPreviewsUrl, @@ -325,10 +325,13 @@ export function studioSettingsFrom(apiStudioSettings: APIStudioSettings): IStudi enableQuickLoop: apiStudioSettings.enableQuickLoop, forceQuickLoopAutoNext: forceQuickLoopAutoNextFrom(apiStudioSettings.forceQuickLoopAutoNext), fallbackPartDuration: apiStudioSettings.fallbackPartDuration ?? DEFAULT_FALLBACK_PART_DURATION, + allowAdlibTestingSegment: apiStudioSettings.allowAdlibTestingSegment, + allowHold: apiStudioSettings.allowHold ?? true, // Backwards compatible + allowPieceDirectPlay: apiStudioSettings.allowPieceDirectPlay ?? true, // Backwards compatible } } -export function APIStudioSettingsFrom(settings: IStudioSettings): APIStudioSettings { +export function APIStudioSettingsFrom(settings: IStudioSettings): Complete { return { frameRate: settings.frameRate, mediaPreviewsUrl: settings.mediaPreviewsUrl, @@ -344,6 +347,9 @@ export function APIStudioSettingsFrom(settings: IStudioSettings): APIStudioSetti enableQuickLoop: settings.enableQuickLoop, forceQuickLoopAutoNext: APIForceQuickLoopAutoNextFrom(settings.forceQuickLoopAutoNext), fallbackPartDuration: settings.fallbackPartDuration, + allowAdlibTestingSegment: settings.allowAdlibTestingSegment, + allowHold: settings.allowHold, + allowPieceDirectPlay: settings.allowPieceDirectPlay, } } diff --git a/meteor/server/api/studio/api.ts b/meteor/server/api/studio/api.ts index fa5d7c1c372..bbcfcbb93a8 100644 --- a/meteor/server/api/studio/api.ts +++ b/meteor/server/api/studio/api.ts @@ -49,6 +49,8 @@ export async function insertStudioInner(organizationId: OrganizationId | null, n frameRate: 25, mediaPreviewsUrl: '', minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, + allowHold: false, + allowPieceDirectPlay: false, }), _rundownVersionHash: '', routeSetsWithOverrides: wrapDefaultObject({}), diff --git a/meteor/server/lib/rest/v1/studios.ts b/meteor/server/lib/rest/v1/studios.ts index 211ea2c34e4..2d043e97bef 100644 --- a/meteor/server/lib/rest/v1/studios.ts +++ b/meteor/server/lib/rest/v1/studios.ts @@ -186,4 +186,7 @@ export interface APIStudioSettings { forceQuickLoopAutoNext?: 'disabled' | 'enabled_when_valid_duration' | 'enabled_forcing_min_duration' minimumTakeSpan?: number fallbackPartDuration?: number + allowAdlibTestingSegment?: boolean + allowHold?: boolean + allowPieceDirectPlay?: boolean } diff --git a/meteor/server/migration/0_1_0.ts b/meteor/server/migration/0_1_0.ts index f4a6abf7ad3..bd6ed719ab2 100644 --- a/meteor/server/migration/0_1_0.ts +++ b/meteor/server/migration/0_1_0.ts @@ -33,6 +33,8 @@ export const addSteps = addMigrationSteps('0.1.0', [ frameRate: 25, mediaPreviewsUrl: '', minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, + allowHold: false, + allowPieceDirectPlay: false, }), mappingsWithOverrides: wrapDefaultObject({}), blueprintConfigWithOverrides: wrapDefaultObject({}), diff --git a/meteor/server/migration/X_X_X.ts b/meteor/server/migration/X_X_X.ts index 62abd9a8f84..13451de2b97 100644 --- a/meteor/server/migration/X_X_X.ts +++ b/meteor/server/migration/X_X_X.ts @@ -310,6 +310,43 @@ export const addSteps = addMigrationSteps(CURRENT_SYSTEM_VERSION, [ } }, }, + + { + id: `add studio settings allowHold & allowPieceDirectPlay`, + canBeRunAutomatically: true, + validate: async () => { + const studios = await Studios.findFetchAsync({ + $or: [ + { 'settings.allowHold': { $exists: false } }, + { 'settings.allowPieceDirectPlay': { $exists: false } }, + ], + }) + + if (studios.length > 0) { + return 'studios must have settings.allowHold and settings.allowPieceDirectPlay defined' + } + + return false + }, + migrate: async () => { + const studios = await Studios.findFetchAsync({ + $or: [ + { 'settings.allowHold': { $exists: false } }, + { 'settings.allowPieceDirectPlay': { $exists: false } }, + ], + }) + + for (const studio of studios) { + // Populate the settings to be backwards compatible + await Studios.updateAsync(studio._id, { + $set: { + 'settings.allowHold': true, + 'settings.allowPieceDirectPlay': true, + }, + }) + } + }, + }, ]) interface PartialOldICoreSystem { diff --git a/meteor/server/migration/__tests__/migrations.test.ts b/meteor/server/migration/__tests__/migrations.test.ts index 62967260fe8..8094b84390f 100644 --- a/meteor/server/migration/__tests__/migrations.test.ts +++ b/meteor/server/migration/__tests__/migrations.test.ts @@ -125,6 +125,8 @@ describe('Migrations', () => { mediaPreviewsUrl: '', frameRate: 25, minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, + allowHold: true, + allowPieceDirectPlay: true, }), mappingsWithOverrides: wrapDefaultObject({}), blueprintConfigWithOverrides: wrapDefaultObject({}), @@ -163,6 +165,8 @@ describe('Migrations', () => { mediaPreviewsUrl: '', frameRate: 25, minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, + allowHold: true, + allowPieceDirectPlay: true, }), mappingsWithOverrides: wrapDefaultObject({}), blueprintConfigWithOverrides: wrapDefaultObject({}), @@ -201,6 +205,8 @@ describe('Migrations', () => { mediaPreviewsUrl: '', frameRate: 25, minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, + allowHold: true, + allowPieceDirectPlay: true, }), mappingsWithOverrides: wrapDefaultObject({}), blueprintConfigWithOverrides: wrapDefaultObject({}), diff --git a/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts b/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts index 4e3604807d4..5d7e3ab300c 100644 --- a/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts +++ b/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts @@ -166,9 +166,7 @@ describe('lib/mediaObjects', () => { test('getAcceptedFormats', () => { const acceptedFormats = getAcceptedFormats({ supportedMediaFormats: '1920x1080i5000, 1280x720, i5000, i5000tff', - mediaPreviewsUrl: '', frameRate: 25, - minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, }) expect(acceptedFormats).toEqual([ ['1920', '1080', 'i', '5000', undefined], @@ -251,6 +249,8 @@ describe('lib/mediaObjects', () => { supportedAudioStreams: '4', frameRate: 25, minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, + allowHold: false, + allowPieceDirectPlay: false, } const mockDefaultStudio = defaultStudio(protectString('studio0')) diff --git a/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts b/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts index 159830fbc5c..dbead8658e1 100644 --- a/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts +++ b/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts @@ -147,7 +147,9 @@ export function acceptFormat(format: string, formats: Array>): boo * [undefined, undefined, i, 5000, tff] * ] */ -export function getAcceptedFormats(settings: IStudioSettings | undefined): Array> { +export function getAcceptedFormats( + settings: Pick | undefined +): Array> { const formatsConfigField = settings ? settings.supportedMediaFormats : '' const formatsString: string = (formatsConfigField && formatsConfigField !== '' ? formatsConfigField : '1920x1080i5000') + '' diff --git a/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts b/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts index 752b8f9eb6a..19633fe0173 100644 --- a/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts +++ b/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts @@ -107,6 +107,8 @@ export function defaultStudio(_id: StudioId): DBStudio { mediaPreviewsUrl: '', minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, allowAdlibTestingSegment: true, + allowHold: true, + allowPieceDirectPlay: true, }), routeSetsWithOverrides: wrapDefaultObject({}), routeSetExclusivityGroupsWithOverrides: wrapDefaultObject({}), diff --git a/packages/job-worker/src/blueprints/__tests__/config.test.ts b/packages/job-worker/src/blueprints/__tests__/config.test.ts index 5e142a6bec3..1794afc6f58 100644 --- a/packages/job-worker/src/blueprints/__tests__/config.test.ts +++ b/packages/job-worker/src/blueprints/__tests__/config.test.ts @@ -15,6 +15,8 @@ describe('Test blueprint config', () => { mediaPreviewsUrl: '', frameRate: 25, minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, + allowHold: true, + allowPieceDirectPlay: true, }), blueprintConfigWithOverrides: wrapDefaultObject({ sdfsdf: 'one', another: 5 }), }) @@ -38,6 +40,8 @@ describe('Test blueprint config', () => { mediaPreviewsUrl: '', frameRate: 25, minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, + allowHold: true, + allowPieceDirectPlay: true, }), blueprintConfigWithOverrides: wrapDefaultObject({ sdfsdf: 'one', another: 5 }), }) diff --git a/packages/job-worker/src/playout/adlibJobs.ts b/packages/job-worker/src/playout/adlibJobs.ts index 31eeb8382c7..4c64fa05dd6 100644 --- a/packages/job-worker/src/playout/adlibJobs.ts +++ b/packages/job-worker/src/playout/adlibJobs.ts @@ -35,6 +35,12 @@ import { PlayoutPieceInstanceModel } from './model/PlayoutPieceInstanceModel' * Play an existing Piece in the Rundown as an AdLib */ export async function handleTakePieceAsAdlibNow(context: JobContext, data: TakePieceAsAdlibNowProps): Promise { + if (!context.studio.settings.allowPieceDirectPlay) { + // Piece direct play isn't allowed, making this a noop + logger.debug(`Piece direct play isn't allowed, skipping`) + return + } + return runJobWithPlayoutModel( context, data, diff --git a/packages/job-worker/src/playout/holdJobs.ts b/packages/job-worker/src/playout/holdJobs.ts index ab00c417382..3c88aca6a63 100644 --- a/packages/job-worker/src/playout/holdJobs.ts +++ b/packages/job-worker/src/playout/holdJobs.ts @@ -5,11 +5,18 @@ import { ActivateHoldProps, DeactivateHoldProps } from '@sofie-automation/coreli import { JobContext } from '../jobs' import { runJobWithPlayoutModel } from './lock' import { updateTimeline } from './timeline/generate' +import { logger } from '../logging' /** * Activate Hold */ export async function handleActivateHold(context: JobContext, data: ActivateHoldProps): Promise { + if (!context.studio.settings.allowHold) { + // Hold isn't allowed, making this a noop + logger.debug(`Hold isn't allowed, skipping`) + return + } + return runJobWithPlayoutModel( context, data, @@ -59,6 +66,8 @@ export async function handleActivateHold(context: JobContext, data: ActivateHold * Deactivate Hold */ export async function handleDeactivateHold(context: JobContext, data: DeactivateHoldProps): Promise { + // This should be possible even when hold is not allowed, as it is a way to get out of a stuck state + return runJobWithPlayoutModel( context, data, diff --git a/packages/job-worker/src/playout/upgrade.ts b/packages/job-worker/src/playout/upgrade.ts index 4e2cf3dece3..54b1c123de0 100644 --- a/packages/job-worker/src/playout/upgrade.ts +++ b/packages/job-worker/src/playout/upgrade.ts @@ -115,6 +115,8 @@ export async function handleBlueprintUpgradeForStudio(context: JobContext, _data frameRate: 25, mediaPreviewsUrl: '', minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, + allowHold: true, + allowPieceDirectPlay: true, } await context.directCollections.Studios.update(context.studioId, { diff --git a/packages/openapi/api/definitions/studios.yaml b/packages/openapi/api/definitions/studios.yaml index 8b70c2e7e59..314e7c4bfc3 100644 --- a/packages/openapi/api/definitions/studios.yaml +++ b/packages/openapi/api/definitions/studios.yaml @@ -476,6 +476,15 @@ components: fallbackPartDuration: type: number description: The duration to apply on too short Parts Within QuickLoop when forceQuickLoopAutoNext is set to `enabled_forcing_min_duration` + allowAdlibTestingSegment: + type: boolean + description: Whether to allow adlib testing mode, before a Part is playing in a Playlist + allowHold: + type: boolean + description: Whether to allow hold operations for Rundowns in this Studio + allowPieceDirectPlay: + type: boolean + description: Whether to allow direct playing of a piece in the rundown required: - frameRate diff --git a/packages/shared-lib/src/core/model/StudioSettings.ts b/packages/shared-lib/src/core/model/StudioSettings.ts index 45f88be0a42..08f38a597d0 100644 --- a/packages/shared-lib/src/core/model/StudioSettings.ts +++ b/packages/shared-lib/src/core/model/StudioSettings.ts @@ -63,4 +63,17 @@ export interface IStudioSettings { * Default: 3000 */ fallbackPartDuration?: number + + /** + * Whether to allow hold operations for Rundowns in this Studio + * When disabled, any action-triggers that would normally trigger a hold operation will be silently ignored + * This should only block entering hold, to ensure Sofie doesn't get stuck if it somehow gets into hold + */ + allowHold: boolean + + /** + * Whether to allow direct playing of a piece in the rundown + * This behaviour is usally triggered by double-clicking on a piece in the GUI + */ + allowPieceDirectPlay: boolean } diff --git a/packages/webui/src/__mocks__/defaultCollectionObjects.ts b/packages/webui/src/__mocks__/defaultCollectionObjects.ts index 609f774ae79..bef95149b48 100644 --- a/packages/webui/src/__mocks__/defaultCollectionObjects.ts +++ b/packages/webui/src/__mocks__/defaultCollectionObjects.ts @@ -105,6 +105,8 @@ export function defaultStudio(_id: StudioId): DBStudio { frameRate: 25, mediaPreviewsUrl: '', minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, + allowHold: true, + allowPieceDirectPlay: true, }), _rundownVersionHash: '', routeSetsWithOverrides: wrapDefaultObject({}), diff --git a/packages/webui/src/client/ui/RundownView.tsx b/packages/webui/src/client/ui/RundownView.tsx index 021c09e9f69..21048d356b7 100644 --- a/packages/webui/src/client/ui/RundownView.tsx +++ b/packages/webui/src/client/ui/RundownView.tsx @@ -1050,7 +1050,7 @@ const RundownHeader = withTranslation()( {this.props.playlist.activationId ? ( this.take(e)}>{t('Take')} ) : null} - {this.props.playlist.activationId ? ( + {this.props.studio.settings.allowHold && this.props.playlist.activationId ? ( this.hold(e)}>{t('Hold')} ) : null} {this.props.playlist.activationId && canClearQuickLoop ? ( @@ -2252,7 +2252,8 @@ const RundownViewContent = translateWithTracker )} + + + {(value, handleUpdate) => } + + + + {(value, handleUpdate) => } + ) } From 826d602284eb6f311fcc290d92f656ffcff73768 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Wed, 20 Nov 2024 17:12:59 +0000 Subject: [PATCH 64/81] chore: remove unnecessary migration --- meteor/server/migration/X_X_X.ts | 37 -------------------------------- 1 file changed, 37 deletions(-) diff --git a/meteor/server/migration/X_X_X.ts b/meteor/server/migration/X_X_X.ts index 13451de2b97..62abd9a8f84 100644 --- a/meteor/server/migration/X_X_X.ts +++ b/meteor/server/migration/X_X_X.ts @@ -310,43 +310,6 @@ export const addSteps = addMigrationSteps(CURRENT_SYSTEM_VERSION, [ } }, }, - - { - id: `add studio settings allowHold & allowPieceDirectPlay`, - canBeRunAutomatically: true, - validate: async () => { - const studios = await Studios.findFetchAsync({ - $or: [ - { 'settings.allowHold': { $exists: false } }, - { 'settings.allowPieceDirectPlay': { $exists: false } }, - ], - }) - - if (studios.length > 0) { - return 'studios must have settings.allowHold and settings.allowPieceDirectPlay defined' - } - - return false - }, - migrate: async () => { - const studios = await Studios.findFetchAsync({ - $or: [ - { 'settings.allowHold': { $exists: false } }, - { 'settings.allowPieceDirectPlay': { $exists: false } }, - ], - }) - - for (const studio of studios) { - // Populate the settings to be backwards compatible - await Studios.updateAsync(studio._id, { - $set: { - 'settings.allowHold': true, - 'settings.allowPieceDirectPlay': true, - }, - }) - } - }, - }, ]) interface PartialOldICoreSystem { From 8dcf8c746fcc773ae5c17d13d7efc0500534034b Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Wed, 20 Nov 2024 17:37:02 +0000 Subject: [PATCH 65/81] chore: fix failing test --- meteor/server/__tests__/cronjobs.test.ts | 38 +++++++++++++----------- 1 file changed, 20 insertions(+), 18 deletions(-) diff --git a/meteor/server/__tests__/cronjobs.test.ts b/meteor/server/__tests__/cronjobs.test.ts index 65bd80d24cd..92a203f1b57 100644 --- a/meteor/server/__tests__/cronjobs.test.ts +++ b/meteor/server/__tests__/cronjobs.test.ts @@ -2,7 +2,7 @@ import '../../__mocks__/_extendJest' import { runAllTimers, waitUntil } from '../../__mocks__/helpers/jest' import { MeteorMock } from '../../__mocks__/meteor' import { logger } from '../logging' -import { getRandomId, getRandomString, protectString } from '../lib/tempLib' +import { getRandomId, getRandomString, literal, protectString } from '../lib/tempLib' import { SnapshotType } from '@sofie-automation/meteor-lib/dist/collections/Snapshots' import { IBlueprintPieceType, PieceLifespan, StatusCode, TSR } from '@sofie-automation/blueprints-integration' import { @@ -64,26 +64,36 @@ import { import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' import { Settings } from '../Settings' import { SofieIngestCacheType } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' +import { ObjectOverrideSetOp } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' describe('cronjobs', () => { let env: DefaultEnvironment let rundownId: RundownId - beforeAll(async () => { - env = await setupDefaultStudioEnvironment() - - const o = await setupDefaultRundownPlaylist(env) - rundownId = o.rundownId - + async function setCasparCGCronEnabled(enabled: boolean) { await CoreSystem.updateAsync( {}, { - $set: { - 'cron.casparCGRestart.enabled': true, + // This is a little bit of a hack, as it will result in duplicate ops, but it's fine for unit tests + $push: { + 'settingsWithOverrides.overrides': literal({ + op: 'set', + path: 'cron.casparCGRestart.enabled', + value: enabled, + }), }, }, { multi: true } ) + } + + beforeAll(async () => { + env = await setupDefaultStudioEnvironment() + + const o = await setupDefaultRundownPlaylist(env) + rundownId = o.rundownId + + await setCasparCGCronEnabled(true) jest.useFakeTimers() // set time to 2020/07/19 00:00 Local Time @@ -591,15 +601,7 @@ describe('cronjobs', () => { }) test('Does not attempt to restart CasparCG when job is disabled', async () => { await createMockPlayoutGatewayAndDevices(Date.now()) // Some time after the threshold - await CoreSystem.updateAsync( - {}, - { - $set: { - 'cron.casparCGRestart.enabled': false, - }, - }, - { multi: true } - ) + await setCasparCGCronEnabled(false) ;(logger.info as jest.Mock).mockClear() // set time to 2020/07/{date} 04:05 Local Time, should be more than 24 hours after 2020/07/19 00:00 UTC mockCurrentTime = new Date(2020, 6, date++, 4, 5, 0).getTime() From 53672195a78c2d76eb8ce54791666c8259cc8bd2 Mon Sep 17 00:00:00 2001 From: Kasper Olsson Hans Date: Wed, 20 Nov 2024 18:38:44 +0100 Subject: [PATCH 66/81] feat: option to disable/enable buckets (#36) --- meteor/__mocks__/defaultCollectionObjects.ts | 1 + meteor/server/api/rest/v1/typeConversion.ts | 2 ++ meteor/server/api/studio/api.ts | 1 + meteor/server/lib/rest/v1/studios.ts | 1 + meteor/server/migration/0_1_0.ts | 1 + meteor/server/migration/__tests__/migrations.test.ts | 3 +++ .../__tests__/checkPieceContentStatus.test.ts | 1 + .../src/__mocks__/defaultCollectionObjects.ts | 1 + .../job-worker/src/blueprints/__tests__/config.test.ts | 2 ++ packages/job-worker/src/playout/upgrade.ts | 1 + packages/shared-lib/src/core/model/StudioSettings.ts | 5 +++++ .../webui/src/__mocks__/defaultCollectionObjects.ts | 1 + packages/webui/src/client/ui/RundownView.tsx | 3 ++- .../webui/src/client/ui/Settings/Studio/Generic.tsx | 10 ++++++++++ 14 files changed, 32 insertions(+), 1 deletion(-) diff --git a/meteor/__mocks__/defaultCollectionObjects.ts b/meteor/__mocks__/defaultCollectionObjects.ts index ae3cc5aa1f2..052ede4a903 100644 --- a/meteor/__mocks__/defaultCollectionObjects.ts +++ b/meteor/__mocks__/defaultCollectionObjects.ts @@ -112,6 +112,7 @@ export function defaultStudio(_id: StudioId): DBStudio { fallbackPartDuration: DEFAULT_FALLBACK_PART_DURATION, allowHold: false, allowPieceDirectPlay: false, + enableBuckets: false, }), _rundownVersionHash: '', routeSetsWithOverrides: wrapDefaultObject({}), diff --git a/meteor/server/api/rest/v1/typeConversion.ts b/meteor/server/api/rest/v1/typeConversion.ts index afd3f7ccd3f..d6e95340039 100644 --- a/meteor/server/api/rest/v1/typeConversion.ts +++ b/meteor/server/api/rest/v1/typeConversion.ts @@ -328,6 +328,7 @@ export function studioSettingsFrom(apiStudioSettings: APIStudioSettings): Comple allowAdlibTestingSegment: apiStudioSettings.allowAdlibTestingSegment, allowHold: apiStudioSettings.allowHold ?? true, // Backwards compatible allowPieceDirectPlay: apiStudioSettings.allowPieceDirectPlay ?? true, // Backwards compatible + enableBuckets: apiStudioSettings.enableBuckets ?? true, // Backwards compatible } } @@ -350,6 +351,7 @@ export function APIStudioSettingsFrom(settings: IStudioSettings): Complete { minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, allowHold: true, allowPieceDirectPlay: true, + enableBuckets: true, }), mappingsWithOverrides: wrapDefaultObject({}), blueprintConfigWithOverrides: wrapDefaultObject({}), @@ -167,6 +168,7 @@ describe('Migrations', () => { minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, allowHold: true, allowPieceDirectPlay: true, + enableBuckets: true, }), mappingsWithOverrides: wrapDefaultObject({}), blueprintConfigWithOverrides: wrapDefaultObject({}), @@ -207,6 +209,7 @@ describe('Migrations', () => { minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, allowHold: true, allowPieceDirectPlay: true, + enableBuckets: true, }), mappingsWithOverrides: wrapDefaultObject({}), blueprintConfigWithOverrides: wrapDefaultObject({}), diff --git a/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts b/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts index 5d7e3ab300c..c4196d32f22 100644 --- a/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts +++ b/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts @@ -251,6 +251,7 @@ describe('lib/mediaObjects', () => { minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, allowHold: false, allowPieceDirectPlay: false, + enableBuckets: false, } const mockDefaultStudio = defaultStudio(protectString('studio0')) diff --git a/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts b/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts index 19633fe0173..ebd99423359 100644 --- a/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts +++ b/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts @@ -109,6 +109,7 @@ export function defaultStudio(_id: StudioId): DBStudio { allowAdlibTestingSegment: true, allowHold: true, allowPieceDirectPlay: true, + enableBuckets: true, }), routeSetsWithOverrides: wrapDefaultObject({}), routeSetExclusivityGroupsWithOverrides: wrapDefaultObject({}), diff --git a/packages/job-worker/src/blueprints/__tests__/config.test.ts b/packages/job-worker/src/blueprints/__tests__/config.test.ts index 1794afc6f58..3e7e8bef503 100644 --- a/packages/job-worker/src/blueprints/__tests__/config.test.ts +++ b/packages/job-worker/src/blueprints/__tests__/config.test.ts @@ -17,6 +17,7 @@ describe('Test blueprint config', () => { minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, allowHold: true, allowPieceDirectPlay: true, + enableBuckets: true, }), blueprintConfigWithOverrides: wrapDefaultObject({ sdfsdf: 'one', another: 5 }), }) @@ -42,6 +43,7 @@ describe('Test blueprint config', () => { minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, allowHold: true, allowPieceDirectPlay: true, + enableBuckets: true, }), blueprintConfigWithOverrides: wrapDefaultObject({ sdfsdf: 'one', another: 5 }), }) diff --git a/packages/job-worker/src/playout/upgrade.ts b/packages/job-worker/src/playout/upgrade.ts index 54b1c123de0..fda503f079a 100644 --- a/packages/job-worker/src/playout/upgrade.ts +++ b/packages/job-worker/src/playout/upgrade.ts @@ -117,6 +117,7 @@ export async function handleBlueprintUpgradeForStudio(context: JobContext, _data minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, allowHold: true, allowPieceDirectPlay: true, + enableBuckets: true, } await context.directCollections.Studios.update(context.studioId, { diff --git a/packages/shared-lib/src/core/model/StudioSettings.ts b/packages/shared-lib/src/core/model/StudioSettings.ts index 08f38a597d0..f964362679e 100644 --- a/packages/shared-lib/src/core/model/StudioSettings.ts +++ b/packages/shared-lib/src/core/model/StudioSettings.ts @@ -76,4 +76,9 @@ export interface IStudioSettings { * This behaviour is usally triggered by double-clicking on a piece in the GUI */ allowPieceDirectPlay: boolean + + /** + * Enable buckets - the default behavior is to have buckets. + */ + enableBuckets: boolean } diff --git a/packages/webui/src/__mocks__/defaultCollectionObjects.ts b/packages/webui/src/__mocks__/defaultCollectionObjects.ts index bef95149b48..edf6ddf181a 100644 --- a/packages/webui/src/__mocks__/defaultCollectionObjects.ts +++ b/packages/webui/src/__mocks__/defaultCollectionObjects.ts @@ -107,6 +107,7 @@ export function defaultStudio(_id: StudioId): DBStudio { minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, allowHold: true, allowPieceDirectPlay: true, + enableBuckets: true, }), _rundownVersionHash: '', routeSetsWithOverrides: wrapDefaultObject({}), diff --git a/packages/webui/src/client/ui/RundownView.tsx b/packages/webui/src/client/ui/RundownView.tsx index 21048d356b7..bb807da0f90 100644 --- a/packages/webui/src/client/ui/RundownView.tsx +++ b/packages/webui/src/client/ui/RundownView.tsx @@ -1475,7 +1475,8 @@ const RundownViewContent = translateWithTracker } + + {(value, handleUpdate) => } + + Date: Thu, 21 Nov 2024 09:11:45 +0000 Subject: [PATCH 67/81] chore: fix gateway docker image start commands --- packages/live-status-gateway/Dockerfile | 2 +- packages/live-status-gateway/Dockerfile.circle | 2 +- packages/mos-gateway/Dockerfile.circle | 2 +- packages/playout-gateway/Dockerfile.circle | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/live-status-gateway/Dockerfile b/packages/live-status-gateway/Dockerfile index d6a46474ce2..42e11dafa6c 100644 --- a/packages/live-status-gateway/Dockerfile +++ b/packages/live-status-gateway/Dockerfile @@ -27,4 +27,4 @@ COPY --from=0 /opt/shared-lib /opt/shared-lib COPY --from=0 /opt/corelib /opt/corelib WORKDIR /opt/live-status-gateway -CMD ["yarn", "start"] +CMD ["node", "dist/index.js"] diff --git a/packages/live-status-gateway/Dockerfile.circle b/packages/live-status-gateway/Dockerfile.circle index 9fbbf474e7d..637941030a0 100644 --- a/packages/live-status-gateway/Dockerfile.circle +++ b/packages/live-status-gateway/Dockerfile.circle @@ -10,4 +10,4 @@ COPY shared-lib /opt/shared-lib COPY corelib /opt/corelib WORKDIR /opt/live-status-gateway -CMD ["yarn", "start"] +CMD ["node", "dist/index.js"] diff --git a/packages/mos-gateway/Dockerfile.circle b/packages/mos-gateway/Dockerfile.circle index 0d89e15f17a..10373c3df9b 100644 --- a/packages/mos-gateway/Dockerfile.circle +++ b/packages/mos-gateway/Dockerfile.circle @@ -8,4 +8,4 @@ COPY server-core-integration /opt/server-core-integration COPY shared-lib /opt/shared-lib WORKDIR /opt/mos-gateway -CMD ["yarn", "start"] +CMD ["node", "dist/index.js"] diff --git a/packages/playout-gateway/Dockerfile.circle b/packages/playout-gateway/Dockerfile.circle index 8ceaeb4c3a7..1d2821e54d6 100644 --- a/packages/playout-gateway/Dockerfile.circle +++ b/packages/playout-gateway/Dockerfile.circle @@ -8,4 +8,4 @@ COPY server-core-integration /opt/server-core-integration COPY shared-lib /opt/shared-lib WORKDIR /opt/playout-gateway -CMD ["yarn", "start"] +CMD ["node", "dist/index.js"] From 20bec45dd21c8ae55359da94b9db05c920a8ae33 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Mon, 25 Nov 2024 12:18:21 +0000 Subject: [PATCH 68/81] feat: update meteor to 3.1 and node to 22 --- .github/actions/setup-meteor/action.yaml | 2 +- .github/workflows/node.yaml | 16 ++++---- .github/workflows/prerelease-libs.yml | 2 +- .node-version | 2 +- meteor/.meteor/release | 2 +- meteor/.meteor/versions | 31 +++++++-------- meteor/Dockerfile | 8 ++-- meteor/Dockerfile.circle | 2 +- meteor/package.json | 4 +- meteor/yarn.lock | 39 +++++-------------- package.json | 2 +- packages/blueprints-integration/package.json | 2 +- packages/corelib/package.json | 2 +- packages/documentation/package.json | 2 +- packages/job-worker/package.json | 2 +- packages/live-status-gateway/Dockerfile | 4 +- .../live-status-gateway/Dockerfile.circle | 2 +- packages/live-status-gateway/package.json | 2 +- packages/meteor-lib/package.json | 2 +- packages/mos-gateway/Dockerfile | 4 +- packages/mos-gateway/Dockerfile.circle | 2 +- packages/mos-gateway/package.json | 2 +- packages/openapi/package.json | 2 +- packages/package.json | 2 +- packages/playout-gateway/Dockerfile | 4 +- packages/playout-gateway/Dockerfile.circle | 2 +- packages/playout-gateway/package.json | 2 +- packages/server-core-integration/package.json | 2 +- packages/shared-lib/package.json | 2 +- packages/webui/package.json | 2 +- packages/yarn.lock | 14 +++---- 31 files changed, 73 insertions(+), 95 deletions(-) diff --git a/.github/actions/setup-meteor/action.yaml b/.github/actions/setup-meteor/action.yaml index b96960585d7..68a7305e4c0 100644 --- a/.github/actions/setup-meteor/action.yaml +++ b/.github/actions/setup-meteor/action.yaml @@ -3,5 +3,5 @@ description: "Setup Meteor" runs: using: "composite" steps: - - run: curl "https://install.meteor.com/?release=3.0.4" | sh + - run: curl "https://install.meteor.com/?release=3.1" | sh shell: bash diff --git a/.github/workflows/node.yaml b/.github/workflows/node.yaml index 2573f57bc15..a8ad4c0f8fc 100644 --- a/.github/workflows/node.yaml +++ b/.github/workflows/node.yaml @@ -481,27 +481,27 @@ jobs: - server-core-integration - shared-lib - openapi - node-version: [20.x, 22.x] + node-version: [22.x] include: # include additional configs, to run certain packages only for a certain version of node - - node-version: 20.x + - node-version: 22.x package-name: corelib send-coverage: true - - node-version: 20.x + - node-version: 22.x package-name: job-worker send-coverage: true # No tests for the gateways yet - # - node-version: 20.x + # - node-version: 22.x # package-name: playout-gateway - # - node-version: 20.x + # - node-version: 22.x # package-name: mos-gateway - - node-version: 20.x + - node-version: 22.x package-name: live-status-gateway send-coverage: true - - node-version: 20.x + - node-version: 22.x package-name: webui # manual meteor-lib as it only needs a couple of versions - - node-version: 20.x + - node-version: 22.x package-name: meteor-lib send-coverage: true diff --git a/.github/workflows/prerelease-libs.yml b/.github/workflows/prerelease-libs.yml index 7ca1a31f2a0..cfbb129dda3 100644 --- a/.github/workflows/prerelease-libs.yml +++ b/.github/workflows/prerelease-libs.yml @@ -53,7 +53,7 @@ jobs: - blueprints-integration - server-core-integration - shared-lib - node-version: [20.x, 22.x] + node-version: [22.x] steps: - uses: actions/checkout@v4 diff --git a/.node-version b/.node-version index 10fef252a9f..8b84b727be4 100644 --- a/.node-version +++ b/.node-version @@ -1 +1 @@ -20.18 +22.11 diff --git a/meteor/.meteor/release b/meteor/.meteor/release index b1e86a359f7..8d20e1a2d3a 100644 --- a/meteor/.meteor/release +++ b/meteor/.meteor/release @@ -1 +1 @@ -METEOR@3.0.4 +METEOR@3.1 diff --git a/meteor/.meteor/versions b/meteor/.meteor/versions index 6048cd78971..b49eda45ce0 100644 --- a/meteor/.meteor/versions +++ b/meteor/.meteor/versions @@ -1,7 +1,7 @@ accounts-base@3.0.3 -accounts-password@3.0.2 +accounts-password@3.0.3 allow-deny@2.0.0 -babel-compiler@7.11.1 +babel-compiler@7.11.2 babel-runtime@1.5.2 base64@1.0.13 binary-heap@1.0.12 @@ -10,18 +10,18 @@ callback-hook@1.6.0 check@1.4.4 core-runtime@1.0.0 ddp@1.4.2 -ddp-client@3.0.2 +ddp-client@3.0.3 ddp-common@1.4.4 ddp-rate-limiter@1.2.2 -ddp-server@3.0.2 +ddp-server@3.0.3 diff-sequence@1.1.3 dynamic-import@0.7.4 -ecmascript@0.16.9 +ecmascript@0.16.10 ecmascript-runtime@0.8.3 ecmascript-runtime-client@0.12.2 ecmascript-runtime-server@0.11.1 ejson@1.1.4 -email@3.1.0 +email@3.1.1 facts-base@1.0.2 fetch@0.1.5 geojson-utils@1.0.12 @@ -29,16 +29,16 @@ id-map@1.2.0 inter-process-messaging@0.1.2 localstorage@1.2.1 logging@1.3.5 -meteor@2.0.1 -minimongo@2.0.1 +meteor@2.0.2 +minimongo@2.0.2 modern-browsers@0.1.11 -modules@0.20.2 +modules@0.20.3 modules-runtime@0.13.2 -mongo@2.0.2 -mongo-decimal@0.1.4 +mongo@2.0.3 +mongo-decimal@0.2.0 mongo-dev-server@1.1.1 mongo-id@1.0.9 -npm-mongo@4.17.4 +npm-mongo@6.10.0 ordered-dict@1.2.0 promise@1.0.0 random@1.2.2 @@ -51,9 +51,8 @@ routepolicy@1.1.2 sha@1.0.10 socket-stream-client@0.5.3 tracker@1.3.4 -typescript@5.4.3 -underscore@1.6.4 -url@1.3.4 -webapp@2.0.3 +typescript@5.6.3 +url@1.3.5 +webapp@2.0.4 webapp-hashing@1.1.2 zodern:types@1.0.13 diff --git a/meteor/Dockerfile b/meteor/Dockerfile index cee205aede8..13c52fa2951 100644 --- a/meteor/Dockerfile +++ b/meteor/Dockerfile @@ -1,7 +1,7 @@ # syntax=docker/dockerfile:experimental # BUILD WEBUI -FROM node:20 +FROM node:22 COPY packages /opt/core/packages WORKDIR /opt/core/packages @@ -14,8 +14,8 @@ RUN yarn install && yarn build # RUN yarn workspaces focus --production @sofie-automation/job-worker @sofie-automation/corelib # BUILD IMAGE -FROM node:20 -RUN curl "https://install.meteor.com/?release=3.0.4" | sh +FROM node:22 +RUN curl "https://install.meteor.com/?release=3.1" | sh # Temporary change the NODE_ENV env variable, so that all libraries are installed: ENV NODE_ENV_TMP $NODE_ENV @@ -50,7 +50,7 @@ RUN npm install RUN mv /opt/bundle/programs/web.browser/assets /opt/bundle/programs/web.browser/app/assets || true # DEPLOY IMAGE -FROM node:20-alpine +FROM node:22-alpine RUN apk add --no-cache tzdata diff --git a/meteor/Dockerfile.circle b/meteor/Dockerfile.circle index 1e39e80f817..8cb4d3971f8 100644 --- a/meteor/Dockerfile.circle +++ b/meteor/Dockerfile.circle @@ -1,5 +1,5 @@ # DEPLOY IMAGE -FROM node:20-alpine +FROM node:22-alpine RUN apk add --no-cache tzdata diff --git a/meteor/package.json b/meteor/package.json index c8ed1b51280..6b63793d080 100644 --- a/meteor/package.json +++ b/meteor/package.json @@ -3,7 +3,7 @@ "version": "1.52.0-in-development", "private": true, "engines": { - "node": ">=20.18" + "node": ">=22.11" }, "scripts": { "preinstall": "node -v", @@ -88,7 +88,7 @@ "@types/koa-static": "^4.0.4", "@types/koa__cors": "^5.0.0", "@types/koa__router": "^12.0.4", - "@types/node": "^20.17.6", + "@types/node": "^22.9.3", "@types/request": "^2.48.12", "@types/semver": "^7.5.6", "@types/underscore": "^1.11.15", diff --git a/meteor/yarn.lock b/meteor/yarn.lock index a01cc3dc27a..34729e9fbe4 100644 --- a/meteor/yarn.lock +++ b/meteor/yarn.lock @@ -1557,19 +1557,12 @@ __metadata: languageName: node linkType: hard -"@types/node@npm:*, @types/node@npm:>=12.0.0": - version: 20.6.3 - resolution: "@types/node@npm:20.6.3" - checksum: 444a6f1f41cfa8d3e20ce0108e6e43960fb2ae0e481f233bb1c14d6252aa63a92e021de561cd317d9fdb411688f871065f40175a1f18763282dee2613a08f8a3 - languageName: node - linkType: hard - -"@types/node@npm:^20.17.6": - version: 20.17.6 - resolution: "@types/node@npm:20.17.6" +"@types/node@npm:*, @types/node@npm:>=12.0.0, @types/node@npm:^22.9.3": + version: 22.9.3 + resolution: "@types/node@npm:22.9.3" dependencies: - undici-types: ~6.19.2 - checksum: d51dbb9881c94d0310b32b5fd8013e3261595c61bc888fa27258469c93c3dc0b3c4d20a9f28f3f5f79562f6737e28e7f3dd04940dc8b4d966d34aaf318f7f69b + undici-types: ~6.19.8 + checksum: 274cced37a8a11cd89827c551de73980a174e00bef0768c10c1fb7d3887a26b4fade25f870e3fd870432b93546e092cdbe0979e65110c0839982dc2b5938aabc languageName: node linkType: hard @@ -2297,7 +2290,7 @@ __metadata: "@types/koa-static": ^4.0.4 "@types/koa__cors": ^5.0.0 "@types/koa__router": ^12.0.4 - "@types/node": ^20.17.6 + "@types/node": ^22.9.3 "@types/request": ^2.48.12 "@types/semver": ^7.5.6 "@types/underscore": ^1.11.15 @@ -7065,7 +7058,7 @@ __metadata: languageName: node linkType: hard -"lru-cache@npm:10.2.0": +"lru-cache@npm:10.2.0, lru-cache@npm:^9.1.1 || ^10.0.0": version: 10.2.0 resolution: "lru-cache@npm:10.2.0" checksum: eee7ddda4a7475deac51ac81d7dd78709095c6fa46e8350dc2d22462559a1faa3b81ed931d5464b13d48cbd7e08b46100b6f768c76833912bc444b99c37e25db @@ -7097,13 +7090,6 @@ __metadata: languageName: node linkType: hard -"lru-cache@npm:^9.1.1 || ^10.0.0": - version: 10.0.1 - resolution: "lru-cache@npm:10.0.1" - checksum: 06f8d0e1ceabd76bb6f644a26dbb0b4c471b79c7b514c13c6856113879b3bf369eb7b497dad4ff2b7e2636db202412394865b33c332100876d838ad1372f0181 - languageName: node - linkType: hard - "make-dir@npm:^3.1.0": version: 3.1.0 resolution: "make-dir@npm:3.1.0" @@ -10348,14 +10334,7 @@ __metadata: languageName: node linkType: hard -"tslib@npm:^2.5.3, tslib@npm:^2.6.0, tslib@npm:^2.6.2": - version: 2.6.2 - resolution: "tslib@npm:2.6.2" - checksum: 329ea56123005922f39642318e3d1f0f8265d1e7fcb92c633e0809521da75eeaca28d2cf96d7248229deb40e5c19adf408259f4b9640afd20d13aecc1430f3ad - languageName: node - linkType: hard - -"tslib@npm:^2.6.3": +"tslib@npm:^2.5.3, tslib@npm:^2.6.0, tslib@npm:^2.6.2, tslib@npm:^2.6.3": version: 2.7.0 resolution: "tslib@npm:2.7.0" checksum: 1606d5c89f88d466889def78653f3aab0f88692e80bb2066d090ca6112ae250ec1cfa9dbfaab0d17b60da15a4186e8ec4d893801c67896b277c17374e36e1d28 @@ -10571,7 +10550,7 @@ __metadata: languageName: node linkType: hard -"undici-types@npm:~6.19.2": +"undici-types@npm:~6.19.8": version: 6.19.8 resolution: "undici-types@npm:6.19.8" checksum: de51f1b447d22571cf155dfe14ff6d12c5bdaec237c765085b439c38ca8518fc360e88c70f99469162bf2e14188a7b0bcb06e1ed2dc031042b984b0bb9544017 diff --git a/package.json b/package.json index 8a279813ee3..95a4ef24bc2 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,7 @@ "license": "MIT", "private": true, "engines": { - "node": ">=20.18" + "node": ">=22.11" }, "scripts": { "postinstall": "run install:packages && run install:meteor", diff --git a/packages/blueprints-integration/package.json b/packages/blueprints-integration/package.json index d32f58ee438..39a9a5eafc2 100644 --- a/packages/blueprints-integration/package.json +++ b/packages/blueprints-integration/package.json @@ -29,7 +29,7 @@ "license-validate": "run -T sofie-licensecheck" }, "engines": { - "node": ">=20.18" + "node": ">=22.11" }, "files": [ "/dist", diff --git a/packages/corelib/package.json b/packages/corelib/package.json index 52533e7332a..dd45f266e67 100644 --- a/packages/corelib/package.json +++ b/packages/corelib/package.json @@ -30,7 +30,7 @@ "license-validate": "run -T sofie-licensecheck" }, "engines": { - "node": ">=20.18" + "node": ">=22.11" }, "files": [ "/dist", diff --git a/packages/documentation/package.json b/packages/documentation/package.json index 7fabd6dc65e..50a06e31440 100644 --- a/packages/documentation/package.json +++ b/packages/documentation/package.json @@ -15,7 +15,7 @@ "write-heading-ids": "docusaurus write-heading-ids" }, "engines": { - "node": ">=20.18" + "node": ">=22.11" }, "devDependencies": { "@docusaurus/core": "3.2.1", diff --git a/packages/job-worker/package.json b/packages/job-worker/package.json index 8a7e17f85b3..605e132301e 100644 --- a/packages/job-worker/package.json +++ b/packages/job-worker/package.json @@ -31,7 +31,7 @@ "license-validate": "run -T sofie-licensecheck" }, "engines": { - "node": ">=20.18" + "node": ">=22.11" }, "files": [ "/dist", diff --git a/packages/live-status-gateway/Dockerfile b/packages/live-status-gateway/Dockerfile index 42e11dafa6c..00617c036ae 100644 --- a/packages/live-status-gateway/Dockerfile +++ b/packages/live-status-gateway/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:experimental # BUILD IMAGE -FROM node:20 +FROM node:22 WORKDIR /opt COPY package.json lerna.json yarn.lock tsconfig.json ./ @@ -15,7 +15,7 @@ RUN yarn build RUN yarn install --check-files --frozen-lockfile --production --force --ignore-scripts # purge dev-dependencies # DEPLOY IMAGE -FROM node:20-alpine +FROM node:22-alpine RUN apk add --no-cache tzdata COPY --from=0 /opt/package.json /opt/package.json diff --git a/packages/live-status-gateway/Dockerfile.circle b/packages/live-status-gateway/Dockerfile.circle index 637941030a0..cbbb3440499 100644 --- a/packages/live-status-gateway/Dockerfile.circle +++ b/packages/live-status-gateway/Dockerfile.circle @@ -1,4 +1,4 @@ -FROM node:20-alpine +FROM node:22-alpine RUN apk add --no-cache tzdata COPY package.json /opt/ diff --git a/packages/live-status-gateway/package.json b/packages/live-status-gateway/package.json index bf768b847ee..f3e8b76c791 100644 --- a/packages/live-status-gateway/package.json +++ b/packages/live-status-gateway/package.json @@ -37,7 +37,7 @@ "license-validate": "run -T sofie-licensecheck" }, "engines": { - "node": ">=20.18" + "node": ">=22.11" }, "keywords": [ "broadcast", diff --git a/packages/meteor-lib/package.json b/packages/meteor-lib/package.json index 0abbc9d4799..f02c35e95a4 100644 --- a/packages/meteor-lib/package.json +++ b/packages/meteor-lib/package.json @@ -30,7 +30,7 @@ "license-validate": "run -T sofie-licensecheck" }, "engines": { - "node": ">=20.18" + "node": ">=22.11" }, "files": [ "/dist", diff --git a/packages/mos-gateway/Dockerfile b/packages/mos-gateway/Dockerfile index fe28949d7a2..52e575565e9 100644 --- a/packages/mos-gateway/Dockerfile +++ b/packages/mos-gateway/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:experimental # BUILD IMAGE -FROM node:20 +FROM node:22 WORKDIR /opt COPY . . @@ -13,7 +13,7 @@ RUN yarn plugin import workspace-tools RUN yarn workspaces focus mos-gateway --production # purge dev-dependencies # DEPLOY IMAGE -FROM node:20-alpine +FROM node:22-alpine RUN apk add --no-cache tzdata COPY --from=0 /opt/package.json /opt/package.json diff --git a/packages/mos-gateway/Dockerfile.circle b/packages/mos-gateway/Dockerfile.circle index 10373c3df9b..a648ccbb25d 100644 --- a/packages/mos-gateway/Dockerfile.circle +++ b/packages/mos-gateway/Dockerfile.circle @@ -1,4 +1,4 @@ -FROM node:20-alpine +FROM node:22-alpine RUN apk add --no-cache tzdata COPY package.json /opt/ diff --git a/packages/mos-gateway/package.json b/packages/mos-gateway/package.json index 3b93188b065..084e6869eec 100644 --- a/packages/mos-gateway/package.json +++ b/packages/mos-gateway/package.json @@ -48,7 +48,7 @@ "license-validate": "run -T sofie-licensecheck" }, "engines": { - "node": ">=20.18" + "node": ">=22.11" }, "keywords": [ "mos", diff --git a/packages/openapi/package.json b/packages/openapi/package.json index 78e975d6f48..06d6429b40f 100644 --- a/packages/openapi/package.json +++ b/packages/openapi/package.json @@ -28,7 +28,7 @@ }, "prettier": "@sofie-automation/code-standard-preset/.prettierrc.json", "engines": { - "node": ">=20.18" + "node": ">=18.18" }, "files": [ "/api", diff --git a/packages/package.json b/packages/package.json index fbe2eeb83be..9320553c244 100644 --- a/packages/package.json +++ b/packages/package.json @@ -45,7 +45,7 @@ "@types/ejson": "^2.2.2", "@types/got": "^9.6.12", "@types/jest": "^29.5.11", - "@types/node": "^20.17.6", + "@types/node": "^22.9.3", "@types/node-fetch": "^2.6.11", "@types/object-path": "^0.11.4", "@types/underscore": "^1.11.15", diff --git a/packages/playout-gateway/Dockerfile b/packages/playout-gateway/Dockerfile index 604a3587484..8b83b08e24b 100644 --- a/packages/playout-gateway/Dockerfile +++ b/packages/playout-gateway/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:experimental # BUILD IMAGE -FROM node:20 +FROM node:22 WORKDIR /opt COPY . . @@ -13,7 +13,7 @@ RUN yarn plugin import workspace-tools RUN yarn workspaces focus playout-gateway --production # purge dev-dependencies # DEPLOY IMAGE -FROM node:20-alpine +FROM node:22-alpine RUN apk add --no-cache tzdata COPY --from=0 /opt/package.json /opt/package.json diff --git a/packages/playout-gateway/Dockerfile.circle b/packages/playout-gateway/Dockerfile.circle index 1d2821e54d6..f9007eab3d1 100644 --- a/packages/playout-gateway/Dockerfile.circle +++ b/packages/playout-gateway/Dockerfile.circle @@ -1,4 +1,4 @@ -FROM node:20-alpine +FROM node:22-alpine RUN apk add --no-cache tzdata COPY package.json /opt/ diff --git a/packages/playout-gateway/package.json b/packages/playout-gateway/package.json index a4864865f42..591e2d1d34c 100644 --- a/packages/playout-gateway/package.json +++ b/packages/playout-gateway/package.json @@ -40,7 +40,7 @@ "license-validate": "run -T sofie-licensecheck" }, "engines": { - "node": ">=20.18" + "node": ">=22.11" }, "keywords": [ "broadcast", diff --git a/packages/server-core-integration/package.json b/packages/server-core-integration/package.json index 9b2a3cc289f..48e8f816600 100644 --- a/packages/server-core-integration/package.json +++ b/packages/server-core-integration/package.json @@ -48,7 +48,7 @@ "license-validate": "run -T sofie-licensecheck" }, "engines": { - "node": ">=20.18" + "node": ">=22.11" }, "files": [ "/dist", diff --git a/packages/shared-lib/package.json b/packages/shared-lib/package.json index ca62e23353c..8b28c643c33 100644 --- a/packages/shared-lib/package.json +++ b/packages/shared-lib/package.json @@ -29,7 +29,7 @@ "license-validate": "run -T sofie-licensecheck" }, "engines": { - "node": ">=20.18" + "node": ">=22.11" }, "files": [ "/dist", diff --git a/packages/webui/package.json b/packages/webui/package.json index 90458dd48e9..7bb1ce4a499 100644 --- a/packages/webui/package.json +++ b/packages/webui/package.json @@ -108,6 +108,6 @@ "xml2js": "^0.6.2" }, "engines": { - "node": ">=20.18" + "node": ">=22.11" } } diff --git a/packages/yarn.lock b/packages/yarn.lock index b831a95ad1b..07cf34d6481 100644 --- a/packages/yarn.lock +++ b/packages/yarn.lock @@ -6465,12 +6465,12 @@ __metadata: languageName: node linkType: hard -"@types/node@npm:*, @types/node@npm:>=12.0.0, @types/node@npm:>=13.7.0, @types/node@npm:^20.17.6": - version: 20.17.6 - resolution: "@types/node@npm:20.17.6" +"@types/node@npm:*, @types/node@npm:>=12.0.0, @types/node@npm:>=13.7.0, @types/node@npm:^22.9.3": + version: 22.9.3 + resolution: "@types/node@npm:22.9.3" dependencies: - undici-types: ~6.19.2 - checksum: d51dbb9881c94d0310b32b5fd8013e3261595c61bc888fa27258469c93c3dc0b3c4d20a9f28f3f5f79562f6737e28e7f3dd04940dc8b4d966d34aaf318f7f69b + undici-types: ~6.19.8 + checksum: 274cced37a8a11cd89827c551de73980a174e00bef0768c10c1fb7d3887a26b4fade25f870e3fd870432b93546e092cdbe0979e65110c0839982dc2b5938aabc languageName: node linkType: hard @@ -21026,7 +21026,7 @@ asn1@evs-broadcast/node-asn1: "@types/ejson": ^2.2.2 "@types/got": ^9.6.12 "@types/jest": ^29.5.11 - "@types/node": ^20.17.6 + "@types/node": ^22.9.3 "@types/node-fetch": ^2.6.11 "@types/object-path": ^0.11.4 "@types/underscore": ^1.11.15 @@ -26834,7 +26834,7 @@ asn1@evs-broadcast/node-asn1: languageName: node linkType: hard -"undici-types@npm:~6.19.2": +"undici-types@npm:~6.19.8": version: 6.19.8 resolution: "undici-types@npm:6.19.8" checksum: de51f1b447d22571cf155dfe14ff6d12c5bdaec237c765085b439c38ca8518fc360e88c70f99469162bf2e14188a7b0bcb06e1ed2dc031042b984b0bb9544017 From 8943fa01b5a1f3141cd1bc30208624999cb0320c Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Tue, 26 Nov 2024 10:14:24 +0000 Subject: [PATCH 69/81] fix: `queuedSegmentId` ignored when taking last part of the rundown --- .../src/playout/__tests__/selectNextPart.test.ts | 7 +++++++ packages/job-worker/src/playout/selectNextPart.ts | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/job-worker/src/playout/__tests__/selectNextPart.test.ts b/packages/job-worker/src/playout/__tests__/selectNextPart.test.ts index 387f21b6e8e..f4efb9d410b 100644 --- a/packages/job-worker/src/playout/__tests__/selectNextPart.test.ts +++ b/packages/job-worker/src/playout/__tests__/selectNextPart.test.ts @@ -418,4 +418,11 @@ describe('selectNextPart', () => { expect(nextPart).toEqual({ index: 6, part: defaultParts[6], consumesQueuedSegmentId: false }) } }) + + test('on last part, with queued segment', () => { + // On the last part in the rundown, with a queuedSegment id set to earlier + defaultPlaylist.queuedSegmentId = segment2 + const nextPart = selectNextPart2(defaultParts[8].toPartInstance(), defaultParts[8].toPartInstance()) + expect(nextPart).toEqual({ index: 4, part: defaultParts[4], consumesQueuedSegmentId: true }) + }) }) diff --git a/packages/job-worker/src/playout/selectNextPart.ts b/packages/job-worker/src/playout/selectNextPart.ts index 48d495d85c7..892c88aecf2 100644 --- a/packages/job-worker/src/playout/selectNextPart.ts +++ b/packages/job-worker/src/playout/selectNextPart.ts @@ -173,7 +173,7 @@ export function selectNextPart( if (rundownPlaylist.queuedSegmentId) { // No previous part, or segment has changed - if (!previousPartInstance || (nextPart && previousPartInstance.segmentId !== nextPart.part.segmentId)) { + if (!previousPartInstance || !nextPart || previousPartInstance.segmentId !== nextPart.part.segmentId) { // Find first in segment const newSegmentPart = findFirstPlayablePart( 0, From 4bd6d3b0903575c35d11795436bcad7fbf9da6f4 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Tue, 19 Nov 2024 12:46:47 +0000 Subject: [PATCH 70/81] chore: improve when webui is built --- packages/webui/package.json | 1 + scripts/install-and-build.mjs | 11 ++++++----- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/packages/webui/package.json b/packages/webui/package.json index 7bb1ce4a499..0a1ee41445b 100644 --- a/packages/webui/package.json +++ b/packages/webui/package.json @@ -16,6 +16,7 @@ "scripts": { "dev": "vite --port=3005", "build": "tsc -b && vite build", + "build:main": "tsc -p tsconfig.app.json --noEmit", "check-types": "tsc -p tsconfig.app.json --noEmit", "watch-types": "run check-types --watch", "preview": "vite preview", diff --git a/scripts/install-and-build.mjs b/scripts/install-and-build.mjs index 72d1e5993b4..5b1fa8124f7 100644 --- a/scripts/install-and-build.mjs +++ b/scripts/install-and-build.mjs @@ -41,14 +41,15 @@ try { console.log(" 🪛 Build packages..."); console.log(hr()); + const buildArgs = ['--ignore @sofie-automation/webui'] + if (config.uiOnly) { + buildArgs.push(...EXTRA_PACKAGES.map((pkg) => `--ignore ${pkg}`)) + } + await concurrently( [ { - command: config.uiOnly - ? `yarn build:try ${EXTRA_PACKAGES.map( - (pkg) => `--ignore ${pkg}` - ).join(" ")}` - : "yarn build:try", + command: `yarn build:try ${buildArgs.join(" ")}`, cwd: "packages", name: "PACKAGES-BUILD", prefixColor: "yellow", From 761ff589c702558c747a27e1d8fd8f04c61e83fa Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Tue, 26 Nov 2024 13:10:22 +0000 Subject: [PATCH 71/81] feat: Sofie Core Groups with Trusted header SOFIE-95 --- .gitignore | 1 + meteor/.meteor/packages | 2 - meteor/.meteor/versions | 9 - meteor/__mocks__/_setupMocks.ts | 1 - meteor/__mocks__/accounts-base.ts | 81 ---- meteor/__mocks__/meteor.ts | 36 +- meteor/__mocks__/mongo.ts | 2 - meteor/server/Connections.ts | 20 + meteor/server/__tests__/cronjobs.test.ts | 13 +- meteor/server/api/ExternalMessageQueue.ts | 31 +- .../api/__tests__/peripheralDevice.test.ts | 2 +- .../userActions/mediaManager.test.ts | 28 +- .../api/blueprints/__tests__/api.test.ts | 54 +-- .../api/blueprints/__tests__/http.test.ts | 2 +- meteor/server/api/blueprints/api.ts | 46 +-- meteor/server/api/blueprints/http.ts | 27 +- meteor/server/api/buckets.ts | 146 +++---- meteor/server/api/cleanup.ts | 4 - meteor/server/api/client.ts | 74 ++-- meteor/server/api/deviceTriggers/observer.ts | 2 +- meteor/server/api/evaluations.ts | 11 +- meteor/server/api/heapSnapshot.ts | 25 +- meteor/server/api/ingest/actions.ts | 6 +- meteor/server/api/ingest/lib.ts | 23 -- .../api/ingest/mosDevice/mosIntegration.ts | 8 +- meteor/server/api/ingest/rundownInput.ts | 8 +- .../api/integration/expectedPackages.ts | 2 +- .../server/api/integration/media-scanner.ts | 2 +- .../server/api/integration/mediaWorkFlows.ts | 2 +- meteor/server/api/lib.ts | 67 ---- meteor/server/api/mediaManager.ts | 125 +++--- meteor/server/api/methodContext.ts | 13 +- meteor/server/api/organizations.ts | 22 +- meteor/server/api/packageManager.ts | 56 ++- meteor/server/api/peripheralDevice.ts | 28 +- meteor/server/api/playout/api.ts | 17 +- meteor/server/api/playout/playout.ts | 11 +- .../server/api/rest/v0/__tests__/rest.test.ts | 17 - meteor/server/api/rest/v1/buckets.ts | 43 +-- meteor/server/api/rest/v1/index.ts | 12 +- meteor/server/api/rest/v1/playlists.ts | 2 +- meteor/server/api/rest/v1/studios.ts | 10 +- meteor/server/api/rest/v1/types.ts | 2 - meteor/server/api/rundown.ts | 33 +- meteor/server/api/rundownLayouts.ts | 16 +- meteor/server/api/showStyles.ts | 52 +-- meteor/server/api/singleUseTokens.ts | 2 +- meteor/server/api/snapshot.ts | 158 +++----- meteor/server/api/studio/api.ts | 18 +- meteor/server/api/system.ts | 22 +- meteor/server/api/triggeredActions.ts | 20 +- meteor/server/api/user.ts | 132 +------ meteor/server/api/userActions.ts | 193 ++++++---- meteor/server/collections/collection.ts | 37 +- meteor/server/collections/index.ts | 87 ++--- meteor/server/email.ts | 12 - .../__tests__/optimizedObserver.test.ts | 4 - .../server/lib/customPublication/publish.ts | 5 - meteor/server/main.ts | 3 +- meteor/server/methods.ts | 21 +- meteor/server/migration/api.ts | 35 +- .../blueprintUpgradeStatus/publication.ts | 12 +- meteor/server/publications/buckets.ts | 97 +++-- .../publications/deviceTriggersPreview.ts | 10 +- meteor/server/publications/lib/lib.ts | 105 +---- meteor/server/publications/mountedTriggers.ts | 61 ++- meteor/server/publications/organization.ts | 87 ++--- .../expectedPackages/publication.ts | 53 ++- .../packageManager/packageContainers.ts | 49 +-- .../packageManager/playoutContext.ts | 49 +-- .../partInstancesUI/publication.ts | 38 +- .../publications/partsUI/publication.ts | 39 +- .../server/publications/peripheralDevice.ts | 121 +++--- .../publications/peripheralDeviceForDevice.ts | 41 +- .../bucket/publication.ts | 44 +-- .../rundown/publication.ts | 46 +-- meteor/server/publications/rundown.ts | 349 +++++++---------- meteor/server/publications/rundownPlaylist.ts | 35 +- .../segmentPartNotesUI/publication.ts | 46 +-- meteor/server/publications/showStyle.ts | 73 ++-- meteor/server/publications/showStyleUI.ts | 46 +-- meteor/server/publications/studio.ts | 106 +++--- meteor/server/publications/studioUI.ts | 25 +- meteor/server/publications/system.ts | 90 +---- meteor/server/publications/timeline.ts | 54 ++- .../publications/translationsBundles.ts | 18 +- .../server/publications/triggeredActionsUI.ts | 41 +- meteor/server/security/README.md | 53 --- .../security/__tests__/security.test.ts | 358 ------------------ meteor/server/security/_security.ts | 11 - .../security/{lib/lib.ts => allowDeny.ts} | 7 +- meteor/server/security/auth.ts | 87 +++++ meteor/server/security/buckets.ts | 80 ---- meteor/server/security/check.ts | 104 +++++ meteor/server/security/lib/access.ts | 64 ---- meteor/server/security/lib/credentials.ts | 171 --------- meteor/server/security/lib/security.ts | 349 ----------------- meteor/server/security/noSecurity.ts | 12 - meteor/server/security/organization.ts | 165 -------- meteor/server/security/peripheralDevice.ts | 180 --------- meteor/server/security/rundown.ts | 126 ------ meteor/server/security/rundownPlaylist.ts | 126 ------ .../security/{lib => }/securityVerify.ts | 4 +- meteor/server/security/showStyle.ts | 154 -------- meteor/server/security/studio.ts | 155 -------- meteor/server/security/system.ts | 65 ---- meteor/server/security/translationsBundles.ts | 8 - meteor/server/systemStatus/api.ts | 60 ++- meteor/server/systemStatus/systemStatus.ts | 40 +- meteor/server/worker/worker.ts | 2 + packages/corelib/src/dataModel/Collections.ts | 1 - packages/meteor-lib/src/Settings.ts | 6 +- packages/meteor-lib/src/api/pubsub.ts | 4 - packages/meteor-lib/src/api/user.ts | 31 +- packages/meteor-lib/src/api/userActions.ts | 3 + packages/meteor-lib/src/collections/Users.ts | 29 -- packages/meteor-lib/src/userPermissions.ts | 58 +++ .../src/peripheralDevice/methodsAPI.ts | 2 +- packages/webui/src/__mocks__/meteor.ts | 31 +- packages/webui/src/__mocks__/mongo.ts | 2 - packages/webui/src/client/ui/App.tsx | 2 +- .../src/client/ui/Status/MediaManager.tsx | 6 +- .../ui/Status/SystemStatus/SystemStatus.tsx | 19 +- .../webui/src/client/ui/UserPermissions.tsx | 79 +++- scripts/run.mjs | 31 +- 125 files changed, 1779 insertions(+), 4584 deletions(-) delete mode 100644 meteor/__mocks__/accounts-base.ts delete mode 100644 meteor/server/api/lib.ts delete mode 100644 meteor/server/email.ts delete mode 100644 meteor/server/security/README.md delete mode 100644 meteor/server/security/__tests__/security.test.ts delete mode 100644 meteor/server/security/_security.ts rename meteor/server/security/{lib/lib.ts => allowDeny.ts} (84%) create mode 100644 meteor/server/security/auth.ts delete mode 100644 meteor/server/security/buckets.ts create mode 100644 meteor/server/security/check.ts delete mode 100644 meteor/server/security/lib/access.ts delete mode 100644 meteor/server/security/lib/credentials.ts delete mode 100644 meteor/server/security/lib/security.ts delete mode 100644 meteor/server/security/noSecurity.ts delete mode 100644 meteor/server/security/organization.ts delete mode 100644 meteor/server/security/peripheralDevice.ts delete mode 100644 meteor/server/security/rundown.ts delete mode 100644 meteor/server/security/rundownPlaylist.ts rename meteor/server/security/{lib => }/securityVerify.ts (99%) delete mode 100644 meteor/server/security/showStyle.ts delete mode 100644 meteor/server/security/studio.ts delete mode 100644 meteor/server/security/system.ts delete mode 100644 meteor/server/security/translationsBundles.ts delete mode 100644 packages/meteor-lib/src/collections/Users.ts create mode 100644 packages/meteor-lib/src/userPermissions.ts diff --git a/.gitignore b/.gitignore index 5892e69d3ec..6d86bbd0707 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,7 @@ meteor/.coverage/ node_modules **/yarn-error.log scratch/ +meteor-settings.json # Exclude JetBrains IDE specific files .idea diff --git a/meteor/.meteor/packages b/meteor/.meteor/packages index 8d1724b1db7..34ab0cf5f53 100644 --- a/meteor/.meteor/packages +++ b/meteor/.meteor/packages @@ -19,6 +19,4 @@ typescript@5.4.3 # Enable TypeScript syntax in .ts and .tsx modules tracker@1.3.4 # Meteor's client-side reactive programming library -accounts-password@3.0.2 - zodern:types diff --git a/meteor/.meteor/versions b/meteor/.meteor/versions index b49eda45ce0..93c057c752e 100644 --- a/meteor/.meteor/versions +++ b/meteor/.meteor/versions @@ -1,5 +1,3 @@ -accounts-base@3.0.3 -accounts-password@3.0.3 allow-deny@2.0.0 babel-compiler@7.11.2 babel-runtime@1.5.2 @@ -12,7 +10,6 @@ core-runtime@1.0.0 ddp@1.4.2 ddp-client@3.0.3 ddp-common@1.4.4 -ddp-rate-limiter@1.2.2 ddp-server@3.0.3 diff-sequence@1.1.3 dynamic-import@0.7.4 @@ -21,13 +18,11 @@ ecmascript-runtime@0.8.3 ecmascript-runtime-client@0.12.2 ecmascript-runtime-server@0.11.1 ejson@1.1.4 -email@3.1.1 facts-base@1.0.2 fetch@0.1.5 geojson-utils@1.0.12 id-map@1.2.0 inter-process-messaging@0.1.2 -localstorage@1.2.1 logging@1.3.5 meteor@2.0.2 minimongo@2.0.2 @@ -42,17 +37,13 @@ npm-mongo@6.10.0 ordered-dict@1.2.0 promise@1.0.0 random@1.2.2 -rate-limit@1.1.2 react-fast-refresh@0.2.9 -reactive-var@1.0.13 reload@1.3.2 retry@1.1.1 routepolicy@1.1.2 -sha@1.0.10 socket-stream-client@0.5.3 tracker@1.3.4 typescript@5.6.3 -url@1.3.5 webapp@2.0.4 webapp-hashing@1.1.2 zodern:types@1.0.13 diff --git a/meteor/__mocks__/_setupMocks.ts b/meteor/__mocks__/_setupMocks.ts index b4508a82bb1..b9e7936792a 100644 --- a/meteor/__mocks__/_setupMocks.ts +++ b/meteor/__mocks__/_setupMocks.ts @@ -14,7 +14,6 @@ jest.mock('meteor/meteor', (...args) => require('./meteor').setup(args), { virtu jest.mock('meteor/random', (...args) => require('./random').setup(args), { virtual: true }) jest.mock('meteor/check', (...args) => require('./check').setup(args), { virtual: true }) jest.mock('meteor/tracker', (...args) => require('./tracker').setup(args), { virtual: true }) -jest.mock('meteor/accounts-base', (...args) => require('./accounts-base').setup(args), { virtual: true }) jest.mock('meteor/ejson', (...args) => require('./ejson').setup(args), { virtual: true }) jest.mock('meteor/mdg:validated-method', (...args) => require('./validated-method').setup(args), { virtual: true }) diff --git a/meteor/__mocks__/accounts-base.ts b/meteor/__mocks__/accounts-base.ts deleted file mode 100644 index 468f6e8d78d..00000000000 --- a/meteor/__mocks__/accounts-base.ts +++ /dev/null @@ -1,81 +0,0 @@ -import { RandomMock } from './random' -import { MeteorMock } from './meteor' -import { Accounts } from 'meteor/accounts-base' - -export class AccountsBaseMock { - static mockUsers: any = {} - - // From https://docs.meteor.com/api/passwords.html - - static createUser( - options: Parameters[0], - cb: (err: any | undefined, result?: any) => void - ): void { - const user = { - _id: RandomMock.id(), - ...options, - } - AccountsBaseMock.mockUsers[user._id] = user - MeteorMock.setTimeout(() => { - cb(undefined, user._id) - }, 1) - throw new Error('Mocked function not implemented') - } - static setUsername(userId: string, newUsername: string): void { - AccountsBaseMock.mockUsers[userId].username = newUsername - throw new Error('Mocked function not implemented') - } - static removeEmail(_userId: string, _email: string): void { - throw new Error('Mocked function not implemented') - } - static verifyEmail(_token: string, _cb: (err: any | undefined, result?: any) => void): void { - throw new Error('Mocked function not implemented') - } - static findUserByUsername(_username: string): void { - throw new Error('Mocked function not implemented') - } - static findUserByEmail(_email: string): void { - throw new Error('Mocked function not implemented') - } - static changePassword( - _oldPassword: string, - _newPassword: string, - _cb: (err: any | undefined, result?: any) => void - ): void { - throw new Error('Mocked function not implemented') - } - static forgotPassword( - _options: { email?: string | undefined }, - _cb: (err: any | undefined, result?: any) => void - ): void { - throw new Error('Mocked function not implemented') - } - static resetPassword( - _token: string, - _newPassword: string, - _cb: (err: any | undefined, result?: any) => void - ): void { - throw new Error('Mocked function not implemented') - } - static setPassword(_userId: string, _newPassword: string, _options?: { logout?: Object | undefined }): void { - throw new Error('Mocked function not implemented') - } - static sendResetPasswordEmail(_userId: string, _email: string): void { - throw new Error('Mocked function not implemented') - } - static sendEnrollmentEmail(_userId: string, _email: string): void { - throw new Error('Mocked function not implemented') - } - static sendVerificationEmail(_userId: string, _email: string): void { - throw new Error('Mocked function not implemented') - } - static onResetPasswordLink?: () => void - static onEnrollmentLink?: () => void - static onEmailVerificationLink?: () => void - static emailTemplates?: () => void -} -export function setup(): any { - return { - Accounts: AccountsBaseMock, - } -} diff --git a/meteor/__mocks__/meteor.ts b/meteor/__mocks__/meteor.ts index a63b6181f43..593eab34c42 100644 --- a/meteor/__mocks__/meteor.ts +++ b/meteor/__mocks__/meteor.ts @@ -1,4 +1,4 @@ -import { MongoMock } from './mongo' +import { USER_PERMISSIONS_HEADER } from '@sofie-automation/meteor-lib/dist/userPermissions' let controllableDefer = false @@ -9,7 +9,7 @@ export function useNextTickDefer(): void { controllableDefer = false } -namespace Meteor { +export namespace Meteor { export interface Settings { public: { [id: string]: any @@ -17,19 +17,6 @@ namespace Meteor { [id: string]: any } - export interface UserEmail { - address: string - verified: boolean - } - export interface User { - _id?: string - username?: string - emails?: UserEmail[] - createdAt?: number - profile?: any - services?: any - } - export interface ErrorStatic { new (error: string | number, reason?: string, details?: string): Error } @@ -103,22 +90,18 @@ export namespace MeteorMock { export const settings: any = {} export const mockMethods: { [name: string]: Function } = {} - export let mockUser: Meteor.User | undefined = undefined export const mockStartupFunctions: Function[] = [] export const absolutePath = process.cwd() - export function user(): Meteor.User | undefined { - return mockUser - } - export function userId(): string | undefined { - return mockUser ? mockUser._id : undefined - } function getMethodContext() { return { - userId: mockUser ? mockUser._id : undefined, connection: { clientAddress: '1.1.1.1', + httpHeaders: { + // Default to full permissions for tests + [USER_PERMISSIONS_HEADER]: 'admin', + }, }, unblock: () => { // noop @@ -253,7 +236,6 @@ export namespace MeteorMock { return fcn(...args) } } - export let users: MongoMock.Collection | undefined = undefined // -- Mock functions: -------------------------- /** @@ -266,12 +248,6 @@ export namespace MeteorMock { await waitTimeNoFakeTimers(10) // So that any observers or defers has had time to run. } - export function mockLoginUser(newUser: Meteor.User): void { - mockUser = newUser - } - export function mockSetUsersCollection(usersCollection: MongoMock.Collection): void { - users = usersCollection - } export function mockSetClientEnvironment(): void { mockIsClient = true } diff --git a/meteor/__mocks__/mongo.ts b/meteor/__mocks__/mongo.ts index d39e071ef09..fdd2074222a 100644 --- a/meteor/__mocks__/mongo.ts +++ b/meteor/__mocks__/mongo.ts @@ -453,5 +453,3 @@ export function setup(): any { Mongo: MongoMock, } } - -MeteorMock.mockSetUsersCollection(new MongoMock.Collection('Meteor.users')) diff --git a/meteor/server/Connections.ts b/meteor/server/Connections.ts index d97d44d5fa8..e0d094199d0 100644 --- a/meteor/server/Connections.ts +++ b/meteor/server/Connections.ts @@ -4,6 +4,8 @@ import { logger } from './logging' import { sendTrace } from './api/integration/influx' import { PeripheralDevices } from './collections' import { MetricsGauge } from '@sofie-automation/corelib/dist/prometheus' +import { parseUserPermissions, USER_PERMISSIONS_HEADER } from '@sofie-automation/meteor-lib/dist/userPermissions' +import { Settings } from './Settings' const connections = new Set() const connectionsGauge = new MetricsGauge({ @@ -14,6 +16,24 @@ const connectionsGauge = new MetricsGauge({ Meteor.onConnection((conn: Meteor.Connection) => { // This is called whenever a new ddp-connection is opened (ie a web-client or a peripheral-device) + if (Settings.enableHeaderAuth) { + const userLevel = parseUserPermissions(conn.httpHeaders[USER_PERMISSIONS_HEADER]) + + // HACK: force the userId of the connection before it can be used. + // This ensures we know the permissions of the connection before it can try to do anything + // This could probably be safely done inside a meteor method, as we only need it when directly modifying a collection in the client, + // but that will cause all the publications to restart when changing the userId. + const connSession = (Meteor as any).server.sessions.get(conn.id) + if (!connSession) { + logger.error(`Failed to find session for ddp connection! "${conn.id}"`) + // Close the connection, it won't be secure + conn.close() + return + } else { + connSession.userId = JSON.stringify(userLevel) + } + } + const connectionId: string = conn.id // var clientAddress = conn.clientAddress; // ip-adress diff --git a/meteor/server/__tests__/cronjobs.test.ts b/meteor/server/__tests__/cronjobs.test.ts index 92a203f1b57..3d6cc33c433 100644 --- a/meteor/server/__tests__/cronjobs.test.ts +++ b/meteor/server/__tests__/cronjobs.test.ts @@ -476,7 +476,8 @@ describe('cronjobs', () => { expect(await Snapshots.findOneAsync(snapshot1)).toBeUndefined() }) async function insertPlayoutDevice( - props: Pick + props: Pick & + Partial> ): Promise { const deviceId = protectString(getRandomString()) await PeripheralDevices.insertAsync({ @@ -505,29 +506,35 @@ describe('cronjobs', () => { } async function createMockPlayoutGatewayAndDevices(lastSeen: number): Promise<{ + deviceToken: string mockPlayoutGw: PeripheralDeviceId mockCasparCg: PeripheralDeviceId mockAtem: PeripheralDeviceId }> { + const deviceToken = 'token1' const mockPlayoutGw = await insertPlayoutDevice({ deviceName: 'Playout Gateway', lastSeen: lastSeen, subType: PERIPHERAL_SUBTYPE_PROCESS, + token: deviceToken, }) const mockCasparCg = await insertPlayoutDevice({ deviceName: 'CasparCG', lastSeen: lastSeen, subType: TSR.DeviceType.CASPARCG, parentDeviceId: mockPlayoutGw, + token: deviceToken, }) const mockAtem = await insertPlayoutDevice({ deviceName: 'ATEM', lastSeen: lastSeen, subType: TSR.DeviceType.ATEM, parentDeviceId: mockPlayoutGw, + token: deviceToken, }) return { + deviceToken, mockPlayoutGw, mockCasparCg, mockAtem, @@ -535,7 +542,7 @@ describe('cronjobs', () => { } test('Attempts to restart CasparCG when job is enabled', async () => { - const { mockCasparCg } = await createMockPlayoutGatewayAndDevices(Date.now()) // Some time after the threshold + const { mockCasparCg, deviceToken } = await createMockPlayoutGatewayAndDevices(Date.now()) // Some time after the threshold ;(logger.info as jest.Mock).mockClear() // set time to 2020/07/{date} 04:05 Local Time, should be more than 24 hours after 2020/07/19 00:00 UTC @@ -558,7 +565,7 @@ describe('cronjobs', () => { Meteor.callAsync( 'peripheralDevice.functionReply', cmd.deviceId, // deviceId - '', // deviceToken + deviceToken, // deviceToken cmd._id, // commandId null, // err null // result diff --git a/meteor/server/api/ExternalMessageQueue.ts b/meteor/server/api/ExternalMessageQueue.ts index 5d90abb7e3a..0a5fdf7414c 100644 --- a/meteor/server/api/ExternalMessageQueue.ts +++ b/meteor/server/api/ExternalMessageQueue.ts @@ -9,11 +9,14 @@ import { } from '@sofie-automation/meteor-lib/dist/api/ExternalMessageQueue' import { StatusObject, setSystemStatus } from '../systemStatus/systemStatus' import { MethodContextAPI, MethodContext } from './methodContext' -import { StudioContentWriteAccess } from '../security/studio' import { ExternalMessageQueueObjId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { ExternalMessageQueue } from '../collections' import { ExternalMessageQueueObj } from '@sofie-automation/corelib/dist/dataModel/ExternalMessageQueue' import { MongoQuery } from '@sofie-automation/corelib/dist/mongo' +import { UserPermissions } from '@sofie-automation/meteor-lib/dist/userPermissions' +import { assertConnectionHasOneOfPermissions } from '../security/auth' + +const USER_PERMISSIONS_FOR_EXTERNAL_MESSAGES: Array = ['configure', 'studio', 'service'] let updateExternalMessageQueueStatusTimeout = 0 function updateExternalMessageQueueStatus(): void { @@ -69,28 +72,33 @@ Meteor.startup(async () => { async function removeExternalMessage(context: MethodContext, messageId: ExternalMessageQueueObjId): Promise { check(messageId, String) - await StudioContentWriteAccess.externalMessage(context, messageId) + + assertConnectionHasOneOfPermissions(context.connection, ...USER_PERMISSIONS_FOR_EXTERNAL_MESSAGES) // TODO - is this safe? what if it is in the middle of execution? await ExternalMessageQueue.removeAsync(messageId) } async function toggleHold(context: MethodContext, messageId: ExternalMessageQueueObjId): Promise { check(messageId, String) - const access = await StudioContentWriteAccess.externalMessage(context, messageId) - const m = access.message - if (!m) throw new Meteor.Error(404, `ExternalMessage "${messageId}" not found!`) + + assertConnectionHasOneOfPermissions(context.connection, ...USER_PERMISSIONS_FOR_EXTERNAL_MESSAGES) + + const existingMessage = await ExternalMessageQueue.findOneAsync(messageId) + if (!existingMessage) throw new Meteor.Error(404, `ExternalMessage "${messageId}" not found!`) await ExternalMessageQueue.updateAsync(messageId, { $set: { - hold: !m.hold, + hold: !existingMessage.hold, }, }) } async function retry(context: MethodContext, messageId: ExternalMessageQueueObjId): Promise { check(messageId, String) - const access = await StudioContentWriteAccess.externalMessage(context, messageId) - const m = access.message - if (!m) throw new Meteor.Error(404, `ExternalMessage "${messageId}" not found!`) + + assertConnectionHasOneOfPermissions(context.connection, ...USER_PERMISSIONS_FOR_EXTERNAL_MESSAGES) + + const existingMessage = await ExternalMessageQueue.findOneAsync(messageId) + if (!existingMessage) throw new Meteor.Error(404, `ExternalMessage "${messageId}" not found!`) const tryGap = getCurrentTime() - 1 * 60 * 1000 await ExternalMessageQueue.updateAsync(messageId, { @@ -98,7 +106,10 @@ async function retry(context: MethodContext, messageId: ExternalMessageQueueObjI manualRetry: true, hold: false, errorFatal: false, - lastTry: m.lastTry !== undefined && m.lastTry > tryGap ? tryGap : m.lastTry, + lastTry: + existingMessage.lastTry !== undefined && existingMessage.lastTry > tryGap + ? tryGap + : existingMessage.lastTry, }, }) // triggerdoMessageQueue(1000) diff --git a/meteor/server/api/__tests__/peripheralDevice.test.ts b/meteor/server/api/__tests__/peripheralDevice.test.ts index 6efe7a15969..4a3b69fe5a3 100644 --- a/meteor/server/api/__tests__/peripheralDevice.test.ts +++ b/meteor/server/api/__tests__/peripheralDevice.test.ts @@ -618,7 +618,7 @@ describe('test peripheralDevice general API methods', () => { const deviceObj = await PeripheralDevices.findOneAsync(device?._id) expect(deviceObj).toBeDefined() - await MeteorCall.peripheralDevice.removePeripheralDevice(device?._id) + await MeteorCall.peripheralDevice.removePeripheralDevice(device._id, device.token) } { diff --git a/meteor/server/api/__tests__/userActions/mediaManager.test.ts b/meteor/server/api/__tests__/userActions/mediaManager.test.ts index 3680cffde2b..bf58417d6f7 100644 --- a/meteor/server/api/__tests__/userActions/mediaManager.test.ts +++ b/meteor/server/api/__tests__/userActions/mediaManager.test.ts @@ -47,11 +47,16 @@ describe('User Actions - Media Manager', () => { jest.resetAllMocks() }) test('Restart workflow', async () => { - const { workFlowId } = await setupMockWorkFlow() + const { workFlowId, workFlow } = await setupMockWorkFlow() // should fail if the workflow doesn't exist await expect( - MeteorCall.userAction.mediaRestartWorkflow('', getCurrentTime(), protectString('FAKE_ID')) + MeteorCall.userAction.mediaRestartWorkflow( + '', + getCurrentTime(), + workFlow.deviceId, + protectString('FAKE_ID') + ) ).resolves.toMatchUserRawError(/not found/gi) { @@ -72,16 +77,16 @@ describe('User Actions - Media Manager', () => { }) }, MAX_WAIT_TIME) - await MeteorCall.userAction.mediaRestartWorkflow('', getCurrentTime(), workFlowId) + await MeteorCall.userAction.mediaRestartWorkflow('', getCurrentTime(), workFlow.deviceId, workFlowId) await p } }) test('Abort worfklow', async () => { - const { workFlowId } = await setupMockWorkFlow() + const { workFlowId, workFlow } = await setupMockWorkFlow() // should fail if the workflow doesn't exist await expect( - MeteorCall.userAction.mediaAbortWorkflow('', getCurrentTime(), protectString('FAKE_ID')) + MeteorCall.userAction.mediaAbortWorkflow('', getCurrentTime(), workFlow.deviceId, protectString('FAKE_ID')) ).resolves.toMatchUserRawError(/not found/gi) { @@ -103,16 +108,21 @@ describe('User Actions - Media Manager', () => { }) }, MAX_WAIT_TIME) - await MeteorCall.userAction.mediaAbortWorkflow('', getCurrentTime(), workFlowId) + await MeteorCall.userAction.mediaAbortWorkflow('', getCurrentTime(), workFlow.deviceId, workFlowId) await p } }) test('Prioritize workflow', async () => { - const { workFlowId } = await setupMockWorkFlow() + const { workFlowId, workFlow } = await setupMockWorkFlow() // should fail if the workflow doesn't exist await expect( - MeteorCall.userAction.mediaPrioritizeWorkflow('', getCurrentTime(), protectString('FAKE_ID')) + MeteorCall.userAction.mediaPrioritizeWorkflow( + '', + getCurrentTime(), + workFlow.deviceId, + protectString('FAKE_ID') + ) ).resolves.toMatchUserRawError(/not found/gi) { @@ -134,7 +144,7 @@ describe('User Actions - Media Manager', () => { }) }, MAX_WAIT_TIME) - await MeteorCall.userAction.mediaPrioritizeWorkflow('', getCurrentTime(), workFlowId) + await MeteorCall.userAction.mediaPrioritizeWorkflow('', getCurrentTime(), workFlow.deviceId, workFlowId) await p } }) diff --git a/meteor/server/api/blueprints/__tests__/api.test.ts b/meteor/server/api/blueprints/__tests__/api.test.ts index b92bf0a0ac2..b2c60d3d4ab 100644 --- a/meteor/server/api/blueprints/__tests__/api.test.ts +++ b/meteor/server/api/blueprints/__tests__/api.test.ts @@ -6,29 +6,23 @@ import { BlueprintManifestType } from '@sofie-automation/blueprints-integration' import { SYSTEM_ID, ICoreSystem } from '@sofie-automation/meteor-lib/dist/collections/CoreSystem' import { insertBlueprint, uploadBlueprint } from '../api' import { MeteorCall } from '../../methods' -import { MethodContext } from '../../methodContext' import '../../../../__mocks__/_extendJest' import { Blueprints, CoreSystem } from '../../../collections' import { SupressLogMessages } from '../../../../__mocks__/suppressLogging' import { JSONBlobStringify } from '@sofie-automation/shared-lib/dist/lib/JSONBlob' +import { Meteor } from 'meteor/meteor' // we don't want the deviceTriggers observer to start up at this time jest.mock('../../deviceTriggers/observer') require('../../peripheralDevice.ts') // include in order to create the Meteor methods needed -const DEFAULT_CONTEXT: MethodContext = { - userId: null, - isSimulation: false, - connection: { - id: 'mockConnectionId', - close: () => undefined, - onClose: () => undefined, - clientAddress: '127.0.0.1', - httpHeaders: {}, - }, - setUserId: () => undefined, - unblock: () => undefined, +const DEFAULT_CONNECTION: Meteor.Connection = { + id: 'mockConnectionId', + close: () => undefined, + onClose: () => undefined, + clientAddress: '127.0.0.1', + httpHeaders: {}, } describe('Test blueprint management api', () => { @@ -195,7 +189,7 @@ describe('Test blueprint management api', () => { }) test('with name', async () => { const rawName = 'some_fake_name' - const newId = await insertBlueprint(DEFAULT_CONTEXT, undefined, rawName) + const newId = await insertBlueprint(DEFAULT_CONNECTION, undefined, rawName) expect(newId).toBeTruthy() // Check some props @@ -206,7 +200,7 @@ describe('Test blueprint management api', () => { }) test('with type', async () => { const type = BlueprintManifestType.STUDIO - const newId = await insertBlueprint(DEFAULT_CONTEXT, type) + const newId = await insertBlueprint(DEFAULT_CONNECTION, type) expect(newId).toBeTruthy() // Check some props @@ -219,20 +213,20 @@ describe('Test blueprint management api', () => { describe('uploadBlueprint', () => { test('empty id', async () => { - await expect(uploadBlueprint(DEFAULT_CONTEXT, protectString(''), '0')).rejects.toThrowMeteor( + await expect(uploadBlueprint(DEFAULT_CONNECTION, protectString(''), '0')).rejects.toThrowMeteor( 400, 'Blueprint id "" is not valid' ) }) test('empty body', async () => { - await expect(uploadBlueprint(DEFAULT_CONTEXT, protectString('blueprint99'), '')).rejects.toThrowMeteor( + await expect(uploadBlueprint(DEFAULT_CONNECTION, protectString('blueprint99'), '')).rejects.toThrowMeteor( 400, 'Blueprint blueprint99 failed to parse' ) }) test('body not a manifest', async () => { await expect( - uploadBlueprint(DEFAULT_CONTEXT, protectString('blueprint99'), `({default: (() => 5)()})`) + uploadBlueprint(DEFAULT_CONNECTION, protectString('blueprint99'), `({default: (() => 5)()})`) ).rejects.toThrowMeteor(400, 'Blueprint blueprint99 returned a manifest of type number') }) test('manifest missing blueprintType', async () => { @@ -254,7 +248,7 @@ describe('Test blueprint management api', () => { } }) await expect( - uploadBlueprint(DEFAULT_CONTEXT, protectString('blueprint99'), blueprintStr) + uploadBlueprint(DEFAULT_CONNECTION, protectString('blueprint99'), blueprintStr) ).rejects.toThrowMeteor( 400, `Blueprint blueprint99 returned a manifest of unknown blueprintType "undefined"` @@ -281,7 +275,9 @@ describe('Test blueprint management api', () => { })) as Blueprint expect(existingBlueprint).toBeTruthy() - await expect(uploadBlueprint(DEFAULT_CONTEXT, existingBlueprint._id, blueprintStr)).rejects.toThrowMeteor( + await expect( + uploadBlueprint(DEFAULT_CONNECTION, existingBlueprint._id, blueprintStr) + ).rejects.toThrowMeteor( 400, `Cannot replace old blueprint (of type "showstyle") with new blueprint of type "studio"` ) @@ -305,7 +301,7 @@ describe('Test blueprint management api', () => { } ) - const blueprint = await uploadBlueprint(DEFAULT_CONTEXT, protectString('tmp_showstyle'), blueprintStr) + const blueprint = await uploadBlueprint(DEFAULT_CONNECTION, protectString('tmp_showstyle'), blueprintStr) expect(blueprint).toBeTruthy() expect(blueprint).toMatchObject( literal>({ @@ -344,7 +340,7 @@ describe('Test blueprint management api', () => { ) const blueprint = await uploadBlueprint( - DEFAULT_CONTEXT, + DEFAULT_CONNECTION, protectString('tmp_studio'), blueprintStr, 'tmp name' @@ -388,7 +384,7 @@ describe('Test blueprint management api', () => { ) const blueprint = await uploadBlueprint( - DEFAULT_CONTEXT, + DEFAULT_CONNECTION, protectString('tmp_system'), blueprintStr, 'tmp name' @@ -436,7 +432,7 @@ describe('Test blueprint management api', () => { expect(existingBlueprint).toBeTruthy() expect(existingBlueprint.blueprintId).toBeFalsy() - const blueprint = await uploadBlueprint(DEFAULT_CONTEXT, existingBlueprint._id, blueprintStr) + const blueprint = await uploadBlueprint(DEFAULT_CONNECTION, existingBlueprint._id, blueprintStr) expect(blueprint).toBeTruthy() expect(blueprint).toMatchObject( literal>({ @@ -482,7 +478,7 @@ describe('Test blueprint management api', () => { expect(existingBlueprint).toBeTruthy() expect(existingBlueprint.blueprintId).toBeTruthy() - const blueprint = await uploadBlueprint(DEFAULT_CONTEXT, existingBlueprint._id, blueprintStr) + const blueprint = await uploadBlueprint(DEFAULT_CONNECTION, existingBlueprint._id, blueprintStr) expect(blueprint).toBeTruthy() expect(blueprint).toMatchObject( literal>({ @@ -528,7 +524,9 @@ describe('Test blueprint management api', () => { expect(existingBlueprint).toBeTruthy() expect(existingBlueprint.blueprintId).toBeTruthy() - await expect(uploadBlueprint(DEFAULT_CONTEXT, existingBlueprint._id, blueprintStr)).rejects.toThrowMeteor( + await expect( + uploadBlueprint(DEFAULT_CONNECTION, existingBlueprint._id, blueprintStr) + ).rejects.toThrowMeteor( 422, `Cannot replace old blueprint "${existingBlueprint._id}" ("ss1") with new blueprint "show2"` ) @@ -558,7 +556,9 @@ describe('Test blueprint management api', () => { expect(existingBlueprint).toBeTruthy() expect(existingBlueprint.blueprintId).toBeTruthy() - await expect(uploadBlueprint(DEFAULT_CONTEXT, existingBlueprint._id, blueprintStr)).rejects.toThrowMeteor( + await expect( + uploadBlueprint(DEFAULT_CONNECTION, existingBlueprint._id, blueprintStr) + ).rejects.toThrowMeteor( 422, `Cannot replace old blueprint "${existingBlueprint._id}" ("ss1") with new blueprint ""` ) diff --git a/meteor/server/api/blueprints/__tests__/http.test.ts b/meteor/server/api/blueprints/__tests__/http.test.ts index 17d493b0e30..887b7d61d88 100644 --- a/meteor/server/api/blueprints/__tests__/http.test.ts +++ b/meteor/server/api/blueprints/__tests__/http.test.ts @@ -8,7 +8,7 @@ jest.mock('../../deviceTriggers/observer') import * as api from '../api' jest.mock('../api.ts') -const DEFAULT_CONTEXT = { userId: '' } +const DEFAULT_CONTEXT = expect.objectContaining({ req: expect.any(Object), res: expect.any(Object) }) require('../http.ts') // include in order to create the Meteor methods needed diff --git a/meteor/server/api/blueprints/api.ts b/meteor/server/api/blueprints/api.ts index e2f2ec7bc5b..8a294bdfecf 100644 --- a/meteor/server/api/blueprints/api.ts +++ b/meteor/server/api/blueprints/api.ts @@ -20,10 +20,6 @@ import { parseVersion } from '../../systemStatus/semverUtils' import { evalBlueprint } from './cache' import { removeSystemStatus } from '../../systemStatus/systemStatus' import { MethodContext, MethodContextAPI } from '../methodContext' -import { OrganizationContentWriteAccess, OrganizationReadAccess } from '../../security/organization' -import { SystemWriteAccess } from '../../security/system' -import { Credentials, isResolvedCredentials } from '../../security/lib/credentials' -import { Settings } from '../../Settings' import { generateTranslationBundleOriginId, upsertBundles } from '../translationsBundles' import { BlueprintId, OrganizationId, ShowStyleBaseId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { Blueprints, CoreSystem, ShowStyleBases, ShowStyleVariants, Studios } from '../../collections' @@ -32,21 +28,21 @@ import { getSystemStorePath } from '../../coreSystem' import { DBShowStyleBase } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' import { DBShowStyleVariant } from '@sofie-automation/corelib/dist/dataModel/ShowStyleVariant' import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { UserPermissions } from '@sofie-automation/meteor-lib/dist/userPermissions' +import { assertConnectionHasOneOfPermissions, RequestCredentials } from '../../security/auth' + +const PERMISSIONS_FOR_MANAGE_BLUEPRINTS: Array = ['configure'] export async function insertBlueprint( - methodContext: MethodContext, + cred: RequestCredentials | null, type?: BlueprintManifestType, name?: string ): Promise { - const { organizationId, cred } = await OrganizationContentWriteAccess.blueprint(methodContext) - if (Settings.enableUserAccounts && isResolvedCredentials(cred)) { - if (!cred.user || !cred.user.superAdmin) { - throw new Meteor.Error(401, 'Only super admins can create new blueprints') - } - } + assertConnectionHasOneOfPermissions(cred, ...PERMISSIONS_FOR_MANAGE_BLUEPRINTS) + return Blueprints.insertAsync({ _id: getRandomId(), - organizationId: organizationId, + organizationId: null, name: name || 'New Blueprint', hasCode: false, code: '', @@ -72,7 +68,9 @@ export async function insertBlueprint( } export async function removeBlueprint(methodContext: MethodContext, blueprintId: BlueprintId): Promise { check(blueprintId, String) - await OrganizationContentWriteAccess.blueprint(methodContext, blueprintId, true) + + assertConnectionHasOneOfPermissions(methodContext.connection, ...PERMISSIONS_FOR_MANAGE_BLUEPRINTS) + if (!blueprintId) throw new Meteor.Error(404, `Blueprint id "${blueprintId}" was not found`) await Blueprints.removeAsync(blueprintId) @@ -80,7 +78,7 @@ export async function removeBlueprint(methodContext: MethodContext, blueprintId: } export async function uploadBlueprint( - context: Credentials, + cred: RequestCredentials, blueprintId: BlueprintId, body: string, blueprintName?: string, @@ -90,19 +88,21 @@ export async function uploadBlueprint( check(body, String) check(blueprintName, Match.Maybe(String)) - // TODO: add access control here - const { organizationId } = await OrganizationContentWriteAccess.blueprint(context, blueprintId, true) + assertConnectionHasOneOfPermissions(cred, ...PERMISSIONS_FOR_MANAGE_BLUEPRINTS) + if (!Meteor.isTest) logger.info(`Got blueprint '${blueprintId}'. ${body.length} bytes`) if (!blueprintId) throw new Meteor.Error(400, `Blueprint id "${blueprintId}" is not valid`) const blueprint = await fetchBlueprintLight(blueprintId) - return innerUploadBlueprint(organizationId, blueprint, blueprintId, body, blueprintName, ignoreIdChange) + return innerUploadBlueprint(null, blueprint, blueprintId, body, blueprintName, ignoreIdChange) } -export async function uploadBlueprintAsset(_context: Credentials, fileId: string, body: string): Promise { +export async function uploadBlueprintAsset(cred: RequestCredentials, fileId: string, body: string): Promise { check(fileId, String) check(body, String) + assertConnectionHasOneOfPermissions(cred, ...PERMISSIONS_FOR_MANAGE_BLUEPRINTS) + const storePath = getSystemStorePath() // TODO: add access control here @@ -115,12 +115,11 @@ export async function uploadBlueprintAsset(_context: Credentials, fileId: string await fsp.mkdir(path.join(storePath, parsedPath.dir), { recursive: true }) await fsp.writeFile(path.join(storePath, fileId), data) } -export function retrieveBlueprintAsset(_context: Credentials, fileId: string): ReadStream { +export function retrieveBlueprintAsset(_cred: RequestCredentials, fileId: string): ReadStream { check(fileId, String) const storePath = getSystemStorePath() - // TODO: add access control here return createReadStream(path.join(storePath, fileId)) } /** Only to be called from internal functions */ @@ -363,7 +362,7 @@ async function syncConfigPresetsToStudios(blueprint: Blueprint): Promise { } async function assignSystemBlueprint(methodContext: MethodContext, blueprintId: BlueprintId | null): Promise { - await SystemWriteAccess.coreSystem(methodContext) + assertConnectionHasOneOfPermissions(methodContext.connection, ...PERMISSIONS_FOR_MANAGE_BLUEPRINTS) if (blueprintId !== undefined && blueprintId !== null) { check(blueprintId, String) @@ -371,9 +370,6 @@ async function assignSystemBlueprint(methodContext: MethodContext, blueprintId: const blueprint = await fetchBlueprintLight(blueprintId) if (!blueprint) throw new Meteor.Error(404, 'Blueprint not found') - if (blueprint.organizationId) - await OrganizationReadAccess.organizationContent(blueprint.organizationId, { userId: methodContext.userId }) - if (blueprint.blueprintType !== BlueprintManifestType.SYSTEM) throw new Meteor.Error(404, 'Blueprint not of type SYSTEM') @@ -393,7 +389,7 @@ async function assignSystemBlueprint(methodContext: MethodContext, blueprintId: class ServerBlueprintAPI extends MethodContextAPI implements ReplaceOptionalWithNullInMethodArguments { async insertBlueprint() { - return insertBlueprint(this) + return insertBlueprint(this.connection) } async removeBlueprint(blueprintId: BlueprintId) { return removeBlueprint(this, blueprintId) diff --git a/meteor/server/api/blueprints/http.ts b/meteor/server/api/blueprints/http.ts index 70a0bb520f4..ae364dd81f5 100644 --- a/meteor/server/api/blueprints/http.ts +++ b/meteor/server/api/blueprints/http.ts @@ -38,20 +38,12 @@ blueprintsRouter.post( check(blueprintId, String) check(blueprintName, Match.Maybe(String)) - const userId = ctx.headers.authorization ? ctx.headers.authorization.split(' ')[1] : '' - const body = ctx.request.body || ctx.req.body if (!body) throw new Meteor.Error(400, 'Restore Blueprint: Missing request body') if (typeof body !== 'string' || body.length < 10) throw new Meteor.Error(400, 'Restore Blueprint: Invalid request body') - await uploadBlueprint( - { userId: protectString(userId) }, - protectString(blueprintId), - body, - blueprintName, - force - ) + await uploadBlueprint(ctx, protectString(blueprintId), body, blueprintName, force) ctx.response.status = 200 ctx.body = '' @@ -89,13 +81,7 @@ blueprintsRouter.post( const errors: any[] = [] for (const id of _.keys(collection.blueprints)) { try { - const userId = ctx.headers.authorization ? ctx.headers.authorization.split(' ')[1] : '' - await uploadBlueprint( - { userId: protectString(userId) }, - protectString(id), - collection.blueprints[id], - id - ) + await uploadBlueprint(ctx, protectString(id), collection.blueprints[id], id) } catch (e) { logger.error('Blueprint restore failed: ' + e) errors.push(e) @@ -104,8 +90,7 @@ blueprintsRouter.post( if (collection.assets) { for (const id of _.keys(collection.assets)) { try { - const userId = ctx.headers.authorization ? ctx.headers.authorization.split(' ')[1] : '' - await uploadBlueprintAsset({ userId: protectString(userId) }, id, collection.assets[id]) + await uploadBlueprintAsset(ctx, id, collection.assets[id]) } catch (e) { logger.error('Blueprint assets upload failed: ' + e) errors.push(e) @@ -157,8 +142,7 @@ blueprintsRouter.post( const errors: any[] = [] for (const id of _.keys(collection)) { try { - const userId = ctx.headers.authorization ? ctx.headers.authorization.split(' ')[1] : '' - await uploadBlueprintAsset({ userId: protectString(userId) }, id, collection[id]) + await uploadBlueprintAsset(ctx, id, collection[id]) } catch (e) { logger.error('Blueprint assets upload failed: ' + e) errors.push(e) @@ -192,9 +176,8 @@ blueprintsRouter.get('/assets/(.*)', async (ctx) => { const filePath = ctx.params[0] if (filePath.match(/\.(png|svg|gif)?$/)) { - const userId = ctx.headers.authorization ? ctx.headers.authorization.split(' ')[1] : '' try { - const dataStream = retrieveBlueprintAsset({ userId: protectString(userId) }, filePath) + const dataStream = retrieveBlueprintAsset(ctx, filePath) const extension = path.extname(filePath) if (extension === '.svg') { ctx.response.type = 'image/svg+xml' diff --git a/meteor/server/api/buckets.ts b/meteor/server/api/buckets.ts index ac2ad69cbe2..109ae82ba94 100644 --- a/meteor/server/api/buckets.ts +++ b/meteor/server/api/buckets.ts @@ -2,18 +2,23 @@ import * as _ from 'underscore' import { Meteor } from 'meteor/meteor' import { Bucket } from '@sofie-automation/meteor-lib/dist/collections/Buckets' import { getRandomId, getRandomString, literal } from '../lib/tempLib' -import { BucketSecurity } from '../security/buckets' import { BucketAdLib } from '@sofie-automation/corelib/dist/dataModel/BucketAdLibPiece' import { AdLibAction, AdLibActionCommon } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' import { BucketAdLibAction } from '@sofie-automation/corelib/dist/dataModel/BucketAdLibAction' -import { BucketAdLibActions, Buckets, Rundowns, ShowStyleVariants, Studios } from '../collections' +import { BucketAdLibActions, BucketAdLibs, Buckets, Rundowns, ShowStyleVariants, Studios } from '../collections' import { runIngestOperation } from './ingest/lib' import { IngestJobs } from '@sofie-automation/corelib/dist/worker/ingest' -import { StudioContentAccess } from '../security/studio' -import { Settings } from '../Settings' import { IngestAdlib } from '@sofie-automation/blueprints-integration' import { getShowStyleCompound } from './showStyles' -import { ShowStyleBaseId, ShowStyleVariantId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { + BucketAdLibActionId, + BucketAdLibId, + BucketId, + ShowStyleBaseId, + ShowStyleVariantId, + StudioId, +} from '@sofie-automation/corelib/dist/dataModel/Ids' +import { fetchStudioLight } from '../optimizations' const DEFAULT_BUCKET_WIDTH = undefined @@ -25,18 +30,28 @@ function isBucketAdLibAction(action: AdLibActionCommon | BucketAdLibAction): act } export namespace BucketsAPI { - export async function removeBucketAdLib(access: BucketSecurity.BucketAdlibPieceContentAccess): Promise { - const adlib = access.adlib + export async function removeBucketAdLib(adLibId: BucketAdLibId): Promise { + const adlib = (await BucketAdLibs.findOneAsync(adLibId, { + projection: { + _id: 1, + studioId: 1, + }, + })) as Pick | undefined + if (!adlib) throw new Meteor.Error(404, `BucketAdLib "${adLibId}" not found`) await runIngestOperation(adlib.studioId, IngestJobs.BucketRemoveAdlibPiece, { pieceId: adlib._id, }) } - export async function removeBucketAdLibAction( - access: BucketSecurity.BucketAdlibActionContentAccess - ): Promise { - const adlib = access.action + export async function removeBucketAdLibAction(adLibActionId: BucketAdLibActionId): Promise { + const adlib = (await BucketAdLibActions.findOneAsync(adLibActionId, { + projection: { + _id: 1, + studioId: 1, + }, + })) as Pick | undefined + if (!adlib) throw new Meteor.Error(404, `BucketAdLibAction "${adLibActionId}" not found`) await runIngestOperation(adlib.studioId, IngestJobs.BucketRemoveAdlibAction, { actionId: adlib._id, @@ -44,22 +59,26 @@ export namespace BucketsAPI { } export async function modifyBucket( - access: BucketSecurity.BucketContentAccess, + bucketId: BucketId, bucketProps: Partial> ): Promise { - await Buckets.updateAsync(access.bucket._id, { + await Buckets.updateAsync(bucketId, { $set: _.omit(bucketProps, ['_id', 'studioId']), }) } - export async function emptyBucket(access: BucketSecurity.BucketContentAccess): Promise { - await runIngestOperation(access.studioId, IngestJobs.BucketEmpty, { - bucketId: access.bucket._id, + export async function emptyBucket(bucketId: BucketId): Promise { + const bucket = await Buckets.findOneAsync(bucketId) + if (!bucket) throw new Meteor.Error(404, `Bucket "${bucketId}" not found`) + + await runIngestOperation(bucket.studioId, IngestJobs.BucketEmpty, { + bucketId: bucket._id, }) } - export async function createNewBucket(access: StudioContentAccess, name: string): Promise { - const { studio } = access + export async function createNewBucket(studioId: StudioId, name: string): Promise { + const studio = await fetchStudioLight(studioId) + if (!studio) throw new Meteor.Error(404, `Studio "${studioId}" not found`) const heaviestBucket = ( await Buckets.findFetchAsync( @@ -99,28 +118,20 @@ export namespace BucketsAPI { } export async function modifyBucketAdLibAction( - access: BucketSecurity.BucketAdlibActionContentAccess, + adLibActionId: BucketAdLibActionId, actionProps: Partial> ): Promise { - const oldAction = access.action + const oldAction = await BucketAdLibActions.findOneAsync(adLibActionId) + if (!oldAction) throw new Meteor.Error(404, `BucketAdLibAction "${adLibActionId}" not found`) - let moveIntoBucket: Bucket | undefined if (actionProps.bucketId && actionProps.bucketId !== oldAction.bucketId) { - moveIntoBucket = await Buckets.findOneAsync(actionProps.bucketId) - if (!moveIntoBucket) throw new Meteor.Error(`Could not find bucket: "${actionProps.bucketId}"`) + const moveIntoBucket = await Buckets.countDocuments(actionProps.bucketId) + if (moveIntoBucket === 0) throw new Meteor.Error(`Could not find bucket: "${actionProps.bucketId}"`) } - // Check we are allowed to move into the new bucket - if (Settings.enableUserAccounts && moveIntoBucket) { - // Shouldn't be moved across orgs - const newBucketStudio = await Studios.findOneAsync(moveIntoBucket.studioId, { - fields: { organizationId: 1 }, - }) - if (!newBucketStudio) throw new Meteor.Error(`Could not find studio: "${moveIntoBucket.studioId}"`) - - if (newBucketStudio.organizationId !== access.studio.organizationId) { - throw new Meteor.Error(403, 'Access denied') - } + if (actionProps.studioId && actionProps.studioId !== oldAction.studioId) { + const newStudioCount = await Studios.countDocuments(actionProps.studioId) + if (newStudioCount === 0) throw new Meteor.Error(`Could not find studio: "${actionProps.studioId}"`) } await runIngestOperation(oldAction.studioId, IngestJobs.BucketActionModify, { @@ -130,25 +141,30 @@ export namespace BucketsAPI { } export async function saveAdLibActionIntoBucket( - access: BucketSecurity.BucketContentAccess, + bucketId: BucketId, action: AdLibActionCommon | BucketAdLibAction ): Promise { + const targetBucket = (await Buckets.findOneAsync(bucketId, { projection: { _id: 1, studioId: 1 } })) as + | Pick + | undefined + if (!targetBucket) throw new Meteor.Error(404, `Bucket "${bucketId}" not found`) + let adLibAction: BucketAdLibAction if (isBucketAdLibAction(action)) { if (action.showStyleVariantId && !(await ShowStyleVariants.findOneAsync(action.showStyleVariantId))) { throw new Meteor.Error(`Could not find show style variant: "${action.showStyleVariantId}"`) } - if (access.studioId !== action.studioId) { + if (targetBucket.studioId !== action.studioId) { throw new Meteor.Error( - `studioId is different than Action's studioId: "${access.studioId}" - "${action.studioId}"` + `studioId is different than Action's studioId: "${targetBucket.studioId}" - "${action.studioId}"` ) } adLibAction = { ...action, _id: getRandomId(), - bucketId: access.bucket._id, + bucketId: targetBucket._id, } } else { const rundown = await Rundowns.findOneAsync(action.rundownId) @@ -156,9 +172,9 @@ export namespace BucketsAPI { throw new Meteor.Error(`Could not find rundown: "${action.rundownId}"`) } - if (access.studioId !== rundown.studioId) { + if (targetBucket.studioId !== rundown.studioId) { throw new Meteor.Error( - `studioId is different than Rundown's studioId: "${access.studioId}" - "${rundown.studioId}"` + `studioId is different than Rundown's studioId: "${targetBucket.studioId}" - "${rundown.studioId}"` ) } @@ -166,8 +182,8 @@ export namespace BucketsAPI { ...(_.omit(action, ['partId', 'rundownId']) as Omit), _id: getRandomId(), externalId: getRandomString(), // This needs to be something unique, so that the regenerate logic doesn't get it mixed up with something else - bucketId: access.bucket._id, - studioId: access.studioId, + bucketId: targetBucket._id, + studioId: targetBucket.studioId, showStyleBaseId: rundown.showStyleBaseId, showStyleVariantId: action.allVariants ? null : rundown.showStyleVariantId, importVersions: rundown.importVersions, @@ -178,7 +194,7 @@ export namespace BucketsAPI { // We can insert it here, as it is a creation with a new id, so the only race risk we have is the bucket being deleted await BucketAdLibActions.insertAsync(adLibAction) - await runIngestOperation(access.studioId, IngestJobs.BucketActionRegenerateExpectedPackages, { + await runIngestOperation(targetBucket.studioId, IngestJobs.BucketActionRegenerateExpectedPackages, { actionId: adLibAction._id, }) @@ -186,28 +202,20 @@ export namespace BucketsAPI { } export async function modifyBucketAdLib( - access: BucketSecurity.BucketAdlibPieceContentAccess, + adLibId: BucketAdLibId, adlibProps: Partial> ): Promise { - const oldAdLib = access.adlib + const oldAdLib = await BucketAdLibs.findOneAsync(adLibId) + if (!oldAdLib) throw new Meteor.Error(404, `BucketAdLib "${adLibId}" not found`) - let moveIntoBucket: Bucket | undefined if (adlibProps.bucketId && adlibProps.bucketId !== oldAdLib.bucketId) { - moveIntoBucket = await Buckets.findOneAsync(adlibProps.bucketId) - if (!moveIntoBucket) throw new Meteor.Error(`Could not find bucket: "${adlibProps.bucketId}"`) + const moveIntoBucket = await Buckets.countDocuments(adlibProps.bucketId) + if (moveIntoBucket === 0) throw new Meteor.Error(`Could not find bucket: "${adlibProps.bucketId}"`) } - // Check we are allowed to move into the new bucket - if (Settings.enableUserAccounts && moveIntoBucket) { - // Shouldn't be moved across orgs - const newBucketStudio = await Studios.findOneAsync(moveIntoBucket.studioId, { - fields: { organizationId: 1 }, - }) - if (!newBucketStudio) throw new Meteor.Error(`Could not find studio: "${moveIntoBucket.studioId}"`) - - if (newBucketStudio.organizationId !== access.studio.organizationId) { - throw new Meteor.Error(403, 'Access denied') - } + if (adlibProps.studioId && adlibProps.studioId !== oldAdLib.studioId) { + const newStudioCount = await Studios.countDocuments(adlibProps.studioId) + if (newStudioCount === 0) throw new Meteor.Error(`Could not find studio: "${adlibProps.studioId}"`) } await runIngestOperation(oldAdLib.studioId, IngestJobs.BucketPieceModify, { @@ -216,8 +224,10 @@ export namespace BucketsAPI { }) } - export async function removeBucket(access: BucketSecurity.BucketContentAccess): Promise { - const bucket = access.bucket + export async function removeBucket(bucketId: BucketId): Promise { + const bucket = await Buckets.findOneAsync(bucketId) + if (!bucket) throw new Meteor.Error(404, `Bucket "${bucketId}" not found`) + await Promise.all([ Buckets.removeAsync(bucket._id), await runIngestOperation(bucket.studioId, IngestJobs.BucketEmpty, { @@ -227,13 +237,17 @@ export namespace BucketsAPI { } export async function importAdlibToBucket( - access: BucketSecurity.BucketContentAccess, + bucketId: BucketId, showStyleBaseId: ShowStyleBaseId, /** Optional: if set, only create adlib for this variant (otherwise: for all variants in ShowStyleBase)*/ showStyleVariantId: ShowStyleVariantId | undefined, ingestItem: IngestAdlib ): Promise { - const studioLight = access.studio + const bucket = await Buckets.findOneAsync(bucketId) + if (!bucket) throw new Meteor.Error(404, `Bucket "${bucketId}" not found`) + + const studioLight = await fetchStudioLight(bucket.studioId) + if (!studioLight) throw new Meteor.Error(404, `Studio "${bucket.studioId}" not found`) if (showStyleVariantId) { const showStyleCompound = await getShowStyleCompound(showStyleVariantId) @@ -249,12 +263,12 @@ export namespace BucketsAPI { if (studioLight.supportedShowStyleBase.indexOf(showStyleBaseId) === -1) { throw new Meteor.Error( 500, - `ShowStyle base "${showStyleBaseId}" not supported by studio "${access.studioId}"` + `ShowStyle base "${showStyleBaseId}" not supported by studio "${bucket.studioId}"` ) } - await runIngestOperation(access.studioId, IngestJobs.BucketItemImport, { - bucketId: access.bucket._id, + await runIngestOperation(bucket.studioId, IngestJobs.BucketItemImport, { + bucketId: bucket._id, showStyleBaseId: showStyleBaseId, showStyleVariantIds: showStyleVariantId ? [showStyleVariantId] : undefined, payload: ingestItem, diff --git a/meteor/server/api/cleanup.ts b/meteor/server/api/cleanup.ts index 2f733c9b8e0..5324208bcf8 100644 --- a/meteor/server/api/cleanup.ts +++ b/meteor/server/api/cleanup.ts @@ -127,10 +127,6 @@ export async function cleanupOldDataInner(actuallyCleanup = false): Promise { + async (userActionMetadata) => { checkArgs() - const access = await checkAccessToPlaylist(context, playlistId) - return runStudioJob(access.playlist.studioId, jobName, jobArguments, userActionMetadata) + const playlist = await checkAccessToPlaylist(context.connection, playlistId) + return runStudioJob(playlist.studioId, jobName, jobArguments, userActionMetadata) } ) } @@ -92,11 +87,11 @@ export namespace ServerClientAPI { eventTime, `worker.${jobName}`, jobArguments as any, - async (_credentials, userActionMetadata) => { + async (userActionMetadata) => { checkArgs() - const access = await checkAccessToRundown(context, rundownId) - return runStudioJob(access.rundown.studioId, jobName, jobArguments, userActionMetadata) + const rundown = await checkAccessToRundown(context.connection, rundownId) + return runStudioJob(rundown.studioId, jobName, jobArguments, userActionMetadata) } ) } @@ -112,13 +107,13 @@ export namespace ServerClientAPI { checkArgs: () => void, methodName: string, args: Record, - fcn: (access: VerifiedRundownPlaylistContentAccess) => Promise + fcn: (playlist: VerifiedRundownPlaylistForUserAction) => Promise ): Promise> { return runUserActionInLog(context, userEvent, eventTime, methodName, args, async () => { checkArgs() - const access = await checkAccessToPlaylist(context, playlistId) - return fcn(access) + const playlist = await checkAccessToPlaylist(context.connection, playlistId) + return fcn(playlist) }) } @@ -133,13 +128,13 @@ export namespace ServerClientAPI { checkArgs: () => void, methodName: string, args: Record, - fcn: (access: VerifiedRundownContentAccess) => Promise + fcn: (rundown: VerifiedRundownForUserAction) => Promise ): Promise> { return runUserActionInLog(context, userEvent, eventTime, methodName, args, async () => { checkArgs() - const access = await checkAccessToRundown(context, rundownId) - return fcn(access) + const rundown = await checkAccessToRundown(context.connection, rundownId) + return fcn(rundown) }) } @@ -185,11 +180,11 @@ export namespace ServerClientAPI { eventTime: Time, methodName: string, methodArgs: Record, - fcn: (credentials: BasicAccessContext, userActionMetadata: UserActionMetadata) => Promise + fcn: (userActionMetadata: UserActionMetadata) => Promise ): Promise> { // If we are in the test write auth check mode, then bypass all special logic to ensure errors dont get mangled if (isInTestWrite()) { - const result = await fcn({ organizationId: null, userId: null }, {}) + const result = await fcn({}) return ClientAPI.responseSuccess(result) } @@ -203,23 +198,21 @@ export namespace ServerClientAPI { // Called internally from server-side. // Just run and return right away: try { - const result = await fcn({ organizationId: null, userId: null }, {}) + const result = await fcn({}) return ClientAPI.responseSuccess(result) } catch (e) { return rewrapError(methodName, e) } } else { - const credentials = await getLoggedInCredentials(context) - // Start the db entry, but don't wait for it const actionId: UserActionsLogItemId = getRandomId() const pInitialInsert = UserActionsLog.insertAsync( literal({ _id: actionId, clientAddress: context.connection.clientAddress, - organizationId: credentials.organizationId, - userId: credentials.userId, + organizationId: null, + userId: null, context: userEvent, method: methodName, args: JSON.stringify(methodArgs), @@ -233,7 +226,7 @@ export namespace ServerClientAPI { const userActionMetadata: UserActionMetadata = {} try { - const result = await fcn(credentials, userActionMetadata) + const result = await fcn(userActionMetadata) const completeTime = Date.now() pInitialInsert @@ -325,14 +318,15 @@ export namespace ServerClientAPI { }) } - const access = await PeripheralDeviceContentWriteAccess.executeFunction(methodContext, deviceId) + // TODO - check this. This probably needs to be moved out of this method, with the client using more targetted methods + assertConnectionHasOneOfPermissions(methodContext.connection, 'studio', 'configure', 'service') await UserActionsLog.insertAsync( literal({ _id: actionId, clientAddress: methodContext.connection ? methodContext.connection.clientAddress : '', - organizationId: access.organizationId, - userId: access.userId, + organizationId: null, + userId: null, context: context, method: `${deviceId}: ${method}`, args: JSON.stringify(args), @@ -395,7 +389,8 @@ export namespace ServerClientAPI { }) } - await PeripheralDeviceContentWriteAccess.executeFunction(methodContext, deviceId) + // TODO - check this. This probably needs to be moved out of this method, with the client using more targetted methods + assertConnectionHasOneOfPermissions(methodContext.connection, 'studio', 'configure', 'service') return executePeripheralDeviceFunctionWithCustomTimeout(deviceId, timeoutTime, { functionName, @@ -407,17 +402,6 @@ export namespace ServerClientAPI { return Promise.reject(err) }) } - - async function getLoggedInCredentials(methodContext: MethodContext): Promise { - let userId: UserId | null = null - let organizationId: OrganizationId | null = null - if (Settings.enableUserAccounts) { - const cred = await resolveCredentials({ userId: methodContext.userId }) - if (cred.user) userId = cred.user._id - organizationId = cred.organizationId - } - return { userId, organizationId } - } } class ServerClientAPIClass extends MethodContextAPI implements NewClientAPI { diff --git a/meteor/server/api/deviceTriggers/observer.ts b/meteor/server/api/deviceTriggers/observer.ts index aa6bc4bc861..c155bcb6004 100644 --- a/meteor/server/api/deviceTriggers/observer.ts +++ b/meteor/server/api/deviceTriggers/observer.ts @@ -10,7 +10,7 @@ import { PreviewWrappedAdLib, } from '@sofie-automation/meteor-lib/dist/api/MountedTriggers' import { logger } from '../../logging' -import { checkAccessAndGetPeripheralDevice } from '../ingest/lib' +import { checkAccessAndGetPeripheralDevice } from '../../security/check' import { StudioActionManagers } from './StudioActionManagers' import { JobQueueWithClasses } from '@sofie-automation/shared-lib/dist/lib/JobQueueWithClasses' import { StudioDeviceTriggerManager } from './StudioDeviceTriggerManager' diff --git a/meteor/server/api/evaluations.ts b/meteor/server/api/evaluations.ts index 3be9d578182..cfa30748dbb 100644 --- a/meteor/server/api/evaluations.ts +++ b/meteor/server/api/evaluations.ts @@ -7,23 +7,20 @@ import { Meteor } from 'meteor/meteor' import * as _ from 'underscore' import { fetchStudioLight } from '../optimizations' import { sendSlackMessageToWebhook } from './integration/slack' -import { OrganizationId, UserId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { Evaluations, RundownPlaylists } from '../collections' import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' +import { VerifiedRundownPlaylistForUserAction } from '../security/check' export async function saveEvaluation( - credentials: { - userId: UserId | null - organizationId: OrganizationId | null - }, + _playlist: VerifiedRundownPlaylistForUserAction, evaluation: EvaluationBase ): Promise { await Evaluations.insertAsync({ ...evaluation, _id: getRandomId(), - organizationId: credentials.organizationId, - userId: credentials.userId, + organizationId: null, + userId: null, timestamp: getCurrentTime(), }) logger.info({ diff --git a/meteor/server/api/heapSnapshot.ts b/meteor/server/api/heapSnapshot.ts index 1b9cb142a2b..876b60be7a0 100644 --- a/meteor/server/api/heapSnapshot.ts +++ b/meteor/server/api/heapSnapshot.ts @@ -7,14 +7,11 @@ import { fixValidPath } from '../lib/lib' import { sleep } from '../lib/lib' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' import { logger } from '../logging' -import { Settings } from '../Settings' -import { Credentials } from '../security/lib/credentials' -import { SystemWriteAccess } from '../security/system' +import { assertConnectionHasOneOfPermissions, RequestCredentials } from '../security/auth' + +async function retrieveHeapSnapshot(cred: RequestCredentials): Promise { + assertConnectionHasOneOfPermissions(cred, 'developer') -async function retrieveHeapSnapshot(cred0: Credentials): Promise { - if (Settings.enableUserAccounts) { - await SystemWriteAccess.coreSystem(cred0) - } logger.warn('Taking heap snapshot, expect system to be unresponsive for a few seconds..') await sleep(100) // Allow the logger to catch up before continuing.. @@ -51,19 +48,9 @@ async function handleKoaResponse(ctx: Koa.ParameterizedContext, snapshotFcn: () } } -// For backwards compatibility: -if (!Settings.enableUserAccounts) { - // Retrieve heap snapshot: - heapSnapshotPrivateApiRouter.get('/retrieve', async (ctx) => { - return handleKoaResponse(ctx, async () => { - return retrieveHeapSnapshot({ userId: null }) - }) - }) -} - // Retrieve heap snapshot: -heapSnapshotPrivateApiRouter.get('/:token/retrieve', async (ctx) => { +heapSnapshotPrivateApiRouter.get('/retrieve', async (ctx) => { return handleKoaResponse(ctx, async () => { - return retrieveHeapSnapshot({ userId: null, token: ctx.params.token }) + return retrieveHeapSnapshot(ctx) }) }) diff --git a/meteor/server/api/ingest/actions.ts b/meteor/server/api/ingest/actions.ts index dc46d4a3fa8..6a6cf852bf3 100644 --- a/meteor/server/api/ingest/actions.ts +++ b/meteor/server/api/ingest/actions.ts @@ -1,12 +1,12 @@ import { getPeripheralDeviceFromRundown, runIngestOperation } from './lib' import { MOSDeviceActions } from './mosDevice/actions' import { Meteor } from 'meteor/meteor' -import { Rundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { TriggerReloadDataResponse } from '@sofie-automation/meteor-lib/dist/api/userActions' import { GenericDeviceActions } from './genericDevice/actions' import { PeripheralDeviceType } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' import { IngestJobs } from '@sofie-automation/corelib/dist/worker/ingest' import { assertNever } from '@sofie-automation/corelib/dist/lib' +import { VerifiedRundownForUserAction } from '../../security/check' /* This file contains actions that can be performed on an ingest-device @@ -15,9 +15,7 @@ export namespace IngestActions { /** * Trigger a reload of a rundown */ - export async function reloadRundown( - rundown: Pick - ): Promise { + export async function reloadRundown(rundown: VerifiedRundownForUserAction): Promise { const rundownSourceType = rundown.source.type switch (rundown.source.type) { case 'snapshot': diff --git a/meteor/server/api/ingest/lib.ts b/meteor/server/api/ingest/lib.ts index 15cd36b428e..f3e761b8874 100644 --- a/meteor/server/api/ingest/lib.ts +++ b/meteor/server/api/ingest/lib.ts @@ -5,9 +5,6 @@ import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyE import { PeripheralDevice, PeripheralDeviceCategory } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' import { Rundown, RundownSourceNrcs } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { logger } from '../../logging' -import { PeripheralDeviceContentWriteAccess } from '../../security/peripheralDevice' -import { MethodContext } from '../methodContext' -import { Credentials } from '../../security/lib/credentials' import { profiler } from '../profiler' import { IngestJobFunc } from '@sofie-automation/corelib/dist/worker/ingest' import { QueueIngestJob } from '../../worker/worker' @@ -64,26 +61,6 @@ export async function runIngestOperation( } } -/** Check Access and return PeripheralDevice, throws otherwise */ -export async function checkAccessAndGetPeripheralDevice( - deviceId: PeripheralDeviceId, - token: string | undefined, - context: Credentials | MethodContext -): Promise { - const span = profiler.startSpan('lib.checkAccessAndGetPeripheralDevice') - - const { device: peripheralDevice } = await PeripheralDeviceContentWriteAccess.peripheralDevice( - { userId: context.userId, token }, - deviceId - ) - if (!peripheralDevice) { - throw new Meteor.Error(404, `PeripheralDevice "${deviceId}" not found`) - } - - span?.end() - return peripheralDevice -} - export function getRundownId(studioId: StudioId, rundownExternalId: string): RundownId { if (!studioId) throw new Meteor.Error(500, 'getRundownId: studio not set!') if (!rundownExternalId) throw new Meteor.Error(401, 'getRundownId: rundownExternalId must be set!') diff --git a/meteor/server/api/ingest/mosDevice/mosIntegration.ts b/meteor/server/api/ingest/mosDevice/mosIntegration.ts index ad226d6d844..6969159e89b 100644 --- a/meteor/server/api/ingest/mosDevice/mosIntegration.ts +++ b/meteor/server/api/ingest/mosDevice/mosIntegration.ts @@ -1,16 +1,12 @@ import { MOS } from '@sofie-automation/corelib' import { logger } from '../../../logging' -import { - checkAccessAndGetPeripheralDevice, - fetchStudioIdFromDevice, - generateRundownSource, - runIngestOperation, -} from '../lib' +import { fetchStudioIdFromDevice, generateRundownSource, runIngestOperation } from '../lib' import { parseMosString } from './lib' import { MethodContext } from '../../methodContext' import { profiler } from '../../profiler' import { IngestJobs } from '@sofie-automation/corelib/dist/worker/ingest' import { PeripheralDeviceId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { checkAccessAndGetPeripheralDevice } from '../../../security/check' const apmNamespace = 'mosIntegration' diff --git a/meteor/server/api/ingest/rundownInput.ts b/meteor/server/api/ingest/rundownInput.ts index 8b05552eba0..4013b465efe 100644 --- a/meteor/server/api/ingest/rundownInput.ts +++ b/meteor/server/api/ingest/rundownInput.ts @@ -7,18 +7,14 @@ import { lazyIgnore } from '../../lib/lib' import { IngestRundown, IngestSegment, IngestPart, IngestPlaylist } from '@sofie-automation/blueprints-integration' import { logger } from '../../logging' import { RundownIngestDataCache } from './ingestCache' -import { - checkAccessAndGetPeripheralDevice, - fetchStudioIdFromDevice, - generateRundownSource, - runIngestOperation, -} from './lib' +import { fetchStudioIdFromDevice, generateRundownSource, runIngestOperation } from './lib' import { MethodContext } from '../methodContext' import { IngestJobs } from '@sofie-automation/corelib/dist/worker/ingest' import { MediaObject } from '@sofie-automation/shared-lib/dist/core/model/MediaObjects' import { PeripheralDeviceId, RundownId, SegmentId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { NrcsIngestCacheType } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' +import { checkAccessAndGetPeripheralDevice } from '../../security/check' export namespace RundownInput { export async function dataPlaylistGet( diff --git a/meteor/server/api/integration/expectedPackages.ts b/meteor/server/api/integration/expectedPackages.ts index 1c3d489b9a7..8e823b968fb 100644 --- a/meteor/server/api/integration/expectedPackages.ts +++ b/meteor/server/api/integration/expectedPackages.ts @@ -1,7 +1,7 @@ import { check } from '../../lib/check' import { Meteor } from 'meteor/meteor' import { MethodContext } from '../methodContext' -import { checkAccessAndGetPeripheralDevice } from '../ingest/lib' +import { checkAccessAndGetPeripheralDevice } from '../../security/check' import { ExpectedPackageStatusAPI, PackageInfo } from '@sofie-automation/blueprints-integration' import { ExpectedPackageWorkStatus } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackageWorkStatuses' import { assertNever, literal, protectString } from '../../lib/tempLib' diff --git a/meteor/server/api/integration/media-scanner.ts b/meteor/server/api/integration/media-scanner.ts index 4af257f01ad..cda12b162e2 100644 --- a/meteor/server/api/integration/media-scanner.ts +++ b/meteor/server/api/integration/media-scanner.ts @@ -1,6 +1,6 @@ import { Meteor } from 'meteor/meteor' import { protectString } from '../../lib/tempLib' -import { checkAccessAndGetPeripheralDevice } from '../ingest/lib' +import { checkAccessAndGetPeripheralDevice } from '../../security/check' import { MethodContext } from '../methodContext' import { MediaObject } from '@sofie-automation/shared-lib/dist/core/model/MediaObjects' import { MediaObjId, PeripheralDeviceId } from '@sofie-automation/corelib/dist/dataModel/Ids' diff --git a/meteor/server/api/integration/mediaWorkFlows.ts b/meteor/server/api/integration/mediaWorkFlows.ts index 20c1a65bf66..36fb3e2461d 100644 --- a/meteor/server/api/integration/mediaWorkFlows.ts +++ b/meteor/server/api/integration/mediaWorkFlows.ts @@ -9,7 +9,7 @@ import { } from '@sofie-automation/shared-lib/dist/peripheralDevice/mediaManager' import { PeripheralDeviceType } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' import { MethodContext } from '../methodContext' -import { checkAccessAndGetPeripheralDevice } from '../ingest/lib' +import { checkAccessAndGetPeripheralDevice } from '../../security/check' import { MediaWorkFlowId, MediaWorkFlowStepId, PeripheralDeviceId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { MediaWorkFlows, MediaWorkFlowSteps } from '../../collections' diff --git a/meteor/server/api/lib.ts b/meteor/server/api/lib.ts deleted file mode 100644 index e0d4ed7dea4..00000000000 --- a/meteor/server/api/lib.ts +++ /dev/null @@ -1,67 +0,0 @@ -import { RundownId, RundownPlaylistId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { Meteor } from 'meteor/meteor' -import { MethodContext } from './methodContext' -import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' -import { Rundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' -import { - RundownContentAccess, - RundownPlaylistContentAccess, - RundownPlaylistContentWriteAccess, -} from '../security/rundownPlaylist' - -/** - * This is returned from a check of access to a playlist, when access is granted. - * Fields will be populated about the user. - * It is identical to RundownPlaylistContentAccess, except for confirming access is allowed - */ -export interface VerifiedRundownPlaylistContentAccess extends RundownPlaylistContentAccess { - playlist: DBRundownPlaylist - studioId: StudioId -} -/** - * This is returned from a check of access to a rundown, when access is granted. - * Fields will be populated about the user. - * It is identical to RundownContentAccess, except for confirming access is allowed - */ -export interface VerifiedRundownContentAccess extends RundownContentAccess { - rundown: Rundown - studioId: StudioId -} - -/** - * Check that the current user has write access to the specified playlist, and ensure that the playlist exists - * @param context - * @param playlistId Id of the playlist - */ -export async function checkAccessToPlaylist( - context: MethodContext, - playlistId: RundownPlaylistId -): Promise { - const access = await RundownPlaylistContentWriteAccess.playout(context, playlistId) - const playlist = access.playlist - if (!playlist) throw new Meteor.Error(404, `Rundown Playlist "${playlistId}" not found!`) - return { - ...access, - playlist, - studioId: playlist.studioId, - } -} - -/** - * Check that the current user has write access to the specified rundown, and ensure that the rundown exists - * @param context - * @param rundownId Id of the rundown - */ -export async function checkAccessToRundown( - context: MethodContext, - rundownId: RundownId -): Promise { - const access = await RundownPlaylistContentWriteAccess.rundown(context, rundownId) - const rundown = access.rundown - if (!rundown) throw new Meteor.Error(404, `Rundown "${rundownId}" not found!`) - return { - ...access, - rundown, - studioId: rundown.studioId, - } -} diff --git a/meteor/server/api/mediaManager.ts b/meteor/server/api/mediaManager.ts index 3370b85944c..ea9caa81139 100644 --- a/meteor/server/api/mediaManager.ts +++ b/meteor/server/api/mediaManager.ts @@ -1,74 +1,77 @@ import { MediaWorkFlow } from '@sofie-automation/shared-lib/dist/core/model/MediaWorkFlows' import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' -import { MediaWorkFlowContentAccess } from '../security/peripheralDevice' -import { BasicAccessContext } from '../security/organization' import { MediaWorkFlows, PeripheralDevices } from '../collections' import { executePeripheralDeviceFunction } from './peripheralDevice/executeFunction' +import { MediaWorkFlowId, OrganizationId, PeripheralDeviceId } from '@sofie-automation/corelib/dist/dataModel/Ids' -export namespace MediaManagerAPI { - export async function restartAllWorkflows(access: BasicAccessContext): Promise { - const devices: Array> = await PeripheralDevices.findFetchAsync( - access.organizationId ? { organizationId: access.organizationId } : {}, - { - fields: { - _id: 1, - }, - } - ) - const workflows: Array> = await MediaWorkFlows.findFetchAsync( - { - deviceId: { $in: devices.map((d) => d._id) }, +export async function restartAllWorkflows(organizationId: OrganizationId | null): Promise { + const devices: Array> = await PeripheralDevices.findFetchAsync( + organizationId ? { organizationId: organizationId } : {}, + { + fields: { + _id: 1, }, - { - fields: { - deviceId: 1, - }, - } - ) + } + ) + const workflows: Array> = await MediaWorkFlows.findFetchAsync( + { + deviceId: { $in: devices.map((d) => d._id) }, + }, + { + fields: { + deviceId: 1, + }, + } + ) - const deviceIds = Array.from(new Set(workflows.map((w) => w.deviceId))) + const deviceIds = Array.from(new Set(workflows.map((w) => w.deviceId))) - await Promise.all( - deviceIds.map(async (deviceId) => executePeripheralDeviceFunction(deviceId, 'restartAllWorkflows')) - ) - } - export async function abortAllWorkflows(access: BasicAccessContext): Promise { - const devices: Array> = await PeripheralDevices.findFetchAsync( - access.organizationId ? { organizationId: access.organizationId } : {}, - { - fields: { - _id: 1, - }, - } - ) - const workflows: Array> = await MediaWorkFlows.findFetchAsync( - { - deviceId: { $in: devices.map((d) => d._id) }, + await Promise.all( + deviceIds.map(async (deviceId) => executePeripheralDeviceFunction(deviceId, 'restartAllWorkflows')) + ) +} +export async function abortAllWorkflows(organizationId: OrganizationId | null): Promise { + const devices: Array> = await PeripheralDevices.findFetchAsync( + organizationId ? { organizationId: organizationId } : {}, + { + fields: { + _id: 1, + }, + } + ) + const workflows: Array> = await MediaWorkFlows.findFetchAsync( + { + deviceId: { $in: devices.map((d) => d._id) }, + }, + { + fields: { + deviceId: 1, }, - { - fields: { - deviceId: 1, - }, - } - ) + } + ) + + const deviceIds = Array.from(new Set(workflows.map((w) => w.deviceId))) + + await Promise.all(deviceIds.map(async (deviceId) => executePeripheralDeviceFunction(deviceId, 'abortAllWorkflows'))) +} - const deviceIds = Array.from(new Set(workflows.map((w) => w.deviceId))) +export async function restartWorkflow(deviceId: PeripheralDeviceId, workflowId: MediaWorkFlowId): Promise { + await ensureWorkflowExists(workflowId) - await Promise.all( - deviceIds.map(async (deviceId) => executePeripheralDeviceFunction(deviceId, 'abortAllWorkflows')) - ) - } + await executePeripheralDeviceFunction(deviceId, 'restartWorkflow', workflowId) +} +export async function abortWorkflow(deviceId: PeripheralDeviceId, workflowId: MediaWorkFlowId): Promise { + await ensureWorkflowExists(workflowId) + + await executePeripheralDeviceFunction(deviceId, 'abortWorkflow', workflowId) +} +export async function prioritizeWorkflow(deviceId: PeripheralDeviceId, workflowId: MediaWorkFlowId): Promise { + await ensureWorkflowExists(workflowId) + + await executePeripheralDeviceFunction(deviceId, 'prioritizeWorkflow', workflowId) +} - export async function restartWorkflow(access: MediaWorkFlowContentAccess): Promise { - const workflow = access.mediaWorkFlow - await executePeripheralDeviceFunction(workflow.deviceId, 'restartWorkflow', workflow._id) - } - export async function abortWorkflow(access: MediaWorkFlowContentAccess): Promise { - const workflow = access.mediaWorkFlow - await executePeripheralDeviceFunction(workflow.deviceId, 'abortWorkflow', workflow._id) - } - export async function prioritizeWorkflow(access: MediaWorkFlowContentAccess): Promise { - const workflow = access.mediaWorkFlow - await executePeripheralDeviceFunction(workflow.deviceId, 'prioritizeWorkflow', workflow._id) - } +async function ensureWorkflowExists(workflowId: MediaWorkFlowId): Promise { + const doc = await MediaWorkFlows.findOneAsync(workflowId, { projection: { _id: 1 } }) + if (!doc) throw new Error(`Workflow "${workflowId}" not found`) } diff --git a/meteor/server/api/methodContext.ts b/meteor/server/api/methodContext.ts index cd8e962aae6..4c564f75c42 100644 --- a/meteor/server/api/methodContext.ts +++ b/meteor/server/api/methodContext.ts @@ -1,21 +1,10 @@ import { Meteor } from 'meteor/meteor' -import { UserId } from '@sofie-automation/corelib/dist/dataModel/Ids' -export interface MethodContext extends Omit { - userId: UserId | null -} +export type MethodContext = Omit /** Abstarct class to be used when defining Mehod-classes */ export abstract class MethodContextAPI implements MethodContext { // These properties are added by Meteor to the `this` context when calling methods - public userId!: UserId | null - public isSimulation!: boolean - public setUserId(_userId: string | null): void { - throw new Meteor.Error( - 500, - `This shoulc never be called, there's something wrong in with 'this' in the calling method` - ) - } public unblock(): void { throw new Meteor.Error( 500, diff --git a/meteor/server/api/organizations.ts b/meteor/server/api/organizations.ts index df8e0cedfd0..e56b6151633 100644 --- a/meteor/server/api/organizations.ts +++ b/meteor/server/api/organizations.ts @@ -4,14 +4,15 @@ import { MethodContextAPI, MethodContext } from './methodContext' import { NewOrganizationAPI, OrganizationAPIMethods } from '@sofie-automation/meteor-lib/dist/api/organization' import { registerClassToMeteorMethods } from '../methods' import { DBOrganization, DBOrganizationBase } from '@sofie-automation/meteor-lib/dist/collections/Organization' -import { OrganizationContentWriteAccess } from '../security/organization' -import { triggerWriteAccessBecauseNoCheckNecessary } from '../security/lib/securityVerify' import { insertStudioInner } from './studio/api' import { insertShowStyleBaseInner } from './showStyles' -import { resetCredentials } from '../security/lib/credentials' import { BlueprintId, OrganizationId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { Blueprints, CoreSystem, Organizations, ShowStyleBases, Studios, Users } from '../collections' +import { Blueprints, CoreSystem, Organizations, ShowStyleBases, Studios } from '../collections' import { getCoreSystemAsync } from '../coreSystem/collection' +import { UserPermissions } from '@sofie-automation/meteor-lib/dist/userPermissions' +import { assertConnectionHasOneOfPermissions } from '../security/auth' + +const PERMISSIONS_FOR_MANAGE_ORGANIZATIONS: Array = ['configure'] async function createDefaultEnvironmentForOrg(orgId: OrganizationId) { let systemBlueprintId: BlueprintId | undefined @@ -43,8 +44,11 @@ async function createDefaultEnvironmentForOrg(orgId: OrganizationId) { await ShowStyleBases.updateAsync(showStyleId, { $set: { blueprintId: showStyleBlueprintId } }) } -export async function createOrganization(organization: DBOrganizationBase): Promise { - triggerWriteAccessBecauseNoCheckNecessary() +export async function createOrganization( + context: MethodContext, + organization: DBOrganizationBase +): Promise { + assertConnectionHasOneOfPermissions(context.connection, ...PERMISSIONS_FOR_MANAGE_ORGANIZATIONS) const orgId = await Organizations.insertAsync( literal({ @@ -61,12 +65,8 @@ export async function createOrganization(organization: DBOrganizationBase): Prom } async function removeOrganization(context: MethodContext, organizationId: OrganizationId) { - await OrganizationContentWriteAccess.organization(context, organizationId) + assertConnectionHasOneOfPermissions(context.connection, ...PERMISSIONS_FOR_MANAGE_ORGANIZATIONS) - const users = await Users.findFetchAsync({ organizationId }) - users.forEach((user) => { - resetCredentials({ userId: user._id }) - }) await Organizations.removeAsync(organizationId) } diff --git a/meteor/server/api/packageManager.ts b/meteor/server/api/packageManager.ts index 09888263930..c446852f257 100644 --- a/meteor/server/api/packageManager.ts +++ b/meteor/server/api/packageManager.ts @@ -3,43 +3,31 @@ import { PeripheralDeviceType, PERIPHERAL_SUBTYPE_PROCESS, } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' -import { PeripheralDeviceContentWriteAccess } from '../security/peripheralDevice' -import { StudioContentAccess } from '../security/studio' import { PeripheralDevices } from '../collections' import { executePeripheralDeviceFunction } from './peripheralDevice/executeFunction' +import { PeripheralDeviceId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' -export namespace PackageManagerAPI { - export async function restartExpectation( - access: PeripheralDeviceContentWriteAccess.ContentAccess, - workId: string - ): Promise { - await executePeripheralDeviceFunction(access.deviceId, 'restartExpectation', workId) - } - export async function abortExpectation( - access: PeripheralDeviceContentWriteAccess.ContentAccess, - workId: string - ): Promise { - await executePeripheralDeviceFunction(access.deviceId, 'abortExpectation', workId) - } +export async function restartExpectation(deviceId: PeripheralDeviceId, workId: string): Promise { + await executePeripheralDeviceFunction(deviceId, 'restartExpectation', workId) +} +export async function abortExpectation(deviceId: PeripheralDeviceId, workId: string): Promise { + await executePeripheralDeviceFunction(deviceId, 'abortExpectation', workId) +} - export async function restartAllExpectationsInStudio(access: StudioContentAccess): Promise { - const packageManagerDevices = await PeripheralDevices.findFetchAsync({ - studioId: access.studioId, - category: PeripheralDeviceCategory.PACKAGE_MANAGER, - type: PeripheralDeviceType.PACKAGE_MANAGER, - subType: PERIPHERAL_SUBTYPE_PROCESS, - }) +export async function restartAllExpectationsInStudio(studioId: StudioId): Promise { + const packageManagerDevices = await PeripheralDevices.findFetchAsync({ + studioId: studioId, + category: PeripheralDeviceCategory.PACKAGE_MANAGER, + type: PeripheralDeviceType.PACKAGE_MANAGER, + subType: PERIPHERAL_SUBTYPE_PROCESS, + }) - await Promise.all( - packageManagerDevices.map(async (packageManagerDevice) => { - return executePeripheralDeviceFunction(packageManagerDevice._id, 'restartAllExpectations') - }) - ) - } - export async function restartPackageContainer( - access: PeripheralDeviceContentWriteAccess.ContentAccess, - containerId: string - ): Promise { - await executePeripheralDeviceFunction(access.deviceId, 'restartPackageContainer', containerId) - } + await Promise.all( + packageManagerDevices.map(async (packageManagerDevice) => { + return executePeripheralDeviceFunction(packageManagerDevice._id, 'restartAllExpectations') + }) + ) +} +export async function restartPackageContainer(deviceId: PeripheralDeviceId, containerId: string): Promise { + await executePeripheralDeviceFunction(deviceId, 'restartPackageContainer', containerId) } diff --git a/meteor/server/api/peripheralDevice.ts b/meteor/server/api/peripheralDevice.ts index 0c6bed88572..90604fb61aa 100644 --- a/meteor/server/api/peripheralDevice.ts +++ b/meteor/server/api/peripheralDevice.ts @@ -26,10 +26,9 @@ import { MediaWorkFlowStep } from '@sofie-automation/shared-lib/dist/core/model/ import { MOS } from '@sofie-automation/corelib' import { determineDiffTime } from './systemTime/systemTime' import { getTimeDiff } from './systemTime/api' -import { PeripheralDeviceContentWriteAccess } from '../security/peripheralDevice' import { MethodContextAPI, MethodContext } from './methodContext' -import { triggerWriteAccess, triggerWriteAccessBecauseNoCheckNecessary } from '../security/lib/securityVerify' -import { checkAccessAndGetPeripheralDevice } from './ingest/lib' +import { triggerWriteAccess, triggerWriteAccessBecauseNoCheckNecessary } from '../security/securityVerify' +import { checkAccessAndGetPeripheralDevice } from '../security/check' import { UserActionsLogItem } from '@sofie-automation/meteor-lib/dist/collections/UserActionsLog' import { PackageManagerIntegration } from './integration/expectedPackages' import { profiler } from './profiler' @@ -81,7 +80,9 @@ export namespace ServerPeripheralDeviceAPI { check(deviceId, String) const existingDevice = await PeripheralDevices.findOneAsync(deviceId) if (existingDevice) { - await PeripheralDeviceContentWriteAccess.peripheralDevice({ userId: context.userId, token }, deviceId) + await checkAccessAndGetPeripheralDevice(deviceId, token, context) + } else { + triggerWriteAccessBecauseNoCheckNecessary() } check(token, String) @@ -356,12 +357,12 @@ export namespace ServerPeripheralDeviceAPI { return false } export async function disableSubDevice( - access: PeripheralDeviceContentWriteAccess.ContentAccess, + deviceId: PeripheralDeviceId, subDeviceId: string, disable: boolean ): Promise { - const peripheralDevice = access.device - const deviceId = access.deviceId + const peripheralDevice = await PeripheralDevices.findOneAsync(deviceId) + if (!peripheralDevice) throw new Meteor.Error(404, `PeripheralDevice "${deviceId}" not found`) // check that the peripheralDevice has subDevices if (peripheralDevice.type !== PeripheralDeviceType.PLAYOUT) @@ -432,18 +433,21 @@ export namespace ServerPeripheralDeviceAPI { }) } } - export async function getDebugStates(access: PeripheralDeviceContentWriteAccess.ContentAccess): Promise { + export async function getDebugStates(peripheralDeviceId: PeripheralDeviceId): Promise { + const peripheralDevice = await PeripheralDevices.findOneAsync(peripheralDeviceId) + if (!peripheralDevice) return {} + if ( // Debug states are only valid for Playout devices and must be enabled with the `debugState` option - access.device.type !== PeripheralDeviceType.PLAYOUT || - !access.device.settings || - !(access.device.settings as any)['debugState'] + peripheralDevice.type !== PeripheralDeviceType.PLAYOUT || + !peripheralDevice.settings || + !(peripheralDevice.settings as any)['debugState'] ) { return {} } try { - return await executePeripheralDeviceFunction(access.deviceId, 'getDebugStates') + return await executePeripheralDeviceFunction(peripheralDevice._id, 'getDebugStates') } catch (e) { logger.error(e) return {} diff --git a/meteor/server/api/playout/api.ts b/meteor/server/api/playout/api.ts index d290abc6d7d..8b18fed8aa9 100644 --- a/meteor/server/api/playout/api.ts +++ b/meteor/server/api/playout/api.ts @@ -6,20 +6,29 @@ import { logger } from '../../logging' import { MethodContextAPI } from '../methodContext' import { QueueStudioJob } from '../../worker/worker' import { StudioJobs } from '@sofie-automation/corelib/dist/worker/studio' -import { StudioContentWriteAccess } from '../../security/studio' + import { StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { UserPermissions } from '@sofie-automation/meteor-lib/dist/userPermissions' +import { assertConnectionHasOneOfPermissions } from '../../security/auth' +import { Studios } from '../../collections' +import { Meteor } from 'meteor/meteor' + +const PERMISSIONS_FOR_STUDIO_BASELINE: Array = ['configure', 'studio'] class ServerPlayoutAPIClass extends MethodContextAPI implements NewPlayoutAPI { async updateStudioBaseline(studioId: StudioId): Promise { - await StudioContentWriteAccess.baseline(this, studioId) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_STUDIO_BASELINE) const res = await QueueStudioJob(StudioJobs.UpdateStudioBaseline, studioId, undefined) return res.complete } async shouldUpdateStudioBaseline(studioId: StudioId) { - const access = await StudioContentWriteAccess.baseline(this, studioId) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_STUDIO_BASELINE) + + const studio = await Studios.findOneAsync(studioId) + if (!studio) throw new Meteor.Error(404, `Studio "${studioId}" not found`) - return ServerPlayoutAPI.shouldUpdateStudioBaseline(access) + return ServerPlayoutAPI.shouldUpdateStudioBaseline(studio) } } registerClassToMeteorMethods(PlayoutAPIMethods, ServerPlayoutAPIClass, false) diff --git a/meteor/server/api/playout/playout.ts b/meteor/server/api/playout/playout.ts index 20fb5e40c31..a9d36df0a9e 100644 --- a/meteor/server/api/playout/playout.ts +++ b/meteor/server/api/playout/playout.ts @@ -1,15 +1,14 @@ /* tslint:disable:no-use-before-declare */ import { PackageInfo } from '../../coreSystem' -import { StudioContentAccess } from '../../security/studio' import { shouldUpdateStudioBaselineInner } from '@sofie-automation/corelib/dist/studio/baseline' import { Blueprints, RundownPlaylists, Timeline } from '../../collections' import { StudioJobs } from '@sofie-automation/corelib/dist/worker/studio' import { QueueStudioJob } from '../../worker/worker' +import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' export namespace ServerPlayoutAPI { - export async function shouldUpdateStudioBaseline(access: StudioContentAccess): Promise { - const { studio } = access - + export async function shouldUpdateStudioBaseline(studio: DBStudio): Promise { // This is intentionally not in a lock/queue, as doing so will cause it to block playout performance, and being wrong is harmless if (studio) { @@ -34,11 +33,11 @@ export namespace ServerPlayoutAPI { } export async function switchRouteSet( - access: StudioContentAccess, + studioId: StudioId, routeSetId: string, state: boolean | 'toggle' ): Promise { - const queuedJob = await QueueStudioJob(StudioJobs.SwitchRouteSet, access.studioId, { + const queuedJob = await QueueStudioJob(StudioJobs.SwitchRouteSet, studioId, { routeSetId, state, }) diff --git a/meteor/server/api/rest/v0/__tests__/rest.test.ts b/meteor/server/api/rest/v0/__tests__/rest.test.ts index 41d9e876c7a..10d3ed72047 100644 --- a/meteor/server/api/rest/v0/__tests__/rest.test.ts +++ b/meteor/server/api/rest/v0/__tests__/rest.test.ts @@ -20,23 +20,6 @@ describe('REST API', () => { const legacyApiRouter = createLegacyApiRouter() - test('registers endpoints for all UserActionAPI methods', async () => { - for (const [methodName, methodValue] of Object.entries(UserActionAPIMethods)) { - const signature = MeteorMethodSignatures[methodValue] - - let resource = `/action/${methodName}` - for (const paramName of signature || []) { - resource += `/${paramName}` - } - - const ctx = await callKoaRoute(legacyApiRouter, { - method: 'POST', - url: resource, - }) - expect(ctx.response.status).not.toBe(404) - } - }) - test('calls the UserActionAPI methods, when doing a POST to the endpoint', async () => { for (const [methodName, methodValue] of Object.entries(UserActionAPIMethods)) { const signature = MeteorMethodSignatures[methodValue] diff --git a/meteor/server/api/rest/v1/buckets.ts b/meteor/server/api/rest/v1/buckets.ts index 5a4b764267f..8086ea5159f 100644 --- a/meteor/server/api/rest/v1/buckets.ts +++ b/meteor/server/api/rest/v1/buckets.ts @@ -8,13 +8,15 @@ import { ServerClientAPI } from '../../client' import { protectString } from '@sofie-automation/shared-lib/dist/lib/protectedString' import { getCurrentTime } from '../../../lib/lib' import { check } from 'meteor/check' -import { StudioContentWriteAccess } from '../../../security/studio' import { BucketsAPI } from '../../buckets' -import { BucketSecurity } from '../../../security/buckets' import { APIFactory, APIRegisterHook, ServerAPIContext } from './types' import { logger } from '../../../logging' import { UserError, UserErrorMessage } from '@sofie-automation/corelib/dist/error' import { IngestAdlib } from '@sofie-automation/blueprints-integration' +import { assertConnectionHasOneOfPermissions } from '../../../security/auth' +import { UserPermissions } from '@sofie-automation/meteor-lib/dist/userPermissions' + +const PERMISSIONS_FOR_BUCKET_MODIFICATION: Array = ['studio'] export class BucketsServerAPI implements BucketsRestAPI { constructor(private context: ServerAPIContext) {} @@ -57,11 +59,9 @@ export class BucketsServerAPI implements BucketsRestAPI { check(bucket.studioId, String) check(bucket.name, String) - const access = await StudioContentWriteAccess.bucket( - this.context.getCredentials(), - protectString(bucket.studioId) - ) - return BucketsAPI.createNewBucket(access, bucket.name) + assertConnectionHasOneOfPermissions(connection, ...PERMISSIONS_FOR_BUCKET_MODIFICATION) + + return BucketsAPI.createNewBucket(protectString(bucket.studioId), bucket.name) } ) if (ClientAPI.isClientResponseSuccess(createdBucketResponse)) { @@ -84,8 +84,9 @@ export class BucketsServerAPI implements BucketsRestAPI { async () => { check(bucketId, String) - const access = await BucketSecurity.allowWriteAccess(this.context.getCredentials(), bucketId) - return BucketsAPI.removeBucket(access) + assertConnectionHasOneOfPermissions(connection, ...PERMISSIONS_FOR_BUCKET_MODIFICATION) + + return BucketsAPI.removeBucket(bucketId) } ) } @@ -104,8 +105,9 @@ export class BucketsServerAPI implements BucketsRestAPI { async () => { check(bucketId, String) - const access = await BucketSecurity.allowWriteAccess(this.context.getCredentials(), bucketId) - return BucketsAPI.emptyBucket(access) + assertConnectionHasOneOfPermissions(connection, ...PERMISSIONS_FOR_BUCKET_MODIFICATION) + + return BucketsAPI.emptyBucket(bucketId) } ) } @@ -122,6 +124,8 @@ export class BucketsServerAPI implements BucketsRestAPI { 'bucketsRemoveBucketAdLib', { externalId }, async () => { + assertConnectionHasOneOfPermissions(connection, ...PERMISSIONS_FOR_BUCKET_MODIFICATION) + const bucketAdLibPiecePromise = BucketAdLibs.findOneAsync( { externalId }, { @@ -139,17 +143,9 @@ export class BucketsServerAPI implements BucketsRestAPI { bucketAdLibActionPromise, ]) if (bucketAdLibPiece) { - const access = await BucketSecurity.allowWriteAccessPiece( - this.context.getCredentials(), - bucketAdLibPiece._id - ) - return BucketsAPI.removeBucketAdLib(access) + return BucketsAPI.removeBucketAdLib(bucketAdLibPiece._id) } else if (bucketAdLibAction) { - const access = await BucketSecurity.allowWriteAccessAction( - this.context.getCredentials(), - bucketAdLibAction._id - ) - return BucketsAPI.removeBucketAdLibAction(access) + return BucketsAPI.removeBucketAdLibAction(bucketAdLibAction._id) } } ) @@ -173,8 +169,9 @@ export class BucketsServerAPI implements BucketsRestAPI { check(showStyleBaseId, String) check(ingestItem, Object) - const access = await BucketSecurity.allowWriteAccess(this.context.getCredentials(), bucketId) - return BucketsAPI.importAdlibToBucket(access, showStyleBaseId, undefined, ingestItem) + assertConnectionHasOneOfPermissions(connection, ...PERMISSIONS_FOR_BUCKET_MODIFICATION) + + return BucketsAPI.importAdlibToBucket(bucketId, showStyleBaseId, undefined, ingestItem) } ) } diff --git a/meteor/server/api/rest/v1/index.ts b/meteor/server/api/rest/v1/index.ts index c935f83fa4a..c36553562d8 100644 --- a/meteor/server/api/rest/v1/index.ts +++ b/meteor/server/api/rest/v1/index.ts @@ -9,8 +9,7 @@ import { ClientAPI } from '@sofie-automation/meteor-lib/dist/api/client' import { MethodContextAPI } from '../../methodContext' import { logger } from '../../../logging' import { CURRENT_SYSTEM_VERSION } from '../../../migration/currentSystemVersion' -import { Credentials } from '../../../security/lib/credentials' -import { triggerWriteAccess } from '../../../security/lib/securityVerify' +import { triggerWriteAccess } from '../../../security/securityVerify' import { makeMeteorConnectionFromKoa } from '../koa' import { registerRoutes as registerBlueprintsRoutes } from './blueprints' import { registerRoutes as registerDevicesRoutes } from './devices' @@ -35,21 +34,12 @@ function restAPIUserEvent( class APIContext implements ServerAPIContext { public getMethodContext(connection: Meteor.Connection): MethodContextAPI { return { - userId: null, connection, - isSimulation: false, - setUserId: () => { - /* no-op */ - }, unblock: () => { /* no-op */ }, } } - - public getCredentials(): Credentials { - return { userId: null } - } } export const koaRouter = new KoaRouter() diff --git a/meteor/server/api/rest/v1/playlists.ts b/meteor/server/api/rest/v1/playlists.ts index 30513751c4e..0e3193ad6e9 100644 --- a/meteor/server/api/rest/v1/playlists.ts +++ b/meteor/server/api/rest/v1/playlists.ts @@ -33,7 +33,7 @@ import { QueueNextSegmentResult, StudioJobs } from '@sofie-automation/corelib/di import { getCurrentTime } from '../../../lib/lib' import { TriggerReloadDataResponse } from '@sofie-automation/meteor-lib/dist/api/userActions' import { ServerRundownAPI } from '../../rundown' -import { triggerWriteAccess } from '../../../security/lib/securityVerify' +import { triggerWriteAccess } from '../../../security/securityVerify' class PlaylistsServerAPI implements PlaylistsRestAPI { constructor(private context: ServerAPIContext) {} diff --git a/meteor/server/api/rest/v1/studios.ts b/meteor/server/api/rest/v1/studios.ts index 30e57b14ba3..aaabedc6933 100644 --- a/meteor/server/api/rest/v1/studios.ts +++ b/meteor/server/api/rest/v1/studios.ts @@ -17,9 +17,12 @@ import { getCurrentTime } from '../../../lib/lib' import { StudioJobs } from '@sofie-automation/corelib/dist/worker/studio' import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' -import { StudioContentWriteAccess } from '../../../security/studio' import { ServerPlayoutAPI } from '../../playout/playout' import { checkValidation } from '.' +import { assertConnectionHasOneOfPermissions } from '../../../security/auth' +import { UserPermissions } from '@sofie-automation/meteor-lib/dist/userPermissions' + +const PERMISSIONS_FOR_PLAYOUT_USERACTION: Array = ['studio'] class StudiosServerAPI implements StudiosRestAPI { constructor(private context: ServerAPIContext) {} @@ -215,8 +218,9 @@ class StudiosServerAPI implements StudiosRestAPI { check(routeSetId, String) check(state, Boolean) - const access = await StudioContentWriteAccess.routeSet(this.context.getCredentials(), studioId) - return ServerPlayoutAPI.switchRouteSet(access, routeSetId, state) + assertConnectionHasOneOfPermissions(connection, ...PERMISSIONS_FOR_PLAYOUT_USERACTION) + + return ServerPlayoutAPI.switchRouteSet(studioId, routeSetId, state) } ) } diff --git a/meteor/server/api/rest/v1/types.ts b/meteor/server/api/rest/v1/types.ts index f7bb0e7a439..a5a3a6316de 100644 --- a/meteor/server/api/rest/v1/types.ts +++ b/meteor/server/api/rest/v1/types.ts @@ -1,7 +1,6 @@ import { UserErrorMessage } from '@sofie-automation/corelib/dist/error' import { Meteor } from 'meteor/meteor' import { ClientAPI } from '@sofie-automation/meteor-lib/dist/api/client' -import { Credentials } from '../../../security/lib/credentials' import { MethodContextAPI } from '../../methodContext' export type APIRegisterHook = ( @@ -24,5 +23,4 @@ export interface APIFactory { export interface ServerAPIContext { getMethodContext(connection: Meteor.Connection): MethodContextAPI - getCredentials(): Credentials } diff --git a/meteor/server/api/rundown.ts b/meteor/server/api/rundown.ts index 51b9ec5a9de..b3dfb757349 100644 --- a/meteor/server/api/rundown.ts +++ b/meteor/server/api/rundown.ts @@ -12,36 +12,36 @@ import { TriggerReloadDataResponse, } from '@sofie-automation/meteor-lib/dist/api/userActions' import { MethodContextAPI, MethodContext } from './methodContext' -import { StudioContentWriteAccess } from '../security/studio' import { runIngestOperation } from './ingest/lib' import { IngestJobs } from '@sofie-automation/corelib/dist/worker/ingest' -import { VerifiedRundownContentAccess, VerifiedRundownPlaylistContentAccess } from './lib' +import { VerifiedRundownForUserAction, VerifiedRundownPlaylistForUserAction } from '../security/check' import { Blueprint } from '@sofie-automation/corelib/dist/dataModel/Blueprint' import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' import { RundownPlaylistId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { Blueprints, Rundowns, ShowStyleBases, ShowStyleVariants, Studios } from '../collections' +import { triggerWriteAccessBecauseNoCheckNecessary } from '../security/securityVerify' export namespace ServerRundownAPI { /** Remove an individual rundown */ - export async function removeRundown(access: VerifiedRundownContentAccess): Promise { - await runIngestOperation(access.rundown.studioId, IngestJobs.UserRemoveRundown, { - rundownId: access.rundown._id, + export async function removeRundown(rundown: VerifiedRundownForUserAction): Promise { + await runIngestOperation(rundown.studioId, IngestJobs.UserRemoveRundown, { + rundownId: rundown._id, force: true, }) } - export async function unsyncRundown(access: VerifiedRundownContentAccess): Promise { - await runIngestOperation(access.rundown.studioId, IngestJobs.UserUnsyncRundown, { - rundownId: access.rundown._id, + export async function unsyncRundown(rundown: VerifiedRundownForUserAction): Promise { + await runIngestOperation(rundown.studioId, IngestJobs.UserUnsyncRundown, { + rundownId: rundown._id, }) } /** Resync all rundowns in a rundownPlaylist */ export async function resyncRundownPlaylist( - access: VerifiedRundownPlaylistContentAccess + playlist: VerifiedRundownPlaylistForUserAction ): Promise { - logger.info('resyncRundownPlaylist ' + access.playlist._id) + logger.info('resyncRundownPlaylist ' + playlist._id) - const rundowns = await Rundowns.findFetchAsync({ playlistId: access.playlist._id }) + const rundowns = await Rundowns.findFetchAsync({ playlistId: playlist._id }) const responses = await Promise.all( rundowns.map(async (rundown) => { return { @@ -56,23 +56,22 @@ export namespace ServerRundownAPI { } } - export async function resyncRundown(access: VerifiedRundownContentAccess): Promise { - return IngestActions.reloadRundown(access.rundown) + export async function resyncRundown(rundown: VerifiedRundownForUserAction): Promise { + return IngestActions.reloadRundown(rundown) } } export namespace ClientRundownAPI { export async function rundownPlaylistNeedsResync( - context: MethodContext, + _context: MethodContext, playlistId: RundownPlaylistId ): Promise { check(playlistId, String) - const access = await StudioContentWriteAccess.rundownPlaylist(context, playlistId) - const playlist = access.playlist + triggerWriteAccessBecauseNoCheckNecessary() const rundowns = await Rundowns.findFetchAsync( { - playlistId: playlist._id, + playlistId: playlistId, }, { sort: { _id: 1 }, diff --git a/meteor/server/api/rundownLayouts.ts b/meteor/server/api/rundownLayouts.ts index db9eb108e02..62f45967620 100644 --- a/meteor/server/api/rundownLayouts.ts +++ b/meteor/server/api/rundownLayouts.ts @@ -10,12 +10,15 @@ import { import { literal, getRandomId, protectString } from '../lib/tempLib' import { logger } from '../logging' import { MethodContext, MethodContextAPI } from './methodContext' -import { ShowStyleContentWriteAccess } from '../security/showStyle' import { fetchShowStyleBaseLight } from '../optimizations' import { BlueprintId, RundownLayoutId, ShowStyleBaseId, UserId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { RundownLayouts } from '../collections' import KoaRouter from '@koa/router' import bodyParser from 'koa-bodyparser' +import { UserPermissions } from '@sofie-automation/meteor-lib/dist/userPermissions' +import { assertConnectionHasOneOfPermissions } from '../security/auth' + +const PERMISSIONS_FOR_MANAGE_RUNDOWN_LAYOUTS: Array = ['configure'] export async function createRundownLayout( name: string, @@ -57,6 +60,8 @@ shelfLayoutsRouter.post( async (ctx) => { ctx.response.type = 'text/plain' + assertConnectionHasOneOfPermissions(ctx, ...PERMISSIONS_FOR_MANAGE_RUNDOWN_LAYOUTS) + const showStyleBaseId: ShowStyleBaseId = protectString(ctx.params.showStyleBaseId) check(showStyleBaseId, String) @@ -129,15 +134,16 @@ async function apiCreateRundownLayout( check(showStyleBaseId, String) check(regionId, String) - const access = await ShowStyleContentWriteAccess.anyContent(context, showStyleBaseId) + assertConnectionHasOneOfPermissions(context.connection, ...PERMISSIONS_FOR_MANAGE_RUNDOWN_LAYOUTS) - return createRundownLayout(name, type, showStyleBaseId, regionId, undefined, access.userId || undefined) + return createRundownLayout(name, type, showStyleBaseId, regionId, undefined, undefined) } async function apiRemoveRundownLayout(context: MethodContext, id: RundownLayoutId) { check(id, String) - const access = await ShowStyleContentWriteAccess.rundownLayout(context, id) - const rundownLayout = access.rundownLayout + assertConnectionHasOneOfPermissions(context.connection, ...PERMISSIONS_FOR_MANAGE_RUNDOWN_LAYOUTS) + + const rundownLayout = await RundownLayouts.findOneAsync(id) if (!rundownLayout) throw new Meteor.Error(404, `RundownLayout "${id}" not found`) await removeRundownLayout(id) diff --git a/meteor/server/api/showStyles.ts b/meteor/server/api/showStyles.ts index 8275c818367..fdf42e6e5ad 100644 --- a/meteor/server/api/showStyles.ts +++ b/meteor/server/api/showStyles.ts @@ -10,9 +10,6 @@ import { DBShowStyleBase } from '@sofie-automation/corelib/dist/dataModel/ShowSt import { DBShowStyleVariant } from '@sofie-automation/corelib/dist/dataModel/ShowStyleVariant' import { protectString, getRandomId, omit } from '../lib/tempLib' import { MethodContextAPI, MethodContext } from './methodContext' -import { OrganizationContentWriteAccess } from '../security/organization' -import { ShowStyleContentWriteAccess } from '../security/showStyle' -import { Credentials } from '../security/lib/credentials' import deepmerge from 'deepmerge' import { applyAndValidateOverrides, @@ -23,6 +20,10 @@ import { IBlueprintConfig } from '@sofie-automation/blueprints-integration' import { OrganizationId, ShowStyleBaseId, ShowStyleVariantId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { RundownLayouts, ShowStyleBases, ShowStyleVariants, Studios } from '../collections' import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { UserPermissions } from '@sofie-automation/meteor-lib/dist/userPermissions' +import { assertConnectionHasOneOfPermissions } from '../security/auth' + +const PERMISSIONS_FOR_MANAGE_SHOWSTYLES: Array = ['configure'] export interface ShowStyleCompound extends Omit { showStyleVariantId: ShowStyleVariantId @@ -74,9 +75,10 @@ export function createShowStyleCompound( } } -export async function insertShowStyleBase(context: MethodContext | Credentials): Promise { - const access = await OrganizationContentWriteAccess.showStyleBase(context) - return insertShowStyleBaseInner(access.organizationId) +export async function insertShowStyleBase(context: MethodContext): Promise { + assertConnectionHasOneOfPermissions(context.connection, ...PERMISSIONS_FOR_MANAGE_SHOWSTYLES) + + return insertShowStyleBaseInner(null) } export async function insertShowStyleBaseInner(organizationId: OrganizationId | null): Promise { @@ -97,20 +99,14 @@ export async function insertShowStyleBaseInner(organizationId: OrganizationId | await insertShowStyleVariantInner(showStyleBase._id, 'Default') return showStyleBase._id } -async function assertShowStyleBaseAccess(context: MethodContext | Credentials, showStyleBaseId: ShowStyleBaseId) { - check(showStyleBaseId, String) - - const access = await ShowStyleContentWriteAccess.anyContent(context, showStyleBaseId) - const showStyleBase = access.showStyleBase - if (!showStyleBase) throw new Meteor.Error(404, `showStyleBase "${showStyleBaseId}" not found`) -} export async function insertShowStyleVariant( - context: MethodContext | Credentials, + context: MethodContext, showStyleBaseId: ShowStyleBaseId, name?: string ): Promise { - await assertShowStyleBaseAccess(context, showStyleBaseId) + assertConnectionHasOneOfPermissions(context.connection, ...PERMISSIONS_FOR_MANAGE_SHOWSTYLES) + return insertShowStyleVariantInner(showStyleBaseId, name) } @@ -150,19 +146,19 @@ async function insertShowStyleVariantInner( } export async function importShowStyleVariant( - context: MethodContext | Credentials, + context: MethodContext, showStyleVariant: DBShowStyleVariant ): Promise { - await assertShowStyleBaseAccess(context, showStyleVariant.showStyleBaseId) + assertConnectionHasOneOfPermissions(context.connection, ...PERMISSIONS_FOR_MANAGE_SHOWSTYLES) return ShowStyleVariants.insertAsync(showStyleVariant) } export async function importShowStyleVariantAsNew( - context: MethodContext | Credentials, + context: MethodContext, showStyleVariant: Omit ): Promise { - await assertShowStyleBaseAccess(context, showStyleVariant.showStyleBaseId) + assertConnectionHasOneOfPermissions(context.connection, ...PERMISSIONS_FOR_MANAGE_SHOWSTYLES) const newShowStyleVariant: DBShowStyleVariant = { ...showStyleVariant, @@ -173,7 +169,7 @@ export async function importShowStyleVariantAsNew( } export async function removeShowStyleBase(context: MethodContext, showStyleBaseId: ShowStyleBaseId): Promise { - await assertShowStyleBaseAccess(context, showStyleBaseId) + assertConnectionHasOneOfPermissions(context.connection, ...PERMISSIONS_FOR_MANAGE_SHOWSTYLES) await Promise.allSettled([ ShowStyleBases.removeAsync(showStyleBaseId), @@ -192,8 +188,9 @@ export async function removeShowStyleVariant( ): Promise { check(showStyleVariantId, String) - const access = await ShowStyleContentWriteAccess.showStyleVariant(context, showStyleVariantId) - const showStyleVariant = access.showStyleVariant + assertConnectionHasOneOfPermissions(context.connection, ...PERMISSIONS_FOR_MANAGE_SHOWSTYLES) + + const showStyleVariant = await ShowStyleVariants.findOneAsync(showStyleVariantId) if (!showStyleVariant) throw new Meteor.Error(404, `showStyleVariant "${showStyleVariantId}" not found`) await ShowStyleVariants.removeAsync(showStyleVariant._id) @@ -207,8 +204,9 @@ export async function reorderShowStyleVariant( check(showStyleVariantId, String) check(rank, Number) - const access = await ShowStyleContentWriteAccess.showStyleVariant(context, showStyleVariantId) - const showStyleVariant = access.showStyleVariant + assertConnectionHasOneOfPermissions(context.connection, ...PERMISSIONS_FOR_MANAGE_SHOWSTYLES) + + const showStyleVariant = await ShowStyleVariants.findOneAsync(showStyleVariantId) if (!showStyleVariant) throw new Meteor.Error(404, `showStyleVariant "${showStyleVariantId}" not found`) await ShowStyleVariants.updateAsync(showStyleVariantId, { @@ -218,7 +216,9 @@ export async function reorderShowStyleVariant( }) } -async function getCreateAdlibTestingRundownOptions(): Promise { +async function getCreateAdlibTestingRundownOptions(context: MethodContext): Promise { + assertConnectionHasOneOfPermissions(context.connection, 'studio') + const [studios, showStyleBases, showStyleVariants] = await Promise.all([ Studios.findFetchAsync( {}, @@ -306,7 +306,7 @@ class ServerShowStylesAPI extends MethodContextAPI implements NewShowStylesAPI { } async getCreateAdlibTestingRundownOptions() { - return getCreateAdlibTestingRundownOptions() + return getCreateAdlibTestingRundownOptions(this) } } registerClassToMeteorMethods(ShowStylesAPIMethods, ServerShowStylesAPI, false) diff --git a/meteor/server/api/singleUseTokens.ts b/meteor/server/api/singleUseTokens.ts index d775a5e7606..88fc57a45fe 100644 --- a/meteor/server/api/singleUseTokens.ts +++ b/meteor/server/api/singleUseTokens.ts @@ -3,7 +3,7 @@ import { Time } from '@sofie-automation/blueprints-integration' import { getHash } from '@sofie-automation/corelib/dist/hash' import { getCurrentTime } from '../lib/lib' import { SINGLE_USE_TOKEN_SALT } from '@sofie-automation/meteor-lib/dist/api/userActions' -import { isInTestWrite } from '../security/lib/securityVerify' +import { isInTestWrite } from '../security/securityVerify' // The following code is taken from an NPM pacakage called "@sunknudsen/totp", but copied here, instead // of used as a dependency so that it's not vulnerable to a supply chain attack diff --git a/meteor/server/api/snapshot.ts b/meteor/server/api/snapshot.ts index a07dbdb7d98..3ca719a2d4c 100644 --- a/meteor/server/api/snapshot.ts +++ b/meteor/server/api/snapshot.ts @@ -39,12 +39,7 @@ import { importIngestRundown } from './ingest/http' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { RundownLayoutBase } from '@sofie-automation/meteor-lib/dist/collections/RundownLayouts' import { DBTriggeredActions } from '@sofie-automation/meteor-lib/dist/collections/TriggeredActions' -import { Settings } from '../Settings' import { MethodContext, MethodContextAPI } from './methodContext' -import { Credentials, isResolvedCredentials } from '../security/lib/credentials' -import { OrganizationContentWriteAccess } from '../security/organization' -import { StudioContentWriteAccess } from '../security/studio' -import { SystemWriteAccess } from '../security/system' import { saveIntoDb, sumChanges } from '../lib/database' import * as fs from 'fs' import { ExpectedPackageWorkStatus } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackageWorkStatuses' @@ -57,8 +52,7 @@ import { checkStudioExists } from '../optimizations' import { CoreRundownPlaylistSnapshot } from '@sofie-automation/corelib/dist/snapshots' import { QueueStudioJob } from '../worker/worker' import { StudioJobs } from '@sofie-automation/corelib/dist/worker/studio' -import { ReadonlyDeep } from 'type-fest' -import { checkAccessToPlaylist, VerifiedRundownPlaylistContentAccess } from './lib' +import { checkAccessToPlaylist, VerifiedRundownPlaylistForUserAction } from '../security/check' import { getSystemStorePath, PackageInfo } from '../coreSystem' import { JSONBlobParse, JSONBlobStringify } from '@sofie-automation/shared-lib/dist/lib/JSONBlob' import { @@ -93,6 +87,10 @@ import { import { getCoreSystemAsync } from '../coreSystem/collection' import { executePeripheralDeviceFunction } from './peripheralDevice/executeFunction' import { verifyHashedToken } from './singleUseTokens' +import { UserPermissions } from '@sofie-automation/meteor-lib/dist/userPermissions' +import { assertConnectionHasOneOfPermissions, RequestCredentials } from '../security/auth' + +const PERMISSIONS_FOR_SNAPSHOT_MANAGEMENT: Array = ['configure'] interface RundownPlaylistSnapshot extends CoreRundownPlaylistSnapshot { versionExtended: string | undefined @@ -155,9 +153,6 @@ async function createSystemSnapshot( const coreSystem = await getCoreSystemAsync() if (!coreSystem) throw new Meteor.Error(500, `coreSystem not set up`) - if (Settings.enableUserAccounts && !organizationId) - throw new Meteor.Error(500, 'Not able to create a systemSnaphost without organizationId') - let queryStudio: MongoQuery = {} let queryShowStyleBases: MongoQuery = {} let queryShowStyleVariants: MongoQuery = {} @@ -320,7 +315,7 @@ function getPiecesMediaObjects(pieces: PieceGeneric[]): string[] { } async function createRundownPlaylistSnapshot( - playlist: ReadonlyDeep, + playlist: VerifiedRundownPlaylistForUserAction, full = false ): Promise { /** Max count of one type of items to include in the snapshot */ @@ -452,24 +447,12 @@ async function storeSnaphot( return id } -async function retreiveSnapshot(snapshotId: SnapshotId, cred0: Credentials): Promise { +async function retreiveSnapshot(snapshotId: SnapshotId, cred: RequestCredentials | null): Promise { + assertConnectionHasOneOfPermissions(cred, ...PERMISSIONS_FOR_SNAPSHOT_MANAGEMENT) + const snapshot = await Snapshots.findOneAsync(snapshotId) if (!snapshot) throw new Meteor.Error(404, `Snapshot not found!`) - if (Settings.enableUserAccounts) { - if (snapshot.type === SnapshotType.RUNDOWNPLAYLIST) { - if (!snapshot.studioId) - throw new Meteor.Error(500, `Snapshot is of type "${snapshot.type}" but hase no studioId`) - await StudioContentWriteAccess.dataFromSnapshot(cred0, snapshot.studioId) - } else if (snapshot.type === SnapshotType.SYSTEM) { - if (!snapshot.organizationId) - throw new Meteor.Error(500, `Snapshot is of type "${snapshot.type}" but has no organizationId`) - await OrganizationContentWriteAccess.dataFromSnapshot(cred0, snapshot.organizationId) - } else { - await SystemWriteAccess.coreSystem(cred0) - } - } - const storePath = getSystemStorePath() const filePath = Path.join(storePath, snapshot.fileName) @@ -663,11 +646,9 @@ export async function storeSystemSnapshot( throw new Meteor.Error(401, `Restart token is invalid or has expired`) } - const { organizationId, cred } = await OrganizationContentWriteAccess.snapshot(context) - if (Settings.enableUserAccounts && isResolvedCredentials(cred)) { - if (cred.user && !cred.user.superAdmin) throw new Meteor.Error(401, 'Only Super Admins can store Snapshots') - } - return internalStoreSystemSnapshot(organizationId, studioId, reason) + assertConnectionHasOneOfPermissions(context.connection, ...PERMISSIONS_FOR_SNAPSHOT_MANAGEMENT) + + return internalStoreSystemSnapshot(null, studioId, reason) } /** Take and store a system snapshot. For internal use only, performs no access control. */ export async function internalStoreSystemSnapshot( @@ -681,7 +662,7 @@ export async function internalStoreSystemSnapshot( return storeSnaphot(s, organizationId, reason) } export async function storeRundownPlaylistSnapshot( - access: VerifiedRundownPlaylistContentAccess, + playlist: VerifiedRundownPlaylistForUserAction, hashedToken: string, reason: string, full?: boolean @@ -691,8 +672,8 @@ export async function storeRundownPlaylistSnapshot( throw new Meteor.Error(401, `Restart token is invalid or has expired`) } - const s = await createRundownPlaylistSnapshot(access.playlist, full) - return storeSnaphot(s, access.organizationId, reason) + const s = await createRundownPlaylistSnapshot(playlist, full) + return storeSnaphot(s, playlist.organizationId ?? null, reason) } export async function internalStoreRundownPlaylistSnapshot( playlist: DBRundownPlaylist, @@ -714,12 +695,10 @@ export async function storeDebugSnapshot( throw new Meteor.Error(401, `Restart token is invalid or has expired`) } - const { organizationId, cred } = await OrganizationContentWriteAccess.snapshot(context) - if (Settings.enableUserAccounts && isResolvedCredentials(cred)) { - if (cred.user && !cred.user.superAdmin) throw new Meteor.Error(401, 'Only Super Admins can store Snapshots') - } - const s = await createDebugSnapshot(studioId, organizationId) - return storeSnaphot(s, organizationId, reason) + assertConnectionHasOneOfPermissions(context.connection, ...PERMISSIONS_FOR_SNAPSHOT_MANAGEMENT) + + const s = await createDebugSnapshot(studioId, null) + return storeSnaphot(s, null, reason) } export async function restoreSnapshot( context: MethodContext, @@ -727,21 +706,18 @@ export async function restoreSnapshot( restoreDebugData: boolean ): Promise { check(snapshotId, String) - const { cred } = await OrganizationContentWriteAccess.snapshot(context) - if (Settings.enableUserAccounts && isResolvedCredentials(cred)) { - if (cred.user && !cred.user.superAdmin) throw new Meteor.Error(401, 'Only Super Admins can store Snapshots') - } - const snapshot = await retreiveSnapshot(snapshotId, context) + + const snapshot = await retreiveSnapshot(snapshotId, context.connection) return restoreFromSnapshot(snapshot, restoreDebugData) } export async function removeSnapshot(context: MethodContext, snapshotId: SnapshotId): Promise { check(snapshotId, String) - const { snapshot, cred } = await OrganizationContentWriteAccess.snapshot(context, snapshotId) - if (Settings.enableUserAccounts && isResolvedCredentials(cred)) { - if (cred.user && !cred.user.superAdmin) throw new Meteor.Error(401, 'Only Super Admins can store Snapshots') - } + + assertConnectionHasOneOfPermissions(context.connection, ...PERMISSIONS_FOR_SNAPSHOT_MANAGEMENT) + logger.info(`Removing snapshot ${snapshotId}`) + const snapshot = await Snapshots.findOneAsync(snapshotId) if (!snapshot) throw new Meteor.Error(404, `Snapshot "${snapshotId}" not found!`) if (snapshot.fileName) { @@ -789,58 +765,48 @@ async function handleKoaResponse( } } -// For backwards compatibility: -if (!Settings.enableUserAccounts) { - snapshotPrivateApiRouter.post( - '/restore', - bodyParser({ - jsonLimit: '200mb', // Arbitrary limit - }), - async (ctx) => { - const content = 'ok' - try { - ctx.response.type = 'text/plain' - - if (ctx.request.type !== 'application/json') - throw new Meteor.Error(400, 'Restore Snapshot: Invalid content-type') - - const snapshot = ctx.request.body as any - if (!snapshot) throw new Meteor.Error(400, 'Restore Snapshot: Missing request body') - - const restoreDebugData = ctx.headers['restore-debug-data'] === '1' - - await restoreFromSnapshot(snapshot, restoreDebugData) - - ctx.response.status = 200 - ctx.response.body = content - } catch (e) { - ctx.response.type = 'text/plain' - ctx.response.status = e instanceof Meteor.Error && typeof e.error === 'number' ? e.error : 500 - ctx.response.body = 'Error: ' + stringifyError(e) - - if (ctx.response.status !== 404) { - logger.error(stringifyError(e)) - } +snapshotPrivateApiRouter.post( + '/restore', + bodyParser({ + jsonLimit: '200mb', // Arbitrary limit + }), + async (ctx) => { + assertConnectionHasOneOfPermissions(ctx, ...PERMISSIONS_FOR_SNAPSHOT_MANAGEMENT) + + const content = 'ok' + try { + ctx.response.type = 'text/plain' + + if (ctx.request.type !== 'application/json') + throw new Meteor.Error(400, 'Restore Snapshot: Invalid content-type') + + const snapshot = ctx.request.body as any + if (!snapshot) throw new Meteor.Error(400, 'Restore Snapshot: Missing request body') + + const restoreDebugData = ctx.headers['restore-debug-data'] === '1' + + await restoreFromSnapshot(snapshot, restoreDebugData) + + ctx.response.status = 200 + ctx.response.body = content + } catch (e) { + ctx.response.type = 'text/plain' + ctx.response.status = e instanceof Meteor.Error && typeof e.error === 'number' ? e.error : 500 + ctx.response.body = 'Error: ' + stringifyError(e) + + if (ctx.response.status !== 404) { + logger.error(stringifyError(e)) } } - ) - - // Retrieve snapshot: - snapshotPrivateApiRouter.get('/retrieve/:snapshotId', async (ctx) => { - return handleKoaResponse(ctx, async () => { - const snapshotId = ctx.params.snapshotId - check(snapshotId, String) - return retreiveSnapshot(protectString(snapshotId), { userId: null }) - }) - }) -} + } +) // Retrieve snapshot: -snapshotPrivateApiRouter.get('/:token/retrieve/:snapshotId', async (ctx) => { +snapshotPrivateApiRouter.get('/retrieve/:snapshotId', async (ctx) => { return handleKoaResponse(ctx, async () => { const snapshotId = ctx.params.snapshotId check(snapshotId, String) - return retreiveSnapshot(protectString(snapshotId), { userId: null, token: ctx.params.token }) + return retreiveSnapshot(protectString(snapshotId), ctx) }) }) @@ -850,8 +816,8 @@ class ServerSnapshotAPI extends MethodContextAPI implements NewSnapshotAPI { } async storeRundownPlaylist(hashedToken: string, playlistId: RundownPlaylistId, reason: string) { check(playlistId, String) - const access = await checkAccessToPlaylist(this, playlistId) - return storeRundownPlaylistSnapshot(access, hashedToken, reason) + const playlist = await checkAccessToPlaylist(this.connection, playlistId) + return storeRundownPlaylistSnapshot(playlist, hashedToken, reason) } async storeDebugSnapshot(hashedToken: string, studioId: StudioId, reason: string) { return storeDebugSnapshot(this, hashedToken, studioId, reason) diff --git a/meteor/server/api/studio/api.ts b/meteor/server/api/studio/api.ts index 6f10e43b4ee..c992f5c78e3 100644 --- a/meteor/server/api/studio/api.ts +++ b/meteor/server/api/studio/api.ts @@ -21,18 +21,21 @@ import { Timeline, } from '../../collections' import { MethodContextAPI, MethodContext } from '../methodContext' -import { OrganizationContentWriteAccess } from '../../security/organization' -import { Credentials } from '../../security/lib/credentials' import { wrapDefaultObject } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' import { OrganizationId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { logger } from '../../logging' import { DEFAULT_MINIMUM_TAKE_SPAN } from '@sofie-automation/shared-lib/dist/core/constants' +import { UserPermissions } from '@sofie-automation/meteor-lib/dist/userPermissions' +import { assertConnectionHasOneOfPermissions } from '../../security/auth' -async function insertStudio(context: MethodContext | Credentials, newId?: StudioId): Promise { +const PERMISSIONS_FOR_MANAGE_STUDIOS: Array = ['configure'] + +async function insertStudio(context: MethodContext, newId?: StudioId): Promise { if (newId) check(newId, String) - const access = await OrganizationContentWriteAccess.studio(context) - return insertStudioInner(access.organizationId, newId) + assertConnectionHasOneOfPermissions(context.connection, ...PERMISSIONS_FOR_MANAGE_STUDIOS) + + return insertStudioInner(null, newId) } export async function insertStudioInner(organizationId: OrganizationId | null, newId?: StudioId): Promise { return Studios.insertAsync( @@ -72,8 +75,9 @@ export async function insertStudioInner(organizationId: OrganizationId | null, n async function removeStudio(context: MethodContext, studioId: StudioId): Promise { check(studioId, String) - const access = await OrganizationContentWriteAccess.studio(context, studioId) - const studio = access.studio + assertConnectionHasOneOfPermissions(context.connection, ...PERMISSIONS_FOR_MANAGE_STUDIOS) + + const studio = await Studios.findOneAsync(studioId) if (!studio) throw new Meteor.Error(404, `Studio "${studioId}" not found`) // allowed to remove? diff --git a/meteor/server/api/system.ts b/meteor/server/api/system.ts index 1668f76720f..3c8bd9c4b83 100644 --- a/meteor/server/api/system.ts +++ b/meteor/server/api/system.ts @@ -13,12 +13,10 @@ import { import { CollectionIndexes, getTargetRegisteredIndexes } from '../collections/indices' import { Meteor } from 'meteor/meteor' import { logger } from '../logging' -import { SystemWriteAccess } from '../security/system' import { check } from '../lib/check' import { IndexSpecifier } from '@sofie-automation/meteor-lib/dist/collections/lib' import { getBundle as getTranslationBundleInner } from './translationsBundles' import { TranslationsBundle } from '@sofie-automation/meteor-lib/dist/collections/TranslationsBundles' -import { OrganizationContentWriteAccess } from '../security/organization' import { ClientAPI } from '@sofie-automation/meteor-lib/dist/api/client' import { cleanupOldDataInner } from './cleanup' import { IndexSpecification } from 'mongodb' @@ -26,8 +24,12 @@ import { nightlyCronjobInner } from '../cronjobs' import { TranslationsBundleId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { createAsyncOnlyMongoCollection, AsyncOnlyMongoCollection } from '../collections/collection' import { generateToken } from './singleUseTokens' -import { triggerWriteAccessBecauseNoCheckNecessary } from '../security/lib/securityVerify' +import { triggerWriteAccessBecauseNoCheckNecessary } from '../security/securityVerify' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' +import { assertConnectionHasOneOfPermissions } from '../security/auth' +import { UserPermissions } from '@sofie-automation/meteor-lib/dist/userPermissions' + +const PERMISSIONS_FOR_SYSTEM_CLEANUP: Array = ['configure'] async function setupIndexes(removeOldIndexes = false): Promise> { // Note: This function should NOT run on Meteor.startup, due to getCollectionIndexes failing if run before indexes have been created. @@ -95,7 +97,7 @@ async function cleanupIndexes( actuallyRemoveOldIndexes: boolean ): Promise> { check(actuallyRemoveOldIndexes, Boolean) - await SystemWriteAccess.coreSystem(context) + assertConnectionHasOneOfPermissions(context.connection, ...PERMISSIONS_FOR_SYSTEM_CLEANUP) return setupIndexes(actuallyRemoveOldIndexes) } @@ -104,12 +106,13 @@ async function cleanupOldData( actuallyRemoveOldData: boolean ): Promise { check(actuallyRemoveOldData, Boolean) - await SystemWriteAccess.coreSystem(context) + + assertConnectionHasOneOfPermissions(context.connection, ...PERMISSIONS_FOR_SYSTEM_CLEANUP) return cleanupOldDataInner(actuallyRemoveOldData) } async function runCronjob(context: MethodContext): Promise { - await SystemWriteAccess.coreSystem(context) + assertConnectionHasOneOfPermissions(context.connection, ...PERMISSIONS_FOR_SYSTEM_CLEANUP) return nightlyCronjobInner() } @@ -293,7 +296,7 @@ async function doSystemBenchmarkInner() { return result } async function doSystemBenchmark(context: MethodContext, runCount = 1): Promise { - await SystemWriteAccess.coreSystem(context) + assertConnectionHasOneOfPermissions(context.connection, 'developer') if (runCount < 1) throw new Error(`runCount must be >= 1`) @@ -361,10 +364,11 @@ CPU JSON stringifying: ${avg.cpuStringifying} ms (${comparison.cpuStringif } } -async function getTranslationBundle(context: MethodContext, bundleId: TranslationsBundleId) { +async function getTranslationBundle(_context: MethodContext, bundleId: TranslationsBundleId) { check(bundleId, String) - await OrganizationContentWriteAccess.translationBundle(context) + triggerWriteAccessBecauseNoCheckNecessary() + return ClientAPI.responseSuccess(await getTranslationBundleInner(bundleId)) } diff --git a/meteor/server/api/triggeredActions.ts b/meteor/server/api/triggeredActions.ts index f172786207d..0f29b780d03 100644 --- a/meteor/server/api/triggeredActions.ts +++ b/meteor/server/api/triggeredActions.ts @@ -4,14 +4,12 @@ import { registerClassToMeteorMethods, ReplaceOptionalWithNullInMethodArguments import { literal, getRandomId, protectString, Complete } from '../lib/tempLib' import { logger } from '../logging' import { MethodContext, MethodContextAPI } from './methodContext' -import { ShowStyleContentWriteAccess } from '../security/showStyle' import { DBTriggeredActions, TriggeredActionsObj } from '@sofie-automation/meteor-lib/dist/collections/TriggeredActions' import { CreateTriggeredActionsContent, NewTriggeredActionsAPI, TriggeredActionsAPIMethods, } from '@sofie-automation/meteor-lib/dist/api/triggeredActions' -import { SystemWriteAccess } from '../security/system' import { fetchShowStyleBaseLight } from '../optimizations' import { convertObjectIntoOverrides, @@ -21,6 +19,10 @@ import { ShowStyleBaseId, TriggeredActionId } from '@sofie-automation/corelib/di import { TriggeredActions } from '../collections' import KoaRouter from '@koa/router' import bodyParser from 'koa-bodyparser' +import { UserPermissions } from '@sofie-automation/meteor-lib/dist/userPermissions' +import { assertConnectionHasOneOfPermissions } from '../security/auth' + +const PERMISSIONS_FOR_TRIGGERED_ACTIONS: Array = ['configure'] export async function createTriggeredActions( showStyleBaseId: ShowStyleBaseId | null, @@ -58,6 +60,8 @@ actionTriggersRouter.post( async (ctx) => { ctx.response.type = 'text/plain' + assertConnectionHasOneOfPermissions(ctx, ...PERMISSIONS_FOR_TRIGGERED_ACTIONS) + const showStyleBaseId: ShowStyleBaseId | undefined = protectString(ctx.params.showStyleBaseId) check(showStyleBaseId, Match.Optional(String)) @@ -161,22 +165,14 @@ async function apiCreateTriggeredActions( check(showStyleBaseId, Match.Maybe(String)) check(base, Match.Maybe(Object)) - if (!showStyleBaseId) { - const access = await SystemWriteAccess.coreSystem(context) - if (!access) throw new Meteor.Error(403, `Core System settings not writable`) - } else { - const access = await ShowStyleContentWriteAccess.anyContent(context, showStyleBaseId) - if (!access) throw new Meteor.Error(404, `ShowStyleBase "${showStyleBaseId}" not found`) - } + assertConnectionHasOneOfPermissions(context.connection, ...PERMISSIONS_FOR_TRIGGERED_ACTIONS) return createTriggeredActions(showStyleBaseId, base || undefined) } async function apiRemoveTriggeredActions(context: MethodContext, id: TriggeredActionId) { check(id, String) - const access = await ShowStyleContentWriteAccess.triggeredActions(context, id) - const triggeredActions = typeof access === 'boolean' ? access : access.triggeredActions - if (!triggeredActions) throw new Meteor.Error(404, `Action Trigger "${id}" not found`) + assertConnectionHasOneOfPermissions(context.connection, ...PERMISSIONS_FOR_TRIGGERED_ACTIONS) await removeTriggeredActions(id) } diff --git a/meteor/server/api/user.ts b/meteor/server/api/user.ts index 9b3649abdd7..e626071dd3f 100644 --- a/meteor/server/api/user.ts +++ b/meteor/server/api/user.ts @@ -1,132 +1,14 @@ -import { Meteor } from 'meteor/meteor' -import { Accounts } from 'meteor/accounts-base' -import { unprotectString, protectString } from '../lib/tempLib' -import { sleep, deferAsync } from '../lib/lib' -import { MethodContextAPI, MethodContext } from './methodContext' -import { NewUserAPI, UserAPIMethods, createUser, CreateNewUserData } from '@sofie-automation/meteor-lib/dist/api/user' +import { MethodContextAPI } from './methodContext' +import { NewUserAPI, UserAPIMethods } from '@sofie-automation/meteor-lib/dist/api/user' import { registerClassToMeteorMethods } from '../methods' -import { SystemWriteAccess } from '../security/system' -import { triggerWriteAccess, triggerWriteAccessBecauseNoCheckNecessary } from '../security/lib/securityVerify' -import { logNotAllowed } from '../../server/security/lib/lib' -import { User } from '@sofie-automation/meteor-lib/dist/collections/Users' -import { createOrganization } from './organizations' -import { DBOrganizationBase } from '@sofie-automation/meteor-lib/dist/collections/Organization' -import { resetCredentials } from '../security/lib/credentials' -import { OrganizationId, UserId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { Organizations, Users } from '../collections' -import { logger } from '../logging' - -async function enrollUser(email: string, name: string): Promise { - triggerWriteAccessBecauseNoCheckNecessary() - - const id = await createUser({ - email: email, - profile: { name: name }, - }) - try { - await Accounts.sendEnrollmentEmail(unprotectString(id), email) - } catch (error) { - logger.error('Accounts.sendEnrollmentEmail') - logger.error(error) - } - - return id -} - -async function afterCreateNewUser(userId: UserId, organization: DBOrganizationBase): Promise { - triggerWriteAccessBecauseNoCheckNecessary() - - await sendVerificationEmail(userId) - - // Create an organization for the user: - const orgId = await createOrganization(organization) - // Add user to organization: - await Users.updateAsync(userId, { $set: { organizationId: orgId } }) - await Organizations.updateAsync(orgId, { - $set: { - userRoles: { - [unprotectString(userId)]: { - admin: true, - studio: true, - configurator: true, - }, - }, - }, - }) - - resetCredentials({ userId }) - - return orgId -} -async function sendVerificationEmail(userId: UserId) { - const user = await Users.findOneAsync(userId) - if (!user) throw new Meteor.Error(404, `User "${userId}" not found!`) - try { - await Promise.all( - user.emails.map(async (email) => { - if (!email.verified) { - await Accounts.sendVerificationEmail(unprotectString(user._id), email.address) - } - }) - ) - } catch (error) { - logger.error('ERROR sending email verification') - logger.error(error) - } -} - -async function requestResetPassword(email: string): Promise { - triggerWriteAccessBecauseNoCheckNecessary() - const meteorUser = Accounts.findUserByEmail(email) as unknown - const user = meteorUser as User - if (!user) return false - await Accounts.sendResetPasswordEmail(unprotectString(user._id)) - return true -} - -async function removeUser(context: MethodContext) { - triggerWriteAccess() - if (!context.userId) throw new Meteor.Error(403, `Not logged in`) - const access = await SystemWriteAccess.currentUser(context.userId, context) - if (!access) return logNotAllowed('Current user', 'Invalid user id or permissions') - await Users.removeAsync(context.userId) - return true -} +import { triggerWriteAccessBecauseNoCheckNecessary } from '../security/securityVerify' +import { parseUserPermissions, USER_PERMISSIONS_HEADER } from '@sofie-automation/meteor-lib/dist/userPermissions' class ServerUserAPI extends MethodContextAPI implements NewUserAPI { - async enrollUser(email: string, name: string) { - return enrollUser(email, name) - } - async requestPasswordReset(email: string) { - return requestResetPassword(email) - } - async removeUser() { - return removeUser(this) + async getUserPermissions() { + triggerWriteAccessBecauseNoCheckNecessary() + return parseUserPermissions(this.connection?.httpHeaders?.[USER_PERMISSIONS_HEADER]) } } registerClassToMeteorMethods(UserAPIMethods, ServerUserAPI, false) - -Accounts.onCreateUser((options0, user) => { - const options = options0 as Partial - user.profile = options.profile - - const createOrganization = options.createOrganization - if (createOrganization) { - deferAsync(async () => { - // To be run after the user has been inserted: - for (let t = 10; t < 200; t *= 1.5) { - const dbUser = await Users.findOneAsync(protectString(user._id)) - if (dbUser) { - await afterCreateNewUser(dbUser._id, createOrganization) - return - } else { - // User has not been inserted into db (yet), wait - await sleep(t) - } - } - }) - } - // The user to-be-inserted: - return user -}) diff --git a/meteor/server/api/userActions.ts b/meteor/server/api/userActions.ts index 1f4935625ed..a15aaeb5caa 100644 --- a/meteor/server/api/userActions.ts +++ b/meteor/server/api/userActions.ts @@ -10,25 +10,20 @@ import { storeRundownPlaylistSnapshot } from './snapshot' import { registerClassToMeteorMethods, ReplaceOptionalWithNullInMethodArguments } from '../methods' import { ServerRundownAPI } from './rundown' import { saveEvaluation } from './evaluations' -import { MediaManagerAPI } from './mediaManager' +import * as MediaManagerAPI from './mediaManager' import { MOSDeviceActions } from './ingest/mosDevice/actions' import { MethodContextAPI } from './methodContext' import { ServerClientAPI } from './client' -import { OrganizationContentWriteAccess } from '../security/organization' -import { SystemWriteAccess } from '../security/system' -import { triggerWriteAccessBecauseNoCheckNecessary } from '../security/lib/securityVerify' +import { triggerWriteAccessBecauseNoCheckNecessary } from '../security/securityVerify' import { Bucket } from '@sofie-automation/meteor-lib/dist/collections/Buckets' import { BucketsAPI } from './buckets' import { BucketAdLib } from '@sofie-automation/corelib/dist/dataModel/BucketAdLibPiece' import { AdLibActionCommon } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' import { BucketAdLibAction } from '@sofie-automation/corelib/dist/dataModel/BucketAdLibAction' -import { VerifiedRundownPlaylistContentAccess } from './lib' -import { PackageManagerAPI } from './packageManager' +import { checkAccessToRundown } from '../security/check' +import * as PackageManagerAPI from './packageManager' import { ServerPeripheralDeviceAPI } from './peripheralDevice' import { StudioJobs } from '@sofie-automation/corelib/dist/worker/studio' -import { PeripheralDeviceContentWriteAccess } from '../security/peripheralDevice' -import { StudioContentWriteAccess } from '../security/studio' -import { BucketSecurity } from '../security/buckets' import { AdLibActionId, BucketId, @@ -51,11 +46,17 @@ import { NrcsIngestCacheType } from '@sofie-automation/corelib/dist/dataModel/Nr import { verifyHashedToken } from './singleUseTokens' import { QuickLoopMarker } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { runIngestOperation } from './ingest/lib' -import { RundownPlaylistContentWriteAccess } from '../security/rundownPlaylist' import { IngestJobs } from '@sofie-automation/corelib/dist/worker/ingest' +import { UserPermissions } from '@sofie-automation/meteor-lib/dist/userPermissions' +import { assertConnectionHasOneOfPermissions } from '../security/auth' + +const PERMISSIONS_FOR_PLAYOUT_USERACTION: Array = ['studio'] +const PERMISSIONS_FOR_BUCKET_MODIFICATION: Array = ['studio'] +const PERMISSIONS_FOR_MEDIA_MANAGEMENT: Array = ['studio', 'service', 'configure'] +const PERMISSIONS_FOR_SYSTEM_ACTION: Array = ['service', 'configure'] async function pieceSetInOutPoints( - access: VerifiedRundownPlaylistContentAccess, + playlistId: RundownPlaylistId, partId: PartId, pieceId: PieceId, inPoint: number, @@ -66,7 +67,7 @@ async function pieceSetInOutPoints( const rundown = await Rundowns.findOneAsync({ _id: part.rundownId, - playlistId: access.playlist._id, + playlistId: playlistId, }) if (!rundown) throw new Meteor.Error(501, `Rundown "${part.rundownId}" not found!`) @@ -387,8 +388,8 @@ class ServerUserActionAPI }, 'pieceSetInOutPoints', { rundownPlaylistId, partId, pieceId, inPoint, duration }, - async (access) => { - return pieceSetInOutPoints(access, partId, pieceId, inPoint, duration) + async (playlist) => { + return pieceSetInOutPoints(playlist._id, partId, pieceId, inPoint, duration) } ) } @@ -546,8 +547,8 @@ class ServerUserActionAPI check(showStyleBaseId, String) check(ingestItem, Object) - const access = await BucketSecurity.allowWriteAccess(this, bucketId) - return BucketsAPI.importAdlibToBucket(access, showStyleBaseId, undefined, ingestItem) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_BUCKET_MODIFICATION) + return BucketsAPI.importAdlibToBucket(bucketId, showStyleBaseId, undefined, ingestItem) } ) } @@ -622,8 +623,8 @@ class ServerUserActionAPI }, 'saveEvaluation', { evaluation }, - async (access) => { - return saveEvaluation(access, evaluation) + async (playlist) => { + return saveEvaluation(playlist, evaluation) } ) } @@ -646,8 +647,8 @@ class ServerUserActionAPI }, 'storeRundownSnapshot', { playlistId, reason, full }, - async (access) => { - return storeRundownPlaylistSnapshot(access, hashedToken, reason, full) + async (playlist) => { + return storeRundownPlaylistSnapshot(playlist, hashedToken, reason, full) } ) } @@ -695,8 +696,8 @@ class ServerUserActionAPI }, 'resyncRundownPlaylist', { playlistId }, - async (access) => { - return ServerRundownAPI.resyncRundownPlaylist(access) + async (playlist) => { + return ServerRundownAPI.resyncRundownPlaylist(playlist) } ) } @@ -711,8 +712,8 @@ class ServerUserActionAPI }, 'unsyncRundown', { rundownId }, - async (access) => { - return ServerRundownAPI.unsyncRundown(access) + async (rundown) => { + return ServerRundownAPI.unsyncRundown(rundown) } ) } @@ -727,8 +728,8 @@ class ServerUserActionAPI }, 'removeRundown', { rundownId }, - async (access) => { - return ServerRundownAPI.removeRundown(access) + async (rundown) => { + return ServerRundownAPI.removeRundown(rundown) } ) } @@ -743,53 +744,71 @@ class ServerUserActionAPI }, 'resyncRundown', { rundownId }, - async (access) => { - return ServerRundownAPI.resyncRundown(access) + async (rundown) => { + return ServerRundownAPI.resyncRundown(rundown) } ) } - async mediaRestartWorkflow(userEvent: string, eventTime: Time, workflowId: MediaWorkFlowId) { + async mediaRestartWorkflow( + userEvent: string, + eventTime: Time, + deviceId: PeripheralDeviceId, + workflowId: MediaWorkFlowId + ) { return ServerClientAPI.runUserActionInLog( this, userEvent, eventTime, 'mediaRestartWorkflow', - { workflowId }, + { deviceId, workflowId }, async () => { check(workflowId, String) - const access = await PeripheralDeviceContentWriteAccess.mediaWorkFlow(this, workflowId) - return MediaManagerAPI.restartWorkflow(access) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_MEDIA_MANAGEMENT) + + return MediaManagerAPI.restartWorkflow(deviceId, workflowId) } ) } - async mediaAbortWorkflow(userEvent: string, eventTime: Time, workflowId: MediaWorkFlowId) { + async mediaAbortWorkflow( + userEvent: string, + eventTime: Time, + deviceId: PeripheralDeviceId, + workflowId: MediaWorkFlowId + ) { return ServerClientAPI.runUserActionInLog( this, userEvent, eventTime, 'mediaAbortWorkflow', - { workflowId }, + { deviceId, workflowId }, async () => { check(workflowId, String) - const access = await PeripheralDeviceContentWriteAccess.mediaWorkFlow(this, workflowId) - return MediaManagerAPI.abortWorkflow(access) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_MEDIA_MANAGEMENT) + + return MediaManagerAPI.abortWorkflow(deviceId, workflowId) } ) } - async mediaPrioritizeWorkflow(userEvent: string, eventTime: Time, workflowId: MediaWorkFlowId) { + async mediaPrioritizeWorkflow( + userEvent: string, + eventTime: Time, + deviceId: PeripheralDeviceId, + workflowId: MediaWorkFlowId + ) { return ServerClientAPI.runUserActionInLog( this, userEvent, eventTime, 'mediaPrioritizeWorkflow', - { workflowId }, + { deviceId, workflowId }, async () => { check(workflowId, String) - const access = await PeripheralDeviceContentWriteAccess.mediaWorkFlow(this, workflowId) - return MediaManagerAPI.prioritizeWorkflow(access) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_MEDIA_MANAGEMENT) + + return MediaManagerAPI.prioritizeWorkflow(deviceId, workflowId) } ) } @@ -801,8 +820,9 @@ class ServerUserActionAPI 'mediaRestartAllWorkflows', {}, async () => { - const access = await OrganizationContentWriteAccess.mediaWorkFlows(this) - return MediaManagerAPI.restartAllWorkflows(access) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_MEDIA_MANAGEMENT) + + return MediaManagerAPI.restartAllWorkflows(null) } ) } @@ -814,8 +834,9 @@ class ServerUserActionAPI 'mediaAbortAllWorkflows', {}, async () => { - const access = await OrganizationContentWriteAccess.mediaWorkFlows(this) - return MediaManagerAPI.abortAllWorkflows(access) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_MEDIA_MANAGEMENT) + + return MediaManagerAPI.abortAllWorkflows(null) } ) } @@ -835,8 +856,9 @@ class ServerUserActionAPI check(deviceId, String) check(workId, String) - const access = await PeripheralDeviceContentWriteAccess.executeFunction(this, deviceId) - return PackageManagerAPI.restartExpectation(access, workId) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_MEDIA_MANAGEMENT) + + return PackageManagerAPI.restartExpectation(deviceId, workId) } ) } @@ -850,8 +872,9 @@ class ServerUserActionAPI async () => { check(studioId, String) - const access = await StudioContentWriteAccess.executeFunction(this, studioId) - return PackageManagerAPI.restartAllExpectationsInStudio(access) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_MEDIA_MANAGEMENT) + + return PackageManagerAPI.restartAllExpectationsInStudio(studioId) } ) } @@ -871,8 +894,9 @@ class ServerUserActionAPI check(deviceId, String) check(workId, String) - const access = await PeripheralDeviceContentWriteAccess.executeFunction(this, deviceId) - return PackageManagerAPI.abortExpectation(access, workId) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_MEDIA_MANAGEMENT) + + return PackageManagerAPI.abortExpectation(deviceId, workId) } ) } @@ -892,8 +916,9 @@ class ServerUserActionAPI check(deviceId, String) check(containerId, String) - const access = await PeripheralDeviceContentWriteAccess.executeFunction(this, deviceId) - return PackageManagerAPI.restartPackageContainer(access, containerId) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_MEDIA_MANAGEMENT) + + return PackageManagerAPI.restartPackageContainer(deviceId, containerId) } ) } @@ -922,7 +947,7 @@ class ServerUserActionAPI async () => { check(hashedToken, String) - await SystemWriteAccess.systemActions(this) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_SYSTEM_ACTION) if (!verifyHashedToken(hashedToken)) { throw new Meteor.Error(401, `Restart token is invalid or has expired`) @@ -958,8 +983,8 @@ class ServerUserActionAPI async () => { check(bucketId, String) - const access = await BucketSecurity.allowWriteAccess(this, bucketId) - return BucketsAPI.removeBucket(access) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_BUCKET_MODIFICATION) + return BucketsAPI.removeBucket(bucketId) } ) } @@ -979,8 +1004,8 @@ class ServerUserActionAPI check(bucketId, String) check(bucketProps, Object) - const access = await BucketSecurity.allowWriteAccess(this, bucketId) - return BucketsAPI.modifyBucket(access, bucketProps) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_BUCKET_MODIFICATION) + return BucketsAPI.modifyBucket(bucketId, bucketProps) } ) } @@ -994,8 +1019,8 @@ class ServerUserActionAPI async () => { check(bucketId, String) - const access = await BucketSecurity.allowWriteAccess(this, bucketId) - return BucketsAPI.emptyBucket(access) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_BUCKET_MODIFICATION) + return BucketsAPI.emptyBucket(bucketId) } ) } @@ -1010,8 +1035,8 @@ class ServerUserActionAPI check(studioId, String) check(name, String) - const access = await StudioContentWriteAccess.bucket(this, studioId) - return BucketsAPI.createNewBucket(access, name) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_BUCKET_MODIFICATION) + return BucketsAPI.createNewBucket(studioId, name) } ) } @@ -1025,8 +1050,8 @@ class ServerUserActionAPI 'bucketsRemoveBucketAdLib', { adlibId }, async () => { - const access = await BucketSecurity.allowWriteAccessPiece(this, adlibId) - return BucketsAPI.removeBucketAdLib(access) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_BUCKET_MODIFICATION) + return BucketsAPI.removeBucketAdLib(adlibId) } ) } @@ -1040,8 +1065,8 @@ class ServerUserActionAPI async () => { check(actionId, String) - const access = await BucketSecurity.allowWriteAccessAction(this, actionId) - return BucketsAPI.removeBucketAdLibAction(access) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_BUCKET_MODIFICATION) + return BucketsAPI.removeBucketAdLibAction(actionId) } ) } @@ -1061,8 +1086,8 @@ class ServerUserActionAPI check(adlibId, String) check(adlibProps, Object) - const access = await BucketSecurity.allowWriteAccessPiece(this, adlibId) - return BucketsAPI.modifyBucketAdLib(access, adlibProps) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_BUCKET_MODIFICATION) + return BucketsAPI.modifyBucketAdLib(adlibId, adlibProps) } ) } @@ -1082,8 +1107,8 @@ class ServerUserActionAPI check(actionId, String) check(actionProps, Object) - const access = await BucketSecurity.allowWriteAccessAction(this, actionId) - return BucketsAPI.modifyBucketAdLibAction(access, actionProps) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_BUCKET_MODIFICATION) + return BucketsAPI.modifyBucketAdLibAction(actionId, actionProps) } ) } @@ -1105,8 +1130,8 @@ class ServerUserActionAPI check(bucketId, String) check(action, Object) - const access = await BucketSecurity.allowWriteAccess(this, bucketId) - return BucketsAPI.saveAdLibActionIntoBucket(access, action) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_BUCKET_MODIFICATION) + return BucketsAPI.saveAdLibActionIntoBucket(bucketId, action) } ) } @@ -1121,15 +1146,16 @@ class ServerUserActionAPI this, userEvent, eventTime, - 'packageManagerRestartAllExpectations', + 'switchRouteSet', { studioId, routeSetId, state }, async () => { check(studioId, String) check(routeSetId, String) check(state, Match.OneOf('toggle', Boolean)) - const access = await StudioContentWriteAccess.routeSet(this, studioId) - return ServerPlayoutAPI.switchRouteSet(access, routeSetId, state) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_PLAYOUT_USERACTION) + + return ServerPlayoutAPI.switchRouteSet(studioId, routeSetId, state) } ) } @@ -1195,8 +1221,13 @@ class ServerUserActionAPI check(subDeviceId, String) check(disable, Boolean) - const access = await PeripheralDeviceContentWriteAccess.peripheralDevice(this, peripheralDeviceId) - return ServerPeripheralDeviceAPI.disableSubDevice(access, subDeviceId, disable) + assertConnectionHasOneOfPermissions( + this.connection, + ...PERMISSIONS_FOR_PLAYOUT_USERACTION, + ...PERMISSIONS_FOR_SYSTEM_ACTION + ) + + return ServerPeripheralDeviceAPI.disableSubDevice(peripheralDeviceId, subDeviceId, disable) } ) } @@ -1304,11 +1335,10 @@ class ServerUserActionAPI 'executeUserChangeOperation', { operationTarget, operation }, async () => { - const access = await RundownPlaylistContentWriteAccess.rundown(this, rundownId) - if (!access.rundown) throw new Error(`Rundown "${rundownId}" not found`) + const rundown = await checkAccessToRundown(this.connection, rundownId) - await runIngestOperation(access.rundown.studioId, IngestJobs.UserExecuteChangeOperation, { - rundownExternalId: access.rundown.externalId, + await runIngestOperation(rundown.studioId, IngestJobs.UserExecuteChangeOperation, { + rundownExternalId: rundown.externalId, operationTarget, operation, }) @@ -1333,7 +1363,8 @@ class ServerUserActionAPI check(studioId, String) check(showStyleVariantId, String) - // TODO - checkAccessToStudio? + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_PLAYOUT_USERACTION) + return runIngestOperation(studioId, IngestJobs.CreateAdlibTestingRundownForShowStyleVariant, { showStyleVariantId, }) diff --git a/meteor/server/collections/collection.ts b/meteor/server/collections/collection.ts index 5a81d597c2e..700ceda53d5 100644 --- a/meteor/server/collections/collection.ts +++ b/meteor/server/collections/collection.ts @@ -22,14 +22,14 @@ import { import { MinimalMongoCursor } from './implementations/asyncCollection' export interface MongoAllowRules { - insert?: (userId: UserId | null, doc: DBInterface) => Promise | boolean + // insert?: (userId: UserId | null, doc: DBInterface) => Promise | boolean update?: ( userId: UserId | null, doc: DBInterface, fieldNames: FieldNames, modifier: MongoModifier ) => Promise | boolean - remove?: (userId: UserId | null, doc: DBInterface) => Promise | boolean + // remove?: (userId: UserId | null, doc: DBInterface) => Promise | boolean } /** @@ -48,29 +48,6 @@ export function getOrCreateMongoCollection(name: string): Mongo.Collection return newCollection } -/** - * Wrap an existing Mongo.Collection to have async methods. Primarily to convert the built-in Users collection - * @param collection Collection to wrap - * @param name Name of the collection - * @param allowRules The 'allow' rules for publications. Set to `false` to make readonly - */ -export function wrapMongoCollection }>( - collection: Mongo.Collection, - name: CollectionName, - allowRules: MongoAllowRules | false -): AsyncOnlyMongoCollection { - if (collectionsCache.has(name)) throw new Meteor.Error(500, `Collection "${name}" has already been created`) - collectionsCache.set(name, collection) - - setupCollectionAllowRules(collection, allowRules) - - const wrapped = new WrappedAsyncMongoCollection(collection, name) - - registerCollection(name, wrapped as WrappedAsyncMongoCollection) - - return wrapped -} - /** * Create a fully featured MongoCollection * @param name Name of the collection in mongodb @@ -133,24 +110,16 @@ function setupCollectionAllowRules['allow']>[0]*/ = { - insert: () => false, - insertAsync: origInsert - ? (userId: string | null, doc: DBInterface) => origInsert(protectString(userId), doc) as any - : () => false, update: () => false, updateAsync: origUpdate ? (userId: string | null, doc: DBInterface, fieldNames: string[], modifier: any) => origUpdate(protectString(userId), doc, fieldNames as any, modifier) as any : () => false, - remove: () => false, - removeAsync: origRemove - ? (userId: string | null, doc: DBInterface) => origRemove(protectString(userId), doc) as any - : () => false, } collection.allow(options) diff --git a/meteor/server/collections/index.ts b/meteor/server/collections/index.ts index 9b574870c93..ee4a0280387 100644 --- a/meteor/server/collections/index.ts +++ b/meteor/server/collections/index.ts @@ -24,35 +24,25 @@ import { DBTimelineDatastoreEntry } from '@sofie-automation/corelib/dist/dataMod import { TranslationsBundle } from '@sofie-automation/meteor-lib/dist/collections/TranslationsBundles' import { DBTriggeredActions } from '@sofie-automation/meteor-lib/dist/collections/TriggeredActions' import { UserActionsLogItem } from '@sofie-automation/meteor-lib/dist/collections/UserActionsLog' -import { DBUser } from '@sofie-automation/meteor-lib/dist/collections/Users' import { WorkerStatus } from '@sofie-automation/meteor-lib/dist/collections/Workers' import { registerIndex } from './indices' import { getCurrentTime } from '../lib/lib' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' -import { - createAsyncOnlyMongoCollection, - createAsyncOnlyReadOnlyMongoCollection, - wrapMongoCollection, -} from './collection' +import { createAsyncOnlyMongoCollection, createAsyncOnlyReadOnlyMongoCollection } from './collection' import { ObserveChangesForHash } from './lib' import { logger } from '../logging' -import { resolveCredentials } from '../security/lib/credentials' -import { logNotAllowed, allowOnlyFields, rejectFields } from '../security/lib/lib' -import { - allowAccessToCoreSystem, - allowAccessToOrganization, - allowAccessToShowStyleBase, - allowAccessToStudio, -} from '../security/lib/security' -import { SystemWriteAccess } from '../security/system' +import { allowOnlyFields, rejectFields } from '../security/allowDeny' import type { DBNotificationObj } from '@sofie-automation/corelib/dist/dataModel/Notifications' +import { checkUserIdHasOneOfPermissions } from '../security/auth' export * from './bucket' export * from './packages-media' export * from './rundown' export const Blueprints = createAsyncOnlyMongoCollection(CollectionName.Blueprints, { - update(_userId, doc, fields, _modifier) { + update(userId, doc, fields, _modifier) { + if (!checkUserIdHasOneOfPermissions(userId, CollectionName.Blueprints, 'configure')) return false + return allowOnlyFields(doc, fields, ['name', 'disableVersionChecks']) }, }) @@ -62,9 +52,7 @@ registerIndex(Blueprints, { export const CoreSystem = createAsyncOnlyMongoCollection(CollectionName.CoreSystem, { async update(userId, doc, fields, _modifier) { - const cred = await resolveCredentials({ userId: userId }) - const access = await allowAccessToCoreSystem(cred) - if (!access.update) return logNotAllowed('CoreSystem', access.reason) + if (!checkUserIdHasOneOfPermissions(userId, CollectionName.CoreSystem, 'configure')) return false return allowOnlyFields(doc, fields, [ 'systemInfo', @@ -123,8 +111,8 @@ registerIndex(Notifications, { export const Organizations = createAsyncOnlyMongoCollection(CollectionName.Organizations, { async update(userId, doc, fields, _modifier) { - const access = await allowAccessToOrganization({ userId: userId }, doc._id) - if (!access.update) return logNotAllowed('Organization', access.reason) + if (!checkUserIdHasOneOfPermissions(userId, CollectionName.Organizations, 'configure')) return false + return allowOnlyFields(doc, fields, ['userRoles']) }, }) @@ -138,7 +126,9 @@ registerIndex(PeripheralDeviceCommands, { }) export const PeripheralDevices = createAsyncOnlyMongoCollection(CollectionName.PeripheralDevices, { - update(_userId, doc, fields, _modifier) { + update(userId, doc, fields, _modifier) { + if (!checkUserIdHasOneOfPermissions(userId, CollectionName.PeripheralDevices, 'configure')) return false + return rejectFields(doc, fields, [ 'type', 'parentDeviceId', @@ -167,8 +157,8 @@ registerIndex(PeripheralDevices, { export const RundownLayouts = createAsyncOnlyMongoCollection(CollectionName.RundownLayouts, { async update(userId, doc, fields) { - const access = await allowAccessToShowStyleBase({ userId: userId }, doc.showStyleBaseId) - if (!access.update) return logNotAllowed('ShowStyleBase', access.reason) + if (!checkUserIdHasOneOfPermissions(userId, CollectionName.RundownLayouts, 'configure')) return false + return rejectFields(doc, fields, ['_id', 'showStyleBaseId']) }, }) @@ -184,8 +174,8 @@ registerIndex(RundownLayouts, { export const ShowStyleBases = createAsyncOnlyMongoCollection(CollectionName.ShowStyleBases, { async update(userId, doc, fields) { - const access = await allowAccessToShowStyleBase({ userId: userId }, doc._id) - if (!access.update) return logNotAllowed('ShowStyleBase', access.reason) + if (!checkUserIdHasOneOfPermissions(userId, CollectionName.ShowStyleBases, 'configure')) return false + return rejectFields(doc, fields, ['_id']) }, }) @@ -195,8 +185,7 @@ registerIndex(ShowStyleBases, { export const ShowStyleVariants = createAsyncOnlyMongoCollection(CollectionName.ShowStyleVariants, { async update(userId, doc, fields) { - const access = await allowAccessToShowStyleBase({ userId: userId }, doc.showStyleBaseId) - if (!access.update) return logNotAllowed('ShowStyleBase', access.reason) + if (!checkUserIdHasOneOfPermissions(userId, CollectionName.ShowStyleVariants, 'configure')) return false return rejectFields(doc, fields, ['showStyleBaseId']) }, @@ -207,7 +196,9 @@ registerIndex(ShowStyleVariants, { }) export const Snapshots = createAsyncOnlyMongoCollection(CollectionName.Snapshots, { - update(_userId, doc, fields, _modifier) { + update(userId, doc, fields, _modifier) { + if (!checkUserIdHasOneOfPermissions(userId, CollectionName.Snapshots, 'configure')) return false + return allowOnlyFields(doc, fields, ['comment']) }, }) @@ -220,8 +211,8 @@ registerIndex(Snapshots, { export const Studios = createAsyncOnlyMongoCollection(CollectionName.Studios, { async update(userId, doc, fields, _modifier) { - const access = await allowAccessToStudio({ userId: userId }, doc._id) - if (!access.update) return logNotAllowed('Studio', access.reason) + if (!checkUserIdHasOneOfPermissions(userId, CollectionName.Studios, 'configure')) return false + return rejectFields(doc, fields, ['_id']) }, }) @@ -249,17 +240,9 @@ export const TranslationsBundles = createAsyncOnlyMongoCollection(CollectionName.TriggeredActions, { async update(userId, doc, fields) { - const cred = await resolveCredentials({ userId: userId }) - - if (doc.showStyleBaseId) { - const access = await allowAccessToShowStyleBase(cred, doc.showStyleBaseId) - if (!access.update) return logNotAllowed('ShowStyleBase', access.reason) - return rejectFields(doc, fields, ['_id']) - } else { - const access = await allowAccessToCoreSystem(cred) - if (!access.update) return logNotAllowed('CoreSystem', access.reason) - return rejectFields(doc, fields, ['_id']) - } + if (!checkUserIdHasOneOfPermissions(userId, CollectionName.TriggeredActions, 'configure')) return false + + return rejectFields(doc, fields, ['_id']) }, }) registerIndex(TriggeredActions, { @@ -275,26 +258,6 @@ registerIndex(UserActionsLog, { timelineHash: 1, }) -// This is a somewhat special collection, as it draws from the Meteor.users collection from the Accounts package -export const Users = wrapMongoCollection(Meteor.users as any, CollectionName.Users, { - async update(userId, doc, fields, _modifier) { - const access = await SystemWriteAccess.currentUser(userId, { userId }) - if (!access) return logNotAllowed('CurrentUser', '') - return rejectFields(doc, fields, [ - '_id', - 'createdAt', - 'services', - 'emails', - 'profile', - 'organizationId', - 'superAdmin', - ]) - }, -}) -registerIndex(Users, { - organizationId: 1, -}) - export const Workers = createAsyncOnlyMongoCollection(CollectionName.Workers, false) export const WorkerThreadStatuses = createAsyncOnlyMongoCollection( diff --git a/meteor/server/email.ts b/meteor/server/email.ts deleted file mode 100644 index 0dd0b2d7955..00000000000 --- a/meteor/server/email.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Meteor } from 'meteor/meteor' -import { Accounts } from 'meteor/accounts-base' - -Meteor.startup(function () { - process.env.MAIL_URL = Meteor.settings.MAIL_URL - Accounts.urls.verifyEmail = function (token) { - return Meteor.absoluteUrl('login/verify-email/' + token) - } - Accounts.urls.resetPassword = function (token) { - return Meteor.absoluteUrl('reset/' + token) - } -}) diff --git a/meteor/server/lib/customPublication/__tests__/optimizedObserver.test.ts b/meteor/server/lib/customPublication/__tests__/optimizedObserver.test.ts index c77442a391c..e2c4cafb1f2 100644 --- a/meteor/server/lib/customPublication/__tests__/optimizedObserver.test.ts +++ b/meteor/server/lib/customPublication/__tests__/optimizedObserver.test.ts @@ -1,4 +1,3 @@ -import { UserId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { createManualPromise } from '@sofie-automation/corelib/dist/lib' import { ProtectedString } from '@sofie-automation/corelib/dist/protectedString' import { optimizedObserverCountSubscribers, setUpOptimizedObserverInner, TriggerUpdate } from '../optimizedObserverBase' @@ -20,9 +19,6 @@ class CustomPublishMock }> get isReady(): boolean { return false } - get userId(): UserId | null { - return null - } stop?: () => void diff --git a/meteor/server/lib/customPublication/publish.ts b/meteor/server/lib/customPublication/publish.ts index b9ac5fc402b..fb8622b5169 100644 --- a/meteor/server/lib/customPublication/publish.ts +++ b/meteor/server/lib/customPublication/publish.ts @@ -1,4 +1,3 @@ -import { UserId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { Meteor } from 'meteor/meteor' import { AllPubSubTypes } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { ProtectedString, unprotectString } from '../tempLib' @@ -43,10 +42,6 @@ export class CustomPublishMeteor }> { return this.#isReady } - get userId(): UserId | null { - return this._meteorSubscription.userId - } - /** * Register a function to be called when the subscriber unsubscribes */ diff --git a/meteor/server/main.ts b/meteor/server/main.ts index 30c7678f5c4..06cb00b0265 100644 --- a/meteor/server/main.ts +++ b/meteor/server/main.ts @@ -46,7 +46,6 @@ import './api/rest/api' import './Connections' import './coreSystem' import './cronjobs' -import './email' import './prometheus' import './api/deviceTriggers/observer' import './logo' @@ -55,4 +54,4 @@ import './systemTime' // Setup publications and security: import './publications/_publications' -import './security/_security' +import './security/securityVerify' diff --git a/meteor/server/methods.ts b/meteor/server/methods.ts index 49bee70b1af..3a3c1da46bf 100644 --- a/meteor/server/methods.ts +++ b/meteor/server/methods.ts @@ -4,8 +4,8 @@ import { logger } from './logging' import { extractFunctionSignature } from './lib' import { MethodContext, MethodContextAPI } from './api/methodContext' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' -import { Settings } from './Settings' import { isPromise } from '@sofie-automation/shared-lib/dist/lib/lib' +import { assertConnectionHasOneOfPermissions } from './security/auth' type MeteorMethod = (this: MethodContext, ...args: any[]) => any @@ -142,25 +142,24 @@ function setMeteorMethods(orgMethods: MethodsInner, secret?: boolean): void { AllMeteorMethods.push(methodName) } }) - // @ts-expect-error: incompatible due to userId Meteor.methods(methods) } export type MeteorDebugMethod = (this: Meteor.MethodThisType, ...args: any[]) => Promise | any export function MeteorDebugMethods(methods: { [key: string]: MeteorDebugMethod }): void { - if (!Settings.enableUserAccounts) { - const fiberMethods: { [key: string]: (this: Meteor.MethodThisType, ...args: any[]) => any } = {} + const fiberMethods: { [key: string]: (this: Meteor.MethodThisType, ...args: any[]) => any } = {} - for (const [key, fn] of Object.entries(methods)) { - if (key && !!fn) { - fiberMethods[key] = function (this: Meteor.MethodThisType, ...args: any[]) { - return fn.call(this, ...args) - } + for (const [key, fn] of Object.entries(methods)) { + if (key && !!fn) { + fiberMethods[key] = function (this: Meteor.MethodThisType, ...args: any[]) { + assertConnectionHasOneOfPermissions(this.connection, 'developer') + + return fn.call(this, ...args) } } - - Meteor.methods(fiberMethods) } + + Meteor.methods(fiberMethods) } export function getRunningMethods(): RunningMethods { diff --git a/meteor/server/migration/api.ts b/meteor/server/migration/api.ts index 23a1169759f..05574802788 100644 --- a/meteor/server/migration/api.ts +++ b/meteor/server/migration/api.ts @@ -9,7 +9,6 @@ import { import * as Migrations from './databaseMigration' import { MigrationStepInputResult } from '@sofie-automation/blueprints-integration' import { MethodContextAPI } from '../api/methodContext' -import { SystemWriteAccess } from '../security/system' import { fixupConfigForShowStyleBase, fixupConfigForStudio, @@ -23,10 +22,15 @@ import { import { CoreSystemId, ShowStyleBaseId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { BlueprintValidateConfigForStudioResult } from '@sofie-automation/corelib/dist/worker/studio' import { runUpgradeForCoreSystem } from './upgrades/system' +import { assertConnectionHasOneOfPermissions } from '../security/auth' +import { UserPermissions } from '@sofie-automation/meteor-lib/dist/userPermissions' + +const PERMISSIONS_FOR_MIGRATIONS: Array = ['configure'] class ServerMigrationAPI extends MethodContextAPI implements NewMigrationAPI { async getMigrationStatus() { - await SystemWriteAccess.migrations(this) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_MIGRATIONS) + return Migrations.getMigrationStatus() } @@ -41,20 +45,21 @@ class ServerMigrationAPI extends MethodContextAPI implements NewMigrationAPI { check(inputResults, Array) check(isFirstOfPartialMigrations, Match.Maybe(Boolean)) - await SystemWriteAccess.migrations(this) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_MIGRATIONS) return Migrations.runMigration(chunks, hash, inputResults, isFirstOfPartialMigrations || false) } async forceMigration(chunks: Array) { check(chunks, Array) - await SystemWriteAccess.migrations(this) + + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_MIGRATIONS) return Migrations.forceMigration(chunks) } async resetDatabaseVersions() { - await SystemWriteAccess.migrations(this) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_MIGRATIONS) return Migrations.resetDatabaseVersions() } @@ -62,7 +67,7 @@ class ServerMigrationAPI extends MethodContextAPI implements NewMigrationAPI { async fixupConfigForStudio(studioId: StudioId): Promise { check(studioId, String) - await SystemWriteAccess.migrations(this) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_MIGRATIONS) return fixupConfigForStudio(studioId) } @@ -70,7 +75,7 @@ class ServerMigrationAPI extends MethodContextAPI implements NewMigrationAPI { async ignoreFixupConfigForStudio(studioId: StudioId): Promise { check(studioId, String) - await SystemWriteAccess.migrations(this) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_MIGRATIONS) return ignoreFixupConfigForStudio(studioId) } @@ -78,7 +83,7 @@ class ServerMigrationAPI extends MethodContextAPI implements NewMigrationAPI { async validateConfigForStudio(studioId: StudioId): Promise { check(studioId, String) - await SystemWriteAccess.migrations(this) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_MIGRATIONS) return validateConfigForStudio(studioId) } @@ -86,7 +91,7 @@ class ServerMigrationAPI extends MethodContextAPI implements NewMigrationAPI { async runUpgradeForStudio(studioId: StudioId): Promise { check(studioId, String) - await SystemWriteAccess.migrations(this) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_MIGRATIONS) return runUpgradeForStudio(studioId) } @@ -94,7 +99,7 @@ class ServerMigrationAPI extends MethodContextAPI implements NewMigrationAPI { async fixupConfigForShowStyleBase(showStyleBaseId: ShowStyleBaseId): Promise { check(showStyleBaseId, String) - await SystemWriteAccess.migrations(this) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_MIGRATIONS) return fixupConfigForShowStyleBase(showStyleBaseId) } @@ -102,7 +107,7 @@ class ServerMigrationAPI extends MethodContextAPI implements NewMigrationAPI { async ignoreFixupConfigForShowStyleBase(showStyleBaseId: ShowStyleBaseId): Promise { check(showStyleBaseId, String) - await SystemWriteAccess.migrations(this) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_MIGRATIONS) return ignoreFixupConfigForShowStyleBase(showStyleBaseId) } @@ -112,7 +117,7 @@ class ServerMigrationAPI extends MethodContextAPI implements NewMigrationAPI { ): Promise { check(showStyleBaseId, String) - await SystemWriteAccess.migrations(this) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_MIGRATIONS) return validateConfigForShowStyleBase(showStyleBaseId) } @@ -120,13 +125,15 @@ class ServerMigrationAPI extends MethodContextAPI implements NewMigrationAPI { async runUpgradeForShowStyleBase(showStyleBaseId: ShowStyleBaseId): Promise { check(showStyleBaseId, String) - await SystemWriteAccess.migrations(this) + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_MIGRATIONS) return runUpgradeForShowStyleBase(showStyleBaseId) } async runUpgradeForCoreSystem(coreSystemId: CoreSystemId): Promise { - await SystemWriteAccess.migrations(this) + check(coreSystemId, String) + + assertConnectionHasOneOfPermissions(this.connection, ...PERMISSIONS_FOR_MIGRATIONS) return runUpgradeForCoreSystem(coreSystemId) } diff --git a/meteor/server/publications/blueprintUpgradeStatus/publication.ts b/meteor/server/publications/blueprintUpgradeStatus/publication.ts index 9ea8d72fe5d..00df57b8386 100644 --- a/meteor/server/publications/blueprintUpgradeStatus/publication.ts +++ b/meteor/server/publications/blueprintUpgradeStatus/publication.ts @@ -10,9 +10,6 @@ import { SetupObserversResult, TriggerUpdate, } from '../../lib/customPublication' -import { logger } from '../../logging' -import { resolveCredentials } from '../../security/lib/credentials' -import { NoSecurityReadAccess } from '../../security/noSecurity' import { ContentCache, CoreSystemFields, @@ -30,6 +27,7 @@ import { UIBlueprintUpgradeStatusId, } from '@sofie-automation/meteor-lib/dist/api/upgradeStatus' import { ICoreSystem } from '@sofie-automation/meteor-lib/dist/collections/CoreSystem' +import { assertConnectionHasOneOfPermissions } from '../../security/auth' type BlueprintUpgradeStatusArgs = Record @@ -290,12 +288,8 @@ meteorCustomPublish( MeteorPubSub.uiBlueprintUpgradeStatuses, CustomCollectionName.UIBlueprintUpgradeStatuses, async function (pub) { - const cred = await resolveCredentials({ userId: this.userId, token: undefined }) + assertConnectionHasOneOfPermissions(this.connection, 'configure', 'service') - if (!cred || NoSecurityReadAccess.any()) { - await createBlueprintUpgradeStatusSubscriptionHandle(pub) - } else { - logger.warn(`Pub.${CustomCollectionName.UIBlueprintUpgradeStatuses}: Not allowed`) - } + await createBlueprintUpgradeStatusSubscriptionHandle(pub) } ) diff --git a/meteor/server/publications/buckets.ts b/meteor/server/publications/buckets.ts index 3801f8b4671..589f63d3bbe 100644 --- a/meteor/server/publications/buckets.ts +++ b/meteor/server/publications/buckets.ts @@ -1,14 +1,12 @@ import { FindOptions } from '@sofie-automation/meteor-lib/dist/collections/lib' -import { BucketSecurity } from '../security/buckets' import { meteorPublish } from './lib/lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { Bucket } from '@sofie-automation/meteor-lib/dist/collections/Buckets' -import { StudioReadAccess } from '../security/studio' -import { isProtectedString } from '@sofie-automation/corelib/dist/protectedString' import { BucketAdLibActions, BucketAdLibs, Buckets } from '../collections' import { check, Match } from 'meteor/check' import { StudioId, BucketId, ShowStyleVariantId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { CorelibPubSub } from '@sofie-automation/corelib/dist/pubsub' +import { triggerWriteAccessBecauseNoCheckNecessary } from '../security/securityVerify' meteorPublish( MeteorPubSub.buckets, @@ -16,26 +14,23 @@ meteorPublish( check(studioId, String) check(bucketId, Match.Maybe(String)) + triggerWriteAccessBecauseNoCheckNecessary() + const modifier: FindOptions = { fields: {}, } - if ( - (await StudioReadAccess.studioContent(studioId, this)) || - (isProtectedString(bucketId) && bucketId && (await BucketSecurity.allowReadAccess(this, bucketId))) - ) { - return Buckets.findWithCursor( - bucketId - ? { - _id: bucketId, - studioId, - } - : { - studioId, - }, - modifier - ) - } - return null + + return Buckets.findWithCursor( + bucketId + ? { + _id: bucketId, + studioId, + } + : { + studioId, + }, + modifier + ) } ) @@ -46,23 +41,22 @@ meteorPublish( check(bucketId, String) check(showStyleVariantIds, Array) - if (isProtectedString(bucketId) && (await BucketSecurity.allowReadAccess(this, bucketId))) { - return BucketAdLibs.findWithCursor( - { - studioId: studioId, - bucketId: bucketId, - showStyleVariantId: { - $in: [null, ...showStyleVariantIds], // null = valid for all variants - }, + triggerWriteAccessBecauseNoCheckNecessary() + + return BucketAdLibs.findWithCursor( + { + studioId: studioId, + bucketId: bucketId, + showStyleVariantId: { + $in: [null, ...showStyleVariantIds], // null = valid for all variants }, - { - fields: { - ingestInfo: 0, // This is a large blob, and is not of interest to the UI - }, - } - ) - } - return null + }, + { + fields: { + ingestInfo: 0, // This is a large blob, and is not of interest to the UI + }, + } + ) } ) @@ -73,22 +67,21 @@ meteorPublish( check(bucketId, String) check(showStyleVariantIds, Array) - if (isProtectedString(bucketId) && (await BucketSecurity.allowReadAccess(this, bucketId))) { - return BucketAdLibActions.findWithCursor( - { - studioId: studioId, - bucketId: bucketId, - showStyleVariantId: { - $in: [null, ...showStyleVariantIds], // null = valid for all variants - }, + triggerWriteAccessBecauseNoCheckNecessary() + + return BucketAdLibActions.findWithCursor( + { + studioId: studioId, + bucketId: bucketId, + showStyleVariantId: { + $in: [null, ...showStyleVariantIds], // null = valid for all variants }, - { - fields: { - ingestInfo: 0, // This is a large blob, and is not of interest to the UI - }, - } - ) - } - return null + }, + { + fields: { + ingestInfo: 0, // This is a large blob, and is not of interest to the UI + }, + } + ) } ) diff --git a/meteor/server/publications/deviceTriggersPreview.ts b/meteor/server/publications/deviceTriggersPreview.ts index 67e9edbf039..c8352ba51fa 100644 --- a/meteor/server/publications/deviceTriggersPreview.ts +++ b/meteor/server/publications/deviceTriggersPreview.ts @@ -9,8 +9,8 @@ import { DeviceTriggerArguments, UIDeviceTriggerPreview } from '@sofie-automatio import { getCurrentTime } from '../lib/lib' import { SetupObserversResult, setUpOptimizedObserverArray, TriggerUpdate } from '../lib/customPublication' import { CustomPublish, meteorCustomPublish } from '../lib/customPublication/publish' -import { StudioReadAccess } from '../security/studio' import { PeripheralDevices } from '../collections' +import { assertConnectionHasOneOfPermissions } from '../security/auth' /** IDEA: This could potentially be a Capped Collection, thus enabling scaling Core horizontally: * https://www.mongodb.com/docs/manual/core/capped-collections/ */ @@ -19,14 +19,12 @@ const lastTriggers: Record { - /** - * The id of the logged-in user, or `null` if no user is logged in. - * This is constant. However, if the logged-in user changes, the publish function - * is rerun with the new value, assuming it didn’t throw an error at the previous run. - */ - userId: UserId | null -} +export type SubscriptionContext = Omit /** * Unsafe wrapper around Meteor.publish @@ -82,90 +63,6 @@ export function meteorPublish( meteorPublishUnsafe(name, callback) } -export namespace AutoFillSelector { - /** Autofill an empty selector {} with organizationId of the current user */ - export async function organizationId( - userId: UserId | null, - selector: MongoQuery, - token: string | undefined - ): Promise<{ - cred: ResolvedCredentials | null - selector: MongoQuery - }> { - if (!selector) throw new Meteor.Error(400, 'selector argument missing') - - let cred: ResolvedCredentials | null = null - if (Settings.enableUserAccounts) { - if (!selector.organizationId) { - cred = await resolveCredentials({ userId: userId, token }) - if (cred.organizationId) selector.organizationId = cred.organizationId as any - // TODO - should this block all access if cred.organizationId is not set - } - } - return { cred, selector } - } - /** Autofill an empty selector {} with deviceId of the current user's peripheralDevices */ - export async function deviceId( - userId: UserId | null, - selector: MongoQuery, - token: string | undefined - ): Promise<{ - cred: ResolvedCredentials | null - selector: MongoQuery - }> { - if (!selector) throw new Meteor.Error(400, 'selector argument missing') - - let cred: ResolvedCredentials | null = null - if (Settings.enableUserAccounts) { - if (!selector.deviceId) { - cred = await resolveCredentials({ userId: userId, token }) - if (cred.organizationId) { - const devices = (await PeripheralDevices.findFetchAsync( - { - organizationId: cred.organizationId, - }, - { projection: { _id: 1 } } - )) as Array> - - selector.deviceId = { $in: devices.map((d) => d._id) } as any - } - // TODO - should this block all access if cred.organizationId is not set - } - } - return { cred, selector } - } - /** Autofill an empty selector {} with showStyleBaseId of the current user's showStyleBases */ - export async function showStyleBaseId( - userId: UserId | null, - selector: MongoQuery, - token: string | undefined - ): Promise<{ - cred: ResolvedCredentials | null - selector: MongoQuery - }> { - if (!selector) throw new Meteor.Error(400, 'selector argument missing') - - let cred: ResolvedCredentials | null = null - if (Settings.enableUserAccounts) { - if (!selector.showStyleBaseId) { - cred = await resolveCredentials({ userId: userId, token }) - if (cred.organizationId) { - const showStyleBases = (await ShowStyleBases.findFetchAsync( - { - organizationId: cred.organizationId, - }, - { projection: { _id: 1 } } - )) as Array> - - selector.showStyleBaseId = { $in: showStyleBases.map((d) => d._id) } as any - } - // TODO - should this block all access if cred.organizationId is not set - } - } - return { cred, selector } - } -} - /** * Await each observer, and return the handles * If an observer throws, this will make sure to stop all the ones that were successfully started, to avoid leaking memory diff --git a/meteor/server/publications/mountedTriggers.ts b/meteor/server/publications/mountedTriggers.ts index 9976677de6b..13c520221b8 100644 --- a/meteor/server/publications/mountedTriggers.ts +++ b/meteor/server/publications/mountedTriggers.ts @@ -1,19 +1,18 @@ import { Meteor } from 'meteor/meteor' import { CustomPublish, meteorCustomPublish } from '../lib/customPublication' import { PeripheralDeviceId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { PeripheralDeviceReadAccess } from '../security/peripheralDevice' import { logger } from '../logging' import { DeviceTriggerMountedActionAdlibsPreview, DeviceTriggerMountedActions } from '../api/deviceTriggers/observer' import { Mongo } from 'meteor/mongo' import { ProtectedString } from '@sofie-automation/corelib/dist/protectedString' import _ from 'underscore' -import { PeripheralDevices } from '../collections' import { check } from 'meteor/check' import { PeripheralDevicePubSub, PeripheralDevicePubSubCollectionsNames, } from '@sofie-automation/shared-lib/dist/pubsub/peripheralDevice' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' +import { checkAccessAndGetPeripheralDevice } from '../security/check' const PUBLICATION_DEBOUNCE = 20 @@ -24,26 +23,20 @@ meteorCustomPublish( check(deviceId, String) check(deviceIds, [String]) - if (await PeripheralDeviceReadAccess.peripheralDeviceContent(deviceId, { userId: this.userId, token })) { - const peripheralDevice = await PeripheralDevices.findOneAsync(deviceId) - - if (!peripheralDevice) throw new Meteor.Error(404, `PeripheralDevice "${deviceId}" not found`) - - const studioId = peripheralDevice.studioId - if (!studioId) throw new Meteor.Error(400, `Peripheral Device "${deviceId}" not attached to a studio`) - - cursorCustomPublish( - pub, - DeviceTriggerMountedActions.find({ - studioId, - deviceId: { - $in: deviceIds, - }, - }) - ) - } else { - logger.warn(`Pub.mountedTriggersForDevice: Not allowed: "${deviceId}"`) - } + const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, token, this) + + const studioId = peripheralDevice.studioId + if (!studioId) throw new Meteor.Error(400, `Peripheral Device "${deviceId}" not attached to a studio`) + + cursorCustomPublish( + pub, + DeviceTriggerMountedActions.find({ + studioId, + deviceId: { + $in: deviceIds, + }, + }) + ) } ) @@ -53,23 +46,17 @@ meteorCustomPublish( async function (pub, deviceId: PeripheralDeviceId, token: string | undefined) { check(deviceId, String) - if (await PeripheralDeviceReadAccess.peripheralDeviceContent(deviceId, { userId: this.userId, token })) { - const peripheralDevice = await PeripheralDevices.findOneAsync(deviceId) + const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, token, this) - if (!peripheralDevice) throw new Meteor.Error(404, `PeripheralDevice "${deviceId}" not found`) + const studioId = peripheralDevice.studioId + if (!studioId) throw new Meteor.Error(400, `Peripheral Device "${deviceId}" not attached to a studio`) - const studioId = peripheralDevice.studioId - if (!studioId) throw new Meteor.Error(400, `Peripheral Device "${deviceId}" not attached to a studio`) - - cursorCustomPublish( - pub, - DeviceTriggerMountedActionAdlibsPreview.find({ - studioId, - }) - ) - } else { - logger.warn(`Pub.mountedTriggersForDevicePreview: Not allowed: "${deviceId}"`) - } + cursorCustomPublish( + pub, + DeviceTriggerMountedActionAdlibsPreview.find({ + studioId, + }) + ) } ) diff --git a/meteor/server/publications/organization.ts b/meteor/server/publications/organization.ts index f596d8b3c6f..489f3edc46f 100644 --- a/meteor/server/publications/organization.ts +++ b/meteor/server/publications/organization.ts @@ -1,26 +1,29 @@ -import { meteorPublish, AutoFillSelector } from './lib/lib' +import { meteorPublish } from './lib/lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { Blueprint } from '@sofie-automation/corelib/dist/dataModel/Blueprint' import { Evaluation } from '@sofie-automation/meteor-lib/dist/collections/Evaluations' import { SnapshotItem } from '@sofie-automation/meteor-lib/dist/collections/Snapshots' import { UserActionsLogItem } from '@sofie-automation/meteor-lib/dist/collections/UserActionsLog' -import { OrganizationReadAccess } from '../security/organization' import { FindOptions } from '@sofie-automation/meteor-lib/dist/collections/lib' import { DBOrganization } from '@sofie-automation/meteor-lib/dist/collections/Organization' -import { isProtectedString } from '@sofie-automation/corelib/dist/protectedString' import { Blueprints, Evaluations, Organizations, Snapshots, UserActionsLog } from '../collections' import { MongoQuery } from '@sofie-automation/corelib/dist/mongo' import { BlueprintId, OrganizationId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { CorelibPubSub } from '@sofie-automation/corelib/dist/pubsub' import { check, Match } from '../lib/check' import { getCurrentTime } from '../lib/lib' +import { triggerWriteAccessBecauseNoCheckNecessary } from '../security/securityVerify' +import { assertConnectionHasOneOfPermissions } from '../security/auth' meteorPublish( MeteorPubSub.organization, - async function (organizationId: OrganizationId | null, token: string | undefined) { + async function (organizationId: OrganizationId | null, _token: string | undefined) { + triggerWriteAccessBecauseNoCheckNecessary() + if (!organizationId) return null - const { cred, selector } = await AutoFillSelector.organizationId(this.userId, { _id: organizationId }, token) + const selector: MongoQuery = { _id: organizationId } + const modifier: FindOptions = { fields: { name: 1, @@ -29,83 +32,69 @@ meteorPublish( userRoles: 1, // to not expose too much information consider [`userRoles.${this.userId}`]: 1, and a method/publication for getting all the roles, or limiting the returned roles based on requesting user's role }, } - if ( - isProtectedString(selector.organizationId) && - (!cred || (await OrganizationReadAccess.organizationContent(selector.organizationId, cred))) - ) { - return Organizations.findWithCursor({ _id: selector.organizationId }, modifier) - } - return null + + return Organizations.findWithCursor({ _id: selector.organizationId }, modifier) } ) -meteorPublish(CorelibPubSub.blueprints, async function (blueprintIds: BlueprintId[] | null, token: string | undefined) { - check(blueprintIds, Match.Maybe(Array)) +meteorPublish( + CorelibPubSub.blueprints, + async function (blueprintIds: BlueprintId[] | null, _token: string | undefined) { + assertConnectionHasOneOfPermissions(this.connection, 'configure') - // If values were provided, they must have values - if (blueprintIds && blueprintIds.length === 0) return null + check(blueprintIds, Match.Maybe(Array)) - const { cred, selector } = await AutoFillSelector.organizationId(this.userId, {}, token) + // If values were provided, they must have values + if (blueprintIds && blueprintIds.length === 0) return null - // Add the requested filter - if (blueprintIds) selector._id = { $in: blueprintIds } + // Add the requested filter + const selector: MongoQuery = {} + if (blueprintIds) selector._id = { $in: blueprintIds } - if (!cred || (await OrganizationReadAccess.organizationContent(selector.organizationId, cred))) { return Blueprints.findWithCursor(selector, { fields: { code: 0, }, }) } - return null -}) -meteorPublish(MeteorPubSub.evaluations, async function (dateFrom: number, dateTo: number, token: string | undefined) { - const selector0: MongoQuery = { +) +meteorPublish(MeteorPubSub.evaluations, async function (dateFrom: number, dateTo: number, _token: string | undefined) { + triggerWriteAccessBecauseNoCheckNecessary() + + const selector: MongoQuery = { timestamp: { $gte: dateFrom, $lt: dateTo, }, } - const { cred, selector } = await AutoFillSelector.organizationId(this.userId, selector0, token) - if (!cred || (await OrganizationReadAccess.organizationContent(selector.organizationId, cred))) { - return Evaluations.findWithCursor(selector) - } - return null + return Evaluations.findWithCursor(selector) }) -meteorPublish(MeteorPubSub.snapshots, async function (token: string | undefined) { - const selector0: MongoQuery = { +meteorPublish(MeteorPubSub.snapshots, async function (_token: string | undefined) { + assertConnectionHasOneOfPermissions(this.connection, 'configure') + + const selector: MongoQuery = { created: { $gt: getCurrentTime() - 30 * 24 * 3600 * 1000, // last 30 days }, } - const { cred, selector } = await AutoFillSelector.organizationId(this.userId, selector0, token) - if (!cred || (await OrganizationReadAccess.organizationContent(selector.organizationId, cred))) { - return Snapshots.findWithCursor(selector) - } - return null + return Snapshots.findWithCursor(selector) }) meteorPublish( MeteorPubSub.userActionsLog, - async function (dateFrom: number, dateTo: number, token: string | undefined) { - const selector0: MongoQuery = { + async function (dateFrom: number, dateTo: number, _token: string | undefined) { + triggerWriteAccessBecauseNoCheckNecessary() + + const selector: MongoQuery = { timestamp: { $gte: dateFrom, $lt: dateTo, }, } - const { cred, selector } = await AutoFillSelector.organizationId( - this.userId, - selector0, - token - ) - if (!cred || (await OrganizationReadAccess.organizationContent(selector.organizationId, cred))) { - return UserActionsLog.findWithCursor(selector, { - limit: 10_000, // this is to prevent having a publication that produces a very large array - }) - } - return null + return UserActionsLog.findWithCursor(selector, { + limit: 10_000, // this is to prevent having a publication that produces a very large array + }) } ) diff --git a/meteor/server/publications/packageManager/expectedPackages/publication.ts b/meteor/server/publications/packageManager/expectedPackages/publication.ts index 1952fb7057c..66ee316ae7f 100644 --- a/meteor/server/publications/packageManager/expectedPackages/publication.ts +++ b/meteor/server/publications/packageManager/expectedPackages/publication.ts @@ -1,5 +1,3 @@ -import { Meteor } from 'meteor/meteor' -import { PeripheralDeviceReadAccess } from '../../../security/peripheralDevice' import { DBStudio, StudioPackageContainer } from '@sofie-automation/corelib/dist/dataModel/Studio' import { TriggerUpdate, @@ -19,7 +17,7 @@ import { PieceInstanceId, StudioId, } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { PeripheralDevices, Studios } from '../../../collections' +import { Studios } from '../../../collections' import { check, Match } from 'meteor/check' import { PackageManagerExpectedPackage } from '@sofie-automation/shared-lib/dist/package-manager/publications' import { ExpectedPackagesContentObserver } from './contentObserver' @@ -30,6 +28,7 @@ import { PeripheralDevicePubSub, PeripheralDevicePubSubCollectionsNames, } from '@sofie-automation/shared-lib/dist/pubsub/peripheralDevice' +import { checkAccessAndGetPeripheralDevice } from '../../../security/check' interface ExpectedPackagesPublicationArgs { readonly studioId: StudioId @@ -206,34 +205,28 @@ meteorCustomPublish( check(deviceId, String) check(filterPlayoutDeviceIds, Match.Maybe([String])) - if (await PeripheralDeviceReadAccess.peripheralDeviceContent(deviceId, { userId: this.userId, token })) { - const peripheralDevice = await PeripheralDevices.findOneAsync(deviceId) + const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, token, this) - if (!peripheralDevice) throw new Meteor.Error('PeripheralDevice "' + deviceId + '" not found') - - const studioId = peripheralDevice.studioId - if (!studioId) { - logger.warn(`Pub.packageManagerExpectedPackages: device "${peripheralDevice._id}" has no studioId`) - return this.ready() - } - - await setUpCollectionOptimizedObserver< - PackageManagerExpectedPackage, - ExpectedPackagesPublicationArgs, - ExpectedPackagesPublicationState, - ExpectedPackagesPublicationUpdateProps - >( - `${PeripheralDevicePubSub.packageManagerExpectedPackages}_${studioId}_${deviceId}_${JSON.stringify( - (filterPlayoutDeviceIds || []).sort() - )}`, - { studioId, deviceId, filterPlayoutDeviceIds }, - setupExpectedPackagesPublicationObservers, - manipulateExpectedPackagesPublicationData, - pub, - 500 // ms, wait this time before sending an update - ) - } else { - logger.warn(`Pub.packageManagerExpectedPackages: Not allowed: "${deviceId}"`) + const studioId = peripheralDevice.studioId + if (!studioId) { + logger.warn(`Pub.packageManagerExpectedPackages: device "${peripheralDevice._id}" has no studioId`) + return this.ready() } + + await setUpCollectionOptimizedObserver< + PackageManagerExpectedPackage, + ExpectedPackagesPublicationArgs, + ExpectedPackagesPublicationState, + ExpectedPackagesPublicationUpdateProps + >( + `${PeripheralDevicePubSub.packageManagerExpectedPackages}_${studioId}_${deviceId}_${JSON.stringify( + (filterPlayoutDeviceIds || []).sort() + )}`, + { studioId, deviceId, filterPlayoutDeviceIds }, + setupExpectedPackagesPublicationObservers, + manipulateExpectedPackagesPublicationData, + pub, + 500 // ms, wait this time before sending an update + ) } ) diff --git a/meteor/server/publications/packageManager/packageContainers.ts b/meteor/server/publications/packageManager/packageContainers.ts index 0accf66181d..133569a882d 100644 --- a/meteor/server/publications/packageManager/packageContainers.ts +++ b/meteor/server/publications/packageManager/packageContainers.ts @@ -5,9 +5,8 @@ import { MongoFieldSpecifierOnesStrict } from '@sofie-automation/corelib/dist/mo import { PackageContainer } from '@sofie-automation/shared-lib/dist/package-manager/package' import { PackageManagerPackageContainers } from '@sofie-automation/shared-lib/dist/package-manager/publications' import { check } from 'meteor/check' -import { Meteor } from 'meteor/meteor' import { ReadonlyDeep } from 'type-fest' -import { PeripheralDevices, Studios } from '../../collections' +import { Studios } from '../../collections' import { meteorCustomPublish, SetupObserversResult, @@ -15,12 +14,12 @@ import { TriggerUpdate, } from '../../lib/customPublication' import { logger } from '../../logging' -import { PeripheralDeviceReadAccess } from '../../security/peripheralDevice' import { PeripheralDevicePubSub, PeripheralDevicePubSubCollectionsNames, } from '@sofie-automation/shared-lib/dist/pubsub/peripheralDevice' import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' +import { checkAccessAndGetPeripheralDevice } from '../../security/check' type StudioFields = '_id' | 'packageContainersWithOverrides' const studioFieldSpecifier = literal>>({ @@ -96,32 +95,26 @@ meteorCustomPublish( async function (pub, deviceId: PeripheralDeviceId, token: string | undefined) { check(deviceId, String) - if (await PeripheralDeviceReadAccess.peripheralDeviceContent(deviceId, { userId: this.userId, token })) { - const peripheralDevice = await PeripheralDevices.findOneAsync(deviceId) + const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, token, this) - if (!peripheralDevice) throw new Meteor.Error('PeripheralDevice "' + deviceId + '" not found') - - const studioId = peripheralDevice.studioId - if (!studioId) { - logger.warn(`Pub.packageManagerPackageContainers: device "${peripheralDevice._id}" has no studioId`) - return this.ready() - } - - await setUpOptimizedObserverArray< - PackageManagerPackageContainers, - PackageManagerPackageContainersArgs, - PackageManagerPackageContainersState, - PackageManagerPackageContainersUpdateProps - >( - `${PeripheralDevicePubSub.packageManagerPackageContainers}_${studioId}_${deviceId}`, - { studioId, deviceId }, - setupExpectedPackagesPublicationObservers, - manipulateExpectedPackagesPublicationData, - pub, - 500 // ms, wait this time before sending an update - ) - } else { - logger.warn(`Pub.packageManagerPackageContainers: Not allowed: "${deviceId}"`) + const studioId = peripheralDevice.studioId + if (!studioId) { + logger.warn(`Pub.packageManagerPackageContainers: device "${peripheralDevice._id}" has no studioId`) + return this.ready() } + + await setUpOptimizedObserverArray< + PackageManagerPackageContainers, + PackageManagerPackageContainersArgs, + PackageManagerPackageContainersState, + PackageManagerPackageContainersUpdateProps + >( + `${PeripheralDevicePubSub.packageManagerPackageContainers}_${studioId}_${deviceId}`, + { studioId, deviceId }, + setupExpectedPackagesPublicationObservers, + manipulateExpectedPackagesPublicationData, + pub, + 500 // ms, wait this time before sending an update + ) } ) diff --git a/meteor/server/publications/packageManager/playoutContext.ts b/meteor/server/publications/packageManager/playoutContext.ts index 08c881fafe5..70b55955ca1 100644 --- a/meteor/server/publications/packageManager/playoutContext.ts +++ b/meteor/server/publications/packageManager/playoutContext.ts @@ -5,9 +5,8 @@ import { literal } from '@sofie-automation/corelib/dist/lib' import { MongoFieldSpecifierOnesStrict } from '@sofie-automation/corelib/dist/mongo' import { PackageManagerPlayoutContext } from '@sofie-automation/shared-lib/dist/package-manager/publications' import { check } from 'meteor/check' -import { Meteor } from 'meteor/meteor' import { ReadonlyDeep } from 'type-fest' -import { PeripheralDevices, RundownPlaylists, Rundowns } from '../../collections' +import { RundownPlaylists, Rundowns } from '../../collections' import { meteorCustomPublish, SetupObserversResult, @@ -15,11 +14,11 @@ import { TriggerUpdate, } from '../../lib/customPublication' import { logger } from '../../logging' -import { PeripheralDeviceReadAccess } from '../../security/peripheralDevice' import { PeripheralDevicePubSub, PeripheralDevicePubSubCollectionsNames, } from '@sofie-automation/shared-lib/dist/pubsub/peripheralDevice' +import { checkAccessAndGetPeripheralDevice } from '../../security/check' export type RundownPlaylistCompact = Pick const rundownPlaylistFieldSpecifier = literal>({ @@ -114,32 +113,26 @@ meteorCustomPublish( async function (pub, deviceId: PeripheralDeviceId, token: string | undefined) { check(deviceId, String) - if (await PeripheralDeviceReadAccess.peripheralDeviceContent(deviceId, { userId: this.userId, token })) { - const peripheralDevice = await PeripheralDevices.findOneAsync(deviceId) + const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, token, this) - if (!peripheralDevice) throw new Meteor.Error('PeripheralDevice "' + deviceId + '" not found') - - const studioId = peripheralDevice.studioId - if (!studioId) { - logger.warn(`Pub.packageManagerPlayoutContext: device "${peripheralDevice._id}" has no studioId`) - return this.ready() - } - - await setUpOptimizedObserverArray< - PackageManagerPlayoutContext, - PackageManagerPlayoutContextArgs, - PackageManagerPlayoutContextState, - PackageManagerPlayoutContextUpdateProps - >( - `${PeripheralDevicePubSub.packageManagerPlayoutContext}_${studioId}_${deviceId}`, - { studioId, deviceId }, - setupExpectedPackagesPublicationObservers, - manipulateExpectedPackagesPublicationData, - pub, - 500 // ms, wait this time before sending an update - ) - } else { - logger.warn(`Pub.packageManagerPlayoutContext: Not allowed: "${deviceId}"`) + const studioId = peripheralDevice.studioId + if (!studioId) { + logger.warn(`Pub.packageManagerPlayoutContext: device "${peripheralDevice._id}" has no studioId`) + return this.ready() } + + await setUpOptimizedObserverArray< + PackageManagerPlayoutContext, + PackageManagerPlayoutContextArgs, + PackageManagerPlayoutContextState, + PackageManagerPlayoutContextUpdateProps + >( + `${PeripheralDevicePubSub.packageManagerPlayoutContext}_${studioId}_${deviceId}`, + { studioId, deviceId }, + setupExpectedPackagesPublicationObservers, + manipulateExpectedPackagesPublicationData, + pub, + 500 // ms, wait this time before sending an update + ) } ) diff --git a/meteor/server/publications/partInstancesUI/publication.ts b/meteor/server/publications/partInstancesUI/publication.ts index 11e017ae164..543e58bd33e 100644 --- a/meteor/server/publications/partInstancesUI/publication.ts +++ b/meteor/server/publications/partInstancesUI/publication.ts @@ -9,8 +9,6 @@ import { } from '../../lib/customPublication' import { logger } from '../../logging' import { CustomCollectionName, MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' -import { resolveCredentials } from '../../security/lib/credentials' -import { NoSecurityReadAccess } from '../../security/noSecurity' import { ContentCache, PartInstanceOmitedFields, createReactiveContentCache } from './reactiveContentCache' import { ReadonlyDeep } from 'type-fest' import { RundownPlaylists } from '../../collections' @@ -28,6 +26,7 @@ import { modifyPartInstanceForQuickLoop, stringsToIndexLookup, } from '../lib/quickLoop' +import { triggerWriteAccessBecauseNoCheckNecessary } from '../../security/securityVerify' interface UIPartInstancesArgs { readonly playlistActivationId: RundownPlaylistActivationId @@ -206,23 +205,24 @@ meteorCustomPublish( async function (pub, playlistActivationId: RundownPlaylistActivationId | null) { check(playlistActivationId, Match.Maybe(String)) - const credentials = await resolveCredentials({ userId: this.userId, token: undefined }) - - if (playlistActivationId && (!credentials || NoSecurityReadAccess.any())) { - await setUpCollectionOptimizedObserver< - Omit, - UIPartInstancesArgs, - UIPartInstancesState, - UIPartInstancesUpdateProps - >( - `pub_${MeteorPubSub.uiPartInstances}_${playlistActivationId}`, - { playlistActivationId }, - setupUIPartInstancesPublicationObservers, - manipulateUIPartInstancesPublicationData, - pub - ) - } else { - logger.warn(`Pub.uiPartInstances: Not allowed:"${playlistActivationId}"`) + triggerWriteAccessBecauseNoCheckNecessary() + + if (!playlistActivationId) { + logger.info(`Pub.${CustomCollectionName.UISegmentPartNotes}: Not playlistActivationId`) + return } + + await setUpCollectionOptimizedObserver< + Omit, + UIPartInstancesArgs, + UIPartInstancesState, + UIPartInstancesUpdateProps + >( + `pub_${MeteorPubSub.uiPartInstances}_${playlistActivationId}`, + { playlistActivationId }, + setupUIPartInstancesPublicationObservers, + manipulateUIPartInstancesPublicationData, + pub + ) } ) diff --git a/meteor/server/publications/partsUI/publication.ts b/meteor/server/publications/partsUI/publication.ts index 6e5b0515536..24460ab13c3 100644 --- a/meteor/server/publications/partsUI/publication.ts +++ b/meteor/server/publications/partsUI/publication.ts @@ -9,9 +9,6 @@ import { } from '../../lib/customPublication' import { logger } from '../../logging' import { CustomCollectionName, MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' -import { RundownPlaylistReadAccess } from '../../security/rundownPlaylist' -import { resolveCredentials } from '../../security/lib/credentials' -import { NoSecurityReadAccess } from '../../security/noSecurity' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { ContentCache, PartOmitedFields, createReactiveContentCache } from './reactiveContentCache' import { ReadonlyDeep } from 'type-fest' @@ -23,6 +20,7 @@ import { RundownsObserver } from '../lib/rundownsObserver' import { RundownContentObserver } from './rundownContentObserver' import { protectString } from '@sofie-automation/corelib/dist/protectedString' import { extractRanks, findMarkerPosition, modifyPartForQuickLoop, stringsToIndexLookup } from '../lib/quickLoop' +import { triggerWriteAccessBecauseNoCheckNecessary } from '../../security/securityVerify' interface UIPartsArgs { readonly playlistId: RundownPlaylistId @@ -193,27 +191,24 @@ meteorCustomPublish( async function (pub, playlistId: RundownPlaylistId | null) { check(playlistId, String) - const credentials = await resolveCredentials({ userId: this.userId, token: undefined }) + triggerWriteAccessBecauseNoCheckNecessary() - if ( - !credentials || - NoSecurityReadAccess.any() || - (playlistId && (await RundownPlaylistReadAccess.rundownPlaylistContent(playlistId, credentials))) - ) { - await setUpCollectionOptimizedObserver< - Omit, - UIPartsArgs, - UIPartsState, - UIPartsUpdateProps - >( - `pub_${MeteorPubSub.uiParts}_${playlistId}`, - { playlistId }, - setupUIPartsPublicationObservers, - manipulateUIPartsPublicationData, - pub - ) - } else { + if (!playlistId) { logger.warn(`Pub.uiParts: Not allowed: "${playlistId}"`) + return } + + await setUpCollectionOptimizedObserver< + Omit, + UIPartsArgs, + UIPartsState, + UIPartsUpdateProps + >( + `pub_${MeteorPubSub.uiParts}_${playlistId}`, + { playlistId }, + setupUIPartsPublicationObservers, + manipulateUIPartsPublicationData, + pub + ) } ) diff --git a/meteor/server/publications/peripheralDevice.ts b/meteor/server/publications/peripheralDevice.ts index 1ead8e0e6d3..a6add93fdc4 100644 --- a/meteor/server/publications/peripheralDevice.ts +++ b/meteor/server/publications/peripheralDevice.ts @@ -1,38 +1,20 @@ -import { Meteor } from 'meteor/meteor' import { check, Match } from '../lib/check' -import { meteorPublish, AutoFillSelector } from './lib/lib' +import { meteorPublish } from './lib/lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' -import { PeripheralDeviceReadAccess } from '../security/peripheralDevice' import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' -import { OrganizationReadAccess } from '../security/organization' -import { StudioReadAccess } from '../security/studio' import { MongoFieldSpecifierZeroes, MongoQuery } from '@sofie-automation/corelib/dist/mongo' -import { Credentials, ResolvedCredentials } from '../security/lib/credentials' -import { NoSecurityReadAccess } from '../security/noSecurity' import { PeripheralDeviceId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { MediaWorkFlows, MediaWorkFlowSteps, PeripheralDeviceCommands, PeripheralDevices } from '../collections' -import { MediaWorkFlow } from '@sofie-automation/shared-lib/dist/core/model/MediaWorkFlows' -import { MediaWorkFlowStep } from '@sofie-automation/shared-lib/dist/core/model/MediaWorkFlowSteps' import { CorelibPubSub } from '@sofie-automation/corelib/dist/pubsub' import { PeripheralDevicePubSub } from '@sofie-automation/shared-lib/dist/pubsub/peripheralDevice' import { clone } from '@sofie-automation/corelib/dist/lib' +import { triggerWriteAccessBecauseNoCheckNecessary } from '../security/securityVerify' +import { checkAccessAndGetPeripheralDevice } from '../security/check' /* * This file contains publications for the peripheralDevices, such as playout-gateway, mos-gateway and package-manager */ -async function checkAccess(cred: Credentials | ResolvedCredentials | null, selector: MongoQuery) { - if (!selector) throw new Meteor.Error(400, 'selector argument missing') - return ( - !cred || - NoSecurityReadAccess.any() || - (selector._id && (await PeripheralDeviceReadAccess.peripheralDevice(selector._id, cred))) || - (selector.organizationId && - (await OrganizationReadAccess.organizationContent(selector.organizationId, cred))) || - (selector.studioId && (await StudioReadAccess.studioContent(selector.studioId, cred))) - ) -} - const peripheralDeviceFields: MongoFieldSpecifierZeroes = { token: 0, secretSettings: 0, @@ -43,78 +25,67 @@ meteorPublish( async function (peripheralDeviceIds: PeripheralDeviceId[] | null, token: string | undefined) { check(peripheralDeviceIds, Match.Maybe(Array)) + triggerWriteAccessBecauseNoCheckNecessary() + // If values were provided, they must have values if (peripheralDeviceIds && peripheralDeviceIds.length === 0) return null - const { cred, selector } = await AutoFillSelector.organizationId(this.userId, {}, token) - // Add the requested filter + const selector: MongoQuery = {} if (peripheralDeviceIds) selector._id = { $in: peripheralDeviceIds } - if (await checkAccess(cred, selector)) { - const fields = clone(peripheralDeviceFields) - if (selector._id && token) { - // in this case, send the secretSettings: - delete fields.secretSettings - } - return PeripheralDevices.findWithCursor(selector, { - fields, - }) + const fields = clone(peripheralDeviceFields) + if (selector._id && token) { + // in this case, send the secretSettings: + delete fields.secretSettings } - return null + return PeripheralDevices.findWithCursor(selector, { + fields, + }) } ) meteorPublish(CorelibPubSub.peripheralDevicesAndSubDevices, async function (studioId: StudioId) { - const { cred, selector } = await AutoFillSelector.organizationId( - this.userId, - { studioId }, - undefined - ) - if (await checkAccess(cred, selector)) { - // TODO - this is not correctly reactive when changing the `studioId` property of a parent device - const parents = (await PeripheralDevices.findFetchAsync(selector, { projection: { _id: 1 } })) as Array< - Pick - > + triggerWriteAccessBecauseNoCheckNecessary() - return PeripheralDevices.findWithCursor( - { - $or: [ - { - parentDeviceId: { $in: parents.map((i) => i._id) }, - }, - selector, - ], - }, - { - fields: peripheralDeviceFields, - } - ) + const selector: MongoQuery = { + studioId, } - return null + + // TODO - this is not correctly reactive when changing the `studioId` property of a parent device + const parents = (await PeripheralDevices.findFetchAsync(selector, { projection: { _id: 1 } })) as Array< + Pick + > + + return PeripheralDevices.findWithCursor( + { + $or: [ + { + parentDeviceId: { $in: parents.map((i) => i._id) }, + }, + selector, + ], + }, + { + fields: peripheralDeviceFields, + } + ) }) meteorPublish( PeripheralDevicePubSub.peripheralDeviceCommands, async function (deviceId: PeripheralDeviceId, token: string | undefined) { - if (!deviceId) throw new Meteor.Error(400, 'deviceId argument missing') - check(deviceId, String) - if (await PeripheralDeviceReadAccess.peripheralDeviceContent(deviceId, { userId: this.userId, token })) { - return PeripheralDeviceCommands.findWithCursor({ deviceId: deviceId }) - } - return null + await checkAccessAndGetPeripheralDevice(deviceId, token, this) + + return PeripheralDeviceCommands.findWithCursor({ deviceId: deviceId }) } ) -meteorPublish(MeteorPubSub.mediaWorkFlows, async function (token: string | undefined) { - const { cred, selector } = await AutoFillSelector.deviceId(this.userId, {}, token) - if (!cred || (await PeripheralDeviceReadAccess.peripheralDeviceContent(selector.deviceId, cred))) { - return MediaWorkFlows.findWithCursor(selector) - } - return null +meteorPublish(MeteorPubSub.mediaWorkFlows, async function (_token: string | undefined) { + triggerWriteAccessBecauseNoCheckNecessary() + + return MediaWorkFlows.findWithCursor({}) }) -meteorPublish(MeteorPubSub.mediaWorkFlowSteps, async function (token: string | undefined) { - const { cred, selector } = await AutoFillSelector.deviceId(this.userId, {}, token) - if (!cred || (await PeripheralDeviceReadAccess.peripheralDeviceContent(selector.deviceId, cred))) { - return MediaWorkFlowSteps.findWithCursor(selector) - } - return null +meteorPublish(MeteorPubSub.mediaWorkFlowSteps, async function (_token: string | undefined) { + triggerWriteAccessBecauseNoCheckNecessary() + + return MediaWorkFlowSteps.findWithCursor({}) }) diff --git a/meteor/server/publications/peripheralDeviceForDevice.ts b/meteor/server/publications/peripheralDeviceForDevice.ts index f98b37e6ffd..cb45ec57ee0 100644 --- a/meteor/server/publications/peripheralDeviceForDevice.ts +++ b/meteor/server/publications/peripheralDeviceForDevice.ts @@ -1,5 +1,3 @@ -import { Meteor } from 'meteor/meteor' -import { PeripheralDeviceReadAccess } from '../security/peripheralDevice' import { PeripheralDevice, PeripheralDeviceCategory } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' import { PeripheralDeviceId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { PeripheralDevices, Studios } from '../collections' @@ -26,6 +24,7 @@ import { PeripheralDevicePubSub, PeripheralDevicePubSubCollectionsNames, } from '@sofie-automation/shared-lib/dist/pubsub/peripheralDevice' +import { checkAccessAndGetPeripheralDevice } from '../security/check' interface PeripheralDeviceForDeviceArgs { readonly deviceId: PeripheralDeviceId @@ -207,26 +206,22 @@ meteorCustomPublish( async function (pub, deviceId: PeripheralDeviceId, token: string | undefined) { check(deviceId, String) - if (await PeripheralDeviceReadAccess.peripheralDeviceContent(deviceId, { userId: this.userId, token })) { - const peripheralDevice = await PeripheralDevices.findOneAsync(deviceId) - - if (!peripheralDevice) throw new Meteor.Error('PeripheralDevice "' + deviceId + '" not found') - - const studioId = peripheralDevice.studioId - if (!studioId) return - - await setUpOptimizedObserverArray< - PeripheralDeviceForDevice, - PeripheralDeviceForDeviceArgs, - PeripheralDeviceForDeviceState, - PeripheralDeviceForDeviceUpdateProps - >( - `${PeripheralDevicePubSubCollectionsNames.peripheralDeviceForDevice}_${deviceId}`, - { deviceId }, - setupPeripheralDevicePublicationObservers, - manipulatePeripheralDevicePublicationData, - pub - ) - } + const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, token, this) + + const studioId = peripheralDevice.studioId + if (!studioId) return + + await setUpOptimizedObserverArray< + PeripheralDeviceForDevice, + PeripheralDeviceForDeviceArgs, + PeripheralDeviceForDeviceState, + PeripheralDeviceForDeviceUpdateProps + >( + `${PeripheralDevicePubSubCollectionsNames.peripheralDeviceForDevice}_${deviceId}`, + { deviceId }, + setupPeripheralDevicePublicationObservers, + manipulatePeripheralDevicePublicationData, + pub + ) } ) diff --git a/meteor/server/publications/pieceContentStatusUI/bucket/publication.ts b/meteor/server/publications/pieceContentStatusUI/bucket/publication.ts index 8661244883f..8942d5f75d7 100644 --- a/meteor/server/publications/pieceContentStatusUI/bucket/publication.ts +++ b/meteor/server/publications/pieceContentStatusUI/bucket/publication.ts @@ -20,11 +20,7 @@ import { TriggerUpdate, SetupObserversResult, } from '../../../lib/customPublication' -import { logger } from '../../../logging' -import { resolveCredentials } from '../../../security/lib/credentials' -import { NoSecurityReadAccess } from '../../../security/noSecurity' import { BucketContentCache, createReactiveContentCache } from './bucketContentCache' -import { StudioReadAccess } from '../../../security/studio' import { Bucket } from '@sofie-automation/meteor-lib/dist/collections/Buckets' import { addItemsWithDependenciesChangesToChangedSet, @@ -39,8 +35,8 @@ import { import { BucketContentObserver } from './bucketContentObserver' import { regenerateForBucketActionIds, regenerateForBucketAdLibIds } from './regenerateForItem' import { PieceContentStatusStudio } from '../checkPieceContentStatus' -import { BucketSecurity } from '../../../security/buckets' import { check } from 'meteor/check' +import { triggerWriteAccessBecauseNoCheckNecessary } from '../../../security/securityVerify' interface UIBucketContentStatusesArgs { readonly studioId: StudioId @@ -250,30 +246,20 @@ meteorCustomPublish( check(studioId, String) check(bucketId, String) - const cred = await resolveCredentials({ userId: this.userId, token: undefined }) + triggerWriteAccessBecauseNoCheckNecessary() - if ( - NoSecurityReadAccess.any() || - (studioId && - bucketId && - (await StudioReadAccess.studioContent(studioId, cred)) && - (await BucketSecurity.allowReadAccess(cred, bucketId))) - ) { - await setUpCollectionOptimizedObserver< - UIBucketContentStatus, - UIBucketContentStatusesArgs, - UIBucketContentStatusesState, - UIBucketContentStatusesUpdateProps - >( - `pub_${MeteorPubSub.uiBucketContentStatuses}_${studioId}_${bucketId}`, - { studioId, bucketId }, - setupUIBucketContentStatusesPublicationObservers, - manipulateUIBucketContentStatusesPublicationData, - pub, - 100 - ) - } else { - logger.warn(`Pub.${CustomCollectionName.UIBucketContentStatuses}: Not allowed: "${studioId}" "${bucketId}"`) - } + await setUpCollectionOptimizedObserver< + UIBucketContentStatus, + UIBucketContentStatusesArgs, + UIBucketContentStatusesState, + UIBucketContentStatusesUpdateProps + >( + `pub_${MeteorPubSub.uiBucketContentStatuses}_${studioId}_${bucketId}`, + { studioId, bucketId }, + setupUIBucketContentStatusesPublicationObservers, + manipulateUIBucketContentStatusesPublicationData, + pub, + 100 + ) } ) diff --git a/meteor/server/publications/pieceContentStatusUI/rundown/publication.ts b/meteor/server/publications/pieceContentStatusUI/rundown/publication.ts index 1b20d6de6a7..a190378eacc 100644 --- a/meteor/server/publications/pieceContentStatusUI/rundown/publication.ts +++ b/meteor/server/publications/pieceContentStatusUI/rundown/publication.ts @@ -32,9 +32,6 @@ import { TriggerUpdate, } from '../../../lib/customPublication' import { logger } from '../../../logging' -import { resolveCredentials } from '../../../security/lib/credentials' -import { NoSecurityReadAccess } from '../../../security/noSecurity' -import { RundownPlaylistReadAccess } from '../../../security/rundownPlaylist' import { ContentCache, PartInstanceFields, createReactiveContentCache } from './reactiveContentCache' import { RundownContentObserver } from './rundownContentObserver' import { RundownsObserver } from '../../lib/rundownsObserver' @@ -59,6 +56,7 @@ import { import { PieceContentStatusStudio } from '../checkPieceContentStatus' import { check, Match } from 'meteor/check' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' +import { triggerWriteAccessBecauseNoCheckNecessary } from '../../../security/securityVerify' interface UIPieceContentStatusesArgs { readonly rundownPlaylistId: RundownPlaylistId @@ -476,29 +474,25 @@ meteorCustomPublish( async function (pub, rundownPlaylistId: RundownPlaylistId | null) { check(rundownPlaylistId, Match.Maybe(String)) - const cred = await resolveCredentials({ userId: this.userId, token: undefined }) - - if ( - rundownPlaylistId && - (!cred || - NoSecurityReadAccess.any() || - (await RundownPlaylistReadAccess.rundownPlaylistContent(rundownPlaylistId, cred))) - ) { - await setUpCollectionOptimizedObserver< - UIPieceContentStatus, - UIPieceContentStatusesArgs, - UIPieceContentStatusesState, - UIPieceContentStatusesUpdateProps - >( - `pub_${MeteorPubSub.uiPieceContentStatuses}_${rundownPlaylistId}`, - { rundownPlaylistId }, - setupUIPieceContentStatusesPublicationObservers, - manipulateUIPieceContentStatusesPublicationData, - pub, - 100 - ) - } else { - logger.warn(`Pub.${CustomCollectionName.UIPieceContentStatuses}: Not allowed: "${rundownPlaylistId}"`) + triggerWriteAccessBecauseNoCheckNecessary() + + if (!rundownPlaylistId) { + logger.info(`Pub.${CustomCollectionName.UISegmentPartNotes}: Not playlistId`) + return } + + await setUpCollectionOptimizedObserver< + UIPieceContentStatus, + UIPieceContentStatusesArgs, + UIPieceContentStatusesState, + UIPieceContentStatusesUpdateProps + >( + `pub_${MeteorPubSub.uiPieceContentStatuses}_${rundownPlaylistId}`, + { rundownPlaylistId }, + setupUIPieceContentStatusesPublicationObservers, + manipulateUIPieceContentStatusesPublicationData, + pub, + 100 + ) } ) diff --git a/meteor/server/publications/rundown.ts b/meteor/server/publications/rundown.ts index f939a9baffa..e4be7f6dac7 100644 --- a/meteor/server/publications/rundown.ts +++ b/meteor/server/publications/rundown.ts @@ -1,16 +1,12 @@ import { Meteor } from 'meteor/meteor' -import { meteorPublish, AutoFillSelector } from './lib/lib' +import { meteorPublish } from './lib/lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { MongoFieldSpecifierZeroes, MongoQuery } from '@sofie-automation/corelib/dist/mongo' import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' -import { RundownReadAccess } from '../security/rundown' import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { Piece } from '@sofie-automation/corelib/dist/dataModel/Piece' import { PieceInstance } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' -import { NoSecurityReadAccess } from '../security/noSecurity' -import { OrganizationReadAccess } from '../security/organization' -import { StudioReadAccess } from '../security/studio' import { check, Match } from 'meteor/check' import { FindOptions } from '@sofie-automation/meteor-lib/dist/collections/lib' import { @@ -20,7 +16,6 @@ import { NrcsIngestDataCache, PartInstances, Parts, - PeripheralDevices, PieceInstances, Pieces, RundownBaselineAdLibActions, @@ -44,58 +39,52 @@ import { import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' import { CorelibPubSub } from '@sofie-automation/corelib/dist/pubsub' import { PeripheralDevicePubSub } from '@sofie-automation/shared-lib/dist/pubsub/peripheralDevice' -import { PeripheralDeviceReadAccess } from '../security/peripheralDevice' import { RundownBaselineAdLibAction } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibAction' import { RundownBaselineAdLibItem } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibPiece' import { AdLibAction } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' import { PieceLifespan } from '@sofie-automation/blueprints-integration' -import { resolveCredentials } from '../security/lib/credentials' +import { triggerWriteAccessBecauseNoCheckNecessary } from '../security/securityVerify' +import { checkAccessAndGetPeripheralDevice } from '../security/check' -meteorPublish(PeripheralDevicePubSub.rundownsForDevice, async function (deviceId, token: string | undefined) { - check(deviceId, String) - check(token, String) - - const { cred, selector } = await AutoFillSelector.organizationId(this.userId, {}, token) - - // Future: this should be reactive to studioId changes, but this matches how the other *ForDevice publications behave - - // The above auth check may return nothing when security is disabled, but we need the return value - const resolvedCred = cred?.device ? cred : await resolveCredentials({ userId: this.userId, token }) - if (!resolvedCred || !resolvedCred.device) - throw new Meteor.Error(403, 'Publication can only be used by authorized PeripheralDevices') +meteorPublish( + PeripheralDevicePubSub.rundownsForDevice, + async function (deviceId: PeripheralDeviceId, token: string | undefined) { + check(deviceId, String) + check(token, String) - // No studio, then no rundowns - if (!resolvedCred.device.studioId) return null + // Future: this should be reactive to studioId changes, but this matches how the other *ForDevice publications behave - selector.studioId = resolvedCred.device.studioId + const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, token, this) - const modifier: FindOptions = { - fields: { - privateData: 0, - }, - } + // No studio, then no rundowns + if (!peripheralDevice.studioId) return null - if (NoSecurityReadAccess.any() || (await StudioReadAccess.studioContent(selector.studioId, resolvedCred))) { - return Rundowns.findWithCursor(selector, modifier) + return Rundowns.findWithCursor( + { + studioId: peripheralDevice.studioId, + }, + { + fields: { + privateData: 0, + }, + } + ) } - return null -}) +) meteorPublish( CorelibPubSub.rundownsInPlaylists, - async function (playlistIds: RundownPlaylistId[], token: string | undefined) { + async function (playlistIds: RundownPlaylistId[], _token: string | undefined) { check(playlistIds, Array) + triggerWriteAccessBecauseNoCheckNecessary() + // If values were provided, they must have values if (playlistIds.length === 0) return null - const { cred, selector } = await AutoFillSelector.organizationId( - this.userId, - { - playlistId: { $in: playlistIds }, - }, - token - ) + const selector: MongoQuery = { + playlistId: { $in: playlistIds }, + } const modifier: FindOptions = { fields: { @@ -103,33 +92,21 @@ meteorPublish( }, } - if ( - !cred || - NoSecurityReadAccess.any() || - (selector.organizationId && - (await OrganizationReadAccess.organizationContent(selector.organizationId, cred))) || - (selector.studioId && (await StudioReadAccess.studioContent(selector.studioId, cred))) || - (selector._id && (await RundownReadAccess.rundown(selector._id, cred))) - ) { - return Rundowns.findWithCursor(selector, modifier) - } - return null + return Rundowns.findWithCursor(selector, modifier) } ) meteorPublish( CorelibPubSub.rundownsWithShowStyleBases, - async function (showStyleBaseIds: ShowStyleBaseId[], token: string | undefined) { + async function (showStyleBaseIds: ShowStyleBaseId[], _token: string | undefined) { check(showStyleBaseIds, Array) + triggerWriteAccessBecauseNoCheckNecessary() + if (showStyleBaseIds.length === 0) return null - const { cred, selector } = await AutoFillSelector.organizationId( - this.userId, - { - showStyleBaseId: { $in: showStyleBaseIds }, - }, - token - ) + const selector: MongoQuery = { + showStyleBaseId: { $in: showStyleBaseIds }, + } const modifier: FindOptions = { fields: { @@ -137,25 +114,17 @@ meteorPublish( }, } - if ( - !cred || - NoSecurityReadAccess.any() || - (selector.organizationId && - (await OrganizationReadAccess.organizationContent(selector.organizationId, cred))) || - (selector.studioId && (await StudioReadAccess.studioContent(selector.studioId, cred))) || - (selector._id && (await RundownReadAccess.rundown(selector._id, cred))) - ) { - return Rundowns.findWithCursor(selector, modifier) - } - return null + return Rundowns.findWithCursor(selector, modifier) } ) meteorPublish( CorelibPubSub.segments, - async function (rundownIds: RundownId[], filter: { omitHidden?: boolean } | undefined, token: string | undefined) { + async function (rundownIds: RundownId[], filter: { omitHidden?: boolean } | undefined, _token: string | undefined) { check(rundownIds, Array) + triggerWriteAccessBecauseNoCheckNecessary() + if (rundownIds.length === 0) return null const selector: MongoQuery = { @@ -163,26 +132,22 @@ meteorPublish( } if (filter?.omitHidden) selector.isHidden = { $ne: true } - if ( - NoSecurityReadAccess.any() || - (await RundownReadAccess.rundownContent(selector.rundownId, { userId: this.userId, token })) - ) { - return Segments.findWithCursor(selector, { - fields: { - privateData: 0, - }, - }) - } - return null + return Segments.findWithCursor(selector, { + fields: { + privateData: 0, + }, + }) } ) meteorPublish( CorelibPubSub.parts, - async function (rundownIds: RundownId[], segmentIds: SegmentId[] | null, token: string | undefined) { + async function (rundownIds: RundownId[], segmentIds: SegmentId[] | null, _token: string | undefined) { check(rundownIds, Array) check(segmentIds, Match.Maybe(Array)) + triggerWriteAccessBecauseNoCheckNecessary() + if (rundownIds.length === 0) return null if (segmentIds && segmentIds.length === 0) return null @@ -198,15 +163,7 @@ meteorPublish( } if (segmentIds) selector.segmentId = { $in: segmentIds } - if ( - NoSecurityReadAccess.any() || - (selector.rundownId && - (await RundownReadAccess.rundownContent(selector.rundownId, { userId: this.userId, token }))) // || - // (selector._id && await RundownReadAccess.pieces(selector._id, { userId: this.userId, token })) // TODO - the types for this did not match - ) { - return Parts.findWithCursor(selector, modifier) - } - return null + return Parts.findWithCursor(selector, modifier) } ) meteorPublish( @@ -214,11 +171,13 @@ meteorPublish( async function ( rundownIds: RundownId[], playlistActivationId: RundownPlaylistActivationId | null, - token: string | undefined + _token: string | undefined ) { check(rundownIds, Array) check(playlistActivationId, Match.Maybe(String)) + triggerWriteAccessBecauseNoCheckNecessary() + if (rundownIds.length === 0 || !playlistActivationId) return null const modifier: FindOptions = { @@ -234,13 +193,7 @@ meteorPublish( } if (playlistActivationId) selector.playlistActivationId = playlistActivationId - if ( - NoSecurityReadAccess.any() || - (await RundownReadAccess.rundownContent(selector.rundownId, { userId: this.userId, token })) - ) { - return PartInstances.findWithCursor(selector, modifier) - } - return null + return PartInstances.findWithCursor(selector, modifier) } ) meteorPublish( @@ -248,10 +201,12 @@ meteorPublish( async function ( rundownIds: RundownId[], playlistActivationId: RundownPlaylistActivationId | null, - token: string | undefined + _token: string | undefined ) { check(rundownIds, Array) + triggerWriteAccessBecauseNoCheckNecessary() + if (rundownIds.length === 0) return null const selector: MongoQuery = { @@ -261,20 +216,14 @@ meteorPublish( } if (playlistActivationId) selector.playlistActivationId = playlistActivationId - if ( - NoSecurityReadAccess.any() || - (await RundownReadAccess.rundownContent(selector.rundownId, { userId: this.userId, token })) - ) { - return PartInstances.findWithCursor(selector, { - fields: literal>({ - // @ts-expect-error Mongo typings aren't clever enough yet - 'part.privateData': 0, - isTaken: 0, - timings: 0, - }), - }) - } - return null + return PartInstances.findWithCursor(selector, { + fields: literal>({ + // @ts-expect-error Mongo typings aren't clever enough yet + 'part.privateData': 0, + isTaken: 0, + timings: 0, + }), + }) } ) @@ -285,10 +234,12 @@ const piecesSubFields: MongoFieldSpecifierZeroes = { meteorPublish( CorelibPubSub.pieces, - async function (rundownIds: RundownId[], partIds: PartId[] | null, token: string | undefined) { + async function (rundownIds: RundownId[], partIds: PartId[] | null, _token: string | undefined) { check(rundownIds, Array) check(partIds, Match.Maybe(Array)) + triggerWriteAccessBecauseNoCheckNecessary() + // If values were provided, they must have values if (partIds && partIds.length === 0) return null @@ -297,15 +248,9 @@ meteorPublish( } if (partIds) selector.startPartId = { $in: partIds } - if ( - NoSecurityReadAccess.any() || - (await RundownReadAccess.rundownContent(selector.startRundownId, { userId: this.userId, token })) - ) { - return Pieces.findWithCursor(selector, { - fields: piecesSubFields, - }) - } - return null + return Pieces.findWithCursor(selector, { + fields: piecesSubFields, + }) } ) @@ -317,8 +262,7 @@ meteorPublish( rundownIdsBefore: RundownId[], _token: string | undefined ) { - // TODO - Fix this when security is enabled - if (!NoSecurityReadAccess.any()) return null + triggerWriteAccessBecauseNoCheckNecessary() const selector: MongoQuery = { invalid: { @@ -358,31 +302,26 @@ const adlibPiecesSubFields: MongoFieldSpecifierZeroes = { timelineObjectsString: 0, } -meteorPublish(CorelibPubSub.adLibPieces, async function (rundownIds: RundownId[], token: string | undefined) { +meteorPublish(CorelibPubSub.adLibPieces, async function (rundownIds: RundownId[], _token: string | undefined) { check(rundownIds, Array) + triggerWriteAccessBecauseNoCheckNecessary() + if (rundownIds.length === 0) return null const selector: MongoQuery = { rundownId: { $in: rundownIds }, } - if ( - NoSecurityReadAccess.any() || - (await RundownReadAccess.rundownContent(selector.rundownId, { userId: this.userId, token })) - ) { - return AdLibPieces.findWithCursor(selector, { - fields: adlibPiecesSubFields, - }) - } - return null + return AdLibPieces.findWithCursor(selector, { + fields: adlibPiecesSubFields, + }) }) meteorPublish(MeteorPubSub.adLibPiecesForPart, async function (partId: PartId, sourceLayerIds: string[]) { - if (!partId) throw new Meteor.Error(400, 'partId argument missing') - if (!sourceLayerIds) throw new Meteor.Error(400, 'sourceLayerIds argument missing') + check(partId, String) + check(sourceLayerIds, Array) - // Future: This needs some thought for a security enabled environment - if (!NoSecurityReadAccess.any()) return null + triggerWriteAccessBecauseNoCheckNecessary() return AdLibPieces.findWithCursor( { @@ -411,11 +350,13 @@ meteorPublish( onlyPlayingAdlibsOrWithTags?: boolean } | undefined, - token: string | undefined + _token: string | undefined ) { check(rundownIds, Array) check(partInstanceIds, Match.Maybe(Array)) + triggerWriteAccessBecauseNoCheckNecessary() + // If values were provided, they must have values if (rundownIds.length === 0) return null if (partInstanceIds && partInstanceIds.length === 0) return null @@ -464,15 +405,9 @@ meteorPublish( ] } - if ( - NoSecurityReadAccess.any() || - (await RundownReadAccess.rundownContent(selector.rundownId, { userId: this.userId, token })) - ) { - return PieceInstances.findWithCursor(selector, { - fields: pieceInstanceFields, - }) - } - return null + return PieceInstances.findWithCursor(selector, { + fields: pieceInstanceFields, + }) } ) @@ -481,10 +416,12 @@ meteorPublish( async function ( rundownIds: RundownId[], playlistActivationId: RundownPlaylistActivationId | null, - token: string | undefined + _token: string | undefined ) { check(rundownIds, Array) + triggerWriteAccessBecauseNoCheckNecessary() + if (rundownIds.length === 0) return null const selector: MongoQuery = { @@ -494,81 +431,62 @@ meteorPublish( } if (playlistActivationId) selector.playlistActivationId = playlistActivationId - if ( - NoSecurityReadAccess.any() || - (await RundownReadAccess.rundownContent(selector.rundownId, { userId: this.userId, token })) - ) { - return PieceInstances.findWithCursor(selector, { - fields: literal>({ - ...pieceInstanceFields, - plannedStartedPlayback: 0, - plannedStoppedPlayback: 0, - }), - }) - } - return null + return PieceInstances.findWithCursor(selector, { + fields: literal>({ + ...pieceInstanceFields, + plannedStartedPlayback: 0, + plannedStoppedPlayback: 0, + }), + }) } ) meteorPublish( PeripheralDevicePubSub.expectedPlayoutItemsForDevice, async function (deviceId: PeripheralDeviceId, token: string | undefined) { - if (!deviceId) throw new Meteor.Error(400, 'deviceId argument missing') check(deviceId, String) - if (await PeripheralDeviceReadAccess.peripheralDeviceContent(deviceId, { userId: this.userId, token })) { - const peripheralDevice = await PeripheralDevices.findOneAsync(deviceId) + const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, token, this) - if (!peripheralDevice) throw new Meteor.Error(`PeripheralDevice "${deviceId}" not found`) + const studioId = peripheralDevice.studioId + if (!studioId) return null - const studioId = peripheralDevice.studioId - if (!studioId) return null - - return ExpectedPlayoutItems.findWithCursor({ studioId }) - } - return null + return ExpectedPlayoutItems.findWithCursor({ studioId }) } ) // Note: this publication is for dev purposes only: meteorPublish( CorelibPubSub.ingestDataCache, - async function (selector: MongoQuery, token: string | undefined) { + async function (selector: MongoQuery, _token: string | undefined) { + triggerWriteAccessBecauseNoCheckNecessary() + if (!selector) throw new Meteor.Error(400, 'selector argument missing') const modifier: FindOptions = { fields: {}, } - if ( - NoSecurityReadAccess.any() || - (await RundownReadAccess.rundownContent(selector.rundownId, { userId: this.userId, token })) - ) { - return NrcsIngestDataCache.findWithCursor(selector, modifier) - } - return null + + return NrcsIngestDataCache.findWithCursor(selector, modifier) } ) meteorPublish( CorelibPubSub.rundownBaselineAdLibPieces, - async function (rundownIds: RundownId[], token: string | undefined) { + async function (rundownIds: RundownId[], _token: string | undefined) { check(rundownIds, Array) + triggerWriteAccessBecauseNoCheckNecessary() + if (rundownIds.length === 0) return null const selector: MongoQuery = { rundownId: { $in: rundownIds }, } - if ( - NoSecurityReadAccess.any() || - (await RundownReadAccess.rundownContent(selector.rundownId, { userId: this.userId, token })) - ) { - return RundownBaselineAdLibPieces.findWithCursor(selector, { - fields: { - timelineObjectsString: 0, - privateData: 0, - }, - }) - } - return null + return RundownBaselineAdLibPieces.findWithCursor(selector, { + fields: { + timelineObjectsString: 0, + privateData: 0, + }, + }) } ) @@ -576,31 +494,26 @@ const adlibActionSubFields: MongoFieldSpecifierZeroes = { privateData: 0, } -meteorPublish(CorelibPubSub.adLibActions, async function (rundownIds: RundownId[], token: string | undefined) { +meteorPublish(CorelibPubSub.adLibActions, async function (rundownIds: RundownId[], _token: string | undefined) { check(rundownIds, Array) + triggerWriteAccessBecauseNoCheckNecessary() + if (rundownIds.length === 0) return null const selector: MongoQuery = { rundownId: { $in: rundownIds }, } - if ( - NoSecurityReadAccess.any() || - (await RundownReadAccess.rundownContent(selector.rundownId, { userId: this.userId, token })) - ) { - return AdLibActions.findWithCursor(selector, { - fields: adlibActionSubFields, - }) - } - return null + return AdLibActions.findWithCursor(selector, { + fields: adlibActionSubFields, + }) }) meteorPublish(MeteorPubSub.adLibActionsForPart, async function (partId: PartId, sourceLayerIds: string[]) { - if (!partId) throw new Meteor.Error(400, 'partId argument missing') - if (!sourceLayerIds) throw new Meteor.Error(400, 'sourceLayerIds argument missing') + check(partId, String) + check(sourceLayerIds, Array) - // Future: This needs some thought for a security enabled environment - if (!NoSecurityReadAccess.any()) return null + triggerWriteAccessBecauseNoCheckNecessary() return AdLibActions.findWithCursor( { @@ -615,23 +528,19 @@ meteorPublish(MeteorPubSub.adLibActionsForPart, async function (partId: PartId, meteorPublish( CorelibPubSub.rundownBaselineAdLibActions, - async function (rundownIds: RundownId[], token: string | undefined) { + async function (rundownIds: RundownId[], _token: string | undefined) { check(rundownIds, Array) + triggerWriteAccessBecauseNoCheckNecessary() + if (rundownIds.length === 0) return null const selector: MongoQuery = { rundownId: { $in: rundownIds }, } - if ( - NoSecurityReadAccess.any() || - (await RundownReadAccess.rundownContent(selector.rundownId, { userId: this.userId, token })) - ) { - return RundownBaselineAdLibActions.findWithCursor(selector, { - fields: adlibActionSubFields, - }) - } - return null + return RundownBaselineAdLibActions.findWithCursor(selector, { + fields: adlibActionSubFields, + }) } ) diff --git a/meteor/server/publications/rundownPlaylist.ts b/meteor/server/publications/rundownPlaylist.ts index 89378b15875..c52efc85d36 100644 --- a/meteor/server/publications/rundownPlaylist.ts +++ b/meteor/server/publications/rundownPlaylist.ts @@ -1,59 +1,40 @@ -import { RundownPlaylistReadAccess } from '../security/rundownPlaylist' -import { meteorPublish, AutoFillSelector } from './lib/lib' -import { StudioReadAccess } from '../security/studio' -import { OrganizationReadAccess } from '../security/organization' -import { NoSecurityReadAccess } from '../security/noSecurity' -import { isProtectedString } from '@sofie-automation/corelib/dist/protectedString' +import { meteorPublish } from './lib/lib' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { RundownPlaylists } from '../collections' import { CorelibPubSub } from '@sofie-automation/corelib/dist/pubsub' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { RundownPlaylistId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { resolveCredentials } from '../security/lib/credentials' import { check, Match } from '../lib/check' import { MongoQuery } from '@sofie-automation/corelib/dist/mongo' +import { triggerWriteAccessBecauseNoCheckNecessary } from '../security/securityVerify' meteorPublish( CorelibPubSub.rundownPlaylists, async function ( rundownPlaylistIds: RundownPlaylistId[] | null, studioIds: StudioId[] | null, - token: string | undefined + _token: string | undefined ) { check(rundownPlaylistIds, Match.Maybe(Array)) check(studioIds, Match.Maybe(Array)) + triggerWriteAccessBecauseNoCheckNecessary() + // If values were provided, they must have values if (rundownPlaylistIds && rundownPlaylistIds.length === 0) return null if (studioIds && studioIds.length === 0) return null - const { cred, selector } = await AutoFillSelector.organizationId(this.userId, {}, token) - // Add the requested filter + const selector: MongoQuery = {} if (rundownPlaylistIds) selector._id = { $in: rundownPlaylistIds } if (studioIds) selector.studioId = { $in: studioIds } - if ( - !cred || - NoSecurityReadAccess.any() || - (selector.organizationId && - (await OrganizationReadAccess.organizationContent(selector.organizationId, cred))) || - (selector.studioId && (await StudioReadAccess.studioContent(selector.studioId, cred))) || - (isProtectedString(selector._id) && (await RundownPlaylistReadAccess.rundownPlaylist(selector._id, cred))) - ) { - return RundownPlaylists.findWithCursor(selector) - } - return null + return RundownPlaylists.findWithCursor(selector) } ) meteorPublish(MeteorPubSub.rundownPlaylistForStudio, async function (studioId: StudioId, isActive: boolean) { - if (!NoSecurityReadAccess.any()) { - const cred = await resolveCredentials({ userId: this.userId }) - if (!cred) return null - - if (!(await StudioReadAccess.studioContent(studioId, cred))) return null - } + triggerWriteAccessBecauseNoCheckNecessary() const selector: MongoQuery = { studioId, diff --git a/meteor/server/publications/segmentPartNotesUI/publication.ts b/meteor/server/publications/segmentPartNotesUI/publication.ts index 5ab2a86a445..d01a55c66a7 100644 --- a/meteor/server/publications/segmentPartNotesUI/publication.ts +++ b/meteor/server/publications/segmentPartNotesUI/publication.ts @@ -16,9 +16,6 @@ import { TriggerUpdate, } from '../../lib/customPublication' import { logger } from '../../logging' -import { resolveCredentials } from '../../security/lib/credentials' -import { NoSecurityReadAccess } from '../../security/noSecurity' -import { RundownPlaylistReadAccess } from '../../security/rundownPlaylist' import { ContentCache, createReactiveContentCache, @@ -33,6 +30,7 @@ import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/Rund import { generateNotesForSegment } from './generateNotesForSegment' import { RundownPlaylists } from '../../collections' import { check, Match } from 'meteor/check' +import { triggerWriteAccessBecauseNoCheckNecessary } from '../../security/securityVerify' interface UISegmentPartNotesArgs { readonly playlistId: RundownPlaylistId @@ -215,29 +213,25 @@ meteorCustomPublish( async function (pub, playlistId: RundownPlaylistId | null) { check(playlistId, Match.Maybe(String)) - const cred = await resolveCredentials({ userId: this.userId, token: undefined }) - - if ( - playlistId && - (!cred || - NoSecurityReadAccess.any() || - (await RundownPlaylistReadAccess.rundownPlaylistContent(playlistId, cred))) - ) { - await setUpCollectionOptimizedObserver< - UISegmentPartNote, - UISegmentPartNotesArgs, - UISegmentPartNotesState, - UISegmentPartNotesUpdateProps - >( - `pub_${MeteorPubSub.uiSegmentPartNotes}_${playlistId}`, - { playlistId }, - setupUISegmentPartNotesPublicationObservers, - manipulateUISegmentPartNotesPublicationData, - pub, - 100 - ) - } else { - logger.warn(`Pub.${CustomCollectionName.UISegmentPartNotes}: Not allowed: "${playlistId}"`) + triggerWriteAccessBecauseNoCheckNecessary() + + if (!playlistId) { + logger.info(`Pub.${CustomCollectionName.UISegmentPartNotes}: Not playlistId`) + return } + + await setUpCollectionOptimizedObserver< + UISegmentPartNote, + UISegmentPartNotesArgs, + UISegmentPartNotesState, + UISegmentPartNotesUpdateProps + >( + `pub_${MeteorPubSub.uiSegmentPartNotes}_${playlistId}`, + { playlistId }, + setupUISegmentPartNotesPublicationObservers, + manipulateUISegmentPartNotesPublicationData, + pub, + 100 + ) } ) diff --git a/meteor/server/publications/showStyle.ts b/meteor/server/publications/showStyle.ts index 99b3099e508..ee3cbf08030 100644 --- a/meteor/server/publications/showStyle.ts +++ b/meteor/server/publications/showStyle.ts @@ -1,41 +1,31 @@ -import { meteorPublish, AutoFillSelector } from './lib/lib' +import { meteorPublish } from './lib/lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { DBShowStyleBase } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' import { DBShowStyleVariant } from '@sofie-automation/corelib/dist/dataModel/ShowStyleVariant' import { RundownLayoutBase } from '@sofie-automation/meteor-lib/dist/collections/RundownLayouts' -import { ShowStyleReadAccess } from '../security/showStyle' -import { OrganizationReadAccess } from '../security/organization' -import { NoSecurityReadAccess } from '../security/noSecurity' import { RundownLayouts, ShowStyleBases, ShowStyleVariants, TriggeredActions } from '../collections' import { MongoQuery } from '@sofie-automation/corelib/dist/mongo' import { DBTriggeredActions } from '@sofie-automation/meteor-lib/dist/collections/TriggeredActions' import { CorelibPubSub } from '@sofie-automation/corelib/dist/pubsub' import { ShowStyleBaseId, ShowStyleVariantId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { check, Match } from '../lib/check' +import { triggerWriteAccessBecauseNoCheckNecessary } from '../security/securityVerify' meteorPublish( CorelibPubSub.showStyleBases, - async function (showStyleBaseIds: ShowStyleBaseId[] | null, token: string | undefined) { + async function (showStyleBaseIds: ShowStyleBaseId[] | null, _token: string | undefined) { check(showStyleBaseIds, Match.Maybe(Array)) + triggerWriteAccessBecauseNoCheckNecessary() + // If values were provided, they must have values if (showStyleBaseIds && showStyleBaseIds.length === 0) return null - const { cred, selector } = await AutoFillSelector.organizationId(this.userId, {}, token) - // Add the requested filter + const selector: MongoQuery = {} if (showStyleBaseIds) selector._id = { $in: showStyleBaseIds } - if ( - !cred || - NoSecurityReadAccess.any() || - (selector.organizationId && - (await OrganizationReadAccess.organizationContent(selector.organizationId, cred))) || - (selector._id && (await ShowStyleReadAccess.showStyleBase(selector.id, cred))) - ) { - return ShowStyleBases.findWithCursor(selector) - } - return null + return ShowStyleBases.findWithCursor(selector) } ) @@ -44,59 +34,51 @@ meteorPublish( async function ( showStyleBaseIds: ShowStyleBaseId[] | null, showStyleVariantIds: ShowStyleVariantId[] | null, - token: string | undefined + _token: string | undefined ) { check(showStyleBaseIds, Match.Maybe(Array)) check(showStyleVariantIds, Match.Maybe(Array)) + triggerWriteAccessBecauseNoCheckNecessary() + // If values were provided, they must have values if (showStyleBaseIds && showStyleBaseIds.length === 0) return null if (showStyleVariantIds && showStyleVariantIds.length === 0) return null - const { cred, selector } = await AutoFillSelector.showStyleBaseId(this.userId, {}, token) - // Add the requested filter + const selector: MongoQuery = {} if (showStyleBaseIds) selector.showStyleBaseId = { $in: showStyleBaseIds } if (showStyleVariantIds) selector._id = { $in: showStyleVariantIds } - if ( - !cred || - NoSecurityReadAccess.any() || - (selector.showStyleBaseId && (await ShowStyleReadAccess.showStyleBaseContent(selector, cred))) || - (selector._id && (await ShowStyleReadAccess.showStyleVariant(selector._id, cred))) - ) { - return ShowStyleVariants.findWithCursor(selector) - } - return null + return ShowStyleVariants.findWithCursor(selector) } ) meteorPublish( MeteorPubSub.rundownLayouts, - async function (showStyleBaseIds: ShowStyleBaseId[] | null, token: string | undefined) { + async function (showStyleBaseIds: ShowStyleBaseId[] | null, _token: string | undefined) { check(showStyleBaseIds, Match.Maybe(Array)) + triggerWriteAccessBecauseNoCheckNecessary() + // If values were provided, they must have values if (showStyleBaseIds && showStyleBaseIds.length === 0) return null - const selector0: MongoQuery = {} - if (showStyleBaseIds) selector0.showStyleBaseId = { $in: showStyleBaseIds } - - const { cred, selector } = await AutoFillSelector.showStyleBaseId(this.userId, selector0, token) + const selector: MongoQuery = {} + if (showStyleBaseIds) selector.showStyleBaseId = { $in: showStyleBaseIds } - if (!cred || (await ShowStyleReadAccess.showStyleBaseContent(selector, cred))) { - return RundownLayouts.findWithCursor(selector) - } - return null + return RundownLayouts.findWithCursor(selector) } ) meteorPublish( MeteorPubSub.triggeredActions, - async function (showStyleBaseIds: ShowStyleBaseId[] | null, token: string | undefined) { + async function (showStyleBaseIds: ShowStyleBaseId[] | null, _token: string | undefined) { check(showStyleBaseIds, Match.Maybe(Array)) - const selector0: MongoQuery = + triggerWriteAccessBecauseNoCheckNecessary() + + const selector: MongoQuery = showStyleBaseIds && showStyleBaseIds.length > 0 ? { $or: [ @@ -110,15 +92,6 @@ meteorPublish( } : { showStyleBaseId: null } - const { cred, selector } = await AutoFillSelector.showStyleBaseId(this.userId, selector0, token) - - if ( - !cred || - NoSecurityReadAccess.any() || - (selector.showStyleBaseId && (await ShowStyleReadAccess.showStyleBaseContent(selector, cred))) - ) { - return TriggeredActions.findWithCursor(selector) - } - return null + return TriggeredActions.findWithCursor(selector) } ) diff --git a/meteor/server/publications/showStyleUI.ts b/meteor/server/publications/showStyleUI.ts index 68309db7d95..2b6ce26ccc4 100644 --- a/meteor/server/publications/showStyleUI.ts +++ b/meteor/server/publications/showStyleUI.ts @@ -12,13 +12,9 @@ import { setUpOptimizedObserverArray, TriggerUpdate, } from '../lib/customPublication' -import { logger } from '../logging' -import { NoSecurityReadAccess } from '../security/noSecurity' -import { OrganizationReadAccess } from '../security/organization' -import { ShowStyleReadAccess } from '../security/showStyle' import { ShowStyleBases } from '../collections' -import { AutoFillSelector } from './lib/lib' import { check } from 'meteor/check' +import { triggerWriteAccessBecauseNoCheckNecessary } from '../security/securityVerify' interface UIShowStyleBaseArgs { readonly showStyleBaseId: ShowStyleBaseId @@ -92,33 +88,19 @@ meteorCustomPublish( async function (pub, showStyleBaseId: ShowStyleBaseId) { check(showStyleBaseId, String) - const { cred, selector } = await AutoFillSelector.organizationId( - this.userId, - { _id: showStyleBaseId }, - undefined - ) + triggerWriteAccessBecauseNoCheckNecessary() - if ( - !cred || - NoSecurityReadAccess.any() || - (selector.organizationId && - (await OrganizationReadAccess.organizationContent(selector.organizationId, cred))) || - (selector._id && (await ShowStyleReadAccess.showStyleBase(selector._id, cred))) - ) { - await setUpOptimizedObserverArray< - UIShowStyleBase, - UIShowStyleBaseArgs, - UIShowStyleBaseState, - UIShowStyleBaseUpdateProps - >( - `pub_${MeteorPubSub.uiShowStyleBase}_${showStyleBaseId}`, - { showStyleBaseId }, - setupUIShowStyleBasePublicationObservers, - manipulateUIShowStyleBasePublicationData, - pub - ) - } else { - logger.warn(`Pub.${CustomCollectionName.UIShowStyleBase}: Not allowed: "${showStyleBaseId}"`) - } + await setUpOptimizedObserverArray< + UIShowStyleBase, + UIShowStyleBaseArgs, + UIShowStyleBaseState, + UIShowStyleBaseUpdateProps + >( + `pub_${MeteorPubSub.uiShowStyleBase}_${showStyleBaseId}`, + { showStyleBaseId }, + setupUIShowStyleBasePublicationObservers, + manipulateUIShowStyleBasePublicationData, + pub + ) } ) diff --git a/meteor/server/publications/studio.ts b/meteor/server/publications/studio.ts index 08002e6938a..633f2bd3936 100644 --- a/meteor/server/publications/studio.ts +++ b/meteor/server/publications/studio.ts @@ -1,13 +1,9 @@ import { Meteor } from 'meteor/meteor' import { check, Match } from '../lib/check' -import { meteorPublish, AutoFillSelector } from './lib/lib' +import { meteorPublish } from './lib/lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { getActiveRoutes, getRoutedMappings } from '@sofie-automation/meteor-lib/dist/collections/Studios' -import { PeripheralDeviceReadAccess } from '../security/peripheralDevice' import { ExternalMessageQueueObj } from '@sofie-automation/corelib/dist/dataModel/ExternalMessageQueue' -import { StudioReadAccess } from '../security/studio' -import { OrganizationReadAccess } from '../security/organization' -import { NoSecurityReadAccess } from '../security/noSecurity' import { CustomPublish, meteorCustomPublish, @@ -26,7 +22,6 @@ import { ExternalMessageQueue, PackageContainerStatuses, PackageInfos, - PeripheralDevices, Studios, } from '../collections' import { MongoQuery } from '@sofie-automation/corelib/dist/mongo' @@ -37,94 +32,85 @@ import { PeripheralDevicePubSub, PeripheralDevicePubSubCollectionsNames, } from '@sofie-automation/shared-lib/dist/pubsub/peripheralDevice' +import { triggerWriteAccessBecauseNoCheckNecessary } from '../security/securityVerify' +import { checkAccessAndGetPeripheralDevice } from '../security/check' +import { assertConnectionHasOneOfPermissions } from '../security/auth' -meteorPublish(CorelibPubSub.studios, async function (studioIds: StudioId[] | null, token: string | undefined) { +meteorPublish(CorelibPubSub.studios, async function (studioIds: StudioId[] | null, _token: string | undefined) { check(studioIds, Match.Maybe(Array)) + triggerWriteAccessBecauseNoCheckNecessary() + // If values were provided, they must have values if (studioIds && studioIds.length === 0) return null - const { cred, selector } = await AutoFillSelector.organizationId(this.userId, {}, token) - // Add the requested filter + const selector: MongoQuery = {} if (studioIds) selector._id = { $in: studioIds } - if ( - !cred || - NoSecurityReadAccess.any() || - (selector._id && (await StudioReadAccess.studio(selector._id, cred))) || - (selector.organizationId && (await OrganizationReadAccess.organizationContent(selector.organizationId, cred))) - ) { - return Studios.findWithCursor(selector) - } - return null + return Studios.findWithCursor(selector) }) meteorPublish( CorelibPubSub.externalMessageQueue, - async function (selector: MongoQuery, token: string | undefined) { + async function (selector: MongoQuery, _token: string | undefined) { + triggerWriteAccessBecauseNoCheckNecessary() + if (!selector) throw new Meteor.Error(400, 'selector argument missing') const modifier: FindOptions = { fields: {}, } - if (await StudioReadAccess.studioContent(selector.studioId, { userId: this.userId, token })) { - return ExternalMessageQueue.findWithCursor(selector, modifier) - } - return null + + return ExternalMessageQueue.findWithCursor(selector, modifier) } ) -meteorPublish(CorelibPubSub.expectedPackages, async function (studioIds: StudioId[], token: string | undefined) { +meteorPublish(CorelibPubSub.expectedPackages, async function (studioIds: StudioId[], _token: string | undefined) { // Note: This differs from the expected packages sent to the Package Manager, instead @see PubSub.expectedPackagesForDevice check(studioIds, Array) + triggerWriteAccessBecauseNoCheckNecessary() + if (studioIds.length === 0) return null - if (await StudioReadAccess.studioContent(studioIds, { userId: this.userId, token })) { - return ExpectedPackages.findWithCursor({ - studioId: { $in: studioIds }, - }) - } - return null + return ExpectedPackages.findWithCursor({ + studioId: { $in: studioIds }, + }) }) meteorPublish( CorelibPubSub.expectedPackageWorkStatuses, - async function (studioIds: StudioId[], token: string | undefined) { + async function (studioIds: StudioId[], _token: string | undefined) { check(studioIds, Array) + triggerWriteAccessBecauseNoCheckNecessary() if (studioIds.length === 0) return null - if (await StudioReadAccess.studioContent(studioIds, { userId: this.userId, token })) { - return ExpectedPackageWorkStatuses.findWithCursor({ - studioId: { $in: studioIds }, - }) - } - return null + return ExpectedPackageWorkStatuses.findWithCursor({ + studioId: { $in: studioIds }, + }) } ) meteorPublish( CorelibPubSub.packageContainerStatuses, - async function (studioIds: StudioId[], token: string | undefined) { + async function (studioIds: StudioId[], _token: string | undefined) { check(studioIds, Array) + triggerWriteAccessBecauseNoCheckNecessary() + if (studioIds.length === 0) return null - if (await StudioReadAccess.studioContent(studioIds, { userId: this.userId, token })) { - return PackageContainerStatuses.findWithCursor({ - studioId: { $in: studioIds }, - }) - } - return null + return PackageContainerStatuses.findWithCursor({ + studioId: { $in: studioIds }, + }) } ) -meteorPublish(CorelibPubSub.packageInfos, async function (deviceId: PeripheralDeviceId, token: string | undefined) { - if (!deviceId) throw new Meteor.Error(400, 'deviceId argument missing') +meteorPublish(CorelibPubSub.packageInfos, async function (deviceId: PeripheralDeviceId, _token: string | undefined) { + check(deviceId, String) - if (await PeripheralDeviceReadAccess.peripheralDeviceContent(deviceId, { userId: this.userId, token })) { - return PackageInfos.findWithCursor({ deviceId }) - } - return null + triggerWriteAccessBecauseNoCheckNecessary() + + return PackageInfos.findWithCursor({ deviceId }) }) meteorCustomPublish( @@ -133,28 +119,24 @@ meteorCustomPublish( async function (pub, deviceId: PeripheralDeviceId, token: string | undefined) { check(deviceId, String) - if (await PeripheralDeviceReadAccess.peripheralDeviceContent(deviceId, { userId: this.userId, token })) { - const peripheralDevice = await PeripheralDevices.findOneAsync(deviceId) - - if (!peripheralDevice) throw new Meteor.Error('PeripheralDevice "' + deviceId + '" not found') + const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, token, this) - const studioId = peripheralDevice.studioId - if (!studioId) return + const studioId = peripheralDevice.studioId + if (!studioId) return - await createObserverForMappingsPublication(pub, studioId) - } + await createObserverForMappingsPublication(pub, studioId) } ) meteorCustomPublish( MeteorPubSub.mappingsForStudio, PeripheralDevicePubSubCollectionsNames.studioMappings, - async function (pub, studioId: StudioId, token: string | undefined) { + async function (pub, studioId: StudioId, _token: string | undefined) { check(studioId, String) - if (await StudioReadAccess.studio(studioId, { userId: this.userId, token })) { - await createObserverForMappingsPublication(pub, studioId) - } + assertConnectionHasOneOfPermissions(this.connection, 'testing') + + await createObserverForMappingsPublication(pub, studioId) } ) diff --git a/meteor/server/publications/studioUI.ts b/meteor/server/publications/studioUI.ts index ad6170a167d..4513f13797c 100644 --- a/meteor/server/publications/studioUI.ts +++ b/meteor/server/publications/studioUI.ts @@ -13,12 +13,9 @@ import { SetupObserversResult, TriggerUpdate, } from '../lib/customPublication' -import { logger } from '../logging' -import { resolveCredentials } from '../security/lib/credentials' -import { NoSecurityReadAccess } from '../security/noSecurity' -import { StudioReadAccess } from '../security/studio' import { Studios } from '../collections' import { check, Match } from 'meteor/check' +import { triggerWriteAccessBecauseNoCheckNecessary } from '../security/securityVerify' interface UIStudioArgs { readonly studioId: StudioId | null @@ -131,18 +128,14 @@ meteorCustomPublish( async function (pub, studioId: StudioId | null) { check(studioId, Match.Maybe(String)) - const cred = await resolveCredentials({ userId: this.userId, token: undefined }) + triggerWriteAccessBecauseNoCheckNecessary() - if (!cred || NoSecurityReadAccess.any() || (studioId && (await StudioReadAccess.studio(studioId, cred)))) { - await setUpCollectionOptimizedObserver( - `pub_${MeteorPubSub.uiStudio}_${studioId}`, - { studioId }, - setupUIStudioPublicationObservers, - manipulateUIStudioPublicationData, - pub - ) - } else { - logger.warn(`Pub.${CustomCollectionName.UIStudio}: Not allowed: "${studioId}"`) - } + await setUpCollectionOptimizedObserver( + `pub_${MeteorPubSub.uiStudio}_${studioId}`, + { studioId }, + setupUIStudioPublicationObservers, + manipulateUIStudioPublicationData, + pub + ) } ) diff --git a/meteor/server/publications/system.ts b/meteor/server/publications/system.ts index d1f615a9dfa..8b20ad6f6cf 100644 --- a/meteor/server/publications/system.ts +++ b/meteor/server/publications/system.ts @@ -1,82 +1,31 @@ -import { Meteor } from 'meteor/meteor' import { meteorPublish } from './lib/lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' -import { SystemReadAccess } from '../security/system' -import { OrganizationReadAccess } from '../security/organization' -import { CoreSystem, Notifications, Users } from '../collections' +import { CoreSystem, Notifications } from '../collections' import { SYSTEM_ID } from '@sofie-automation/meteor-lib/dist/collections/CoreSystem' -import { OrganizationId, RundownId, RundownPlaylistId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { triggerWriteAccessBecauseNoCheckNecessary } from '../security/lib/securityVerify' +import { RundownId, RundownPlaylistId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { triggerWriteAccessBecauseNoCheckNecessary } from '../security/securityVerify' import { check } from 'meteor/check' -meteorPublish(MeteorPubSub.coreSystem, async function (token: string | undefined) { - if (await SystemReadAccess.coreSystem({ userId: this.userId, token })) { - return CoreSystem.findWithCursor(SYSTEM_ID, { - fields: { - // Include only specific fields in the result documents: - _id: 1, - systemInfo: 1, - apm: 1, - name: 1, - logLevel: 1, - serviceMessages: 1, - blueprintId: 1, - logo: 1, - settingsWithOverrides: 1, - }, - }) - } - return null -}) - -meteorPublish(MeteorPubSub.loggedInUser, async function (token: string | undefined) { - const currentUserId = this.userId +meteorPublish(MeteorPubSub.coreSystem, async function (_token: string | undefined) { + triggerWriteAccessBecauseNoCheckNecessary() - if (!currentUserId) return null - if (await SystemReadAccess.currentUser(currentUserId, { userId: this.userId, token })) { - return Users.findWithCursor( - { - _id: currentUserId, - }, - { - fields: { - _id: 1, - username: 1, - emails: 1, - profile: 1, - organizationId: 1, - superAdmin: 1, - }, - } - ) - } - return null + return CoreSystem.findWithCursor(SYSTEM_ID, { + fields: { + // Include only specific fields in the result documents: + _id: 1, + systemInfo: 1, + apm: 1, + name: 1, + logLevel: 1, + serviceMessages: 1, + blueprintId: 1, + logo: 1, + settingsWithOverrides: 1, + }, + }) }) -meteorPublish( - MeteorPubSub.usersInOrganization, - async function (organizationId: OrganizationId, token: string | undefined) { - if (!organizationId) throw new Meteor.Error(400, 'organizationId argument missing') - if (await OrganizationReadAccess.adminUsers(organizationId, { userId: this.userId, token })) { - return Users.findWithCursor( - { organizationId }, - { - fields: { - _id: 1, - username: 1, - emails: 1, - profile: 1, - organizationId: 1, - superAdmin: 1, - }, - } - ) - } - return null - } -) meteorPublish(MeteorPubSub.notificationsForRundown, async function (studioId: StudioId, rundownId: RundownId) { - // HACK: This should do real auth triggerWriteAccessBecauseNoCheckNecessary() check(studioId, String) @@ -92,7 +41,6 @@ meteorPublish(MeteorPubSub.notificationsForRundown, async function (studioId: St meteorPublish( MeteorPubSub.notificationsForRundownPlaylist, async function (studioId: StudioId, playlistId: RundownPlaylistId) { - // HACK: This should do real auth triggerWriteAccessBecauseNoCheckNecessary() check(studioId, String) diff --git a/meteor/server/publications/timeline.ts b/meteor/server/publications/timeline.ts index 15cf679157e..c32c42b938b 100644 --- a/meteor/server/publications/timeline.ts +++ b/meteor/server/publications/timeline.ts @@ -19,8 +19,6 @@ import { TriggerUpdate, } from '../lib/customPublication' import { getActiveRoutes } from '@sofie-automation/meteor-lib/dist/collections/Studios' -import { PeripheralDeviceReadAccess } from '../security/peripheralDevice' -import { StudioReadAccess } from '../security/studio' import { fetchStudioLight } from '../optimizations' import { FastTrackObservers, setupFastTrackObserver } from './fastTrack' import { logger } from '../logging' @@ -29,7 +27,7 @@ import { Time } from '../lib/tempLib' import { ReadonlyDeep } from 'type-fest' import { PeripheralDeviceId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { DBTimelineDatastoreEntry } from '@sofie-automation/corelib/dist/dataModel/TimelineDatastore' -import { PeripheralDevices, Studios, Timeline, TimelineDatastore } from '../collections' +import { Studios, Timeline, TimelineDatastore } from '../collections' import { check } from 'meteor/check' import { ResultingMappingRoutes, StudioLight } from '@sofie-automation/corelib/dist/dataModel/Studio' import { CorelibPubSub } from '@sofie-automation/corelib/dist/pubsub' @@ -38,16 +36,18 @@ import { PeripheralDevicePubSubCollectionsNames, } from '@sofie-automation/shared-lib/dist/pubsub/peripheralDevice' import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' +import { checkAccessAndGetPeripheralDevice } from '../security/check' +import { assertConnectionHasOneOfPermissions } from '../security/auth' + +meteorPublish(CorelibPubSub.timelineDatastore, async function (studioId: StudioId, _token: string | undefined) { + assertConnectionHasOneOfPermissions(this.connection, 'testing') -meteorPublish(CorelibPubSub.timelineDatastore, async function (studioId: StudioId, token: string | undefined) { if (!studioId) throw new Meteor.Error(400, 'selector argument missing') const modifier: FindOptions = { fields: {}, } - if (await StudioReadAccess.studioContent(studioId, { userId: this.userId, token })) { - return TimelineDatastore.findWithCursor({ studioId }, modifier) - } - return null + + return TimelineDatastore.findWithCursor({ studioId }, modifier) }) meteorCustomPublish( @@ -56,16 +56,12 @@ meteorCustomPublish( async function (pub, deviceId: PeripheralDeviceId, token: string | undefined) { check(deviceId, String) - if (await PeripheralDeviceReadAccess.peripheralDeviceContent(deviceId, { userId: this.userId, token })) { - const peripheralDevice = await PeripheralDevices.findOneAsync(deviceId) + const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, token, this) - if (!peripheralDevice) throw new Meteor.Error('PeripheralDevice "' + deviceId + '" not found') + const studioId = peripheralDevice.studioId + if (!studioId) return - const studioId = peripheralDevice.studioId - if (!studioId) return - - await createObserverForTimelinePublication(pub, studioId) - } + await createObserverForTimelinePublication(pub, studioId) } ) meteorPublish( @@ -73,30 +69,26 @@ meteorPublish( async function (deviceId: PeripheralDeviceId, token: string | undefined) { check(deviceId, String) - if (await PeripheralDeviceReadAccess.peripheralDeviceContent(deviceId, { userId: this.userId, token })) { - const peripheralDevice = await PeripheralDevices.findOneAsync(deviceId) + const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, token, this) - if (!peripheralDevice) throw new Meteor.Error('PeripheralDevice "' + deviceId + '" not found') + const studioId = peripheralDevice.studioId + if (!studioId) return null - const studioId = peripheralDevice.studioId - if (!studioId) return null - const modifier: FindOptions = { - fields: {}, - } - - return TimelineDatastore.findWithCursor({ studioId }, modifier) + const modifier: FindOptions = { + fields: {}, } - return null + + return TimelineDatastore.findWithCursor({ studioId }, modifier) } ) meteorCustomPublish( MeteorPubSub.timelineForStudio, PeripheralDevicePubSubCollectionsNames.studioTimeline, - async function (pub, studioId: StudioId, token: string | undefined) { - if (await StudioReadAccess.studio(studioId, { userId: this.userId, token })) { - await createObserverForTimelinePublication(pub, studioId) - } + async function (pub, studioId: StudioId, _token: string | undefined) { + assertConnectionHasOneOfPermissions(this.connection, 'testing') + + await createObserverForTimelinePublication(pub, studioId) } ) diff --git a/meteor/server/publications/translationsBundles.ts b/meteor/server/publications/translationsBundles.ts index 8173fd3ec56..fbb2d625fd6 100644 --- a/meteor/server/publications/translationsBundles.ts +++ b/meteor/server/publications/translationsBundles.ts @@ -1,20 +1,18 @@ -import { TranslationsBundlesSecurity } from '../security/translationsBundles' import { meteorPublish } from './lib/lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { TranslationsBundles } from '../collections' import { MongoQuery } from '@sofie-automation/corelib/dist/mongo' import { TranslationsBundle } from '@sofie-automation/meteor-lib/dist/collections/TranslationsBundles' +import { triggerWriteAccessBecauseNoCheckNecessary } from '../security/securityVerify' -meteorPublish(MeteorPubSub.translationsBundles, async (token: string | undefined) => { +meteorPublish(MeteorPubSub.translationsBundles, async (_token: string | undefined) => { const selector: MongoQuery = {} - if (TranslationsBundlesSecurity.allowReadAccess(selector, token, this)) { - return TranslationsBundles.findWithCursor(selector, { - fields: { - data: 0, - }, - }) - } + triggerWriteAccessBecauseNoCheckNecessary() - return null + return TranslationsBundles.findWithCursor(selector, { + fields: { + data: 0, + }, + }) }) diff --git a/meteor/server/publications/triggeredActionsUI.ts b/meteor/server/publications/triggeredActionsUI.ts index 5a431daf10c..6eaeb0f52ea 100644 --- a/meteor/server/publications/triggeredActionsUI.ts +++ b/meteor/server/publications/triggeredActionsUI.ts @@ -14,13 +14,10 @@ import { SetupObserversResult, TriggerUpdate, } from '../lib/customPublication' -import { logger } from '../logging' -import { resolveCredentials } from '../security/lib/credentials' -import { NoSecurityReadAccess } from '../security/noSecurity' -import { ShowStyleReadAccess } from '../security/showStyle' import { TriggeredActions } from '../collections' import { check, Match } from 'meteor/check' import { MongoQuery } from '@sofie-automation/corelib/dist/mongo' +import { triggerWriteAccessBecauseNoCheckNecessary } from '../security/securityVerify' interface UITriggeredActionsArgs { readonly showStyleBaseId: ShowStyleBaseId | null @@ -114,27 +111,19 @@ meteorCustomPublish( async function (pub, showStyleBaseId: ShowStyleBaseId | null) { check(showStyleBaseId, Match.Maybe(String)) - const cred = await resolveCredentials({ userId: this.userId, token: undefined }) - - if ( - !cred || - NoSecurityReadAccess.any() || - (showStyleBaseId && (await ShowStyleReadAccess.showStyleBase(showStyleBaseId, cred))) - ) { - await setUpCollectionOptimizedObserver< - UITriggeredActionsObj, - UITriggeredActionsArgs, - UITriggeredActionsState, - UITriggeredActionsUpdateProps - >( - `pub_${MeteorPubSub.uiTriggeredActions}_${showStyleBaseId}`, - { showStyleBaseId }, - setupUITriggeredActionsPublicationObservers, - manipulateUITriggeredActionsPublicationData, - pub - ) - } else { - logger.warn(`Pub.${CustomCollectionName.UITriggeredActions}: Not allowed: "${showStyleBaseId}"`) - } + triggerWriteAccessBecauseNoCheckNecessary() + + await setUpCollectionOptimizedObserver< + UITriggeredActionsObj, + UITriggeredActionsArgs, + UITriggeredActionsState, + UITriggeredActionsUpdateProps + >( + `pub_${MeteorPubSub.uiTriggeredActions}_${showStyleBaseId}`, + { showStyleBaseId }, + setupUITriggeredActionsPublicationObservers, + manipulateUITriggeredActionsPublicationData, + pub + ) } ) diff --git a/meteor/server/security/README.md b/meteor/server/security/README.md deleted file mode 100644 index b66a4adb58b..00000000000 --- a/meteor/server/security/README.md +++ /dev/null @@ -1,53 +0,0 @@ -# Data Ownership: - -## System - -- CoreSystem -- Users -- **Organizations** - -## Organization - -- UserActionsLog -- Evaluations -- Snapshots -- Blueprints -- **Studios** -- **ShowStyleBases** -- **PeripheralDevices** - -## ShowStyleBase - -- ShowStyleVariants -- RundownLayouts - -## Studio - -- ExternalMessageQueue -- RecordedFiles -- MediaObjects -- Timeline -- **RundownPlaylists** - -## RundownPlaylist - -- Rundowns - -## Rundown - -- Segments -- Parts -- PartInstances -- Pieces -- PieceInstances -- AdLibPieces -- RundownBaselineAdLibPieces -- IngestDataCache -- ExpectedMediaItems -- ExpectedPlayoutItems - -## PeripheralDevice - -- PeripheralDeviceCommands -- MediaWorkFlowSteps -- MediaWorkFlows diff --git a/meteor/server/security/__tests__/security.test.ts b/meteor/server/security/__tests__/security.test.ts deleted file mode 100644 index 595791d812f..00000000000 --- a/meteor/server/security/__tests__/security.test.ts +++ /dev/null @@ -1,358 +0,0 @@ -import '../../../__mocks__/_extendJest' - -import { MethodContext } from '../../api/methodContext' -import { DBOrganization } from '@sofie-automation/meteor-lib/dist/collections/Organization' -import { User } from '@sofie-automation/meteor-lib/dist/collections/Users' -import { protectString } from '../../lib/tempLib' -import { Settings } from '../../Settings' -import { DefaultEnvironment, setupDefaultStudioEnvironment } from '../../../__mocks__/helpers/database' -import { BucketsAPI } from '../../api/buckets' -import { storeSystemSnapshot } from '../../api/snapshot' -import { BucketSecurity } from '../buckets' -import { Credentials } from '../lib/credentials' -import { NoSecurityReadAccess } from '../noSecurity' -import { OrganizationContentWriteAccess, OrganizationReadAccess } from '../organization' -import { StudioContentWriteAccess } from '../studio' -import { OrganizationId, UserId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { Organizations, Users } from '../../collections' -import { SupressLogMessages } from '../../../__mocks__/suppressLogging' -import { generateToken } from '../../api/singleUseTokens' -import { hashSingleUseToken } from '../../api/deviceTriggers/triggersContext' - -describe('Security', () => { - function getContext(cred: Credentials): MethodContext { - return { - ...cred, - - isSimulation: false, - connection: null, - setUserId: (_userId: string) => { - // Nothing - }, - unblock: () => { - // Nothing - }, - } - } - function getUser(userId: UserId, orgId: OrganizationId): User { - return { - _id: userId, - organizationId: orgId, - - createdAt: '', - services: { - password: { - bcrypt: 'abc', - }, - }, - username: 'username', - emails: [{ address: 'email.com', verified: false }], - profile: { - name: 'John Doe', - }, - } - } - function getOrg(id: string): DBOrganization { - return { - _id: protectString(id), - name: 'The Company', - - userRoles: { - userA: { - admin: true, - }, - }, - - created: 0, - modified: 0, - - applications: [], - broadcastMediums: [], - } - } - async function changeEnableUserAccounts(fcn: () => Promise) { - try { - Settings.enableUserAccounts = false - await fcn() - Settings.enableUserAccounts = true - await fcn() - } catch (e) { - console.log(`Error happened when Settings.enableUserAccounts = ${Settings.enableUserAccounts}`) - throw e - } - } - - const idCreator: UserId = protectString('userCreator') - const idUserB: UserId = protectString('userB') - const idNonExisting: UserId = protectString('userNonExistant') - const idInWrongOrg: UserId = protectString('userInWrongOrg') - const idSuperAdmin: UserId = protectString('userSuperAdmin') - const idSuperAdminInOtherOrg: UserId = protectString('userSuperAdminOther') - - // Credentials for various users: - const nothing: MethodContext = getContext({ userId: null }) - const creator: MethodContext = getContext({ userId: idCreator }) - const userB: MethodContext = getContext({ userId: idUserB }) - const nonExisting: MethodContext = getContext({ userId: idNonExisting }) - const wrongOrg: MethodContext = getContext({ userId: idInWrongOrg }) - const superAdmin: MethodContext = getContext({ userId: idSuperAdmin }) - const otherSuperAdmin: MethodContext = getContext({ userId: idSuperAdminInOtherOrg }) - - const unknownId = protectString('unknown') - - const org0: DBOrganization = getOrg('org0') - const org1: DBOrganization = getOrg('org1') - const org2: DBOrganization = getOrg('org2') - - async function expectReadNotAllowed(fcn: () => Promise) { - if (Settings.enableUserAccounts === false) return expectReadAllowed(fcn) - return expect(fcn()).resolves.toEqual(false) - } - async function expectReadAllowed(fcn: () => Promise) { - return expect(fcn()).resolves.toEqual(true) - } - async function expectNotAllowed(fcn: () => Promise) { - if (Settings.enableUserAccounts === false) return expectAllowed(fcn) - return expect(fcn()).rejects.toBeTruthy() - } - async function expectNotLoggedIn(fcn: () => Promise) { - if (Settings.enableUserAccounts === false) return expectAllowed(fcn) - return expect(fcn()).rejects.toMatchToString(/not logged in/i) - } - async function expectNotFound(fcn: () => Promise) { - // if (Settings.enableUserAccounts === false) return expectAllowed(fcn) - return expect(fcn()).rejects.toMatchToString(/not found/i) - } - async function expectAllowed(fcn: () => Promise) { - return expect(fcn()).resolves.not.toBeUndefined() - } - let env: DefaultEnvironment - beforeAll(async () => { - env = await setupDefaultStudioEnvironment(org0._id) - - await Organizations.insertAsync(org0) - await Organizations.insertAsync(org1) - await Organizations.insertAsync(org2) - - await Users.insertAsync(getUser(idCreator, org0._id)) - await Users.insertAsync(getUser(idUserB, org0._id)) - await Users.insertAsync(getUser(idInWrongOrg, org1._id)) - await Users.insertAsync({ ...getUser(idSuperAdmin, org0._id), superAdmin: true }) - await Users.insertAsync({ ...getUser(idSuperAdminInOtherOrg, org2._id), superAdmin: true }) - }) - - // eslint-disable-next-line jest/expect-expect - test('Buckets', async () => { - const access = await StudioContentWriteAccess.bucket(creator, env.studio._id) - const bucket = await BucketsAPI.createNewBucket(access, 'myBucket') - - await changeEnableUserAccounts(async () => { - await expectReadAllowed(async () => BucketSecurity.allowReadAccess(creator, bucket._id)) - await expectAllowed(async () => BucketSecurity.allowWriteAccess(creator, bucket._id)) - // expectAccessAllowed(() => BucketSecurity.allowWriteAccessPiece({ _id: bucket._id }, credUserA)) - - // Unknown bucket: - await expectNotFound(async () => BucketSecurity.allowReadAccess(creator, unknownId)) - await expectNotFound(async () => BucketSecurity.allowWriteAccess(creator, unknownId)) - await expectNotFound(async () => BucketSecurity.allowWriteAccessPiece(creator, unknownId)) - - // Not logged in: - if (Settings.enableUserAccounts) { - SupressLogMessages.suppressLogMessage(/No organization in credentials/i) - } - await expectReadNotAllowed(async () => BucketSecurity.allowReadAccess(nothing, bucket._id)) - await expectNotLoggedIn(async () => BucketSecurity.allowWriteAccess(nothing, bucket._id)) - // expectAccessNotLoggedIn(() => BucketSecurity.allowWriteAccessPiece({ _id: bucket._id }, credNothing)) - - // Non existing user: - if (Settings.enableUserAccounts) { - SupressLogMessages.suppressLogMessage(/No organization in credentials/i) - } - await expectReadNotAllowed(async () => BucketSecurity.allowReadAccess(nonExisting, bucket._id)) - await expectNotLoggedIn(async () => BucketSecurity.allowWriteAccess(nonExisting, bucket._id)) - // expectAccess(() => BucketSecurity.allowWriteAccessPiece({ _id: bucket._id }, credNonExistingUser)) - - // Other user in same org: - await expectReadAllowed(async () => BucketSecurity.allowReadAccess(userB, bucket._id)) - await expectAllowed(async () => BucketSecurity.allowWriteAccess(userB, bucket._id)) - // expectAccess(() => BucketSecurity.allowWriteAccessPiece({ _id: bucket._id }, credUserB)) - - // Other user in other org: - if (Settings.enableUserAccounts) { - SupressLogMessages.suppressLogMessage(/User is not in the same organization as the studio/i) - } - await expectReadNotAllowed(async () => BucketSecurity.allowReadAccess(wrongOrg, bucket._id)) - await expectNotAllowed(async () => BucketSecurity.allowWriteAccess(wrongOrg, bucket._id)) - // expectAccess(() => BucketSecurity.allowWriteAccessPiece({ _id: bucket._id }, credUserInWrongOrganization)) - }) - }) - - // eslint-disable-next-line jest/expect-expect - test('NoSecurity', async () => { - await changeEnableUserAccounts(async () => { - await expectAllowed(async () => NoSecurityReadAccess.any()) - }) - }) - // eslint-disable-next-line jest/expect-expect - test('Organization', async () => { - const token = generateToken() - const snapshotId = await storeSystemSnapshot(superAdmin, hashSingleUseToken(token), env.studio._id, 'for test') - - await changeEnableUserAccounts(async () => { - const selectorId = org0._id - const selectorOrg = { organizationId: org0._id } - - // === Read access: === - - // No user credentials: - if (Settings.enableUserAccounts) { - SupressLogMessages.suppressLogMessage(/No organization in credentials/i) - } - await expectReadNotAllowed(async () => OrganizationReadAccess.adminUsers(selectorId, nothing)) - if (Settings.enableUserAccounts) { - SupressLogMessages.suppressLogMessage(/No organization in credentials/i) - } - await expectReadNotAllowed(async () => OrganizationReadAccess.organization(selectorId, nothing)) - if (Settings.enableUserAccounts) { - SupressLogMessages.suppressLogMessage(/No organization in credentials/i) - } - await expectReadNotAllowed(async () => OrganizationReadAccess.organizationContent(selectorId, nothing)) - // Normal user: - await expectReadAllowed(async () => OrganizationReadAccess.adminUsers(selectorId, creator)) - await expectReadAllowed(async () => OrganizationReadAccess.organization(selectorId, creator)) - await expectReadAllowed(async () => OrganizationReadAccess.organizationContent(selectorId, creator)) - // Other normal user: - await expectReadAllowed(async () => OrganizationReadAccess.adminUsers(selectorId, userB)) - await expectReadAllowed(async () => OrganizationReadAccess.organization(selectorId, userB)) - await expectReadAllowed(async () => OrganizationReadAccess.organizationContent(selectorId, userB)) - // Non-existing user: - if (Settings.enableUserAccounts) { - SupressLogMessages.suppressLogMessage(/No organization in credentials/i) - } - await expectReadNotAllowed(async () => OrganizationReadAccess.adminUsers(selectorId, nonExisting)) - if (Settings.enableUserAccounts) { - SupressLogMessages.suppressLogMessage(/No organization in credentials/i) - } - await expectReadNotAllowed(async () => OrganizationReadAccess.organization(selectorId, nonExisting)) - if (Settings.enableUserAccounts) { - SupressLogMessages.suppressLogMessage(/No organization in credentials/i) - } - await expectReadNotAllowed(async () => OrganizationReadAccess.organizationContent(selectorId, nonExisting)) - // User in wrong organization: - if (Settings.enableUserAccounts) { - SupressLogMessages.suppressLogMessage(/User is not in the organization/i) - } - await expectReadNotAllowed(async () => OrganizationReadAccess.adminUsers(selectorId, wrongOrg)) - if (Settings.enableUserAccounts) { - SupressLogMessages.suppressLogMessage(/User is not in the organization/i) - } - await expectReadNotAllowed(async () => OrganizationReadAccess.organization(selectorId, wrongOrg)) - if (Settings.enableUserAccounts) { - SupressLogMessages.suppressLogMessage(/User is not in the organization/i) - } - await expectReadNotAllowed(async () => OrganizationReadAccess.organizationContent(selectorId, wrongOrg)) - // SuperAdmin: - if (Settings.enableUserAccounts) { - SupressLogMessages.suppressLogMessage(/User is not in the organization/i) - } - await expectReadNotAllowed(async () => OrganizationReadAccess.adminUsers(selectorId, otherSuperAdmin)) - if (Settings.enableUserAccounts) { - SupressLogMessages.suppressLogMessage(/User is not in the organization/i) - } - await expectReadNotAllowed(async () => OrganizationReadAccess.organization(selectorId, otherSuperAdmin)) - if (Settings.enableUserAccounts) { - SupressLogMessages.suppressLogMessage(/User is not in the organization/i) - } - await expectReadNotAllowed(async () => - OrganizationReadAccess.organizationContent(selectorId, otherSuperAdmin) - ) - - // === Write access: === - - // No user credentials: - await expectNotLoggedIn(async () => OrganizationContentWriteAccess.organization(nothing, org0._id)) - await expectNotLoggedIn(async () => OrganizationContentWriteAccess.studio(nothing, env.studio)) - await expectNotLoggedIn(async () => OrganizationContentWriteAccess.evaluation(nothing)) - await expectNotLoggedIn(async () => OrganizationContentWriteAccess.mediaWorkFlows(nothing)) - await expectNotLoggedIn(async () => - OrganizationContentWriteAccess.blueprint(nothing, env.studioBlueprint._id) - ) - await expectNotLoggedIn(async () => OrganizationContentWriteAccess.snapshot(nothing, snapshotId)) - await expectNotLoggedIn(async () => OrganizationContentWriteAccess.dataFromSnapshot(nothing, org0._id)) - await expectNotLoggedIn(async () => - OrganizationContentWriteAccess.showStyleBase(nothing, env.showStyleBaseId) - ) - await expectNotLoggedIn(async () => OrganizationContentWriteAccess.translationBundle(nothing, selectorOrg)) - - // Normal user: - await expectAllowed(async () => OrganizationContentWriteAccess.organization(creator, org0._id)) - await expectAllowed(async () => OrganizationContentWriteAccess.studio(creator, env.studio)) - await expectAllowed(async () => OrganizationContentWriteAccess.evaluation(creator)) - await expectAllowed(async () => OrganizationContentWriteAccess.mediaWorkFlows(creator)) - await expectAllowed(async () => OrganizationContentWriteAccess.blueprint(creator, env.studioBlueprint._id)) - await expectAllowed(async () => OrganizationContentWriteAccess.snapshot(creator, snapshotId)) - await expectAllowed(async () => OrganizationContentWriteAccess.dataFromSnapshot(creator, org0._id)) - await expectAllowed(async () => OrganizationContentWriteAccess.showStyleBase(creator, env.showStyleBaseId)) - await expectAllowed(async () => OrganizationContentWriteAccess.translationBundle(creator, selectorOrg)) - // Other normal user: - await expectAllowed(async () => OrganizationContentWriteAccess.organization(userB, org0._id)) - await expectAllowed(async () => OrganizationContentWriteAccess.studio(userB, env.studio)) - await expectAllowed(async () => OrganizationContentWriteAccess.evaluation(userB)) - await expectAllowed(async () => OrganizationContentWriteAccess.mediaWorkFlows(userB)) - await expectAllowed(async () => OrganizationContentWriteAccess.blueprint(userB, env.studioBlueprint._id)) - await expectAllowed(async () => OrganizationContentWriteAccess.snapshot(userB, snapshotId)) - await expectAllowed(async () => OrganizationContentWriteAccess.dataFromSnapshot(userB, org0._id)) - await expectAllowed(async () => OrganizationContentWriteAccess.showStyleBase(userB, env.showStyleBaseId)) - await expectAllowed(async () => OrganizationContentWriteAccess.translationBundle(userB, selectorOrg)) - // Non-existing user: - await expectNotLoggedIn(async () => OrganizationContentWriteAccess.organization(nonExisting, org0._id)) - await expectNotLoggedIn(async () => OrganizationContentWriteAccess.studio(nonExisting, env.studio)) - await expectNotLoggedIn(async () => OrganizationContentWriteAccess.evaluation(nonExisting)) - await expectNotLoggedIn(async () => OrganizationContentWriteAccess.mediaWorkFlows(nonExisting)) - await expectNotLoggedIn(async () => - OrganizationContentWriteAccess.blueprint(nonExisting, env.studioBlueprint._id) - ) - await expectNotLoggedIn(async () => OrganizationContentWriteAccess.snapshot(nonExisting, snapshotId)) - await expectNotLoggedIn(async () => OrganizationContentWriteAccess.dataFromSnapshot(nonExisting, org0._id)) - await expectNotLoggedIn(async () => - OrganizationContentWriteAccess.showStyleBase(nonExisting, env.showStyleBaseId) - ) - await expectNotLoggedIn(async () => - OrganizationContentWriteAccess.translationBundle(nonExisting, selectorOrg) - ) - // User in wrong organization: - await expectNotAllowed(async () => OrganizationContentWriteAccess.organization(wrongOrg, org0._id)) - await expectNotAllowed(async () => OrganizationContentWriteAccess.studio(wrongOrg, env.studio)) - // expectNotAllowed(async() => OrganizationContentWriteAccess.evaluation(wrongOrg)) - // expectNotAllowed(async() => OrganizationContentWriteAccess.mediaWorkFlows(wrongOrg)) - await expectNotAllowed(async () => - OrganizationContentWriteAccess.blueprint(wrongOrg, env.studioBlueprint._id) - ) - await expectNotAllowed(async () => OrganizationContentWriteAccess.snapshot(wrongOrg, snapshotId)) - await expectNotAllowed(async () => OrganizationContentWriteAccess.dataFromSnapshot(wrongOrg, org0._id)) - await expectNotAllowed(async () => - OrganizationContentWriteAccess.showStyleBase(wrongOrg, env.showStyleBaseId) - ) - await expectNotAllowed(async () => OrganizationContentWriteAccess.translationBundle(wrongOrg, selectorOrg)) - - // Other SuperAdmin - await expectNotAllowed(async () => OrganizationContentWriteAccess.organization(otherSuperAdmin, org0._id)) - await expectNotAllowed(async () => OrganizationContentWriteAccess.studio(otherSuperAdmin, env.studio)) - // expectNotAllowed(async() => OrganizationContentWriteAccess.evaluation(otherSuperAdmin)) - // expectNotAllowed(async() => OrganizationContentWriteAccess.mediaWorkFlows(otherSuperAdmin)) - await expectNotAllowed(async () => - OrganizationContentWriteAccess.blueprint(otherSuperAdmin, env.studioBlueprint._id) - ) - await expectNotAllowed(async () => OrganizationContentWriteAccess.snapshot(otherSuperAdmin, snapshotId)) - await expectNotAllowed(async () => - OrganizationContentWriteAccess.dataFromSnapshot(otherSuperAdmin, org0._id) - ) - await expectNotAllowed(async () => - OrganizationContentWriteAccess.showStyleBase(otherSuperAdmin, env.showStyleBaseId) - ) - await expectNotAllowed(async () => - OrganizationContentWriteAccess.translationBundle(otherSuperAdmin, selectorOrg) - ) - }) - }) -}) diff --git a/meteor/server/security/_security.ts b/meteor/server/security/_security.ts deleted file mode 100644 index 320d5b5bbba..00000000000 --- a/meteor/server/security/_security.ts +++ /dev/null @@ -1,11 +0,0 @@ -import './lib/lib' - -import './buckets' -import './noSecurity' -import './organization' -import './peripheralDevice' -import './rundown' -import './rundownPlaylist' -import './showStyle' -import './studio' -import './system' diff --git a/meteor/server/security/lib/lib.ts b/meteor/server/security/allowDeny.ts similarity index 84% rename from meteor/server/security/lib/lib.ts rename to meteor/server/security/allowDeny.ts index a5c0d244d9f..089032c9f83 100644 --- a/meteor/server/security/lib/lib.ts +++ b/meteor/server/security/allowDeny.ts @@ -1,5 +1,5 @@ import { FieldNames } from '@sofie-automation/meteor-lib/dist/collections/lib' -import { logger } from '../../logging' + /** * Allow only edits to the fields specified. Edits to any other fields will be rejected * @param doc @@ -32,8 +32,3 @@ export function rejectFields(_doc: T, fieldNames: FieldNames, rejectFields return true } - -export function logNotAllowed(area: string, reason: string): false { - logger.warn(`Not allowed access to ${area}: ${reason}`) - return false -} diff --git a/meteor/server/security/auth.ts b/meteor/server/security/auth.ts new file mode 100644 index 00000000000..60702838fa0 --- /dev/null +++ b/meteor/server/security/auth.ts @@ -0,0 +1,87 @@ +import { + parseUserPermissions, + USER_PERMISSIONS_HEADER, + UserPermissions, +} from '@sofie-automation/meteor-lib/dist/userPermissions' +import { Settings } from '../Settings' +import { Meteor } from 'meteor/meteor' +import Koa from 'koa' +import { triggerWriteAccess } from './securityVerify' +import { UserId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { unprotectString } from '../lib/tempLib' +import { logger } from '../logging' +import { CollectionName } from '@sofie-automation/corelib/dist/dataModel/Collections' + +export type RequestCredentials = Meteor.Connection | Koa.ParameterizedContext + +export function parseConnectionPermissions(conn: RequestCredentials): UserPermissions { + if (!Settings.enableHeaderAuth) { + // If auth is disabled, return all permissions + return { + studio: true, + configure: true, + developer: true, + testing: true, + service: true, + gateway: true, + } + } + + let header: string | string[] | undefined + if ('httpHeaders' in conn) { + header = conn.httpHeaders[USER_PERMISSIONS_HEADER] + } else { + header = conn.request.headers[USER_PERMISSIONS_HEADER] + } + + // This shouldn't happen, but take the first header if it does + if (Array.isArray(header)) header = header[0] + + return parseUserPermissions(header) +} + +export function assertConnectionHasOneOfPermissions( + conn: RequestCredentials | null, + ...allowedPermissions: Array +): void { + if (allowedPermissions.length === 0) throw new Meteor.Error(403, 'No permissions specified') + + triggerWriteAccess() + + if (!conn) throw new Meteor.Error(403, 'Can only be invoked by clients') + + // Skip if auth is disabled + if (!Settings.enableHeaderAuth) return + + const permissions = parseConnectionPermissions(conn) + for (const permission of allowedPermissions) { + if (permissions[permission]) return + } + + // Nothing matched + throw new Meteor.Error(403, 'Not authorized') +} + +export function checkUserIdHasOneOfPermissions( + userId: UserId | null, + collectionName: CollectionName, + ...allowedPermissions: Array +): boolean { + if (allowedPermissions.length === 0) throw new Meteor.Error(403, 'No permissions specified') + + triggerWriteAccess() + + // Skip if auth is disabled + if (!Settings.enableHeaderAuth) return true + + if (!userId) throw new Meteor.Error(403, 'UserId is null') + + const permissions: UserPermissions = JSON.parse(unprotectString(userId)) + for (const permission of allowedPermissions) { + if (permissions[permission]) return true + } + + // Nothing matched + logger.warn(`Not allowed access to ${collectionName}`) + return false +} diff --git a/meteor/server/security/buckets.ts b/meteor/server/security/buckets.ts deleted file mode 100644 index d7160f974c1..00000000000 --- a/meteor/server/security/buckets.ts +++ /dev/null @@ -1,80 +0,0 @@ -import { Bucket } from '@sofie-automation/meteor-lib/dist/collections/Buckets' -import { Credentials, ResolvedCredentials } from './lib/credentials' -import { triggerWriteAccess } from './lib/securityVerify' -import { check } from '../lib/check' -import { Meteor } from 'meteor/meteor' -import { StudioReadAccess, StudioContentWriteAccess, StudioContentAccess } from './studio' -import { BucketAdLib } from '@sofie-automation/corelib/dist/dataModel/BucketAdLibPiece' -import { BucketAdLibAction } from '@sofie-automation/corelib/dist/dataModel/BucketAdLibAction' -import { AdLibActionId, BucketId, PieceId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { BucketAdLibActions, BucketAdLibs, Buckets } from '../collections' - -export namespace BucketSecurity { - export interface BucketContentAccess extends StudioContentAccess { - bucket: Bucket - } - export interface BucketAdlibPieceContentAccess extends StudioContentAccess { - adlib: BucketAdLib - } - export interface BucketAdlibActionContentAccess extends StudioContentAccess { - action: BucketAdLibAction - } - - // Sometimes a studio ID is passed, others the peice / bucket id - export async function allowReadAccess( - cred: Credentials | ResolvedCredentials, - bucketId: BucketId - ): Promise { - check(bucketId, String) - - const bucket = await Buckets.findOneAsync(bucketId) - if (!bucket) throw new Meteor.Error(404, `Bucket "${bucketId}" not found!`) - - return StudioReadAccess.studioContent(bucket.studioId, cred) - } - export async function allowWriteAccess(cred: Credentials, bucketId: BucketId): Promise { - triggerWriteAccess() - - check(bucketId, String) - - const bucket = await Buckets.findOneAsync(bucketId) - if (!bucket) throw new Meteor.Error(404, `Bucket "${bucketId}" not found!`) - - return { - ...(await StudioContentWriteAccess.bucket(cred, bucket.studioId)), - bucket, - } - } - export async function allowWriteAccessPiece( - cred: Credentials, - pieceId: PieceId - ): Promise { - triggerWriteAccess() - - check(pieceId, String) - - const bucketAdLib = await BucketAdLibs.findOneAsync(pieceId) - if (!bucketAdLib) throw new Meteor.Error(404, `Bucket AdLib "${pieceId}" not found!`) - - return { - ...(await StudioContentWriteAccess.bucket(cred, bucketAdLib.studioId)), - adlib: bucketAdLib, - } - } - export async function allowWriteAccessAction( - cred: Credentials, - actionId: AdLibActionId - ): Promise { - triggerWriteAccess() - - check(actionId, String) - - const bucketAdLibAction = await BucketAdLibActions.findOneAsync(actionId) - if (!bucketAdLibAction) throw new Meteor.Error(404, `Bucket AdLib Actions "${actionId}" not found!`) - - return { - ...(await StudioContentWriteAccess.bucket(cred, bucketAdLibAction.studioId)), - action: bucketAdLibAction, - } - } -} diff --git a/meteor/server/security/check.ts b/meteor/server/security/check.ts new file mode 100644 index 00000000000..da6d38ad1de --- /dev/null +++ b/meteor/server/security/check.ts @@ -0,0 +1,104 @@ +import { PeripheralDeviceId, RundownId, RundownPlaylistId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { Meteor } from 'meteor/meteor' +import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' +import { assertConnectionHasOneOfPermissions, RequestCredentials } from './auth' +import { PeripheralDevices, RundownPlaylists, Rundowns } from '../collections' +import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' +import { MethodContext } from '../api/methodContext' +import { profiler } from '../api/profiler' +import { SubscriptionContext } from '../publications/lib/lib' + +/** + * Check that the current user has write access to the specified playlist, and ensure that the playlist exists + * @param context + * @param playlistId Id of the playlist + */ +export async function checkAccessToPlaylist( + cred: RequestCredentials | null, + playlistId: RundownPlaylistId +): Promise { + assertConnectionHasOneOfPermissions(cred, 'studio') + + const playlist = (await RundownPlaylists.findOneAsync(playlistId, { + projection: { + _id: 1, + studioId: 1, + organizationId: 1, + name: 1, + }, + })) as Pick | undefined + if (!playlist) throw new Meteor.Error(404, `RundownPlaylist "${playlistId}" not found`) + + return playlist +} +export type VerifiedRundownPlaylistForUserAction = Pick< + DBRundownPlaylist, + '_id' | 'studioId' | 'organizationId' | 'name' +> + +/** + * Check that the current user has write access to the specified rundown, and ensure that the rundown exists + * @param context + * @param rundownId Id of the rundown + */ +export async function checkAccessToRundown( + cred: RequestCredentials | null, + rundownId: RundownId +): Promise { + assertConnectionHasOneOfPermissions(cred, 'studio') + + const rundown = (await Rundowns.findOneAsync(rundownId, { + projection: { + _id: 1, + studioId: 1, + externalId: 1, + showStyleVariantId: 1, + source: 1, + }, + })) as Pick | undefined + if (!rundown) throw new Meteor.Error(404, `Rundown "${rundownId}" not found`) + + return rundown +} +export type VerifiedRundownForUserAction = Pick< + DBRundown, + '_id' | 'studioId' | 'externalId' | 'showStyleVariantId' | 'source' +> + +/** Check Access and return PeripheralDevice, throws otherwise */ +export async function checkAccessAndGetPeripheralDevice( + deviceId: PeripheralDeviceId, + token: string | undefined, + context: MethodContext | SubscriptionContext +): Promise { + const span = profiler.startSpan('lib.checkAccessAndGetPeripheralDevice') + + assertConnectionHasOneOfPermissions(context.connection, 'gateway') + + // If no token, we will never match + if (!token) throw new Meteor.Error(401, `Not allowed access to peripheralDevice`) + + const device = await PeripheralDevices.findOneAsync({ _id: deviceId }) + if (!device) throw new Meteor.Error(404, `PeripheralDevice "${deviceId}" not found`) + + // Check if the device has a token, and if it matches: + if (device.token && device.token === token) { + span?.end() + return device + } + + // If the device has a parent, try that for access control: + const parentDevice = device.parentDeviceId ? await PeripheralDevices.findOneAsync(device.parentDeviceId) : device + if (!parentDevice) throw new Meteor.Error(404, `PeripheralDevice parentDevice "${device.parentDeviceId}" not found`) + + // Check if the parent device has a token, and if it matches: + if (parentDevice.token && parentDevice.token === token) { + span?.end() + return device + } + + // No match for token found + span?.end() + throw new Meteor.Error(401, `Not allowed access to peripheralDevice`) +} diff --git a/meteor/server/security/lib/access.ts b/meteor/server/security/lib/access.ts deleted file mode 100644 index 2f913afb5b1..00000000000 --- a/meteor/server/security/lib/access.ts +++ /dev/null @@ -1,64 +0,0 @@ -import * as _ from 'underscore' - -export interface Access { - // Direct database access: - read: boolean - insert: boolean - update: boolean - remove: boolean - - // Methods access: - playout: boolean - configure: boolean - - // For debugging - reason: string - - // The document in question - document: T | null -} - -/** - * Grant all access to all of the document - * @param document The document - * @param reason The reason for the access being granted - */ -export function allAccess(document: T | null, reason?: string): Access { - return { - read: true, - insert: true, - update: true, - remove: true, - - playout: true, - configure: true, - reason: reason || '', - document: document, - } -} - -/** - * Deny all access to all of the document - * @param reason The reason for the access being denied - */ -export function noAccess(reason: string): Access { - return combineAccess({}, allAccess(null, reason)) -} - -/** - * Combine access objects to find the minimum common overlap - * @param access0 - * @param access1 - */ -export function combineAccess( - access0: Access | { reason?: string; document?: null }, - access1: Access -): Access { - const a: any = {} - _.each(_.keys(access0).concat(_.keys(access1)), (key) => { - a[key] = (access0 as any)[key] && (access1 as any)[key] - }) - a.reason = _.compact([access0.reason, access1.reason]).join(',') - a.document = access0.document || access1.document || null - return a -} diff --git a/meteor/server/security/lib/credentials.ts b/meteor/server/security/lib/credentials.ts deleted file mode 100644 index b9b480a712b..00000000000 --- a/meteor/server/security/lib/credentials.ts +++ /dev/null @@ -1,171 +0,0 @@ -import { User } from '@sofie-automation/meteor-lib/dist/collections/Users' -import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' -import { cacheResult, clearCacheResult } from '../../lib/cacheResult' -import { LIMIT_CACHE_TIME } from './security' -import { profiler } from '../../api/profiler' -import { OrganizationId, UserId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { PeripheralDevices, Users } from '../../collections' -import { isProtectedString } from '@sofie-automation/shared-lib/dist/lib/protectedString' - -export interface Credentials { - userId: UserId | null - token?: string -} - -/** - * A minimal set of properties about the user. - * We keep it small so that we don't cache too much in memory or have to invalidate the credentials when something insignificant changes - */ -export type ResolvedUser = Pick - -/** - * A minimal set of properties about the OeripheralDevice. - * We keep it small so that we don't cache too much in memory or have to invalidate the credentials when something insignificant changes - */ -export type ResolvedPeripheralDevice = Pick - -export interface ResolvedCredentials { - organizationId: OrganizationId | null - user?: ResolvedUser - device?: ResolvedPeripheralDevice -} -export interface ResolvedUserCredentials { - organizationId: OrganizationId - user: ResolvedUser -} -export interface ResolvedPeripheralDeviceCredentials { - organizationId: OrganizationId - device: ResolvedPeripheralDevice -} - -/** - * Resolve the provided credentials, and retrieve the PeripheralDevice and Organization for the provided credentials. - * @returns null if the PeripheralDevice was not found - */ -export async function resolveAuthenticatedPeripheralDevice( - cred: Credentials -): Promise { - const resolved = await resolveCredentials({ userId: null, token: cred.token }) - - if (resolved.device && resolved.organizationId) { - return { - organizationId: resolved.organizationId, - device: resolved.device, - } - } else { - return null - } -} - -/** - * Resolve the provided credentials, and retrieve the User and Organization for the provided credentials. - * Note: this requies that the UserId came from a trusted source,it must not be from user input - * @returns null if the user was not found - */ -export async function resolveAuthenticatedUser(cred: Credentials): Promise { - const resolved = await resolveCredentials({ userId: cred.userId }) - - if (resolved.user && resolved.organizationId) { - return { - organizationId: resolved.organizationId, - user: resolved.user, - } - } else { - return null - } -} - -/** - * Resolve the provided credentials/identifier, and fetch the authenticating document from the database. - * Note: this requires that the provided UserId comes from an up-to-date location in meteor, it must not be from user input - * @returns The resolved object. If the identifiers were invalid then this object will have no properties - */ -export async function resolveCredentials(cred: Credentials | ResolvedCredentials): Promise { - const span = profiler.startSpan('security.lib.credentials') - - if (isResolvedCredentials(cred)) { - span?.end() - return cred - } - - const resolved = cacheResult( - credCacheName(cred), - async () => { - const resolved: ResolvedCredentials = { - organizationId: null, - } - - if (cred.token && typeof cred.token !== 'string') cred.token = undefined - if (cred.userId && !isProtectedString(cred.userId)) cred.userId = null - - // Lookup user, using userId: - if (cred.userId && isProtectedString(cred.userId)) { - const user = (await Users.findOneAsync(cred.userId, { - fields: { - _id: 1, - organizationId: 1, - superAdmin: 1, - }, - })) as ResolvedUser - if (user) { - resolved.user = user - resolved.organizationId = user.organizationId - } - } - // Lookup device, using token - if (cred.token) { - // TODO - token is not enforced to be unique and can be defined by a connecting gateway. - // This is rather flawed in the current model.. - const device = (await PeripheralDevices.findOneAsync( - { token: cred.token }, - { - fields: { - _id: 1, - organizationId: 1, - token: 1, - studioId: 1, - }, - } - )) as ResolvedPeripheralDevice - if (device) { - resolved.device = device - resolved.organizationId = device.organizationId - } - } - - // TODO: Implement user-token / API-key - // Lookup user, using token - // if (!resolved.user && !resolved.device && cred.token) { - // user = Users.findOne({ token: cred.token}) - // if (user) resolved.user = user - // } - - // // Make sure the organizationId is valid - // if (resolved.organizationId) { - // const org = (await Organizations.findOneAsync(resolved.organizationId, { - // fields: { _id: 1 }, - // })) as Pick | undefined - // if (org) { - // resolved.organizationId = null - // } - // } - - return resolved - }, - LIMIT_CACHE_TIME - ) - - span?.end() - return resolved -} -/** To be called whenever a user is changed */ -export function resetCredentials(cred: Credentials): void { - clearCacheResult(credCacheName(cred)) -} -function credCacheName(cred: Credentials) { - return `resolveCredentials_${cred.userId}_${cred.token}` -} -export function isResolvedCredentials(cred: Credentials | ResolvedCredentials): cred is ResolvedCredentials { - const c = cred as ResolvedCredentials - return !!(c.user || c.organizationId || c.device) -} diff --git a/meteor/server/security/lib/security.ts b/meteor/server/security/lib/security.ts deleted file mode 100644 index ed27ed18461..00000000000 --- a/meteor/server/security/lib/security.ts +++ /dev/null @@ -1,349 +0,0 @@ -import * as _ from 'underscore' -import { MongoQueryKey } from '@sofie-automation/corelib/dist/mongo' -import { Settings } from '../../Settings' -import { resolveCredentials, ResolvedCredentials, Credentials, isResolvedCredentials } from './credentials' -import { allAccess, noAccess, combineAccess, Access } from './access' -import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' -import { Rundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' -import { isProtectedString } from '../../lib/tempLib' -import { DBOrganization } from '@sofie-automation/meteor-lib/dist/collections/Organization' -import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' -import { DBShowStyleVariant } from '@sofie-automation/corelib/dist/dataModel/ShowStyleVariant' -import { profiler } from '../../api/profiler' -import { fetchShowStyleBasesLight, fetchStudioLight, ShowStyleBaseLight } from '../../optimizations' -import { Organizations, PeripheralDevices, RundownPlaylists, Rundowns, ShowStyleVariants } from '../../collections' -import { - OrganizationId, - PeripheralDeviceId, - RundownId, - RundownPlaylistId, - ShowStyleBaseId, - ShowStyleVariantId, - StudioId, - UserId, -} from '@sofie-automation/corelib/dist/dataModel/Ids' -import { StudioLight } from '@sofie-automation/corelib/dist/dataModel/Studio' - -export const LIMIT_CACHE_TIME = 1000 * 60 * 15 // 15 minutes - -// TODO: add caching - -/** - * Grant access to everything if security is disabled - * @returns Access granting access to everything - */ -export function allowAccessToAnythingWhenSecurityDisabled(): Access { - if (!Settings.enableUserAccounts) return allAccess(null, 'No security') - else return noAccess('Security is enabled') -} - -/** - * Check if access is allowed to the coreSystem collection - * @param cred0 Credentials to check - */ -export async function allowAccessToCoreSystem(cred: ResolvedCredentials): Promise> { - if (!Settings.enableUserAccounts) return allAccess(null, 'No security') - - return AccessRules.accessCoreSystem(cred) -} - -/** - * Check if access is allowed to a User, and that user is the current User - * @param cred0 Credentials to check - */ -export async function allowAccessToCurrentUser( - cred0: Credentials | ResolvedCredentials, - userId: UserId | null -): Promise> { - if (!Settings.enableUserAccounts) return allAccess(null, 'No security') - if (!userId) return noAccess('userId missing') - if (!isProtectedString(userId)) return noAccess('userId is not a string') - - return { - ...(await AccessRules.accessCurrentUser(cred0, userId)), - insert: false, // only allowed through methods - update: false, // only allowed through methods - remove: false, // only allowed through methods - } -} - -/** - * Check if access is allowed to the systemStatus collection - * @param cred0 Credentials to check - */ -export async function allowAccessToSystemStatus(cred0: Credentials | ResolvedCredentials): Promise> { - if (!Settings.enableUserAccounts) return allAccess(null, 'No security') - - return { - ...AccessRules.accessSystemStatus(cred0), - insert: false, // only allowed through methods - update: false, // only allowed through methods - remove: false, // only allowed through methods - } -} - -export async function allowAccessToOrganization( - cred0: Credentials | ResolvedCredentials, - organizationId: OrganizationId | null -): Promise> { - if (!Settings.enableUserAccounts) return allAccess(null, 'No security') - if (!organizationId) return noAccess('organizationId not set') - if (!isProtectedString(organizationId)) return noAccess('organizationId is not a string') - const cred = await resolveCredentials(cred0) - - const organization = await Organizations.findOneAsync(organizationId) - if (!organization) return noAccess('Organization not found') - - return { - ...AccessRules.accessOrganization(organization, cred), - insert: false, // only allowed through methods - remove: false, // only allowed through methods - } -} -export async function allowAccessToShowStyleBase( - cred0: Credentials | ResolvedCredentials, - showStyleBaseId: MongoQueryKey -): Promise> { - if (!Settings.enableUserAccounts) return allAccess(null, 'No security') - if (!showStyleBaseId) return noAccess('showStyleBaseId not set') - const cred = await resolveCredentials(cred0) - - const showStyleBases = await fetchShowStyleBasesLight({ - _id: showStyleBaseId, - }) - let access: Access = allAccess(null) - for (const showStyleBase of showStyleBases) { - access = combineAccess(access, AccessRules.accessShowStyleBase(showStyleBase, cred)) - } - return { - ...access, - insert: false, // only allowed through methods - remove: false, // only allowed through methods - } -} -export async function allowAccessToShowStyleVariant( - cred0: Credentials | ResolvedCredentials, - showStyleVariantId: MongoQueryKey -): Promise> { - if (!Settings.enableUserAccounts) return allAccess(null, 'No security') - if (!showStyleVariantId) return noAccess('showStyleVariantId not set') - const cred = await resolveCredentials(cred0) - - const showStyleVariants = await ShowStyleVariants.findFetchAsync({ - _id: showStyleVariantId, - }) - const showStyleBaseIds = _.uniq(_.map(showStyleVariants, (v) => v.showStyleBaseId)) - const showStyleBases = await fetchShowStyleBasesLight({ - _id: { $in: showStyleBaseIds }, - }) - let access: Access = allAccess(null) - for (const showStyleBase of showStyleBases) { - access = combineAccess(access, AccessRules.accessShowStyleBase(showStyleBase, cred)) - } - return { ...access, document: _.last(showStyleVariants) || null } -} -export async function allowAccessToStudio( - cred0: Credentials | ResolvedCredentials, - studioId: StudioId -): Promise> { - if (!Settings.enableUserAccounts) return allAccess(null, 'No security') - if (!studioId) return noAccess('studioId not set') - if (!isProtectedString(studioId)) return noAccess('studioId is not a string') - const cred = await resolveCredentials(cred0) - - const studio = await fetchStudioLight(studioId) - if (!studio) return noAccess('Studio not found') - - return { - ...AccessRules.accessStudio(studio, cred), - insert: false, // only allowed through methods - remove: false, // only allowed through methods - } -} -export async function allowAccessToRundownPlaylist( - cred0: Credentials | ResolvedCredentials, - playlistId: RundownPlaylistId -): Promise> { - if (!Settings.enableUserAccounts) return allAccess(null, 'No security') - if (!playlistId) return noAccess('playlistId not set') - const cred = await resolveCredentials(cred0) - - const playlist = await RundownPlaylists.findOneAsync(playlistId) - if (playlist) { - return AccessRules.accessRundownPlaylist(playlist, cred) - } else { - return allAccess(null) - } -} -export async function allowAccessToRundown( - cred0: Credentials | ResolvedCredentials, - rundownId: MongoQueryKey -): Promise> { - const access = await allowAccessToRundownContent(cred0, rundownId) - return { - ...access, - insert: false, // only allowed through methods - update: false, // only allowed through methods - remove: false, // only allowed through methods - } -} -export async function allowAccessToRundownContent( - cred0: Credentials | ResolvedCredentials, - rundownId: MongoQueryKey -): Promise> { - if (!Settings.enableUserAccounts) return allAccess(null, 'No security') - if (!rundownId) return noAccess('rundownId missing') - const cred = await resolveCredentials(cred0) - - const rundowns = await Rundowns.findFetchAsync({ _id: rundownId }) - let access: Access = allAccess(null) - for (const rundown of rundowns) { - // TODO - this is reeally inefficient on db queries - access = combineAccess(access, await AccessRules.accessRundown(rundown, cred)) - } - return access -} -export async function allowAccessToPeripheralDevice( - cred0: Credentials | ResolvedCredentials, - deviceId: PeripheralDeviceId -): Promise> { - if (!deviceId) return noAccess('deviceId missing') - if (!isProtectedString(deviceId)) return noAccess('deviceId is not a string') - - const device = await PeripheralDevices.findOneAsync(deviceId) - if (!device) return noAccess('Device not found') - - const access = await allowAccessToPeripheralDeviceContent(cred0, device) - return { - ...access, - insert: false, // only allowed through methods - remove: false, // only allowed through methods - } -} - -export async function allowAccessToPeripheralDeviceContent( - cred0: Credentials | ResolvedCredentials, - device: PeripheralDevice -): Promise> { - const span = profiler.startSpan('security.lib.security.allowAccessToPeripheralDeviceContent') - if (!Settings.enableUserAccounts) return allAccess(null, 'No security') - const cred = await resolveCredentials(cred0) - - const access = AccessRules.accessPeripheralDevice(device, cred) - - span?.end() - return access -} - -namespace AccessRules { - /** - * Check if access is allowed to the coreSystem collection - * @param cred0 Credentials to check - */ - export function accessCoreSystem(cred: ResolvedCredentials): Access { - if (cred.user && cred.user.superAdmin) { - return { - ...allAccess(null), - insert: false, // only allowed through methods - remove: false, // only allowed through methods - } - } else { - return { - ...noAccess('User is not superAdmin'), - read: true, - } - } - } - - /** - * Check the allowed access to a user (and verify that user is the current user) - * @param cred0 Credentials to check - * @param userId User to check access to - */ - export async function accessCurrentUser( - cred0: Credentials | ResolvedCredentials, - userId: UserId - ): Promise> { - let credUserId: UserId | undefined = undefined - if (isResolvedCredentials(cred0) && cred0.user) { - credUserId = cred0.user._id - } else if (!isResolvedCredentials(cred0) && cred0.userId) { - credUserId = cred0.userId - } else { - const cred = await resolveCredentials(cred0) - if (!cred.user) return noAccess('User in cred not found') - credUserId = cred.user._id - } - - if (credUserId) { - if (credUserId === userId) { - // TODO: user role access - return allAccess(null) - } else return noAccess('Not accessing current user') - } else return noAccess('Requested user not found') - } - - export function accessSystemStatus(_cred0: Credentials | ResolvedCredentials): Access { - // No restrictions on systemStatus - return allAccess(null) - } - // export function accessUser (cred: ResolvedCredentials, user: User): Access { - // if (!cred.organizationId) return noAccess('No organization in credentials') - // if (user.organizationId === cred.organizationId) { - // // TODO: user role access - // return allAccess() - // } else return noAccess('User is not in the same organization as requested user') - // } - export function accessOrganization( - organization: DBOrganization, - cred: ResolvedCredentials - ): Access { - if (!cred.organizationId) return noAccess('No organization in credentials') - if (organization._id === cred.organizationId) { - // TODO: user role access - return allAccess(organization) - } else return noAccess(`User is not in the organization "${organization._id}"`) - } - export function accessShowStyleBase( - showStyleBase: ShowStyleBaseLight, - cred: ResolvedCredentials - ): Access { - if (!showStyleBase.organizationId) return noAccess('ShowStyleBase has no organization') - if (!cred.organizationId) return noAccess('No organization in credentials') - if (showStyleBase.organizationId === cred.organizationId) { - // TODO: user role access - return allAccess(showStyleBase) - } else return noAccess(`User is not in the same organization as the showStyleBase "${showStyleBase._id}"`) - } - export function accessStudio(studio: StudioLight, cred: ResolvedCredentials): Access { - if (!studio.organizationId) return noAccess('Studio has no organization') - if (!cred.organizationId) return noAccess('No organization in credentials') - if (studio.organizationId === cred.organizationId) { - // TODO: user role access - return allAccess(studio) - } else return noAccess(`User is not in the same organization as the studio ${studio._id}`) - } - export async function accessRundownPlaylist( - playlist: DBRundownPlaylist, - cred: ResolvedCredentials - ): Promise> { - const studio = await fetchStudioLight(playlist.studioId) - if (!studio) return noAccess(`Studio of playlist "${playlist._id}" not found`) - return { ...accessStudio(studio, cred), document: playlist } - } - export async function accessRundown(rundown: Rundown, cred: ResolvedCredentials): Promise> { - const playlist = await RundownPlaylists.findOneAsync(rundown.playlistId) - if (!playlist) return noAccess(`Rundown playlist of rundown "${rundown._id}" not found`) - return { ...(await accessRundownPlaylist(playlist, cred)), document: rundown } - } - export function accessPeripheralDevice( - device: PeripheralDevice, - cred: ResolvedCredentials - ): Access { - if (!cred.organizationId) return noAccess('No organization in credentials') - if (!device.organizationId) return noAccess('Device has no organizationId') - if (device.organizationId === cred.organizationId) { - return allAccess(device) - } else return noAccess(`Device "${device._id}" is not in the same organization as user`) - } -} diff --git a/meteor/server/security/noSecurity.ts b/meteor/server/security/noSecurity.ts deleted file mode 100644 index 73236204eb9..00000000000 --- a/meteor/server/security/noSecurity.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { allowAccessToAnythingWhenSecurityDisabled } from './lib/security' - -export namespace NoSecurityReadAccess { - /** - * Grant read access if security is disabled - */ - export function any(): boolean { - const access = allowAccessToAnythingWhenSecurityDisabled() - if (!access.read) return false // don't even log anything - return true - } -} diff --git a/meteor/server/security/organization.ts b/meteor/server/security/organization.ts deleted file mode 100644 index 8fd686c20ac..00000000000 --- a/meteor/server/security/organization.ts +++ /dev/null @@ -1,165 +0,0 @@ -import { Meteor } from 'meteor/meteor' -import { SnapshotItem } from '@sofie-automation/meteor-lib/dist/collections/Snapshots' -import { Blueprint } from '@sofie-automation/corelib/dist/dataModel/Blueprint' -import { logNotAllowed } from './lib/lib' -import { MongoQueryKey } from '@sofie-automation/corelib/dist/mongo' -import { allowAccessToOrganization } from './lib/security' -import { Credentials, ResolvedCredentials, resolveCredentials } from './lib/credentials' -import { Settings } from '../Settings' -import { MethodContext } from '../api/methodContext' -import { triggerWriteAccess } from './lib/securityVerify' -import { isProtectedString } from '../lib/tempLib' -import { fetchShowStyleBaseLight, fetchStudioLight, ShowStyleBaseLight } from '../optimizations' -import { - BlueprintId, - OrganizationId, - ShowStyleBaseId, - SnapshotId, - StudioId, - UserId, -} from '@sofie-automation/corelib/dist/dataModel/Ids' -import { Blueprints, Snapshots } from '../collections' -import { StudioLight } from '@sofie-automation/corelib/dist/dataModel/Studio' - -export type BasicAccessContext = { organizationId: OrganizationId | null; userId: UserId | null } - -export interface OrganizationContentAccess { - userId: UserId | null - organizationId: OrganizationId | null - cred: ResolvedCredentials | Credentials -} - -export namespace OrganizationReadAccess { - export async function organization( - organizationId: MongoQueryKey, - cred: Credentials | ResolvedCredentials - ): Promise { - return organizationContent(organizationId, cred) - } - /** Handles read access for all organization content (UserActions, Evaluations etc..) */ - export async function organizationContent( - organizationId: MongoQueryKey | undefined, - cred: Credentials | ResolvedCredentials - ): Promise { - if (!Settings.enableUserAccounts) return true - if (!organizationId || !isProtectedString(organizationId)) - throw new Meteor.Error(400, 'selector must contain organizationId') - - const access = await allowAccessToOrganization(cred, organizationId) - if (!access.read) return logNotAllowed('Organization content', access.reason) - - return true - } - export async function adminUsers( - organizationId: MongoQueryKey | undefined, - cred: Credentials | ResolvedCredentials - ): Promise { - // TODO: User roles - return organizationContent(organizationId, cred) - } -} -export namespace OrganizationContentWriteAccess { - // These functions throws if access is not allowed. - - export async function organization( - cred0: Credentials, - organizationId: OrganizationId - ): Promise { - return anyContent(cred0, { organizationId }) - } - - export async function studio( - cred0: Credentials, - existingStudio?: StudioLight | StudioId - ): Promise { - triggerWriteAccess() - if (existingStudio && isProtectedString(existingStudio)) { - const studioId = existingStudio - existingStudio = await fetchStudioLight(studioId) - if (!existingStudio) throw new Meteor.Error(404, `Studio "${studioId}" not found!`) - } - return { ...(await anyContent(cred0, existingStudio)), studio: existingStudio } - } - export async function evaluation(cred0: Credentials): Promise { - return anyContent(cred0) - } - export async function mediaWorkFlows(cred0: Credentials): Promise { - // "All mediaWOrkflows in all devices of an organization" - return anyContent(cred0) - } - export async function blueprint( - cred0: Credentials, - existingBlueprint?: Blueprint | BlueprintId, - allowMissing?: boolean - ): Promise { - triggerWriteAccess() - if (existingBlueprint && isProtectedString(existingBlueprint)) { - const blueprintId = existingBlueprint - existingBlueprint = await Blueprints.findOneAsync(blueprintId) - if (!existingBlueprint && !allowMissing) - throw new Meteor.Error(404, `Blueprint "${blueprintId}" not found!`) - } - return { ...(await anyContent(cred0, existingBlueprint)), blueprint: existingBlueprint } - } - export async function snapshot( - cred0: Credentials, - existingSnapshot?: SnapshotItem | SnapshotId - ): Promise { - triggerWriteAccess() - if (existingSnapshot && isProtectedString(existingSnapshot)) { - const snapshotId = existingSnapshot - existingSnapshot = await Snapshots.findOneAsync(snapshotId) - if (!existingSnapshot) throw new Meteor.Error(404, `Snapshot "${snapshotId}" not found!`) - } - return { ...(await anyContent(cred0, existingSnapshot)), snapshot: existingSnapshot } - } - export async function dataFromSnapshot( - cred0: Credentials, - organizationId: OrganizationId - ): Promise { - return anyContent(cred0, { organizationId: organizationId }) - } - export async function translationBundle( - cred0: Credentials, - existingObj?: { organizationId: OrganizationId | null } - ): Promise { - return anyContent(cred0, existingObj) - } - export async function showStyleBase( - cred0: Credentials, - existingShowStyleBase?: ShowStyleBaseLight | ShowStyleBaseId - ): Promise { - triggerWriteAccess() - if (existingShowStyleBase && isProtectedString(existingShowStyleBase)) { - const showStyleBaseId = existingShowStyleBase - existingShowStyleBase = await fetchShowStyleBaseLight(showStyleBaseId) - if (!existingShowStyleBase) throw new Meteor.Error(404, `ShowStyleBase "${showStyleBaseId}" not found!`) - } - return { ...(await anyContent(cred0, existingShowStyleBase)), showStyleBase: existingShowStyleBase } - } - /** Return credentials if writing is allowed, throw otherwise */ - async function anyContent( - cred0: Credentials | MethodContext, - existingObj?: { organizationId: OrganizationId | null } - ): Promise { - triggerWriteAccess() - if (!Settings.enableUserAccounts) { - return { userId: null, organizationId: null, cred: cred0 } - } - const cred = await resolveCredentials(cred0) - if (!cred.user) throw new Meteor.Error(403, `Not logged in`) - if (!cred.organizationId) throw new Meteor.Error(500, `User has no organization`) - - const access = await allowAccessToOrganization( - cred, - existingObj ? existingObj.organizationId : cred.organizationId - ) - if (!access.update) throw new Meteor.Error(403, `Not allowed: ${access.reason}`) - - return { - userId: cred.user._id, - organizationId: cred.organizationId, - cred: cred, - } - } -} diff --git a/meteor/server/security/peripheralDevice.ts b/meteor/server/security/peripheralDevice.ts deleted file mode 100644 index a773b199d11..00000000000 --- a/meteor/server/security/peripheralDevice.ts +++ /dev/null @@ -1,180 +0,0 @@ -import { Meteor } from 'meteor/meteor' -import { check } from '../lib/check' -import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' -import { isProtectedString } from '../lib/tempLib' -import { logNotAllowed } from './lib/lib' -import { MediaWorkFlow } from '@sofie-automation/shared-lib/dist/core/model/MediaWorkFlows' -import { MongoQueryKey } from '@sofie-automation/corelib/dist/mongo' -import { Credentials, ResolvedCredentials, resolveCredentials } from './lib/credentials' -import { allowAccessToPeripheralDevice, allowAccessToPeripheralDeviceContent } from './lib/security' -import { Settings } from '../Settings' -import { triggerWriteAccess } from './lib/securityVerify' -import { profiler } from '../api/profiler' -import { StudioContentWriteAccess } from './studio' -import { - MediaWorkFlowId, - OrganizationId, - PeripheralDeviceId, - StudioId, - UserId, -} from '@sofie-automation/corelib/dist/dataModel/Ids' -import { MediaWorkFlows, PeripheralDevices } from '../collections' - -export namespace PeripheralDeviceReadAccess { - /** Check for read access for a peripheral device */ - export async function peripheralDevice( - deviceId: MongoQueryKey, - cred: Credentials | ResolvedCredentials - ): Promise { - return peripheralDeviceContent(deviceId, cred) - } - /** Check for read access for all peripheraldevice content (commands, mediaWorkFlows, etc..) */ - export async function peripheralDeviceContent( - deviceId: MongoQueryKey | undefined, - cred: Credentials | ResolvedCredentials - ): Promise { - if (!Settings.enableUserAccounts) return true - if (!deviceId || !isProtectedString(deviceId)) throw new Meteor.Error(400, 'selector must contain deviceId') - - const access = await allowAccessToPeripheralDevice(cred, deviceId) - if (!access.read) return logNotAllowed('PeripheralDevice content', access.reason) - - return true - } -} -export interface MediaWorkFlowContentAccess extends PeripheralDeviceContentWriteAccess.ContentAccess { - mediaWorkFlow: MediaWorkFlow -} - -export namespace PeripheralDeviceContentWriteAccess { - export interface ContentAccess { - userId: UserId | null - organizationId: OrganizationId | null - deviceId: PeripheralDeviceId - device: PeripheralDevice - cred: ResolvedCredentials | Credentials - } - - // These functions throws if access is not allowed. - - /** - * Check if a user is allowed to execute a PeripheralDevice function in a Studio - */ - export async function executeFunction(cred0: Credentials, deviceId: PeripheralDeviceId): Promise { - triggerWriteAccess() - const device = await PeripheralDevices.findOneAsync(deviceId) - if (!device) throw new Meteor.Error(404, `PeripheralDevice "${deviceId}" not found`) - - let studioId: StudioId - if (device.studioId) { - studioId = device.studioId - } else if (device.parentDeviceId) { - // Child devices aren't assigned to the studio themselves, instead look up the parent device and use it's studioId: - const parentDevice = await PeripheralDevices.findOneAsync(device.parentDeviceId) - if (!parentDevice) - throw new Meteor.Error( - 404, - `Parent PeripheralDevice "${device.parentDeviceId}" of "${deviceId}" not found!` - ) - if (!parentDevice.studioId) - throw new Meteor.Error( - 404, - `Parent PeripheralDevice "${device.parentDeviceId}" of "${deviceId}" doesn't have any studioId set` - ) - studioId = parentDevice.studioId - } else { - throw new Meteor.Error(404, `PeripheralDevice "${deviceId}" doesn't have any studioId set`) - } - - const access = await StudioContentWriteAccess.executeFunction(cred0, studioId) - - const access2 = await allowAccessToPeripheralDeviceContent(access.cred, device) - if (!access2.playout) throw new Meteor.Error(403, `Not allowed: ${access2.reason}`) - - return { - ...access, - deviceId: device._id, - device, - } - } - - /** Check for permission to modify a peripheralDevice */ - export async function peripheralDevice(cred0: Credentials, deviceId: PeripheralDeviceId): Promise { - await backwardsCompatibilityfix(cred0, deviceId) - return anyContent(cred0, deviceId) - } - - /** Check for permission to modify a mediaWorkFlow */ - export async function mediaWorkFlow( - cred0: Credentials, - existingWorkFlow: MediaWorkFlow | MediaWorkFlowId - ): Promise { - triggerWriteAccess() - if (existingWorkFlow && isProtectedString(existingWorkFlow)) { - const workFlowId = existingWorkFlow - const m = await MediaWorkFlows.findOneAsync(workFlowId) - if (!m) throw new Meteor.Error(404, `MediaWorkFlow "${workFlowId}" not found!`) - existingWorkFlow = m - } - await backwardsCompatibilityfix(cred0, existingWorkFlow.deviceId) - return { ...(await anyContent(cred0, existingWorkFlow.deviceId)), mediaWorkFlow: existingWorkFlow } - } - - /** Return credentials if writing is allowed, throw otherwise */ - async function anyContent(cred0: Credentials, deviceId: PeripheralDeviceId): Promise { - const span = profiler.startSpan('PeripheralDeviceContentWriteAccess.anyContent') - triggerWriteAccess() - check(deviceId, String) - const device = await PeripheralDevices.findOneAsync(deviceId) - if (!device) throw new Meteor.Error(404, `PeripheralDevice "${deviceId}" not found`) - - // If the device has a parent, use that for access control: - const parentDevice = device.parentDeviceId - ? await PeripheralDevices.findOneAsync(device.parentDeviceId) - : device - if (!parentDevice) - throw new Meteor.Error(404, `PeripheralDevice parentDevice "${device.parentDeviceId}" not found`) - - if (!Settings.enableUserAccounts) { - // Note: this is kind of a hack to keep backwards compatibility.. - if (!device.parentDeviceId && parentDevice.token !== cred0.token) { - throw new Meteor.Error(401, `Not allowed access to peripheralDevice`) - } - - span?.end() - return { - userId: null, - organizationId: null, - deviceId: deviceId, - device: device, - cred: cred0, - } - } else { - if (!cred0.userId && parentDevice.token !== cred0.token) { - throw new Meteor.Error(401, `Not allowed access to peripheralDevice`) - } - const cred = await resolveCredentials(cred0) - const access = await allowAccessToPeripheralDeviceContent(cred, parentDevice) - if (!access.update) throw new Meteor.Error(403, `Not allowed: ${access.reason}`) - if (!access.document) throw new Meteor.Error(500, `Internal error: access.document not set`) - - span?.end() - return { - userId: cred.user ? cred.user._id : null, - organizationId: cred.organizationId, - deviceId: deviceId, - device: device, - cred: cred, - } - } - } -} -async function backwardsCompatibilityfix(cred0: Credentials, deviceId: PeripheralDeviceId) { - if (!Settings.enableUserAccounts) { - // Note: This is a temporary hack to keep backwards compatibility: - const device = (await PeripheralDevices.findOneAsync(deviceId, { fields: { token: 1 } })) as - | Pick - | undefined - if (device) cred0.token = device.token - } -} diff --git a/meteor/server/security/rundown.ts b/meteor/server/security/rundown.ts deleted file mode 100644 index 8f4bf30ba94..00000000000 --- a/meteor/server/security/rundown.ts +++ /dev/null @@ -1,126 +0,0 @@ -import { Meteor } from 'meteor/meteor' -import { check } from '../lib/check' -import * as _ from 'underscore' -import { Credentials, ResolvedCredentials } from './lib/credentials' -import { logNotAllowed } from './lib/lib' -import { allowAccessToRundown } from './lib/security' -import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' -import { ExpectedMediaItem } from '@sofie-automation/corelib/dist/dataModel/ExpectedMediaItem' -import { PeripheralDeviceType, PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' -import { ExpectedPlayoutItem } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' -import { Settings } from '../Settings' -import { RundownId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { PeripheralDevices, Segments } from '../collections' -import { getStudioIdFromDevice } from '../api/studio/lib' -import { MongoQuery, MongoQueryKey } from '@sofie-automation/corelib/dist/mongo' - -export namespace RundownReadAccess { - /** Check for read access to the rundown collection */ - export async function rundown( - rundownId: MongoQueryKey, - cred: Credentials | ResolvedCredentials - ): Promise { - return rundownContent(rundownId, cred) - } - /** Check for read access for all rundown content (segments, parts, pieces etc..) */ - export async function rundownContent( - rundownId: MongoQueryKey | undefined, - cred: Credentials | ResolvedCredentials - ): Promise { - if (!Settings.enableUserAccounts) return true - if (!rundownId) throw new Meteor.Error(400, 'selector must contain rundownId') - - const access = await allowAccessToRundown(cred, rundownId) - if (!access.read) return logNotAllowed('Rundown content', access.reason) - - return true - } - /** Check for read access for segments in a rundown */ - export async function segments(segmentId: MongoQueryKey, cred: Credentials): Promise { - if (!Settings.enableUserAccounts) return true - if (!segmentId) throw new Meteor.Error(400, 'selector must contain _id') - - const segments = (await Segments.findFetchAsync(segmentId, { - fields: { - _id: 1, - rundownId: 1, - }, - })) as Array> - const rundownIds = _.uniq(_.map(segments, (s) => s.rundownId)) - - const access = await allowAccessToRundown(cred, { $in: rundownIds }) - if (!access.read) return logNotAllowed('Segments', access.reason) - - return true - } - /** Check for read access for pieces in a rundown */ - export async function pieces(rundownId: MongoQueryKey, cred: Credentials): Promise { - if (!Settings.enableUserAccounts) return true - if (!rundownId) throw new Meteor.Error(400, 'selector must contain rundownId') - - const access = await allowAccessToRundown(cred, rundownId) - if (!access.read) return logNotAllowed('Piece', access.reason) - - return true - } - /** Check for read access for exoected media items in a rundown */ - export async function expectedMediaItems( - selector: MongoQuery | any, - cred: Credentials - ): Promise { - check(selector, Object) - if (selector.mediaFlowId) { - check(selector.mediaFlowId, Object) - check(selector.mediaFlowId.$in, Array) - } - if (!(await rundownContent(selector.rundownId, cred))) return null - - const mediaManagerDevice = await PeripheralDevices.findOneAsync({ - type: PeripheralDeviceType.MEDIA_MANAGER, - token: cred.token, - }) - - if (!mediaManagerDevice) return false - - mediaManagerDevice.studioId = await getStudioIdFromDevice(mediaManagerDevice) - - if (mediaManagerDevice && cred.token) { - // mediaManagerDevice.settings - - return mediaManagerDevice - } else { - // TODO: implement access logic here - // use context.userId - - // just returning true for now - return true - } - } - - /** Check for read access to expectedPlayoutItems */ - export async function expectedPlayoutItems( - selector: MongoQuery | any, - cred: Credentials - ): Promise { - check(selector, Object) - check(selector.studioId, String) - - if (!(await rundownContent(selector.rundownId, cred))) return null - - const playoutDevice = await PeripheralDevices.findOneAsync({ - type: PeripheralDeviceType.PLAYOUT, - token: cred.token, - }) - if (!playoutDevice) return false - - playoutDevice.studioId = await getStudioIdFromDevice(playoutDevice) - - if (playoutDevice && cred.token) { - return playoutDevice - } else { - // TODO: implement access logic here - // just returning true for now - return true - } - } -} diff --git a/meteor/server/security/rundownPlaylist.ts b/meteor/server/security/rundownPlaylist.ts deleted file mode 100644 index 4666e718f8f..00000000000 --- a/meteor/server/security/rundownPlaylist.ts +++ /dev/null @@ -1,126 +0,0 @@ -import { Meteor } from 'meteor/meteor' -import { check } from '../lib/check' -import { logNotAllowed } from './lib/lib' -import { allowAccessToRundownPlaylist } from './lib/security' -import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' -import { Credentials, ResolvedCredentials, resolveCredentials } from './lib/credentials' -import { triggerWriteAccess } from './lib/securityVerify' -import { isProtectedString } from '../lib/tempLib' -import { Rundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' -import { Settings } from '../Settings' -import { - OrganizationId, - RundownId, - RundownPlaylistId, - StudioId, - UserId, -} from '@sofie-automation/corelib/dist/dataModel/Ids' -import { RundownPlaylists, Rundowns } from '../collections' - -export namespace RundownPlaylistReadAccess { - /** Handles read access for all playlist document */ - export async function rundownPlaylist( - id: RundownPlaylistId, - cred: Credentials | ResolvedCredentials - ): Promise { - return rundownPlaylistContent(id, cred) - } - /** Handles read access for all playlist content (segments, parts, pieces etc..) */ - export async function rundownPlaylistContent( - id: RundownPlaylistId, - cred: Credentials | ResolvedCredentials - ): Promise { - triggerWriteAccess() - check(id, String) - if (!Settings.enableUserAccounts) return true - if (!id) throw new Meteor.Error(400, 'selector must contain playlistId') - - const access = await allowAccessToRundownPlaylist(cred, id) - if (!access.read) return logNotAllowed('RundownPlaylist content', access.reason) - - return true - } -} - -/** - * This is returned from a check of access to a playlist. - * Fields will be populated about the user, and the playlist if they have permission - */ -export interface RundownPlaylistContentAccess { - userId: UserId | null - organizationId: OrganizationId | null - studioId: StudioId | null - playlist: DBRundownPlaylist | null - cred: ResolvedCredentials | Credentials -} - -/** - * This is returned from a check of access to a rundown. - * Fields will be populated about the user, and the rundown if they have permission - */ -export interface RundownContentAccess { - userId: UserId | null - organizationId: OrganizationId | null - studioId: StudioId | null - rundown: Rundown | null - cred: ResolvedCredentials | Credentials -} - -export namespace RundownPlaylistContentWriteAccess { - /** Access to playout for a playlist, from a rundown. ie the playlist and everything inside it. */ - export async function rundown( - cred0: Credentials, - existingRundown: Rundown | RundownId - ): Promise { - triggerWriteAccess() - if (existingRundown && isProtectedString(existingRundown)) { - const rundownId = existingRundown - const m = await Rundowns.findOneAsync(rundownId) - if (!m) throw new Meteor.Error(404, `Rundown "${rundownId}" not found!`) - existingRundown = m - } - - const access = await anyContent(cred0, existingRundown.playlistId) - return { ...access, rundown: existingRundown } - } - /** Access to playout for a playlist. ie the playlist and everything inside it. */ - export async function playout( - cred0: Credentials, - playlistId: RundownPlaylistId - ): Promise { - return anyContent(cred0, playlistId) - } - /** - * We don't have user levels, so we can use a simple check for all cases - * Return credentials if writing is allowed, throw otherwise - */ - async function anyContent( - cred0: Credentials, - playlistId: RundownPlaylistId - ): Promise { - triggerWriteAccess() - if (!Settings.enableUserAccounts) { - const playlist = await RundownPlaylists.findOneAsync(playlistId) - return { - userId: null, - organizationId: null, - studioId: playlist?.studioId || null, - playlist: playlist || null, - cred: cred0, - } - } - const cred = await resolveCredentials(cred0) - if (!cred.user) throw new Meteor.Error(403, `Not logged in`) - if (!cred.organizationId) throw new Meteor.Error(500, `User has no organization`) - const access = await allowAccessToRundownPlaylist(cred, playlistId) - if (!access.update) throw new Meteor.Error(403, `Not allowed: ${access.reason}`) - - return { - userId: cred.user._id, - organizationId: cred.organizationId, - studioId: access.document?.studioId || null, - playlist: access.document, - cred: cred, - } - } -} diff --git a/meteor/server/security/lib/securityVerify.ts b/meteor/server/security/securityVerify.ts similarity index 99% rename from meteor/server/security/lib/securityVerify.ts rename to meteor/server/security/securityVerify.ts index edde48cb35d..e7edc63cfcc 100644 --- a/meteor/server/security/lib/securityVerify.ts +++ b/meteor/server/security/securityVerify.ts @@ -1,6 +1,6 @@ import { Meteor } from 'meteor/meteor' -import { AllMeteorMethods, suppressExtraErrorLogging } from '../../methods' -import { disableChecks, enableChecks as restoreChecks } from '../../lib/check' +import { AllMeteorMethods, suppressExtraErrorLogging } from '../methods' +import { disableChecks, enableChecks as restoreChecks } from '../lib/check' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' /** These function are used to verify that all methods defined are using security functions */ diff --git a/meteor/server/security/showStyle.ts b/meteor/server/security/showStyle.ts deleted file mode 100644 index bd3e83811c8..00000000000 --- a/meteor/server/security/showStyle.ts +++ /dev/null @@ -1,154 +0,0 @@ -import { Meteor } from 'meteor/meteor' -import { check } from '../lib/check' -import { logNotAllowed } from './lib/lib' -import { DBShowStyleVariant } from '@sofie-automation/corelib/dist/dataModel/ShowStyleVariant' -import { RundownLayoutBase } from '@sofie-automation/meteor-lib/dist/collections/RundownLayouts' -import { MongoQuery, MongoQueryKey } from '@sofie-automation/corelib/dist/mongo' -import { Credentials, ResolvedCredentials, resolveCredentials } from './lib/credentials' -import { allowAccessToShowStyleBase, allowAccessToShowStyleVariant } from './lib/security' -import { triggerWriteAccess } from './lib/securityVerify' -import { Settings } from '../Settings' -import { isProtectedString } from '../lib/tempLib' -import { TriggeredActionsObj } from '@sofie-automation/meteor-lib/dist/collections/TriggeredActions' -import { SystemWriteAccess } from './system' -import { fetchShowStyleBaseLight, ShowStyleBaseLight } from '../optimizations' -import { - OrganizationId, - RundownLayoutId, - ShowStyleBaseId, - ShowStyleVariantId, - TriggeredActionId, - UserId, -} from '@sofie-automation/corelib/dist/dataModel/Ids' -import { RundownLayouts, ShowStyleVariants, TriggeredActions } from '../collections' - -export interface ShowStyleContentAccess { - userId: UserId | null - organizationId: OrganizationId | null - showStyleBaseId: ShowStyleBaseId | null - showStyleBase: ShowStyleBaseLight | null - cred: ResolvedCredentials | Credentials -} - -export namespace ShowStyleReadAccess { - /** Handles read access for all showstyle document */ - export async function showStyleBase( - showStyleBaseId: MongoQueryKey, - cred: Credentials | ResolvedCredentials - ): Promise { - return showStyleBaseContent({ showStyleBaseId }, cred) - } - - /** Handles read access for all showstyle content */ - export async function showStyleBaseContent( - selector: MongoQuery, - cred: Credentials | ResolvedCredentials - ): Promise { - check(selector, Object) - if (!Settings.enableUserAccounts) return true - if (!selector.showStyleBaseId || !isProtectedString(selector.showStyleBaseId)) - throw new Meteor.Error(400, 'selector must contain showStyleBaseId') - - const access = await allowAccessToShowStyleBase(cred, selector.showStyleBaseId) - if (!access.read) return logNotAllowed('ShowStyleBase content', access.reason) - - return true - } - - /** Check for read access to the showstyle variants */ - export async function showStyleVariant( - showStyleVariantId: MongoQueryKey, - cred: Credentials | ResolvedCredentials - ): Promise { - if (!Settings.enableUserAccounts) return true - if (!showStyleVariantId) throw new Meteor.Error(400, 'selector must contain _id') - - const access = await allowAccessToShowStyleVariant(cred, showStyleVariantId) - if (!access.read) return logNotAllowed('ShowStyleVariant', access.reason) - - return true - } -} -export namespace ShowStyleContentWriteAccess { - // These functions throws if access is not allowed. - - /** Check permissions for write access to a showStyleVariant */ - export async function showStyleVariant( - cred0: Credentials, - existingVariant: DBShowStyleVariant | ShowStyleVariantId - ): Promise { - triggerWriteAccess() - if (existingVariant && isProtectedString(existingVariant)) { - const variantId = existingVariant - const m = await ShowStyleVariants.findOneAsync(variantId) - if (!m) throw new Meteor.Error(404, `ShowStyleVariant "${variantId}" not found!`) - existingVariant = m - } - return { ...(await anyContent(cred0, existingVariant.showStyleBaseId)), showStyleVariant: existingVariant } - } - /** Check permissions for write access to a rundownLayout */ - export async function rundownLayout( - cred0: Credentials, - existingLayout: RundownLayoutBase | RundownLayoutId - ): Promise { - triggerWriteAccess() - if (existingLayout && isProtectedString(existingLayout)) { - const layoutId = existingLayout - const m = await RundownLayouts.findOneAsync(layoutId) - if (!m) throw new Meteor.Error(404, `RundownLayout "${layoutId}" not found!`) - existingLayout = m - } - return { ...(await anyContent(cred0, existingLayout.showStyleBaseId)), rundownLayout: existingLayout } - } - /** Check permissions for write access to a triggeredAction */ - export async function triggeredActions( - cred0: Credentials, - existingTriggeredAction: TriggeredActionsObj | TriggeredActionId - ): Promise<(ShowStyleContentAccess & { triggeredActions: TriggeredActionsObj }) | boolean> { - triggerWriteAccess() - if (existingTriggeredAction && isProtectedString(existingTriggeredAction)) { - const layoutId = existingTriggeredAction - const m = await TriggeredActions.findOneAsync(layoutId) - if (!m) throw new Meteor.Error(404, `RundownLayout "${layoutId}" not found!`) - existingTriggeredAction = m - } - if (existingTriggeredAction.showStyleBaseId) { - return { - ...(await anyContent(cred0, existingTriggeredAction.showStyleBaseId)), - triggeredActions: existingTriggeredAction, - } - } else { - return SystemWriteAccess.coreSystem(cred0) - } - } - /** Return credentials if writing is allowed, throw otherwise */ - export async function anyContent( - cred0: Credentials, - showStyleBaseId: ShowStyleBaseId - ): Promise { - triggerWriteAccess() - if (!Settings.enableUserAccounts) { - return { - userId: null, - organizationId: null, - showStyleBaseId: showStyleBaseId, - showStyleBase: (await fetchShowStyleBaseLight(showStyleBaseId)) || null, - cred: cred0, - } - } - const cred = await resolveCredentials(cred0) - if (!cred.user) throw new Meteor.Error(403, `Not logged in`) - if (!cred.organizationId) throw new Meteor.Error(500, `User has no organization`) - - const access = await allowAccessToShowStyleBase(cred, showStyleBaseId) - if (!access.update) throw new Meteor.Error(403, `Not allowed: ${access.reason}`) - - return { - userId: cred.user._id, - organizationId: cred.organizationId, - showStyleBaseId: showStyleBaseId, - showStyleBase: access.document, - cred: cred, - } - } -} diff --git a/meteor/server/security/studio.ts b/meteor/server/security/studio.ts deleted file mode 100644 index 3b52624f846..00000000000 --- a/meteor/server/security/studio.ts +++ /dev/null @@ -1,155 +0,0 @@ -import { Meteor } from 'meteor/meteor' -import { allowAccessToStudio } from './lib/security' -import { MongoQueryKey } from '@sofie-automation/corelib/dist/mongo' -import { logNotAllowed } from './lib/lib' -import { ExternalMessageQueueObj } from '@sofie-automation/corelib/dist/dataModel/ExternalMessageQueue' -import { Credentials, ResolvedCredentials, resolveCredentials } from './lib/credentials' -import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' -import { Settings } from '../Settings' -import { triggerWriteAccess } from './lib/securityVerify' -import { isProtectedString } from '../lib/tempLib' -import { fetchStudioLight } from '../optimizations' -import { - ExternalMessageQueueObjId, - OrganizationId, - RundownPlaylistId, - StudioId, - UserId, -} from '@sofie-automation/corelib/dist/dataModel/Ids' -import { ExternalMessageQueue, RundownPlaylists } from '../collections' -import { StudioLight } from '@sofie-automation/corelib/dist/dataModel/Studio' - -export namespace StudioReadAccess { - /** Handles read access for all studio document */ - export async function studio( - studioId: MongoQueryKey, - cred: Credentials | ResolvedCredentials - ): Promise { - return studioContent(studioId, cred) - } - /** Handles read access for all studioId content */ - export async function studioContent( - studioId: MongoQueryKey | undefined, - cred: Credentials | ResolvedCredentials - ): Promise { - if (!Settings.enableUserAccounts) return true - if (!studioId || !isProtectedString(studioId)) throw new Meteor.Error(400, 'selector must contain studioId') - - const access = await allowAccessToStudio(cred, studioId) - if (!access.read) return logNotAllowed('Studio content', access.reason) - - return true - } -} - -/** - * This is returned from a check of access to a studio. - * Fields will be populated about the user, and the studio if they have permission - */ -export interface StudioContentAccess { - userId: UserId | null - organizationId: OrganizationId | null - studioId: StudioId - studio: StudioLight - cred: ResolvedCredentials | Credentials -} - -export interface ExternalMessageContentAccess extends StudioContentAccess { - message: ExternalMessageQueueObj -} - -export namespace StudioContentWriteAccess { - // These functions throws if access is not allowed. - - export async function rundownPlaylist( - cred0: Credentials, - existingPlaylist: DBRundownPlaylist | RundownPlaylistId - ): Promise { - triggerWriteAccess() - if (existingPlaylist && isProtectedString(existingPlaylist)) { - const playlistId = existingPlaylist - const m = await RundownPlaylists.findOneAsync(playlistId) - if (!m) throw new Meteor.Error(404, `RundownPlaylist "${playlistId}" not found!`) - existingPlaylist = m - } - return { ...(await anyContent(cred0, existingPlaylist.studioId)), playlist: existingPlaylist } - } - - /** Check for permission to restore snapshots into the studio */ - export async function dataFromSnapshot(cred0: Credentials, studioId: StudioId): Promise { - return anyContent(cred0, studioId) - } - - /** Check for permission to select active routesets in the studio */ - export async function routeSet(cred0: Credentials, studioId: StudioId): Promise { - return anyContent(cred0, studioId) - } - - export async function timelineDatastore(cred0: Credentials, studioId: StudioId): Promise { - return anyContent(cred0, studioId) - } - /** Check for permission to update the studio baseline */ - export async function baseline(cred0: Credentials, studioId: StudioId): Promise { - return anyContent(cred0, studioId) - } - - /** Check for permission to modify a bucket or its contents belonging to the studio */ - export async function bucket(cred0: Credentials, studioId: StudioId): Promise { - return anyContent(cred0, studioId) - } - - /** Check for permission to execute a function on a PeripheralDevice in the studio */ - export async function executeFunction(cred0: Credentials, studioId: StudioId): Promise { - return anyContent(cred0, studioId) - } - - /** Check for permission to modify an ExternalMessageQueueObj */ - export async function externalMessage( - cred0: Credentials, - existingMessage: ExternalMessageQueueObj | ExternalMessageQueueObjId - ): Promise { - triggerWriteAccess() - if (existingMessage && isProtectedString(existingMessage)) { - const messageId = existingMessage - const m = await ExternalMessageQueue.findOneAsync(messageId) - if (!m) throw new Meteor.Error(404, `ExternalMessage "${messageId}" not found!`) - existingMessage = m - } - return { ...(await anyContent(cred0, existingMessage.studioId)), message: existingMessage } - } - - /** - * We don't have user levels, so we can use a simple check for all cases - * Return credentials if writing is allowed, throw otherwise - */ - async function anyContent(cred0: Credentials, studioId: StudioId): Promise { - triggerWriteAccess() - if (!Settings.enableUserAccounts) { - const studio = await fetchStudioLight(studioId) - if (!studio) throw new Meteor.Error(404, `Studio "${studioId}" not found`) - - return { - userId: null, - organizationId: null, - studioId: studioId, - studio: studio, - cred: cred0, - } - } - const cred = await resolveCredentials(cred0) - if (!cred.user) throw new Meteor.Error(403, `Not logged in`) - if (!cred.organizationId) throw new Meteor.Error(500, `User has no organization`) - - const access = await allowAccessToStudio(cred, studioId) - if (!access.update) throw new Meteor.Error(403, `Not allowed: ${access.reason}`) - if (!access.document) throw new Meteor.Error(404, `Studio "${studioId}" not found`) - - return { - userId: cred.user._id, - organizationId: cred.organizationId, - studioId: studioId, - studio: access.document, - cred: cred, - } - } -} diff --git a/meteor/server/security/system.ts b/meteor/server/security/system.ts deleted file mode 100644 index d7d13b760e8..00000000000 --- a/meteor/server/security/system.ts +++ /dev/null @@ -1,65 +0,0 @@ -import { Meteor } from 'meteor/meteor' -import { Credentials, resolveAuthenticatedUser, resolveCredentials } from './lib/credentials' -import { logNotAllowed } from './lib/lib' -import { allowAccessToCoreSystem, allowAccessToCurrentUser, allowAccessToSystemStatus } from './lib/security' -import { Settings } from '../Settings' -import { triggerWriteAccess } from './lib/securityVerify' -import { UserId } from '@sofie-automation/corelib/dist/dataModel/Ids' - -export namespace SystemReadAccess { - /** Handles read access for all organization content (segments, parts, pieces etc..) */ - export async function coreSystem(cred0: Credentials): Promise { - const cred = await resolveCredentials(cred0) - - const access = await allowAccessToCoreSystem(cred) - if (!access.read) return logNotAllowed('CoreSystem', access.reason) - - return true - } - /** Check if access is allowed to read a User, and that user is the current User */ - export async function currentUser(userId: UserId, cred: Credentials): Promise { - const access = await allowAccessToCurrentUser(cred, userId) - if (!access.read) return logNotAllowed('Current user', access.reason) - - return true - } - /** Check permissions to get the system status */ - export async function systemStatus(cred0: Credentials): Promise { - // For reading only - triggerWriteAccess() - const access = await allowAccessToSystemStatus(cred0) - if (!access.read) throw new Meteor.Error(403, `Not allowed: ${access.reason}`) - - return true - } -} -export namespace SystemWriteAccess { - // These functions throws if access is not allowed. - - export async function coreSystem(cred0: Credentials): Promise { - triggerWriteAccess() - if (!Settings.enableUserAccounts) return true - const cred = await resolveAuthenticatedUser(cred0) - if (!cred) throw new Meteor.Error(403, `Not logged in`) - - const access = await allowAccessToCoreSystem(cred) - if (!access.configure) throw new Meteor.Error(403, `Not allowed: ${access.reason}`) - - return true - } - /** Check if access is allowed to modify a User, and that user is the current User */ - export async function currentUser(userId: UserId | null, cred: Credentials): Promise { - const access = await allowAccessToCurrentUser(cred, userId) - if (!access.update) return logNotAllowed('Current user', access.reason) - - return true - } - /** Check permissions to run migrations of all types */ - export async function migrations(cred0: Credentials): Promise { - return coreSystem(cred0) - } - /** Check permissions to perform a system-level action */ - export async function systemActions(cred0: Credentials): Promise { - return coreSystem(cred0) - } -} diff --git a/meteor/server/security/translationsBundles.ts b/meteor/server/security/translationsBundles.ts deleted file mode 100644 index b7733f15172..00000000000 --- a/meteor/server/security/translationsBundles.ts +++ /dev/null @@ -1,8 +0,0 @@ -export namespace TranslationsBundlesSecurity { - export function allowReadAccess(_selector: object, _token: string | undefined, _context: unknown): boolean { - return true - } - export function allowWriteAccess(): boolean { - return false - } -} diff --git a/meteor/server/systemStatus/api.ts b/meteor/server/systemStatus/api.ts index d81b351114a..6a95a37388a 100644 --- a/meteor/server/systemStatus/api.ts +++ b/meteor/server/systemStatus/api.ts @@ -6,7 +6,6 @@ import { } from '@sofie-automation/meteor-lib/dist/api/systemStatus' import { getDebugStates, getSystemStatus } from './systemStatus' import { protectString } from '../lib/tempLib' -import { Settings } from '../Settings' import { MethodContextAPI } from '../api/methodContext' import { profiler } from '../api/profiler' import { PeripheralDeviceId } from '@sofie-automation/corelib/dist/dataModel/Ids' @@ -22,53 +21,38 @@ const apmNamespace = 'http' export const metricsRouter = new KoaRouter() export const healthRouter = new KoaRouter() -if (!Settings.enableUserAccounts) { - // For backwards compatibility: +metricsRouter.get('/', async (ctx) => { + const transaction = profiler.startTransaction('metrics', apmNamespace) + try { + ctx.response.type = PrometheusHTTPContentType - metricsRouter.get('/', async (ctx) => { - const transaction = profiler.startTransaction('metrics', apmNamespace) - try { - ctx.response.type = PrometheusHTTPContentType + const [meteorMetrics, workerMetrics] = await Promise.all([ + getPrometheusMetricsString(), + collectWorkerPrometheusMetrics(), + ]) - const [meteorMetrics, workerMetrics] = await Promise.all([ - getPrometheusMetricsString(), - collectWorkerPrometheusMetrics(), - ]) - - ctx.body = [meteorMetrics, ...workerMetrics].join('\n\n') - } catch (ex) { - ctx.response.status = 500 - ctx.body = ex + '' - } - transaction?.end() - }) - - healthRouter.get('/', async (ctx) => { - const transaction = profiler.startTransaction('health', apmNamespace) - const status = await getSystemStatus({ userId: null }) - health(status, ctx) - transaction?.end() - }) + ctx.body = [meteorMetrics, ...workerMetrics].join('\n\n') + } catch (ex) { + ctx.response.status = 500 + ctx.body = ex + '' + } + transaction?.end() +}) - healthRouter.get('/:studioId', async (ctx) => { - const transaction = profiler.startTransaction('health', apmNamespace) - const status = await getSystemStatus({ userId: null }, protectString(ctx.params.studioId)) - health(status, ctx) - transaction?.end() - }) -} -healthRouter.get('/:token', async (ctx) => { +healthRouter.get('/', async (ctx) => { const transaction = profiler.startTransaction('health', apmNamespace) - const status = await getSystemStatus({ userId: null, token: ctx.params.token }) + const status = await getSystemStatus(ctx) health(status, ctx) transaction?.end() }) -healthRouter.get('/:token/:studioId', async (ctx) => { + +healthRouter.get('/:studioId', async (ctx) => { const transaction = profiler.startTransaction('health', apmNamespace) - const status = await getSystemStatus({ userId: null, token: ctx.params.token }, protectString(ctx.params.studioId)) + const status = await getSystemStatus(ctx, protectString(ctx.params.studioId)) health(status, ctx) transaction?.end() }) + function health(status: StatusResponse, ctx: Koa.ParameterizedContext) { ctx.response.type = 'application/json' @@ -79,7 +63,7 @@ function health(status: StatusResponse, ctx: Koa.ParameterizedContext) { class ServerSystemStatusAPI extends MethodContextAPI implements NewSystemStatusAPI { async getSystemStatus() { - return getSystemStatus(this) + return getSystemStatus(this.connection) } async getDebugStates(peripheralDeviceId: PeripheralDeviceId) { diff --git a/meteor/server/systemStatus/systemStatus.ts b/meteor/server/systemStatus/systemStatus.ts index 9e48106430a..34ae34ce499 100644 --- a/meteor/server/systemStatus/systemStatus.ts +++ b/meteor/server/systemStatus/systemStatus.ts @@ -1,4 +1,3 @@ -import { Meteor } from 'meteor/meteor' import { PeripheralDevice, PERIPHERAL_SUBTYPE_PROCESS } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' import { Time, getRandomId, literal } from '../lib/tempLib' import { getCurrentTime } from '../lib/lib' @@ -18,19 +17,15 @@ import { Component, } from '@sofie-automation/meteor-lib/dist/api/systemStatus' import { RelevantSystemVersions } from '../coreSystem' -import { Settings } from '../Settings' -import { StudioReadAccess } from '../security/studio' -import { OrganizationReadAccess } from '../security/organization' -import { resolveCredentials, Credentials } from '../security/lib/credentials' -import { SystemReadAccess } from '../security/system' import { StatusCode } from '@sofie-automation/blueprints-integration' import { PeripheralDevices, Workers, WorkerThreadStatuses } from '../collections' import { PeripheralDeviceId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { ServerPeripheralDeviceAPI } from '../api/peripheralDevice' -import { PeripheralDeviceContentWriteAccess } from '../security/peripheralDevice' import { MethodContext } from '../api/methodContext' import { getBlueprintVersions } from './blueprintVersions' import { getUpgradeSystemStatusMessages } from './blueprintUpgradeStatus' +import { triggerWriteAccessBecauseNoCheckNecessary } from '../security/securityVerify' +import { assertConnectionHasOneOfPermissions, RequestCredentials } from '../security/auth' const PackageInfo = require('../../package.json') const integrationVersionRange = parseCoreIntegrationCompatabilityRange(PackageInfo.version) @@ -166,10 +161,12 @@ function getSystemStatusForDevice(device: PeripheralDevice): StatusResponse { * Returns system status * @param studioId (Optional) If provided, limits the status to what's affecting the studio */ -export async function getSystemStatus(cred0: Credentials, studioId?: StudioId): Promise { - const checks: Array = [] +export async function getSystemStatus(_cred: RequestCredentials | null, studioId?: StudioId): Promise { + // Future: this should consider the studioId + // For now, all users should have access to all statuses + triggerWriteAccessBecauseNoCheckNecessary() - await SystemReadAccess.systemStatus(cred0) + const checks: Array = [] // Check systemStatuses: for (const [key, status] of Object.entries(systemStatuses)) { @@ -251,25 +248,11 @@ export async function getSystemStatus(cred0: Credentials, studioId?: StudioId): if (studioId) { // Check status for a certain studio: - if (!(await StudioReadAccess.studioContent(studioId, cred0))) { - throw new Meteor.Error(403, `Not allowed`) - } devices = await PeripheralDevices.findFetchAsync({ studioId: studioId }) } else { - if (Settings.enableUserAccounts) { - // Check status for the user's studios: + // Check status for all studios: - const cred = await resolveCredentials(cred0) - if (!cred.organizationId) throw new Meteor.Error(500, 'user has no organization') - if (!(await OrganizationReadAccess.organizationContent(cred.organizationId, cred))) { - throw new Meteor.Error(403, `Not allowed`) - } - devices = await PeripheralDevices.findFetchAsync({ organizationId: cred.organizationId }) - } else { - // Check status for all studios: - - devices = await PeripheralDevices.findFetchAsync({}) - } + devices = await PeripheralDevices.findFetchAsync({}) } for (const device of devices) { const so = getSystemStatusForDevice(device) @@ -405,6 +388,7 @@ export async function getDebugStates( methodContext: MethodContext, peripheralDeviceId: PeripheralDeviceId ): Promise { - const access = await PeripheralDeviceContentWriteAccess.peripheralDevice(methodContext, peripheralDeviceId) - return ServerPeripheralDeviceAPI.getDebugStates(access) + assertConnectionHasOneOfPermissions(methodContext.connection, 'developer') + + return ServerPeripheralDeviceAPI.getDebugStates(peripheralDeviceId) } diff --git a/meteor/server/worker/worker.ts b/meteor/server/worker/worker.ts index 6a4b8651cff..cdc1bbbb6c0 100644 --- a/meteor/server/worker/worker.ts +++ b/meteor/server/worker/worker.ts @@ -21,6 +21,7 @@ import { initializeWorkerStatus, setWorkerStatus } from './workerStatus' import { MongoQuery } from '@sofie-automation/corelib/dist/mongo' import { UserActionsLog } from '../collections' import { MetricsCounter } from '@sofie-automation/corelib/dist/prometheus' +import { isInTestWrite } from '../security/securityVerify' const FREEZE_LIMIT = 1000 // how long to wait for a response to a Ping const RESTART_TIMEOUT = 30000 // how long to wait for a restart to complete before throwing an error @@ -459,6 +460,7 @@ export async function QueueStudioJob( studioId: StudioId, jobParameters: Parameters[0] ): Promise>> { + if (isInTestWrite()) throw new Meteor.Error(404, 'Should not be reachable during startup tests') if (!studioId) throw new Meteor.Error(500, 'Missing studioId') const now = getCurrentTime() diff --git a/packages/corelib/src/dataModel/Collections.ts b/packages/corelib/src/dataModel/Collections.ts index 6560aab026a..8f2dd0f5fcd 100644 --- a/packages/corelib/src/dataModel/Collections.ts +++ b/packages/corelib/src/dataModel/Collections.ts @@ -45,7 +45,6 @@ export enum CollectionName { TriggeredActions = 'triggeredActions', TranslationsBundles = 'translationsBundles', UserActionsLog = 'userActionsLog', - Users = 'Users', Workers = 'workers', WorkerThreads = 'workersThreads', } diff --git a/packages/meteor-lib/src/Settings.ts b/packages/meteor-lib/src/Settings.ts index 11c26a3a476..69931224067 100644 --- a/packages/meteor-lib/src/Settings.ts +++ b/packages/meteor-lib/src/Settings.ts @@ -17,8 +17,8 @@ export interface ISettings { defaultTimeScale: number // Allow grabbing the entire timeline allowGrabbingTimeline: boolean - /** If true, enables security measures, access control and user accounts. */ - enableUserAccounts: boolean + /** If true, enable http header based security measures */ + enableHeaderAuth: boolean /** Default duration to use to render parts when no duration is provided */ defaultDisplayDuration: number /** If true, allows creation of new playlists in the Lobby Gui (rundown list). If false; only pre-existing playlists are allowed. */ @@ -69,7 +69,7 @@ export const DEFAULT_SETTINGS = Object.freeze({ disableBlurBorder: false, defaultTimeScale: 1, allowGrabbingTimeline: true, - enableUserAccounts: false, + enableHeaderAuth: false, defaultDisplayDuration: 3000, allowMultiplePlaylistsInGUI: false, poisonKey: 'Escape', diff --git a/packages/meteor-lib/src/api/pubsub.ts b/packages/meteor-lib/src/api/pubsub.ts index 49c18b78165..9c61409c183 100644 --- a/packages/meteor-lib/src/api/pubsub.ts +++ b/packages/meteor-lib/src/api/pubsub.ts @@ -20,7 +20,6 @@ import { SnapshotItem } from '../collections/Snapshots' import { TranslationsBundle } from '../collections/TranslationsBundles' import { DBTriggeredActions, UITriggeredActionsObj } from '../collections/TriggeredActions' import { UserActionsLogItem } from '../collections/UserActionsLog' -import { DBUser } from '../collections/Users' import { UIBucketContentStatus, UIPieceContentStatus, UISegmentPartNote } from './rundownNotifications' import { UIShowStyleBase } from './showStyles' import { UIStudio } from './studios' @@ -218,8 +217,6 @@ export interface MeteorPubSubTypes { showStyleBaseIds: ShowStyleBaseId[] | null, token?: string ) => CollectionName.RundownLayouts - [MeteorPubSub.loggedInUser]: (token?: string) => CollectionName.Users - [MeteorPubSub.usersInOrganization]: (organizationId: OrganizationId, token?: string) => CollectionName.Users [MeteorPubSub.organization]: (organizationId: OrganizationId | null, token?: string) => CollectionName.Organizations [MeteorPubSub.buckets]: (studioId: StudioId, bucketId: BucketId | null, token?: string) => CollectionName.Buckets [MeteorPubSub.translationsBundles]: (token?: string) => CollectionName.TranslationsBundles @@ -297,7 +294,6 @@ export type MeteorPubSubCollections = { [CollectionName.Organizations]: DBOrganization [CollectionName.Buckets]: Bucket [CollectionName.TranslationsBundles]: TranslationsBundle - [CollectionName.Users]: DBUser [CollectionName.ExpectedPlayoutItems]: ExpectedPlayoutItem [CollectionName.Notifications]: DBNotificationObj diff --git a/packages/meteor-lib/src/api/user.ts b/packages/meteor-lib/src/api/user.ts index f1f737f2711..83881d2819e 100644 --- a/packages/meteor-lib/src/api/user.ts +++ b/packages/meteor-lib/src/api/user.ts @@ -1,33 +1,8 @@ -import { UserProfile } from '../collections/Users' -import { UserId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { UserPermissions } from '../userPermissions' export interface NewUserAPI { - enrollUser(email: string, name: string): Promise - requestPasswordReset(email: string): Promise - removeUser(): Promise + getUserPermissions(): Promise } export enum UserAPIMethods { - 'enrollUser' = 'user.enrollUser', - 'requestPasswordReset' = 'user.requestPasswordReset', - 'removeUser' = 'user.removeUser', -} - -export interface CreateNewUserData { - email: string - profile: UserProfile - password?: string - createOrganization?: { - name: string - applications: string[] - broadcastMediums: string[] - } -} -export async function createUser(_newUser: CreateNewUserData): Promise { - // This is available both client-side and server side. - // The reason for that is that the client-side should use Accounts.createUser right away - // so that the password aren't sent in "plaintext" to the server. - - // const userId = await Accounts.createUserAsync(newUser) - // return protectString(userId) - throw new Error('Not implemented') + 'getUserPermissions' = 'user.getUserPermissions', } diff --git a/packages/meteor-lib/src/api/userActions.ts b/packages/meteor-lib/src/api/userActions.ts index 91f521b617f..01db5ba8fe5 100644 --- a/packages/meteor-lib/src/api/userActions.ts +++ b/packages/meteor-lib/src/api/userActions.ts @@ -212,16 +212,19 @@ export interface NewUserActionAPI { mediaRestartWorkflow( userEvent: string, eventTime: Time, + deviceId: PeripheralDeviceId, workflowId: MediaWorkFlowId ): Promise> mediaAbortWorkflow( userEvent: string, eventTime: Time, + deviceId: PeripheralDeviceId, workflowId: MediaWorkFlowId ): Promise> mediaPrioritizeWorkflow( userEvent: string, eventTime: Time, + deviceId: PeripheralDeviceId, workflowId: MediaWorkFlowId ): Promise> mediaRestartAllWorkflows(userEvent: string, eventTime: Time): Promise> diff --git a/packages/meteor-lib/src/collections/Users.ts b/packages/meteor-lib/src/collections/Users.ts deleted file mode 100644 index 04d5b6b8875..00000000000 --- a/packages/meteor-lib/src/collections/Users.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { UserId, OrganizationId } from '@sofie-automation/corelib/dist/dataModel/Ids' - -export interface UserProfile { - name: string -} - -export interface DBUser { - // Note: This interface is partly defined by the dataset from the Meteor.users collection - - _id: UserId - createdAt: string - services: { - password: { - bcrypt: string - } - } - username: string - emails: [ - { - address: string - verified: boolean - } - ] - profile: UserProfile - organizationId: OrganizationId - superAdmin?: boolean -} - -export type User = DBUser // to be replaced by a class somet ime later? diff --git a/packages/meteor-lib/src/userPermissions.ts b/packages/meteor-lib/src/userPermissions.ts new file mode 100644 index 00000000000..2d0f1246f39 --- /dev/null +++ b/packages/meteor-lib/src/userPermissions.ts @@ -0,0 +1,58 @@ +/** + * The header to use for user permissions + * This is currently limited to a small set that sockjs supports: https://github.com/sockjs/sockjs-node/blob/46d2f846653a91822a02794b852886c7f137378c/lib/session.js#L137-L150 + * Any other headers are not exposed in a way we can access, no matter how deep we look into meteor internals. + */ +export const USER_PERMISSIONS_HEADER = 'dnt' + +export interface UserPermissions { + studio: boolean + configure: boolean + developer: boolean + testing: boolean + service: boolean + gateway: boolean +} +const allowedPermissions = new Set([ + 'studio', + 'configure', + 'developer', + 'testing', + 'service', + 'gateway', +]) + +export function parseUserPermissions(encodedPermissions: string | undefined): UserPermissions { + if (encodedPermissions === 'admin') { + return { + studio: true, + configure: true, + developer: true, + testing: true, + service: true, + gateway: true, + } + } + + const result: UserPermissions = { + studio: false, + configure: false, + developer: false, + testing: false, + service: false, + gateway: false, + } + + if (encodedPermissions && typeof encodedPermissions === 'string') { + const parts = encodedPermissions.split(',') + + for (const part of parts) { + const part2 = part.trim() as keyof UserPermissions + if (allowedPermissions.has(part2)) { + result[part2] = true + } + } + } + + return result +} diff --git a/packages/shared-lib/src/peripheralDevice/methodsAPI.ts b/packages/shared-lib/src/peripheralDevice/methodsAPI.ts index 80dd4dd3666..1f8a142f7bf 100644 --- a/packages/shared-lib/src/peripheralDevice/methodsAPI.ts +++ b/packages/shared-lib/src/peripheralDevice/methodsAPI.ts @@ -116,7 +116,7 @@ export interface NewPeripheralDeviceAPI { timelineTriggerTime(deviceId: PeripheralDeviceId, deviceToken: string, r: TimelineTriggerTimeResult): Promise requestUserAuthToken(deviceId: PeripheralDeviceId, deviceToken: string, authUrl: string): Promise storeAccessToken(deviceId: PeripheralDeviceId, deviceToken: string, authToken: string): Promise - removePeripheralDevice(deviceId: PeripheralDeviceId): Promise + removePeripheralDevice(deviceId: PeripheralDeviceId, deviceToken?: string): Promise reportResolveDone( deviceId: PeripheralDeviceId, deviceToken: string, diff --git a/packages/webui/src/__mocks__/meteor.ts b/packages/webui/src/__mocks__/meteor.ts index f11ac9b9998..170ed3d5dd5 100644 --- a/packages/webui/src/__mocks__/meteor.ts +++ b/packages/webui/src/__mocks__/meteor.ts @@ -1,5 +1,4 @@ import * as _ from 'underscore' -import { MongoMock } from './mongo' import type { DDP } from 'meteor/ddp' let controllableDefer = false @@ -11,7 +10,7 @@ export function useNextTickDefer(): void { controllableDefer = false } -namespace Meteor { +export namespace Meteor { export interface Settings { public: { [id: string]: any @@ -19,19 +18,6 @@ namespace Meteor { [id: string]: any } - export interface UserEmail { - address: string - verified: boolean - } - export interface User { - _id?: string - username?: string - emails?: UserEmail[] - createdAt?: number - profile?: any - services?: any - } - export interface ErrorStatic { new (error: string | number, reason?: string, details?: string): Error } @@ -89,7 +75,6 @@ export namespace MeteorMock { export const settings: any = {} export const mockMethods: { [name: string]: Function } = {} - export let mockUser: Meteor.User | undefined = undefined export const mockStartupFunctions: Function[] = [] export function status(): DDP.DDPStatus { @@ -100,15 +85,8 @@ export namespace MeteorMock { } } - export function user(): Meteor.User | undefined { - return mockUser - } - export function userId(): string | undefined { - return mockUser ? mockUser._id : undefined - } function getMethodContext() { return { - userId: mockUser ? mockUser._id : undefined, connection: { clientAddress: '1.1.1.1', }, @@ -223,7 +201,6 @@ export namespace MeteorMock { export function bindEnvironment(_fcn: Function): any { throw new Error(500, 'bindEnvironment not supported on client') } - export let users: MongoMock.Collection | undefined = undefined // -- Mock functions: -------------------------- /** @@ -236,12 +213,6 @@ export namespace MeteorMock { await waitTimeNoFakeTimers(10) // So that any observers or defers has had time to run. } - export function mockLoginUser(newUser: Meteor.User): void { - mockUser = newUser - } - export function mockSetUsersCollection(usersCollection: MongoMock.Collection): void { - users = usersCollection - } /** Wait for time to pass ( unaffected by jest.useFakeTimers() ) */ export async function sleepNoFakeTimers(time: number): Promise { diff --git a/packages/webui/src/__mocks__/mongo.ts b/packages/webui/src/__mocks__/mongo.ts index 2f31d6400bc..7a0d8566cb6 100644 --- a/packages/webui/src/__mocks__/mongo.ts +++ b/packages/webui/src/__mocks__/mongo.ts @@ -349,5 +349,3 @@ export function setup(): any { Mongo: MongoMock, } } - -MeteorMock.mockSetUsersCollection(new MongoMock.Collection('Meteor.users')) diff --git a/packages/webui/src/client/ui/App.tsx b/packages/webui/src/client/ui/App.tsx index 77c4d7afa77..73d5ad43d0f 100644 --- a/packages/webui/src/client/ui/App.tsx +++ b/packages/webui/src/client/ui/App.tsx @@ -52,7 +52,7 @@ export const App: React.FC = function App() { const [lastStart] = useState(Date.now()) - const roles = useUserPermissions() + const [roles, _rolesReady] = useUserPermissions() const featureFlags = useFeatureFlags() useEffect(() => { diff --git a/packages/webui/src/client/ui/Status/MediaManager.tsx b/packages/webui/src/client/ui/Status/MediaManager.tsx index 3471ce6b0b0..d7401b0e886 100644 --- a/packages/webui/src/client/ui/Status/MediaManager.tsx +++ b/packages/webui/src/client/ui/Status/MediaManager.tsx @@ -364,7 +364,7 @@ export function MediaManagerStatus(): JSX.Element { const actionRestart = useCallback( (event: React.MouseEvent, workflow: MediaWorkFlowUi) => { doUserAction(t, event, UserAction.RESTART_MEDIA_WORKFLOW, (e, ts) => - MeteorCall.userAction.mediaRestartWorkflow(e, ts, workflow._id) + MeteorCall.userAction.mediaRestartWorkflow(e, ts, workflow.deviceId, workflow._id) ) }, [t] @@ -372,7 +372,7 @@ export function MediaManagerStatus(): JSX.Element { const actionAbort = useCallback( (event: React.MouseEvent, workflow: MediaWorkFlowUi) => { doUserAction(t, event, UserAction.ABORT_MEDIA_WORKFLOW, (e, ts) => - MeteorCall.userAction.mediaAbortWorkflow(e, ts, workflow._id) + MeteorCall.userAction.mediaAbortWorkflow(e, ts, workflow.deviceId, workflow._id) ) }, [t] @@ -380,7 +380,7 @@ export function MediaManagerStatus(): JSX.Element { const actionPrioritize = useCallback( (event: React.MouseEvent, workflow: MediaWorkFlowUi) => { doUserAction(t, event, UserAction.PRIORITIZE_MEDIA_WORKFLOW, (e, ts) => - MeteorCall.userAction.mediaPrioritizeWorkflow(e, ts, workflow._id) + MeteorCall.userAction.mediaPrioritizeWorkflow(e, ts, workflow.deviceId, workflow._id) ) }, [t] diff --git a/packages/webui/src/client/ui/Status/SystemStatus/SystemStatus.tsx b/packages/webui/src/client/ui/Status/SystemStatus/SystemStatus.tsx index bc047ccc19c..346d19f6a92 100644 --- a/packages/webui/src/client/ui/Status/SystemStatus/SystemStatus.tsx +++ b/packages/webui/src/client/ui/Status/SystemStatus/SystemStatus.tsx @@ -1,4 +1,4 @@ -import { useEffect, useMemo, useState } from 'react' +import { useContext, useEffect, useMemo, useState } from 'react' import { useSubscription, useTracker } from '../../../lib/ReactMeteorData/react-meteor-data' import { PeripheralDevice, PeripheralDeviceType } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' import { useTranslation } from 'react-i18next' @@ -13,10 +13,13 @@ import { CorelibPubSub } from '@sofie-automation/corelib/dist/pubsub' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' import { CoreItem } from './CoreItem' import { DeviceItem } from './DeviceItem' +import { UserPermissions, UserPermissionsContext } from '../../UserPermissions' export function SystemStatus(): JSX.Element { const { t } = useTranslation() + const userPermissions = useContext(UserPermissionsContext) + // Subscribe to data: useSubscription(CorelibPubSub.peripheralDevices, null) @@ -24,7 +27,7 @@ export function SystemStatus(): JSX.Element { const devices = useTracker(() => PeripheralDevices.find({}, { sort: { lastConnected: -1 } }).fetch(), [], []) const systemStatus = useSystemStatus() - const playoutDebugStates = usePlayoutDebugStates(devices) + const playoutDebugStates = usePlayoutDebugStates(devices, userPermissions) const devicesHierarchy = convertDevicesIntoHeirarchy(devices) @@ -98,7 +101,10 @@ function useSystemStatus(): StatusResponse | undefined { return sytemStatus } -function usePlayoutDebugStates(devices: PeripheralDevice[]): Map { +function usePlayoutDebugStates( + devices: PeripheralDevice[], + userPermissions: UserPermissions +): Map { const { t } = useTranslation() const [playoutDebugStates, setPlayoutDebugStates] = useState>(new Map()) @@ -117,6 +123,11 @@ function usePlayoutDebugStates(devices: PeripheralDevice[]): Map { + if (!userPermissions.developer) { + setPlayoutDebugStates(new Map()) + return + } + let destroyed = false const refreshDebugStates = () => { @@ -145,7 +156,7 @@ function usePlayoutDebugStates(devices: PeripheralDevice[]): Map>({ +const NO_PERMISSIONS: UserPermissions = Object.freeze({ studio: false, configure: false, developer: false, testing: false, service: false, + gateway: false, }) -export function useUserPermissions(): UserPermissions { +export const UserPermissionsContext = React.createContext>(NO_PERMISSIONS) + +export function useUserPermissions(): [roles: UserPermissions, ready: boolean] { const location = window.location - const [permissions, setPermissions] = useState({ - studio: getLocalAllowStudio(), - configure: getLocalAllowConfigure(), - developer: getLocalAllowDeveloper(), - testing: getLocalAllowTesting(), - service: getLocalAllowService(), - }) + const [ready, setReady] = useState(!Settings.enableHeaderAuth) + + const [permissions, setPermissions] = useState( + Settings.enableHeaderAuth + ? NO_PERMISSIONS + : { + studio: getLocalAllowStudio(), + configure: getLocalAllowConfigure(), + developer: getLocalAllowDeveloper(), + testing: getLocalAllowTesting(), + service: getLocalAllowService(), + gateway: false, + } + ) + + const isConnected = useTracker(() => Meteor.status().connected, [], false) + + useEffect(() => { + if (!Settings.enableHeaderAuth) return + + // Do nothing when not connected. Persist the previous values. + if (!isConnected) return + + const checkPermissions = () => { + MeteorCall.user + .getUserPermissions() + .then((v) => { + setPermissions(v || NO_PERMISSIONS) + setReady(true) + }) + .catch((e) => { + console.error('Failed to set level', e) + setPermissions(NO_PERMISSIONS) + }) + } + + const interval = setInterval(checkPermissions, 30000) // Arbitrary poll interval + + // Initial check now + checkPermissions() + + return () => { + clearInterval(interval) + } + }, [Settings.enableHeaderAuth, isConnected]) useEffect(() => { + if (Settings.enableHeaderAuth) return + if (!location.search) return const params = queryStringParse(location.search) @@ -66,9 +108,10 @@ export function useUserPermissions(): UserPermissions { developer: getLocalAllowDeveloper(), testing: getLocalAllowTesting(), service: getLocalAllowService(), + gateway: false, }) - }, [location.search]) + }, [location.search, Settings.enableHeaderAuth]) // A naive memoizing of the value, to avoid reactions when the value is identical - return useMemo(() => permissions, [JSON.stringify(permissions)]) + return [useMemo(() => permissions, [JSON.stringify(permissions)]), ready] } diff --git a/scripts/run.mjs b/scripts/run.mjs index 3d15810fc55..5302a5e2f5d 100644 --- a/scripts/run.mjs +++ b/scripts/run.mjs @@ -1,15 +1,22 @@ import process from "process"; +import fs from "fs"; import concurrently from "concurrently"; import { EXTRA_PACKAGES, config } from "./lib.js"; +function joinCommand(...parts) { + return parts.filter((part) => !!part).join(" "); +} + function watchPackages() { return [ { - command: config.uiOnly - ? `yarn watch ${EXTRA_PACKAGES.map((pkg) => `--ignore ${pkg}`).join( + command: joinCommand('yarn watch', + config.uiOnly + ? EXTRA_PACKAGES.map((pkg) => `--ignore ${pkg}`).join( " " - )}` - : "yarn watch", + ) + : "", + ), cwd: "packages", name: "PACKAGES-TSC", prefixColor: "red", @@ -29,6 +36,13 @@ function watchWorker() { } function watchMeteor() { + const settingsFileExists = fs.existsSync("meteor-settings.json"); + if (settingsFileExists) { + console.log('Found meteor-settings.json') + } else { + console.log('No meteor-settings.json') + } + return [ { command: "yarn watch-types --preserveWatchOutput", @@ -37,9 +51,12 @@ function watchMeteor() { prefixColor: "blue", }, { - command: `yarn debug${config.inspectMeteor ? " --inspect" : ""}${ - config.verbose ? " --verbose" : "" - }`, + command: joinCommand( + 'yarn debug', + config.inspectMeteor ? " --inspect" : "", + config.verbose ? " --verbose" : "", + settingsFileExists ? " --settings ../meteor-settings.json" : "" + ), cwd: "meteor", name: "METEOR", prefixColor: "cyan", From 9c2338eb8af87c5974c738597f044eb1a57db6bb Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Tue, 26 Nov 2024 14:13:29 +0000 Subject: [PATCH 72/81] feat: exclude pieces from part keepalive (#29) --- .../src/documents/piece.ts | 5 + .../src/playout/__tests__/timings.test.ts | 43 + packages/corelib/src/playout/timings.ts | 3 + .../job-worker/src/blueprints/context/lib.ts | 2 + .../__snapshots__/rundown.test.ts.snap | 2350 +++++++++++++++++ .../timeline/__tests__/rundown.test.ts | 853 ++++++ .../src/playout/timeline/generate.ts | 7 +- .../job-worker/src/playout/timeline/part.ts | 139 +- .../src/playout/timeline/pieceGroup.ts | 3 + .../src/playout/timeline/rundown.ts | 280 +- .../lib/__tests__/rundownTiming.test.ts | 2 + 11 files changed, 3504 insertions(+), 183 deletions(-) create mode 100644 packages/job-worker/src/playout/timeline/__tests__/__snapshots__/rundown.test.ts.snap create mode 100644 packages/job-worker/src/playout/timeline/__tests__/rundown.test.ts diff --git a/packages/blueprints-integration/src/documents/piece.ts b/packages/blueprints-integration/src/documents/piece.ts index d537c2dda78..42a77db1755 100644 --- a/packages/blueprints-integration/src/documents/piece.ts +++ b/packages/blueprints-integration/src/documents/piece.ts @@ -35,6 +35,11 @@ export interface IBlueprintPiece * User editing definitions for this piece */ userEditOperations?: UserEditingDefinition[] + + /** + * Whether to stop this piece before the 'keepalive' period of the part + */ + excludeDuringPartKeepalive?: boolean } export interface IBlueprintPieceDB extends IBlueprintPiece { diff --git a/packages/corelib/src/playout/__tests__/timings.test.ts b/packages/corelib/src/playout/__tests__/timings.test.ts index 443fab8af6f..29d3c0670bc 100644 --- a/packages/corelib/src/playout/__tests__/timings.test.ts +++ b/packages/corelib/src/playout/__tests__/timings.test.ts @@ -80,6 +80,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 0, fromPartPostroll: 0, toPartPostroll: 0, + fromPartKeepalive: 0, }) ) }) @@ -102,6 +103,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 0, fromPartPostroll: 0, toPartPostroll: 0, + fromPartKeepalive: 0, }) ) }) @@ -124,6 +126,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 231, fromPartPostroll: 231, toPartPostroll: 0, + fromPartKeepalive: 0, }) ) }) @@ -146,6 +149,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 289, fromPartPostroll: 0, toPartPostroll: 0, + fromPartKeepalive: 0, }) ) }) @@ -168,6 +172,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 231 + 289, fromPartPostroll: 231, toPartPostroll: 0, + fromPartKeepalive: 0, }) ) }) @@ -191,6 +196,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 452, fromPartPostroll: 0, toPartPostroll: 0, + fromPartKeepalive: 452, }) ) }) @@ -214,6 +220,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 231 + 452, fromPartPostroll: 231, toPartPostroll: 0, + fromPartKeepalive: 452, }) ) }) @@ -238,6 +245,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 452, fromPartPostroll: 0, toPartPostroll: 0, + fromPartKeepalive: 452, }) ) }) @@ -262,6 +270,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 231 + 452, fromPartPostroll: 231, toPartPostroll: 0, + fromPartKeepalive: 452, }) ) }) @@ -286,6 +295,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 2256, fromPartPostroll: 0, toPartPostroll: 0, + fromPartKeepalive: 452, }) ) }) @@ -310,6 +320,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 231 + 2256, fromPartPostroll: 231, toPartPostroll: 0, + fromPartKeepalive: 452, }) ) }) @@ -331,6 +342,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 500, fromPartPostroll: 0, toPartPostroll: 0, + fromPartKeepalive: 0, }) ) }) @@ -345,6 +357,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 500, fromPartPostroll: 0, toPartPostroll: 0, + fromPartKeepalive: 0, }) ) }) @@ -365,6 +378,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 231 + 500, fromPartPostroll: 231, toPartPostroll: 0, + fromPartKeepalive: 0, }) ) }) @@ -387,6 +401,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 500, fromPartPostroll: 0, toPartPostroll: 0, + fromPartKeepalive: 0, }) ) }) @@ -409,6 +424,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 231 + 500, fromPartPostroll: 231, toPartPostroll: 0, + fromPartKeepalive: 0, }) ) }) @@ -431,6 +447,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 823, fromPartPostroll: 0, toPartPostroll: 0, + fromPartKeepalive: 0, }) ) }) @@ -453,6 +470,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 231 + 823, fromPartPostroll: 231, toPartPostroll: 0, + fromPartKeepalive: 0, }) ) }) @@ -476,6 +494,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 500 + 452, fromPartPostroll: 0, toPartPostroll: 0, + fromPartKeepalive: 452, }) ) }) @@ -499,6 +518,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 231 + 500 + 452, fromPartPostroll: 231, toPartPostroll: 0, + fromPartKeepalive: 452, }) ) }) @@ -523,6 +543,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 500 + 452, fromPartPostroll: 0, toPartPostroll: 0, + fromPartKeepalive: 452, }) ) }) @@ -547,6 +568,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 231 + 500 + 452, fromPartPostroll: 231, toPartPostroll: 0, + fromPartKeepalive: 452, }) ) }) @@ -571,6 +593,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 2256, fromPartPostroll: 0, toPartPostroll: 0, + fromPartKeepalive: 452, }) ) }) @@ -595,6 +618,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 231 + 2256, fromPartPostroll: 231, toPartPostroll: 0, + fromPartKeepalive: 452, }) ) }) @@ -624,6 +648,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 500, fromPartPostroll: 0, toPartPostroll: 0, + fromPartKeepalive: 0, }) ) }) @@ -653,6 +678,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 500 + 452, fromPartPostroll: 0, toPartPostroll: 0, + fromPartKeepalive: 452, }) ) }) @@ -682,6 +708,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 231 + 500 + 452, fromPartPostroll: 231, toPartPostroll: 0, + fromPartKeepalive: 452, }) ) }) @@ -710,6 +737,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 500, fromPartPostroll: 0, toPartPostroll: 0, + fromPartKeepalive: 0, }) ) }) @@ -738,6 +766,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 231 + 500, fromPartPostroll: 231, toPartPostroll: 0, + fromPartKeepalive: 0, }) ) }) @@ -764,6 +793,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 5000, fromPartPostroll: 0, toPartPostroll: 0, + fromPartKeepalive: 5000, }) ) }) @@ -790,6 +820,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 500, fromPartPostroll: 0, toPartPostroll: 0, + fromPartKeepalive: 0, }) ) }) @@ -816,6 +847,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 500, fromPartPostroll: 0, toPartPostroll: 0, + fromPartKeepalive: 0, }) ) }) @@ -843,6 +875,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 231 + 5000, fromPartPostroll: 231, toPartPostroll: 0, + fromPartKeepalive: 5000, }) ) }) @@ -869,6 +902,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 231 + 500, fromPartPostroll: 231, toPartPostroll: 0, + fromPartKeepalive: 0, }) ) }) @@ -895,6 +929,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 231 + 500, fromPartPostroll: 231 + 0, toPartPostroll: 0 + 0, + fromPartKeepalive: 0, }) ) }) @@ -924,6 +959,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 500 - 345 + 628, fromPartPostroll: 0, toPartPostroll: 0, + fromPartKeepalive: 628, }) ) }) @@ -950,6 +986,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 231 + 500 - 345 + 628, fromPartPostroll: 231, toPartPostroll: 0, + fromPartKeepalive: 628, }) ) }) @@ -976,6 +1013,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 628, fromPartPostroll: 0, toPartPostroll: 0, + fromPartKeepalive: 628, }) ) }) @@ -1002,6 +1040,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 231 + 628, fromPartPostroll: 231, toPartPostroll: 0, + fromPartKeepalive: 628, }) ) }) @@ -1030,6 +1069,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 500 - 345 + 628, fromPartPostroll: 0, toPartPostroll: 0, + fromPartKeepalive: 628, }) ) }) @@ -1058,6 +1098,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 231 + 500 - 345 + 628, fromPartPostroll: 231, toPartPostroll: 0, + fromPartKeepalive: 628, }) ) }) @@ -1086,6 +1127,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 987, fromPartPostroll: 0, toPartPostroll: 0, + fromPartKeepalive: 628, }) ) }) @@ -1114,6 +1156,7 @@ describe('Part Playout Timings', () => { fromPartRemaining: 231 + 987, fromPartPostroll: 231, toPartPostroll: 0, + fromPartKeepalive: 628, }) ) }) diff --git a/packages/corelib/src/playout/timings.ts b/packages/corelib/src/playout/timings.ts index 77209d90e79..b2bd4b63adf 100644 --- a/packages/corelib/src/playout/timings.ts +++ b/packages/corelib/src/playout/timings.ts @@ -58,6 +58,7 @@ export interface PartCalculatedTimings { toPartPostroll: number fromPartRemaining: number // How long after the start of toPartGroup should fromPartGroup continue? fromPartPostroll: number + fromPartKeepalive: number } export type CalculateTimingsPiece = Pick @@ -117,6 +118,7 @@ export function calculatePartTimings( // The old part needs to continue for a while fromPartRemaining: takeOffset + fromPartPostroll, fromPartPostroll: fromPartPostroll, + fromPartKeepalive: 0, } } else { // The amount of time needed to complete the outTransition before the 'take' point @@ -136,6 +138,7 @@ export function calculatePartTimings( toPartPostroll: toPartPostroll, fromPartRemaining: takeOffset + inTransition.previousPartKeepaliveDuration + fromPartPostroll, fromPartPostroll: fromPartPostroll, + fromPartKeepalive: inTransition.previousPartKeepaliveDuration, } } } diff --git a/packages/job-worker/src/blueprints/context/lib.ts b/packages/job-worker/src/blueprints/context/lib.ts index 2a2d8fa740f..72934978ad1 100644 --- a/packages/job-worker/src/blueprints/context/lib.ts +++ b/packages/job-worker/src/blueprints/context/lib.ts @@ -97,6 +97,7 @@ export const IBlueprintPieceObjectsSampleKeys = allKeysOfObject notInVision: true, abSessions: true, userEditOperations: true, + excludeDuringPartKeepalive: true, }) // Compile a list of the keys which are allowed to be set @@ -239,6 +240,7 @@ export function convertPieceToBlueprints(piece: ReadonlyDeep extendOnHold: piece.extendOnHold, notInVision: piece.notInVision, userEditOperations: translateUserEditsToBlueprint(piece.userEditOperations), + excludeDuringPartKeepalive: piece.excludeDuringPartKeepalive, } return obj diff --git a/packages/job-worker/src/playout/timeline/__tests__/__snapshots__/rundown.test.ts.snap b/packages/job-worker/src/playout/timeline/__tests__/__snapshots__/rundown.test.ts.snap new file mode 100644 index 00000000000..60c9724e57a --- /dev/null +++ b/packages/job-worker/src/playout/timeline/__tests__/__snapshots__/rundown.test.ts.snap @@ -0,0 +1,2350 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`buildTimelineObjsForRundown current and previous parts 1`] = ` +{ + "timeline": [ + { + "classes": [ + "rundown_active", + "last_part", + ], + "enable": { + "while": 1, + }, + "id": "mockPlaylist_status", + "layer": "rundown_status", + "partInstanceId": null, + "priority": 0, + }, + { + "children": [ + { + "classes": [ + "previous_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece9", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece9.end + 0", + "start": "#piece_group_control_piece9.start - 0", + }, + "id": "piece_group_piece9", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "end": "#part_group_part0.start + 0", + "start": 1235, + }, + "id": "part_group_part9", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part9", + "priority": -1, + }, + { + "children": [ + { + "enable": { + "start": 0, + }, + "id": "part_group_firstobject_part0", + "layer": "group_first_object", + "partInstanceId": "part0", + "priority": 0, + }, + { + "classes": [ + "current_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece0", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece0.end + 0", + "start": "#piece_group_control_piece0.start - 0", + }, + "id": "piece_group_piece0", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "start": "now", + }, + "id": "part_group_part0", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part0", + "priority": 5, + }, + ], + "timingContext": { + "currentPartDuration": undefined, + "currentPartGroup": { + "children": 3, + "content": { + "deviceType": "ABSTRACT", + "objects": [], + "type": "group", + }, + "enable": { + "start": "now", + }, + "id": "part_group_part0", + "isGroup": true, + "layer": "", + "metaData": { + "isPieceTimeline": true, + }, + "objectType": "rundown", + "partInstanceId": "part0", + "priority": 5, + }, + "nextPartGroup": undefined, + "previousPartOverlap": 0, + }, +} +`; + +exports[`buildTimelineObjsForRundown current part with startedPlayback 1`] = ` +{ + "timeline": [ + { + "classes": [ + "rundown_active", + "last_part", + ], + "enable": { + "while": 1, + }, + "id": "mockPlaylist_status", + "layer": "rundown_status", + "partInstanceId": null, + "priority": 0, + }, + { + "children": [ + { + "enable": { + "start": 0, + }, + "id": "part_group_firstobject_part0", + "layer": "group_first_object", + "partInstanceId": "part0", + "priority": 0, + }, + { + "classes": [ + "current_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece0", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece0.end + 0", + "start": "#piece_group_control_piece0.start - 0", + }, + "id": "piece_group_piece0", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "start": 5678, + }, + "id": "part_group_part0", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part0", + "priority": 5, + }, + ], + "timingContext": { + "currentPartDuration": undefined, + "currentPartGroup": { + "children": 3, + "content": { + "deviceType": "ABSTRACT", + "objects": [], + "type": "group", + }, + "enable": { + "start": 5678, + }, + "id": "part_group_part0", + "isGroup": true, + "layer": "", + "metaData": { + "isPieceTimeline": true, + }, + "objectType": "rundown", + "partInstanceId": "part0", + "priority": 5, + }, + "nextPartGroup": undefined, + }, +} +`; + +exports[`buildTimelineObjsForRundown infinite pieces infinite continuing from previous 1`] = ` +{ + "timeline": [ + { + "classes": [ + "rundown_active", + "last_part", + ], + "enable": { + "while": 1, + }, + "id": "mockPlaylist_status", + "layer": "rundown_status", + "partInstanceId": null, + "priority": 0, + }, + { + "children": [ + { + "classes": [ + "previous_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece9", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece9.end + 0", + "start": "#piece_group_control_piece9.start - 0", + }, + "id": "piece_group_piece9", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "end": "#part_group_part0.start + 0", + "start": 1235, + }, + "id": "part_group_part9", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part9", + "priority": -1, + }, + { + "children": [ + { + "classes": [ + "current_part", + "continues_infinite", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece6b", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece6b.end + 0", + "start": "#piece_group_control_piece6b.start - 0", + }, + "id": "piece_group_piece6b", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "start": 123, + }, + "id": "part_group_piece6b_infinite", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part0", + "priority": 1, + }, + { + "children": [ + { + "enable": { + "start": 0, + }, + "id": "part_group_firstobject_part0", + "layer": "group_first_object", + "partInstanceId": "part0", + "priority": 0, + }, + { + "classes": [ + "current_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece0", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece0.end + 0", + "start": "#piece_group_control_piece0.start - 0", + }, + "id": "piece_group_piece0", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "start": "now", + }, + "id": "part_group_part0", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part0", + "priority": 5, + }, + ], + "timingContext": { + "currentPartDuration": undefined, + "currentPartGroup": { + "children": 3, + "content": { + "deviceType": "ABSTRACT", + "objects": [], + "type": "group", + }, + "enable": { + "start": "now", + }, + "id": "part_group_part0", + "isGroup": true, + "layer": "", + "metaData": { + "isPieceTimeline": true, + }, + "objectType": "rundown", + "partInstanceId": "part0", + "priority": 5, + }, + "nextPartGroup": undefined, + "previousPartOverlap": 0, + }, +} +`; + +exports[`buildTimelineObjsForRundown infinite pieces infinite continuing into next with autonext 1`] = ` +{ + "timeline": [ + { + "classes": [ + "rundown_active", + ], + "enable": { + "while": 1, + }, + "id": "mockPlaylist_status", + "layer": "rundown_status", + "partInstanceId": null, + "priority": 0, + }, + { + "children": [ + { + "classes": [ + "current_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece6", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece6.end + 0", + "start": "#piece_group_control_piece6.start - 0", + }, + "id": "piece_group_piece6", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "start": 123, + }, + "id": "part_group_piece6_infinite", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part0", + "priority": 1, + }, + { + "children": [ + { + "enable": { + "start": 0, + }, + "id": "part_group_firstobject_part0", + "layer": "group_first_object", + "partInstanceId": "part0", + "priority": 0, + }, + { + "classes": [ + "current_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece0", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece0.end + 0", + "start": "#piece_group_control_piece0.start - 0", + }, + "id": "piece_group_piece0", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "duration": 5000, + "start": 1235, + }, + "id": "part_group_part0", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part0", + "priority": 5, + }, + { + "children": [ + { + "enable": { + "start": 0, + }, + "id": "part_group_firstobject_part1", + "layer": "group_first_object", + "partInstanceId": "part1", + "priority": 0, + }, + { + "classes": [ + "next_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece1", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece1.end + 0", + "start": "#piece_group_control_piece1.start - 0", + }, + "id": "piece_group_piece1", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "start": "#part_group_part0.end - 0", + }, + "id": "part_group_part1", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part1", + "priority": 5, + }, + ], + "timingContext": { + "currentPartDuration": 5000, + "currentPartGroup": { + "children": 3, + "content": { + "deviceType": "ABSTRACT", + "objects": [], + "type": "group", + }, + "enable": { + "duration": 5000, + "start": 1235, + }, + "id": "part_group_part0", + "isGroup": true, + "layer": "", + "metaData": { + "isPieceTimeline": true, + }, + "objectType": "rundown", + "partInstanceId": "part0", + "priority": 5, + }, + "nextPartGroup": { + "children": 3, + "content": { + "deviceType": "ABSTRACT", + "objects": [], + "type": "group", + }, + "enable": { + "start": "#part_group_part0.end - 0", + }, + "id": "part_group_part1", + "isGroup": true, + "layer": "", + "metaData": { + "isPieceTimeline": true, + }, + "objectType": "rundown", + "partInstanceId": "part1", + "priority": 5, + }, + "nextPartOverlap": 0, + }, +} +`; + +exports[`buildTimelineObjsForRundown infinite pieces infinite ending with previous 1`] = ` +{ + "timeline": [ + { + "classes": [ + "rundown_active", + "last_part", + ], + "enable": { + "while": 1, + }, + "id": "mockPlaylist_status", + "layer": "rundown_status", + "partInstanceId": null, + "priority": 0, + }, + { + "children": [ + { + "classes": [ + "previous_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece9", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece9.end + 0", + "start": "#piece_group_control_piece9.start - 0", + }, + "id": "piece_group_piece9", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + { + "classes": [ + "previous_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece6", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece6.end + 0", + "start": "#piece_group_control_piece6.start - 0", + }, + "id": "piece_group_piece6", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "end": "#part_group_part0.start + 0", + "start": 1235, + }, + "id": "part_group_part9", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part9", + "priority": -1, + }, + { + "children": [ + { + "enable": { + "start": 0, + }, + "id": "part_group_firstobject_part0", + "layer": "group_first_object", + "partInstanceId": "part0", + "priority": 0, + }, + { + "classes": [ + "current_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece0", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece0.end + 0", + "start": "#piece_group_control_piece0.start - 0", + }, + "id": "piece_group_piece0", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "start": "now", + }, + "id": "part_group_part0", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part0", + "priority": 5, + }, + ], + "timingContext": { + "currentPartDuration": undefined, + "currentPartGroup": { + "children": 3, + "content": { + "deviceType": "ABSTRACT", + "objects": [], + "type": "group", + }, + "enable": { + "start": "now", + }, + "id": "part_group_part0", + "isGroup": true, + "layer": "", + "metaData": { + "isPieceTimeline": true, + }, + "objectType": "rundown", + "partInstanceId": "part0", + "priority": 5, + }, + "nextPartGroup": undefined, + "previousPartOverlap": 0, + }, +} +`; + +exports[`buildTimelineObjsForRundown infinite pieces infinite ending with previous excludeDuringPartKeepalive=true 1`] = ` +{ + "timeline": [ + { + "classes": [ + "rundown_active", + "last_part", + ], + "enable": { + "while": 1, + }, + "id": "mockPlaylist_status", + "layer": "rundown_status", + "partInstanceId": null, + "priority": 0, + }, + { + "children": [ + { + "classes": [ + "previous_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece9", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece9.end + 0", + "start": "#piece_group_control_piece9.start - 0", + }, + "id": "piece_group_piece9", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + { + "children": [ + { + "classes": [ + "previous_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece6", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece6.end + 0", + "start": "#piece_group_control_piece6.start - 0", + }, + "id": "piece_group_piece6", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "end": "#part_group_part9.end - 0", + "start": 0, + }, + "id": "part_group_part9_no_keepalive", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part9", + "priority": 5, + }, + ], + "enable": { + "end": "#part_group_part0.start + 0", + "start": 1235, + }, + "id": "part_group_part9", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part9", + "priority": -1, + }, + { + "children": [ + { + "enable": { + "start": 0, + }, + "id": "part_group_firstobject_part0", + "layer": "group_first_object", + "partInstanceId": "part0", + "priority": 0, + }, + { + "classes": [ + "current_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece0", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece0.end + 0", + "start": "#piece_group_control_piece0.start - 0", + }, + "id": "piece_group_piece0", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "start": "now", + }, + "id": "part_group_part0", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part0", + "priority": 5, + }, + ], + "timingContext": { + "currentPartDuration": undefined, + "currentPartGroup": { + "children": 3, + "content": { + "deviceType": "ABSTRACT", + "objects": [], + "type": "group", + }, + "enable": { + "start": "now", + }, + "id": "part_group_part0", + "isGroup": true, + "layer": "", + "metaData": { + "isPieceTimeline": true, + }, + "objectType": "rundown", + "partInstanceId": "part0", + "priority": 5, + }, + "nextPartGroup": undefined, + "previousPartOverlap": 0, + }, +} +`; + +exports[`buildTimelineObjsForRundown infinite pieces infinite starting in current 1`] = ` +{ + "timeline": [ + { + "classes": [ + "rundown_active", + "last_part", + ], + "enable": { + "while": 1, + }, + "id": "mockPlaylist_status", + "layer": "rundown_status", + "partInstanceId": null, + "priority": 0, + }, + { + "children": [ + { + "classes": [ + "previous_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece9", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece9.end + 0", + "start": "#piece_group_control_piece9.start - 0", + }, + "id": "piece_group_piece9", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "end": "#part_group_part0.start + 0", + "start": 1235, + }, + "id": "part_group_part9", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part9", + "priority": -1, + }, + { + "children": [ + { + "classes": [ + "current_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece1", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece1.end + 0", + "start": "#piece_group_control_piece1.start - 0", + }, + "id": "piece_group_piece1", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "start": "#part_group_part0.start", + }, + "id": "part_group_piece1_infinite", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part0", + "priority": 1, + }, + { + "children": [ + { + "enable": { + "start": 0, + }, + "id": "part_group_firstobject_part0", + "layer": "group_first_object", + "partInstanceId": "part0", + "priority": 0, + }, + { + "classes": [ + "current_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece0", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece0.end + 0", + "start": "#piece_group_control_piece0.start - 0", + }, + "id": "piece_group_piece0", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "start": "now", + }, + "id": "part_group_part0", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part0", + "priority": 5, + }, + ], + "timingContext": { + "currentPartDuration": undefined, + "currentPartGroup": { + "children": 3, + "content": { + "deviceType": "ABSTRACT", + "objects": [], + "type": "group", + }, + "enable": { + "start": "now", + }, + "id": "part_group_part0", + "isGroup": true, + "layer": "", + "metaData": { + "isPieceTimeline": true, + }, + "objectType": "rundown", + "partInstanceId": "part0", + "priority": 5, + }, + "nextPartGroup": undefined, + "previousPartOverlap": 0, + }, +} +`; + +exports[`buildTimelineObjsForRundown infinite pieces infinite stopping in current with autonext 1`] = ` +{ + "timeline": [ + { + "classes": [ + "rundown_active", + ], + "enable": { + "while": 1, + }, + "id": "mockPlaylist_status", + "layer": "rundown_status", + "partInstanceId": null, + "priority": 0, + }, + { + "children": [ + { + "classes": [ + "current_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece6", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece6.end + 0", + "start": "#piece_group_control_piece6.start - 0", + }, + "id": "piece_group_piece6", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "end": "#part_group_part0.end + 0", + "start": 123, + }, + "id": "part_group_piece6_infinite", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part0", + "priority": 1, + }, + { + "children": [ + { + "enable": { + "start": 0, + }, + "id": "part_group_firstobject_part0", + "layer": "group_first_object", + "partInstanceId": "part0", + "priority": 0, + }, + { + "classes": [ + "current_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece0", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece0.end + 0", + "start": "#piece_group_control_piece0.start - 0", + }, + "id": "piece_group_piece0", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "duration": 5000, + "start": 1235, + }, + "id": "part_group_part0", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part0", + "priority": 5, + }, + { + "children": [ + { + "enable": { + "start": 0, + }, + "id": "part_group_firstobject_part1", + "layer": "group_first_object", + "partInstanceId": "part1", + "priority": 0, + }, + { + "classes": [ + "next_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece1", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece1.end + 0", + "start": "#piece_group_control_piece1.start - 0", + }, + "id": "piece_group_piece1", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "start": "#part_group_part0.end - 0", + }, + "id": "part_group_part1", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part1", + "priority": 5, + }, + ], + "timingContext": { + "currentPartDuration": 5000, + "currentPartGroup": { + "children": 3, + "content": { + "deviceType": "ABSTRACT", + "objects": [], + "type": "group", + }, + "enable": { + "duration": 5000, + "start": 1235, + }, + "id": "part_group_part0", + "isGroup": true, + "layer": "", + "metaData": { + "isPieceTimeline": true, + }, + "objectType": "rundown", + "partInstanceId": "part0", + "priority": 5, + }, + "nextPartGroup": { + "children": 3, + "content": { + "deviceType": "ABSTRACT", + "objects": [], + "type": "group", + }, + "enable": { + "start": "#part_group_part0.end - 0", + }, + "id": "part_group_part1", + "isGroup": true, + "layer": "", + "metaData": { + "isPieceTimeline": true, + }, + "objectType": "rundown", + "partInstanceId": "part1", + "priority": 5, + }, + "nextPartOverlap": 0, + }, +} +`; + +exports[`buildTimelineObjsForRundown infinite pieces infinite stopping in current with autonext excludeDuringPartKeepalive=true 1`] = ` +{ + "timeline": [ + { + "classes": [ + "rundown_active", + ], + "enable": { + "while": 1, + }, + "id": "mockPlaylist_status", + "layer": "rundown_status", + "partInstanceId": null, + "priority": 0, + }, + { + "children": [ + { + "classes": [ + "current_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece6", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece6.end + 0", + "start": "#piece_group_control_piece6.start - 0", + }, + "id": "piece_group_piece6", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "end": "#part_group_part0.end + -100", + "start": 123, + }, + "id": "part_group_piece6_infinite", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part0", + "priority": 1, + }, + { + "children": [ + { + "enable": { + "start": 0, + }, + "id": "part_group_firstobject_part0", + "layer": "group_first_object", + "partInstanceId": "part0", + "priority": 0, + }, + { + "classes": [ + "current_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece0", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece0.end + 0", + "start": "#piece_group_control_piece0.start - 0", + }, + "id": "piece_group_piece0", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "duration": 5000, + "start": 1235, + }, + "id": "part_group_part0", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part0", + "priority": 5, + }, + { + "children": [ + { + "enable": { + "start": 0, + }, + "id": "part_group_firstobject_part1", + "layer": "group_first_object", + "partInstanceId": "part1", + "priority": 0, + }, + { + "classes": [ + "next_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece1", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece1.end + 0", + "start": "#piece_group_control_piece1.start - 0", + }, + "id": "piece_group_piece1", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "start": "#part_group_part0.end - 0", + }, + "id": "part_group_part1", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part1", + "priority": 5, + }, + ], + "timingContext": { + "currentPartDuration": 5000, + "currentPartGroup": { + "children": 3, + "content": { + "deviceType": "ABSTRACT", + "objects": [], + "type": "group", + }, + "enable": { + "duration": 5000, + "start": 1235, + }, + "id": "part_group_part0", + "isGroup": true, + "layer": "", + "metaData": { + "isPieceTimeline": true, + }, + "objectType": "rundown", + "partInstanceId": "part0", + "priority": 5, + }, + "nextPartGroup": { + "children": 3, + "content": { + "deviceType": "ABSTRACT", + "objects": [], + "type": "group", + }, + "enable": { + "start": "#part_group_part0.end - 0", + }, + "id": "part_group_part1", + "isGroup": true, + "layer": "", + "metaData": { + "isPieceTimeline": true, + }, + "objectType": "rundown", + "partInstanceId": "part1", + "priority": 5, + }, + "nextPartOverlap": 0, + }, +} +`; + +exports[`buildTimelineObjsForRundown next part no autonext 1`] = ` +{ + "timeline": [ + { + "classes": [ + "rundown_active", + ], + "enable": { + "while": 1, + }, + "id": "mockPlaylist_status", + "layer": "rundown_status", + "partInstanceId": null, + "priority": 0, + }, + { + "children": [ + { + "enable": { + "start": 0, + }, + "id": "part_group_firstobject_part0", + "layer": "group_first_object", + "partInstanceId": "part0", + "priority": 0, + }, + { + "classes": [ + "current_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece0", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece0.end + 0", + "start": "#piece_group_control_piece0.start - 0", + }, + "id": "piece_group_piece0", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "start": "now", + }, + "id": "part_group_part0", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part0", + "priority": 5, + }, + ], + "timingContext": { + "currentPartDuration": undefined, + "currentPartGroup": { + "children": 3, + "content": { + "deviceType": "ABSTRACT", + "objects": [], + "type": "group", + }, + "enable": { + "start": "now", + }, + "id": "part_group_part0", + "isGroup": true, + "layer": "", + "metaData": { + "isPieceTimeline": true, + }, + "objectType": "rundown", + "partInstanceId": "part0", + "priority": 5, + }, + "nextPartGroup": undefined, + }, +} +`; + +exports[`buildTimelineObjsForRundown next part with autonext 1`] = ` +{ + "timeline": [ + { + "classes": [ + "rundown_active", + ], + "enable": { + "while": 1, + }, + "id": "mockPlaylist_status", + "layer": "rundown_status", + "partInstanceId": null, + "priority": 0, + }, + { + "children": [ + { + "enable": { + "start": 0, + }, + "id": "part_group_firstobject_part0", + "layer": "group_first_object", + "partInstanceId": "part0", + "priority": 0, + }, + { + "classes": [ + "current_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece0", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece0.end + 0", + "start": "#piece_group_control_piece0.start - 0", + }, + "id": "piece_group_piece0", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "duration": 5000, + "start": "now", + }, + "id": "part_group_part0", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part0", + "priority": 5, + }, + { + "children": [ + { + "enable": { + "start": 0, + }, + "id": "part_group_firstobject_part1", + "layer": "group_first_object", + "partInstanceId": "part1", + "priority": 0, + }, + { + "classes": [ + "next_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece1", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece1.end + 0", + "start": "#piece_group_control_piece1.start - 0", + }, + "id": "piece_group_piece1", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "start": "#part_group_part0.end - 0", + }, + "id": "part_group_part1", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part1", + "priority": 5, + }, + ], + "timingContext": { + "currentPartDuration": 5000, + "currentPartGroup": { + "children": 3, + "content": { + "deviceType": "ABSTRACT", + "objects": [], + "type": "group", + }, + "enable": { + "duration": 5000, + "start": "now", + }, + "id": "part_group_part0", + "isGroup": true, + "layer": "", + "metaData": { + "isPieceTimeline": true, + }, + "objectType": "rundown", + "partInstanceId": "part0", + "priority": 5, + }, + "nextPartGroup": { + "children": 3, + "content": { + "deviceType": "ABSTRACT", + "objects": [], + "type": "group", + }, + "enable": { + "start": "#part_group_part0.end - 0", + }, + "id": "part_group_part1", + "isGroup": true, + "layer": "", + "metaData": { + "isPieceTimeline": true, + }, + "objectType": "rundown", + "partInstanceId": "part1", + "priority": 5, + }, + "nextPartOverlap": 0, + }, +} +`; + +exports[`buildTimelineObjsForRundown overlap and keepalive autonext into next part 1`] = ` +{ + "timeline": [ + { + "classes": [ + "rundown_active", + ], + "enable": { + "while": 1, + }, + "id": "mockPlaylist_status", + "layer": "rundown_status", + "partInstanceId": null, + "priority": 0, + }, + { + "children": [ + { + "enable": { + "start": 0, + }, + "id": "part_group_firstobject_part0", + "layer": "group_first_object", + "partInstanceId": "part0", + "priority": 0, + }, + { + "classes": [ + "current_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece0", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece0.end + 0", + "start": "#piece_group_control_piece0.start - 0", + }, + "id": "piece_group_piece0", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "duration": 5000, + "start": "now", + }, + "id": "part_group_part0", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part0", + "priority": 5, + }, + { + "children": [ + { + "enable": { + "start": 0, + }, + "id": "part_group_firstobject_part1", + "layer": "group_first_object", + "partInstanceId": "part1", + "priority": 0, + }, + { + "classes": [ + "next_part", + ], + "enable": { + "start": 500, + }, + "id": "piece_group_control_piece1", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece1.end + 0", + "start": "#piece_group_control_piece1.start - 0", + }, + "id": "piece_group_piece1", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "start": "#part_group_part0.end - 900", + }, + "id": "part_group_part1", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part1", + "priority": 5, + }, + ], + "timingContext": { + "currentPartDuration": 5000, + "currentPartGroup": { + "children": 3, + "content": { + "deviceType": "ABSTRACT", + "objects": [], + "type": "group", + }, + "enable": { + "duration": 5000, + "start": "now", + }, + "id": "part_group_part0", + "isGroup": true, + "layer": "", + "metaData": { + "isPieceTimeline": true, + }, + "objectType": "rundown", + "partInstanceId": "part0", + "priority": 5, + }, + "nextPartGroup": { + "children": 3, + "content": { + "deviceType": "ABSTRACT", + "objects": [], + "type": "group", + }, + "enable": { + "start": "#part_group_part0.end - 900", + }, + "id": "part_group_part1", + "isGroup": true, + "layer": "", + "metaData": { + "isPieceTimeline": true, + }, + "objectType": "rundown", + "partInstanceId": "part1", + "priority": 5, + }, + "nextPartOverlap": 900, + }, +} +`; + +exports[`buildTimelineObjsForRundown overlap and keepalive autonext into next part with excludeDuringPartKeepalive 1`] = ` +{ + "timeline": [ + { + "classes": [ + "rundown_active", + ], + "enable": { + "while": 1, + }, + "id": "mockPlaylist_status", + "layer": "rundown_status", + "partInstanceId": null, + "priority": 0, + }, + { + "children": [ + { + "enable": { + "start": 0, + }, + "id": "part_group_firstobject_part0", + "layer": "group_first_object", + "partInstanceId": "part0", + "priority": 0, + }, + { + "classes": [ + "current_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece0", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece0.end + 0", + "start": "#piece_group_control_piece0.start - 0", + }, + "id": "piece_group_piece0", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + { + "children": [ + { + "classes": [ + "current_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece9", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece9.end + 0", + "start": "#piece_group_control_piece9.start - 0", + }, + "id": "piece_group_piece9", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "end": "#part_group_part0.end - 100", + "start": 0, + }, + "id": "part_group_part0_no_keepalive", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part0", + "priority": 5, + }, + ], + "enable": { + "duration": 5000, + "start": 1235, + }, + "id": "part_group_part0", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part0", + "priority": 5, + }, + { + "children": [ + { + "enable": { + "start": 0, + }, + "id": "part_group_firstobject_part1", + "layer": "group_first_object", + "partInstanceId": "part1", + "priority": 0, + }, + { + "classes": [ + "next_part", + ], + "enable": { + "start": 500, + }, + "id": "piece_group_control_piece1", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece1.end + 0", + "start": "#piece_group_control_piece1.start - 0", + }, + "id": "piece_group_piece1", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "start": "#part_group_part0.end - 900", + }, + "id": "part_group_part1", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part1", + "priority": 5, + }, + ], + "timingContext": { + "currentPartDuration": 5000, + "currentPartGroup": { + "children": 4, + "content": { + "deviceType": "ABSTRACT", + "objects": [], + "type": "group", + }, + "enable": { + "duration": 5000, + "start": 1235, + }, + "id": "part_group_part0", + "isGroup": true, + "layer": "", + "metaData": { + "isPieceTimeline": true, + }, + "objectType": "rundown", + "partInstanceId": "part0", + "priority": 5, + }, + "nextPartGroup": { + "children": 3, + "content": { + "deviceType": "ABSTRACT", + "objects": [], + "type": "group", + }, + "enable": { + "start": "#part_group_part0.end - 900", + }, + "id": "part_group_part1", + "isGroup": true, + "layer": "", + "metaData": { + "isPieceTimeline": true, + }, + "objectType": "rundown", + "partInstanceId": "part1", + "priority": 5, + }, + "nextPartOverlap": 900, + }, +} +`; + +exports[`buildTimelineObjsForRundown overlap and keepalive current and previous parts 1`] = ` +{ + "timeline": [ + { + "classes": [ + "rundown_active", + "last_part", + ], + "enable": { + "while": 1, + }, + "id": "mockPlaylist_status", + "layer": "rundown_status", + "partInstanceId": null, + "priority": 0, + }, + { + "children": [ + { + "classes": [ + "previous_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece9", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece9.end + 0", + "start": "#piece_group_control_piece9.start - 0", + }, + "id": "piece_group_piece9", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + { + "classes": [ + "previous_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece8", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece8.end + 0", + "start": "#piece_group_control_piece8.start - 0", + }, + "id": "piece_group_piece8", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "end": "#part_group_part0.start + 900", + "start": 1235, + }, + "id": "part_group_part9", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part9", + "priority": -1, + }, + { + "children": [ + { + "enable": { + "start": 0, + }, + "id": "part_group_firstobject_part0", + "layer": "group_first_object", + "partInstanceId": "part0", + "priority": 0, + }, + { + "classes": [ + "current_part", + ], + "enable": { + "start": 500, + }, + "id": "piece_group_control_piece0", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece0.end + 0", + "start": "#piece_group_control_piece0.start - 0", + }, + "id": "piece_group_piece0", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "start": "now", + }, + "id": "part_group_part0", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part0", + "priority": 5, + }, + ], + "timingContext": { + "currentPartDuration": undefined, + "currentPartGroup": { + "children": 3, + "content": { + "deviceType": "ABSTRACT", + "objects": [], + "type": "group", + }, + "enable": { + "start": "now", + }, + "id": "part_group_part0", + "isGroup": true, + "layer": "", + "metaData": { + "isPieceTimeline": true, + }, + "objectType": "rundown", + "partInstanceId": "part0", + "priority": 5, + }, + "nextPartGroup": undefined, + "previousPartOverlap": 900, + }, +} +`; + +exports[`buildTimelineObjsForRundown overlap and keepalive current and previous parts with excludeDuringPartKeepalive 1`] = ` +{ + "timeline": [ + { + "classes": [ + "rundown_active", + "last_part", + ], + "enable": { + "while": 1, + }, + "id": "mockPlaylist_status", + "layer": "rundown_status", + "partInstanceId": null, + "priority": 0, + }, + { + "children": [ + { + "classes": [ + "previous_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece9", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece9.end + 0", + "start": "#piece_group_control_piece9.start - 0", + }, + "id": "piece_group_piece9", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + { + "children": [ + { + "classes": [ + "previous_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece8", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece8.end + 0", + "start": "#piece_group_control_piece8.start - 0", + }, + "id": "piece_group_piece8", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "end": "#part_group_part9.end - 100", + "start": 0, + }, + "id": "part_group_part9_no_keepalive", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part9", + "priority": 5, + }, + ], + "enable": { + "end": "#part_group_part0.start + 900", + "start": 1235, + }, + "id": "part_group_part9", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part9", + "priority": -1, + }, + { + "children": [ + { + "enable": { + "start": 0, + }, + "id": "part_group_firstobject_part0", + "layer": "group_first_object", + "partInstanceId": "part0", + "priority": 0, + }, + { + "classes": [ + "current_part", + ], + "enable": { + "start": 500, + }, + "id": "piece_group_control_piece0", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece0.end + 0", + "start": "#piece_group_control_piece0.start - 0", + }, + "id": "piece_group_piece0", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "start": "now", + }, + "id": "part_group_part0", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part0", + "priority": 5, + }, + ], + "timingContext": { + "currentPartDuration": undefined, + "currentPartGroup": { + "children": 3, + "content": { + "deviceType": "ABSTRACT", + "objects": [], + "type": "group", + }, + "enable": { + "start": "now", + }, + "id": "part_group_part0", + "isGroup": true, + "layer": "", + "metaData": { + "isPieceTimeline": true, + }, + "objectType": "rundown", + "partInstanceId": "part0", + "priority": 5, + }, + "nextPartGroup": undefined, + "previousPartOverlap": 900, + }, +} +`; + +exports[`buildTimelineObjsForRundown simple current part 1`] = ` +{ + "timeline": [ + { + "classes": [ + "rundown_active", + "last_part", + ], + "enable": { + "while": 1, + }, + "id": "mockPlaylist_status", + "layer": "rundown_status", + "partInstanceId": null, + "priority": 0, + }, + { + "children": [ + { + "enable": { + "start": 0, + }, + "id": "part_group_firstobject_part0", + "layer": "group_first_object", + "partInstanceId": "part0", + "priority": 0, + }, + { + "classes": [ + "current_part", + ], + "enable": { + "start": 0, + }, + "id": "piece_group_control_piece0", + "isPieceTimeline": true, + "priority": 5, + }, + { + "children": [], + "enable": { + "end": "#piece_group_control_piece0.end + 0", + "start": "#piece_group_control_piece0.start - 0", + }, + "id": "piece_group_piece0", + "isPieceTimeline": true, + "layer": "", + "priority": 0, + }, + ], + "enable": { + "start": "now", + }, + "id": "part_group_part0", + "isPieceTimeline": true, + "layer": "", + "partInstanceId": "part0", + "priority": 5, + }, + ], + "timingContext": { + "currentPartDuration": undefined, + "currentPartGroup": { + "children": 3, + "content": { + "deviceType": "ABSTRACT", + "objects": [], + "type": "group", + }, + "enable": { + "start": "now", + }, + "id": "part_group_part0", + "isGroup": true, + "layer": "", + "metaData": { + "isPieceTimeline": true, + }, + "objectType": "rundown", + "partInstanceId": "part0", + "priority": 5, + }, + "nextPartGroup": undefined, + }, +} +`; diff --git a/packages/job-worker/src/playout/timeline/__tests__/rundown.test.ts b/packages/job-worker/src/playout/timeline/__tests__/rundown.test.ts new file mode 100644 index 00000000000..003ee434b7b --- /dev/null +++ b/packages/job-worker/src/playout/timeline/__tests__/rundown.test.ts @@ -0,0 +1,853 @@ +/* eslint-disable @typescript-eslint/no-non-null-assertion */ +import { DBRundownPlaylist, SelectedPartInstance } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import { setupDefaultJobEnvironment } from '../../../__mocks__/context' +import { buildTimelineObjsForRundown, RundownTimelineResult, RundownTimelineTimingContext } from '../rundown' +import { protectString } from '@sofie-automation/corelib/dist/protectedString' +import { SelectedPartInstancesTimelineInfo, SelectedPartInstanceTimelineInfo } from '../generate' +import { PartCalculatedTimings } from '@sofie-automation/corelib/dist/playout/timings' +import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' +import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' +import { transformTimeline } from '@sofie-automation/corelib/dist/playout/timeline' +import { deleteAllUndefinedProperties, getRandomId } from '@sofie-automation/corelib/dist/lib' +import { PieceInstance, PieceInstancePiece } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' +import { PieceInstanceWithTimings } from '@sofie-automation/corelib/dist/playout/processAndPrune' +import { EmptyPieceTimelineObjectsBlob } from '@sofie-automation/corelib/dist/dataModel/Piece' +import { IBlueprintPieceType, PieceLifespan } from '@sofie-automation/blueprints-integration' +import { getPartGroupId } from '@sofie-automation/corelib/dist/playout/ids' + +const DEFAULT_PART_TIMINGS: PartCalculatedTimings = Object.freeze({ + inTransitionStart: null, + toPartDelay: 0, + toPartPostroll: 0, + fromPartRemaining: 0, + fromPartPostroll: 0, + fromPartKeepalive: 0, +}) + +function transformTimelineIntoSimplifiedForm(res: RundownTimelineResult) { + const deepTimeline = transformTimeline(res.timeline) + + function simplifyTimelineObject(obj: any): any { + const newObj = { + id: obj.id, + enable: obj.enable, + layer: obj.layer, + partInstanceId: obj.partInstanceId, + priority: obj.priority, + children: obj.children?.map(simplifyTimelineObject), + isPieceTimeline: obj.metaData?.isPieceTimeline, + classes: obj.classes?.length > 0 ? obj.classes : undefined, + } + + deleteAllUndefinedProperties(newObj) + + return newObj + } + + return { + timeline: deepTimeline.map(simplifyTimelineObject), + timingContext: res.timingContext + ? ({ + ...res.timingContext, + currentPartGroup: { + ...res.timingContext.currentPartGroup, + children: res.timingContext.currentPartGroup.children.length as any, + }, + nextPartGroup: res.timingContext.nextPartGroup + ? { + ...res.timingContext.nextPartGroup, + children: res.timingContext.nextPartGroup.children.length as any, + } + : undefined, + } satisfies RundownTimelineTimingContext) + : undefined, + } +} + +/** + * This is a set of tests to get a general overview of the shape of the generated timeline. + * It is not intended to look in much detail at everything, it is expected that methods used + * inside of this will have their own tests to stress difference scenarios. + */ +describe('buildTimelineObjsForRundown', () => { + function createMockPlaylist(selectedPartInfos: SelectedPartInstancesTimelineInfo): DBRundownPlaylist { + function convertSelectedPartInstance( + info: SelectedPartInstanceTimelineInfo | undefined + ): SelectedPartInstance | null { + if (!info) return null + return { + partInstanceId: info.partInstance._id, + rundownId: info.partInstance.rundownId, + manuallySelected: false, + consumesQueuedSegmentId: false, + } + } + return { + _id: protectString('mockPlaylist'), + nextPartInfo: convertSelectedPartInstance(selectedPartInfos.next), + currentPartInfo: convertSelectedPartInstance(selectedPartInfos.current), + previousPartInfo: convertSelectedPartInstance(selectedPartInfos.previous), + activationId: protectString('mockActivationId'), + rehearsal: false, + } as Partial as any + } + function createMockPartInstance( + id: string, + partProps?: Partial, + partInstanceProps?: Partial + ): DBPartInstance { + return { + _id: protectString(id), + part: { + ...partProps, + } as Partial as any, + ...partInstanceProps, + } as Partial as any + } + function createMockPieceInstance( + id: string, + pieceProps?: Partial, + pieceInstanceProps?: Partial + ): PieceInstanceWithTimings { + return { + _id: protectString(id), + + piece: { + enable: { start: 0 }, + pieceType: IBlueprintPieceType.Normal, + timelineObjectsString: EmptyPieceTimelineObjectsBlob, + ...pieceProps, + } as Partial as any, + + resolvedEndCap: undefined, + priority: 5, + + ...pieceInstanceProps, + } as Partial as any + } + function createMockInfinitePieceInstance( + id: string, + pieceProps?: Partial, + pieceInstanceProps?: Partial, + infiniteIndex = 0 + ): PieceInstanceWithTimings { + return createMockPieceInstance( + id, + { + lifespan: PieceLifespan.OutOnSegmentEnd, + ...pieceProps, + }, + { + plannedStartedPlayback: 123, + ...pieceInstanceProps, + infinite: { + infinitePieceId: getRandomId(), + infiniteInstanceId: getRandomId(), + infiniteInstanceIndex: infiniteIndex, + fromPreviousPart: infiniteIndex !== 0, + }, + } + ) + } + function continueInfinitePiece(piece: PieceInstanceWithTimings): PieceInstanceWithTimings { + if (!piece.infinite) throw new Error('Not an infinite piece!') + return { + ...piece, + _id: protectString(piece._id + 'b'), + infinite: { + ...piece.infinite, + fromPreviousPart: true, + infiniteInstanceIndex: piece.infinite.infiniteInstanceIndex + 1, + }, + } + } + + it('playlist with no parts', () => { + const context = setupDefaultJobEnvironment() + + const selectedPartInfos: SelectedPartInstancesTimelineInfo = {} + + const playlist = createMockPlaylist(selectedPartInfos) + const objs = buildTimelineObjsForRundown(context, playlist, selectedPartInfos) + + expect(objs.timeline).toHaveLength(1) + expect(objs.timingContext).toBeUndefined() + expect(objs.timeline).toEqual([ + { + classes: ['rundown_active', 'before_first_part', 'last_part'], + content: { + deviceType: 'ABSTRACT', + }, + enable: { + while: 1, + }, + id: 'mockPlaylist_status', + layer: 'rundown_status', + metaData: undefined, + objectType: 'rundown', + partInstanceId: null, + priority: 0, + }, + ]) + }) + + it('with previous and but no current part', () => { + const context = setupDefaultJobEnvironment() + + const selectedPartInfos: SelectedPartInstancesTimelineInfo = { + previous: { + nowInPart: 1234, + partStarted: 5678, + partInstance: createMockPartInstance('part0'), + pieceInstances: [], + calculatedTimings: DEFAULT_PART_TIMINGS, + }, + } + + const playlist = createMockPlaylist(selectedPartInfos) + const objs = buildTimelineObjsForRundown(context, playlist, selectedPartInfos) + + expect(objs.timeline).toHaveLength(1) + expect(objs.timingContext).toBeUndefined() + }) + + it('simple current part', () => { + const context = setupDefaultJobEnvironment() + + const selectedPartInfos: SelectedPartInstancesTimelineInfo = { + current: { + nowInPart: 1234, + partStarted: 5678, + partInstance: createMockPartInstance('part0'), + pieceInstances: [createMockPieceInstance('piece0')], + calculatedTimings: DEFAULT_PART_TIMINGS, + }, + } + + const playlist = createMockPlaylist(selectedPartInfos) + const objs = buildTimelineObjsForRundown(context, playlist, selectedPartInfos) + + expect(objs.timeline).not.toHaveLength(0) + expect(objs.timingContext).not.toBeUndefined() + expect(transformTimelineIntoSimplifiedForm(objs)).toMatchSnapshot() + + expect(objs.timingContext?.currentPartGroup.enable).toEqual({ + start: 'now', + }) + }) + + it('current part with startedPlayback', () => { + const context = setupDefaultJobEnvironment() + + const selectedPartInfos: SelectedPartInstancesTimelineInfo = { + current: { + nowInPart: 1234, + partStarted: 5678, + partInstance: createMockPartInstance( + 'part0', + {}, + { + timings: { + plannedStartedPlayback: 5678, + }, + } + ), + pieceInstances: [createMockPieceInstance('piece0')], + calculatedTimings: DEFAULT_PART_TIMINGS, + }, + } + + const playlist = createMockPlaylist(selectedPartInfos) + const objs = buildTimelineObjsForRundown(context, playlist, selectedPartInfos) + + expect(objs.timeline).not.toHaveLength(0) + expect(objs.timingContext).not.toBeUndefined() + expect(transformTimelineIntoSimplifiedForm(objs)).toMatchSnapshot() + + expect(objs.timingContext?.currentPartGroup.enable).toEqual({ + start: 5678, + }) + }) + + it('next part no autonext', () => { + const context = setupDefaultJobEnvironment() + + const selectedPartInfos: SelectedPartInstancesTimelineInfo = { + current: { + nowInPart: 1234, + partStarted: 5678, + partInstance: createMockPartInstance('part0'), + pieceInstances: [createMockPieceInstance('piece0')], + calculatedTimings: DEFAULT_PART_TIMINGS, + }, + next: { + nowInPart: 0, + partStarted: undefined, + partInstance: createMockPartInstance('part1'), + pieceInstances: [createMockPieceInstance('piece1')], + calculatedTimings: DEFAULT_PART_TIMINGS, + }, + } + + const playlist = createMockPlaylist(selectedPartInfos) + const objs = buildTimelineObjsForRundown(context, playlist, selectedPartInfos) + + expect(objs.timeline).not.toHaveLength(0) + expect(objs.timingContext).not.toBeUndefined() + expect(transformTimelineIntoSimplifiedForm(objs)).toMatchSnapshot() + + // make sure the next part was not generated + expect(objs.timingContext?.nextPartGroup).toBeUndefined() + const nextPartGroupId = getPartGroupId(selectedPartInfos.next!.partInstance) + expect(objs.timeline.find((obj) => obj.id === nextPartGroupId)).toBeUndefined() + }) + + it('next part with autonext', () => { + const context = setupDefaultJobEnvironment() + + const selectedPartInfos: SelectedPartInstancesTimelineInfo = { + current: { + nowInPart: 1234, + partStarted: 5678, + partInstance: createMockPartInstance('part0', { autoNext: true, expectedDuration: 5000 }), + pieceInstances: [createMockPieceInstance('piece0')], + calculatedTimings: DEFAULT_PART_TIMINGS, + }, + next: { + nowInPart: 0, + partStarted: undefined, + partInstance: createMockPartInstance('part1'), + pieceInstances: [createMockPieceInstance('piece1')], + calculatedTimings: DEFAULT_PART_TIMINGS, + }, + } + + const playlist = createMockPlaylist(selectedPartInfos) + const objs = buildTimelineObjsForRundown(context, playlist, selectedPartInfos) + + expect(objs.timeline).not.toHaveLength(0) + expect(objs.timingContext).toBeTruthy() + expect(transformTimelineIntoSimplifiedForm(objs)).toMatchSnapshot() + + // make sure the next part was generated + expect(objs.timingContext?.nextPartGroup).toBeTruthy() + const nextPartGroupId = getPartGroupId(selectedPartInfos.next!.partInstance) + expect(objs.timeline.find((obj) => obj.id === nextPartGroupId)).toBeTruthy() + }) + + it('current and previous parts', () => { + const context = setupDefaultJobEnvironment() + + const selectedPartInfos: SelectedPartInstancesTimelineInfo = { + previous: { + nowInPart: 9999, + partStarted: 1234, + partInstance: createMockPartInstance( + 'part9', + { autoNext: true, expectedDuration: 5000 }, + { + timings: { + plannedStartedPlayback: 1235, + }, + } + ), + pieceInstances: [createMockPieceInstance('piece9')], + calculatedTimings: DEFAULT_PART_TIMINGS, + }, + current: { + nowInPart: 1234, + partStarted: 5678, + partInstance: createMockPartInstance('part0'), + pieceInstances: [createMockPieceInstance('piece0')], + calculatedTimings: DEFAULT_PART_TIMINGS, + }, + } + + const playlist = createMockPlaylist(selectedPartInfos) + const objs = buildTimelineObjsForRundown(context, playlist, selectedPartInfos) + + expect(objs.timeline).not.toHaveLength(0) + expect(objs.timingContext).not.toBeUndefined() + expect(transformTimelineIntoSimplifiedForm(objs)).toMatchSnapshot() + + // make sure the previous part was generated + const previousPartGroupId = getPartGroupId(selectedPartInfos.previous!.partInstance) + expect(objs.timeline.find((obj) => obj.id === previousPartGroupId)).toBeTruthy() + expect(objs.timingContext?.previousPartOverlap).not.toBeUndefined() + }) + + describe('overlap and keepalive', () => { + it('current and previous parts', () => { + const context = setupDefaultJobEnvironment() + + const selectedPartInfos: SelectedPartInstancesTimelineInfo = { + previous: { + nowInPart: 9999, + partStarted: 1234, + partInstance: createMockPartInstance( + 'part9', + { autoNext: true, expectedDuration: 5000 }, + { + timings: { + plannedStartedPlayback: 1235, + }, + } + ), + pieceInstances: [createMockPieceInstance('piece9'), createMockPieceInstance('piece8')], + calculatedTimings: DEFAULT_PART_TIMINGS, + }, + current: { + nowInPart: 1234, + partStarted: 5678, + partInstance: createMockPartInstance('part0'), + pieceInstances: [createMockPieceInstance('piece0')], + calculatedTimings: { + inTransitionStart: 200, + toPartDelay: 500, + toPartPostroll: 0, + fromPartRemaining: 500 + 400, + fromPartPostroll: 400, + fromPartKeepalive: 100, + }, + }, + } + + const playlist = createMockPlaylist(selectedPartInfos) + const objs = buildTimelineObjsForRundown(context, playlist, selectedPartInfos) + + expect(objs.timeline).not.toHaveLength(0) + expect(objs.timingContext).not.toBeUndefined() + expect(transformTimelineIntoSimplifiedForm(objs)).toMatchSnapshot() + + // make sure the previous part was generated + const previousPartGroupId = getPartGroupId(selectedPartInfos.previous!.partInstance) + expect(objs.timeline.find((obj) => obj.id === previousPartGroupId)).toBeTruthy() + expect(objs.timingContext?.previousPartOverlap).not.toBeUndefined() + }) + + it('current and previous parts with excludeDuringPartKeepalive', () => { + const context = setupDefaultJobEnvironment() + + const selectedPartInfos: SelectedPartInstancesTimelineInfo = { + previous: { + nowInPart: 9999, + partStarted: 1234, + partInstance: createMockPartInstance( + 'part9', + { autoNext: true, expectedDuration: 5000 }, + { + timings: { + plannedStartedPlayback: 1235, + }, + } + ), + pieceInstances: [ + createMockPieceInstance('piece9'), + createMockPieceInstance('piece8', { + excludeDuringPartKeepalive: true, + }), + ], + calculatedTimings: DEFAULT_PART_TIMINGS, + }, + current: { + nowInPart: 1234, + partStarted: 5678, + partInstance: createMockPartInstance('part0'), + pieceInstances: [createMockPieceInstance('piece0')], + calculatedTimings: { + inTransitionStart: 200, + toPartDelay: 500, + toPartPostroll: 0, + fromPartRemaining: 500 + 400, + fromPartPostroll: 400, + fromPartKeepalive: 100, + }, + }, + } + + const playlist = createMockPlaylist(selectedPartInfos) + const objs = buildTimelineObjsForRundown(context, playlist, selectedPartInfos) + + expect(objs.timeline).not.toHaveLength(0) + expect(objs.timingContext).not.toBeUndefined() + expect(transformTimelineIntoSimplifiedForm(objs)).toMatchSnapshot() + + // make sure the previous part was generated + const previousPartGroupId = getPartGroupId(selectedPartInfos.previous!.partInstance) + expect(objs.timeline.find((obj) => obj.id === previousPartGroupId)).toBeTruthy() + expect(objs.timingContext?.previousPartOverlap).not.toBeUndefined() + }) + + it('autonext into next part', () => { + const context = setupDefaultJobEnvironment() + + const selectedPartInfos: SelectedPartInstancesTimelineInfo = { + current: { + nowInPart: 1234, + partStarted: 5678, + partInstance: createMockPartInstance('part0', { autoNext: true, expectedDuration: 5000 }), + pieceInstances: [createMockPieceInstance('piece0')], + calculatedTimings: DEFAULT_PART_TIMINGS, + }, + next: { + nowInPart: 0, + partStarted: undefined, + partInstance: createMockPartInstance('part1'), + pieceInstances: [createMockPieceInstance('piece1')], + calculatedTimings: { + inTransitionStart: 200, + toPartDelay: 500, + toPartPostroll: 0, + fromPartRemaining: 500 + 400, + fromPartPostroll: 400, + fromPartKeepalive: 100, + }, + }, + } + + const playlist = createMockPlaylist(selectedPartInfos) + const objs = buildTimelineObjsForRundown(context, playlist, selectedPartInfos) + + expect(objs.timeline).not.toHaveLength(0) + expect(objs.timingContext).toBeTruthy() + expect(transformTimelineIntoSimplifiedForm(objs)).toMatchSnapshot() + + // make sure the next part was generated + expect(objs.timingContext?.nextPartGroup).toBeTruthy() + const nextPartGroupId = getPartGroupId(selectedPartInfos.next!.partInstance) + expect(objs.timeline.find((obj) => obj.id === nextPartGroupId)).toBeTruthy() + }) + + it('autonext into next part with excludeDuringPartKeepalive', () => { + const context = setupDefaultJobEnvironment() + + jest.spyOn(global.Date, 'now').mockImplementation(() => 3000) + + const selectedPartInfos: SelectedPartInstancesTimelineInfo = { + current: { + nowInPart: 1234, + partStarted: 5678, + partInstance: createMockPartInstance( + 'part0', + { autoNext: true, expectedDuration: 5000 }, + { + timings: { + plannedStartedPlayback: 1235, + }, + } + ), + pieceInstances: [ + createMockPieceInstance('piece0'), + createMockPieceInstance('piece9', { + excludeDuringPartKeepalive: true, + }), + ], + calculatedTimings: DEFAULT_PART_TIMINGS, + }, + next: { + nowInPart: 0, + partStarted: undefined, + partInstance: createMockPartInstance( + 'part1', + {}, + { + timings: { + plannedStartedPlayback: 5000, + }, + } + ), + pieceInstances: [createMockPieceInstance('piece1')], + calculatedTimings: { + inTransitionStart: 200, + toPartDelay: 500, + toPartPostroll: 0, + fromPartRemaining: 500 + 400, + fromPartPostroll: 400, + fromPartKeepalive: 100, + }, + }, + } + + const playlist = createMockPlaylist(selectedPartInfos) + const objs = buildTimelineObjsForRundown(context, playlist, selectedPartInfos) + + expect(objs.timeline).not.toHaveLength(0) + expect(objs.timingContext).not.toBeUndefined() + expect(transformTimelineIntoSimplifiedForm(objs)).toMatchSnapshot() + + // make sure the previous part was generated + expect(objs.timingContext?.nextPartGroup).toBeTruthy() + const nextPartGroupId = getPartGroupId(selectedPartInfos.next!.partInstance) + expect(objs.timeline.find((obj) => obj.id === nextPartGroupId)).toBeTruthy() + }) + }) + + describe('infinite pieces', () => { + const PREVIOUS_PART_INSTANCE: SelectedPartInstanceTimelineInfo = { + nowInPart: 9999, + partStarted: 1234, + partInstance: createMockPartInstance( + 'part9', + { autoNext: true, expectedDuration: 5000 }, + { + timings: { + plannedStartedPlayback: 1235, + }, + } + ), + pieceInstances: [createMockPieceInstance('piece9')], + calculatedTimings: DEFAULT_PART_TIMINGS, + } + + it('infinite starting in current', () => { + const context = setupDefaultJobEnvironment() + + const selectedPartInfos: SelectedPartInstancesTimelineInfo = { + previous: PREVIOUS_PART_INSTANCE, + current: { + nowInPart: 1234, + partStarted: 5678, + partInstance: createMockPartInstance('part0'), + pieceInstances: [ + createMockPieceInstance('piece0'), + createMockInfinitePieceInstance('piece1', {}, { plannedStartedPlayback: undefined }), + ], + calculatedTimings: DEFAULT_PART_TIMINGS, + }, + } + + const playlist = createMockPlaylist(selectedPartInfos) + const objs = buildTimelineObjsForRundown(context, playlist, selectedPartInfos) + + expect(objs.timeline).not.toHaveLength(0) + expect(objs.timingContext).not.toBeUndefined() + expect(transformTimelineIntoSimplifiedForm(objs)).toMatchSnapshot() + }) + + it('infinite ending with previous', () => { + const context = setupDefaultJobEnvironment() + + const selectedPartInfos: SelectedPartInstancesTimelineInfo = { + previous: { + ...PREVIOUS_PART_INSTANCE, + pieceInstances: [ + ...PREVIOUS_PART_INSTANCE.pieceInstances, + createMockInfinitePieceInstance('piece6', {}, {}, 1), + ], + }, + current: { + nowInPart: 1234, + partStarted: 5678, + partInstance: createMockPartInstance('part0'), + pieceInstances: [createMockPieceInstance('piece0')], + calculatedTimings: DEFAULT_PART_TIMINGS, + }, + } + + const playlist = createMockPlaylist(selectedPartInfos) + const objs = buildTimelineObjsForRundown(context, playlist, selectedPartInfos) + + expect(objs.timeline).not.toHaveLength(0) + expect(objs.timingContext).not.toBeUndefined() + expect(transformTimelineIntoSimplifiedForm(objs)).toMatchSnapshot() + }) + + it('infinite ending with previous excludeDuringPartKeepalive=true', () => { + const context = setupDefaultJobEnvironment() + + const selectedPartInfos: SelectedPartInstancesTimelineInfo = { + previous: { + ...PREVIOUS_PART_INSTANCE, + pieceInstances: [ + ...PREVIOUS_PART_INSTANCE.pieceInstances, + createMockInfinitePieceInstance('piece6', { excludeDuringPartKeepalive: true }, {}, 1), + ], + }, + current: { + nowInPart: 1234, + partStarted: 5678, + partInstance: createMockPartInstance('part0'), + pieceInstances: [createMockPieceInstance('piece0')], + calculatedTimings: DEFAULT_PART_TIMINGS, + }, + } + + const playlist = createMockPlaylist(selectedPartInfos) + const objs = buildTimelineObjsForRundown(context, playlist, selectedPartInfos) + + expect(objs.timeline).not.toHaveLength(0) + expect(objs.timingContext).not.toBeUndefined() + expect(transformTimelineIntoSimplifiedForm(objs)).toMatchSnapshot() + }) + + it('infinite continuing from previous', () => { + const context = setupDefaultJobEnvironment() + + const infinitePiece = createMockInfinitePieceInstance('piece6') + + const selectedPartInfos: SelectedPartInstancesTimelineInfo = { + previous: { + ...PREVIOUS_PART_INSTANCE, + pieceInstances: [...PREVIOUS_PART_INSTANCE.pieceInstances, infinitePiece], + }, + current: { + nowInPart: 1234, + partStarted: 5678, + partInstance: createMockPartInstance('part0'), + pieceInstances: [createMockPieceInstance('piece0'), continueInfinitePiece(infinitePiece)], + calculatedTimings: DEFAULT_PART_TIMINGS, + }, + } + + const playlist = createMockPlaylist(selectedPartInfos) + const objs = buildTimelineObjsForRundown(context, playlist, selectedPartInfos) + + expect(objs.timeline).not.toHaveLength(0) + expect(objs.timingContext).not.toBeUndefined() + expect(transformTimelineIntoSimplifiedForm(objs)).toMatchSnapshot() + }) + + it('infinite continuing into next with autonext', () => { + const context = setupDefaultJobEnvironment() + + const infinitePiece = createMockInfinitePieceInstance('piece6') + + const selectedPartInfos: SelectedPartInstancesTimelineInfo = { + current: { + nowInPart: 1234, + partStarted: 5678, + partInstance: createMockPartInstance( + 'part0', + { autoNext: true, expectedDuration: 5000 }, + { + timings: { + plannedStartedPlayback: 1235, + }, + } + ), + pieceInstances: [createMockPieceInstance('piece0'), infinitePiece], + calculatedTimings: DEFAULT_PART_TIMINGS, + }, + next: { + nowInPart: 0, + partStarted: undefined, + partInstance: createMockPartInstance( + 'part1', + {}, + { + timings: { + plannedStartedPlayback: 5000, + }, + } + ), + pieceInstances: [createMockPieceInstance('piece1'), continueInfinitePiece(infinitePiece)], + calculatedTimings: DEFAULT_PART_TIMINGS, + }, + } + + const playlist = createMockPlaylist(selectedPartInfos) + const objs = buildTimelineObjsForRundown(context, playlist, selectedPartInfos) + + expect(objs.timeline).not.toHaveLength(0) + expect(objs.timingContext).not.toBeUndefined() + expect(transformTimelineIntoSimplifiedForm(objs)).toMatchSnapshot() + }) + + it('infinite stopping in current with autonext', () => { + const context = setupDefaultJobEnvironment() + + const selectedPartInfos: SelectedPartInstancesTimelineInfo = { + current: { + nowInPart: 1234, + partStarted: 5678, + partInstance: createMockPartInstance( + 'part0', + { autoNext: true, expectedDuration: 5000 }, + { + timings: { + plannedStartedPlayback: 1235, + }, + } + ), + pieceInstances: [createMockPieceInstance('piece0'), createMockInfinitePieceInstance('piece6')], + calculatedTimings: DEFAULT_PART_TIMINGS, + }, + next: { + nowInPart: 0, + partStarted: undefined, + partInstance: createMockPartInstance( + 'part1', + {}, + { + timings: { + plannedStartedPlayback: 5000, + }, + } + ), + pieceInstances: [createMockPieceInstance('piece1')], + calculatedTimings: { + ...DEFAULT_PART_TIMINGS, + fromPartKeepalive: 100, + }, + }, + } + + const playlist = createMockPlaylist(selectedPartInfos) + const objs = buildTimelineObjsForRundown(context, playlist, selectedPartInfos) + + expect(objs.timeline).not.toHaveLength(0) + expect(objs.timingContext).not.toBeUndefined() + expect(transformTimelineIntoSimplifiedForm(objs)).toMatchSnapshot() + }) + + it('infinite stopping in current with autonext excludeDuringPartKeepalive=true', () => { + const context = setupDefaultJobEnvironment() + + const selectedPartInfos: SelectedPartInstancesTimelineInfo = { + current: { + nowInPart: 1234, + partStarted: 5678, + partInstance: createMockPartInstance( + 'part0', + { autoNext: true, expectedDuration: 5000 }, + { + timings: { + plannedStartedPlayback: 1235, + }, + } + ), + pieceInstances: [ + createMockPieceInstance('piece0'), + createMockInfinitePieceInstance('piece6', { excludeDuringPartKeepalive: true }), + ], + calculatedTimings: DEFAULT_PART_TIMINGS, + }, + next: { + nowInPart: 0, + partStarted: undefined, + partInstance: createMockPartInstance( + 'part1', + {}, + { + timings: { + plannedStartedPlayback: 5000, + }, + } + ), + pieceInstances: [createMockPieceInstance('piece1')], + calculatedTimings: { + ...DEFAULT_PART_TIMINGS, + fromPartKeepalive: 100, + }, + }, + } + + const playlist = createMockPlaylist(selectedPartInfos) + const objs = buildTimelineObjsForRundown(context, playlist, selectedPartInfos) + + expect(objs.timeline).not.toHaveLength(0) + expect(objs.timingContext).not.toBeUndefined() + expect(transformTimelineIntoSimplifiedForm(objs)).toMatchSnapshot() + }) + }) +}) diff --git a/packages/job-worker/src/playout/timeline/generate.ts b/packages/job-worker/src/playout/timeline/generate.ts index 24635a16750..8bc040653b2 100644 --- a/packages/job-worker/src/playout/timeline/generate.ts +++ b/packages/job-worker/src/playout/timeline/generate.ts @@ -336,12 +336,7 @@ async function getTimelineRundown( logger.warn(`Missing Baseline objects for Rundown "${activeRundown.rundown._id}"`) } - const rundownTimelineResult = buildTimelineObjsForRundown( - context, - playoutModel, - activeRundown.rundown, - partInstancesInfo - ) + const rundownTimelineResult = buildTimelineObjsForRundown(context, playoutModel.playlist, partInstancesInfo) timelineObjs = timelineObjs.concat(rundownTimelineResult.timeline) timelineObjs = timelineObjs.concat(await pLookaheadObjs) diff --git a/packages/job-worker/src/playout/timeline/part.ts b/packages/job-worker/src/playout/timeline/part.ts index 4a20c2ef23f..2c9dddbc3d1 100644 --- a/packages/job-worker/src/playout/timeline/part.ts +++ b/packages/job-worker/src/playout/timeline/part.ts @@ -13,11 +13,12 @@ import { assertNever, literal } from '@sofie-automation/corelib/dist/lib' import { getPartGroupId, getPartFirstObjectId } from '@sofie-automation/corelib/dist/playout/ids' import { PieceInstanceWithTimings } from '@sofie-automation/corelib/dist/playout/processAndPrune' import { PieceTimelineMetadata } from './pieceGroup' -import { PartCalculatedTimings } from '@sofie-automation/corelib/dist/playout/timings' import { JobContext } from '../../jobs' import { ReadonlyDeep } from 'type-fest' import { getPieceEnableInsidePart, transformPieceGroupAndObjects } from './piece' import { PlayoutChangedType } from '@sofie-automation/shared-lib/dist/peripheralDevice/peripheralDeviceAPI' +import { SelectedPartInstanceTimelineInfo } from './generate' +import { PartCalculatedTimings } from '@sofie-automation/corelib/dist/playout/timings' export function transformPartIntoTimeline( context: JobContext, @@ -25,61 +26,43 @@ export function transformPartIntoTimeline( pieceInstances: ReadonlyDeep>, pieceGroupFirstObjClasses: string[], parentGroup: TimelineObjGroupPart & OnGenerateTimelineObjExt, - nowInParentGroup: number, - partTimings: PartCalculatedTimings, - isInHold: boolean, - outTransition: IBlueprintPartOutTransition | null + partInfo: SelectedPartInstanceTimelineInfo, + nextPartTimings: PartCalculatedTimings | null, + isInHold: boolean ): Array { const span = context.startSpan('transformPartIntoTimeline') + + const nowInParentGroup = partInfo.nowInPart + const partTimings = partInfo.calculatedTimings + const outTransition = partInfo.partInstance.part.outTransition ?? null + + let parentGroupNoKeepalive: (TimelineObjGroupPart & OnGenerateTimelineObjExt) | undefined + const timelineObjs: Array = [] for (const pieceInstance of pieceInstances) { if (pieceInstance.disabled) continue - let pieceEnable: TSR.Timeline.TimelineEnable | undefined - switch (pieceInstance.piece.pieceType) { - case IBlueprintPieceType.InTransition: - if (typeof partTimings.inTransitionStart === 'number') { - // Respect the start time of the piece, in case there is a reason for it being non-zero - const startOffset = - typeof pieceInstance.piece.enable.start === 'number' ? pieceInstance.piece.enable.start : 0 - - pieceEnable = { - start: partTimings.inTransitionStart + startOffset, - duration: pieceInstance.piece.enable.duration, - } - } - break - case IBlueprintPieceType.OutTransition: - if (outTransition) { - pieceEnable = { - start: `#${parentGroup.id}.end - ${outTransition.duration}`, - } - if (partTimings.toPartPostroll) { - pieceEnable.start += ' - ' + partTimings.toPartPostroll - } - } - break - case IBlueprintPieceType.Normal: - pieceEnable = getPieceEnableInsidePart( - pieceInstance, - partTimings, - parentGroup.id, - parentGroup.enable.duration !== undefined || parentGroup.enable.end !== undefined - ) - break - default: - assertNever(pieceInstance.piece.pieceType) - break - } + const pieceEnable = getPieceEnableForPieceInstance(partTimings, outTransition, parentGroup, pieceInstance) // Not able to enable this piece if (!pieceEnable) continue + // Determine which group to add to + let partGroupToAddTo = parentGroup + if (pieceInstance.piece.excludeDuringPartKeepalive) { + if (!parentGroupNoKeepalive) { + // Only generate the no-keepalive group if is is needed + parentGroupNoKeepalive = createPartNoKeepaliveGroup(parentGroup, nextPartTimings) + timelineObjs.push(parentGroupNoKeepalive) + } + partGroupToAddTo = parentGroupNoKeepalive + } + timelineObjs.push( ...transformPieceGroupAndObjects( playlistId, - parentGroup, + partGroupToAddTo, nowInParentGroup, pieceInstance, pieceEnable, @@ -94,6 +77,49 @@ export function transformPartIntoTimeline( return timelineObjs } +function getPieceEnableForPieceInstance( + partTimings: PartCalculatedTimings, + outTransition: IBlueprintPartOutTransition | null, + parentGroup: TimelineObjGroupPart & OnGenerateTimelineObjExt, + pieceInstance: ReadonlyDeep +): TSR.Timeline.TimelineEnable | undefined { + switch (pieceInstance.piece.pieceType) { + case IBlueprintPieceType.InTransition: { + if (typeof partTimings.inTransitionStart !== 'number') return undefined + // Respect the start time of the piece, in case there is a reason for it being non-zero + const startOffset = + typeof pieceInstance.piece.enable.start === 'number' ? pieceInstance.piece.enable.start : 0 + + return { + start: partTimings.inTransitionStart + startOffset, + duration: pieceInstance.piece.enable.duration, + } + } + case IBlueprintPieceType.OutTransition: { + if (!outTransition) return undefined + + const pieceEnable: TSR.Timeline.TimelineEnable = { + start: `#${parentGroup.id}.end - ${outTransition.duration}`, + } + if (partTimings.toPartPostroll) { + pieceEnable.start += ' - ' + partTimings.toPartPostroll + } + + return pieceEnable + } + case IBlueprintPieceType.Normal: + return getPieceEnableInsidePart( + pieceInstance, + partTimings, + parentGroup.id, + parentGroup.enable.duration !== undefined || parentGroup.enable.end !== undefined + ) + default: + assertNever(pieceInstance.piece.pieceType) + return undefined + } +} + export interface PartEnable { start: number | 'now' | string duration?: number @@ -154,3 +180,32 @@ export function createPartGroupFirstObject( priority: 0, }) } + +export function createPartNoKeepaliveGroup( + partGroup: TimelineObjGroupPart & OnGenerateTimelineObjExt, + nextPartTimings: PartCalculatedTimings | null +): TimelineObjGroupPart & OnGenerateTimelineObjExt { + const keepaliveDuration = nextPartTimings?.fromPartKeepalive ?? 0 + + return { + id: `${partGroup.id}_no_keepalive`, + objectType: TimelineObjType.RUNDOWN, + enable: { + start: 0, + end: `#${partGroup.id}.end - ${keepaliveDuration}`, + }, + priority: 5, + layer: '', // These should coexist + content: { + deviceType: TSR.DeviceType.ABSTRACT, + type: TimelineContentTypeOther.GROUP, + }, + children: [], + isGroup: true, + partInstanceId: partGroup.partInstanceId, + metaData: literal({ + isPieceTimeline: true, + }), + inGroup: partGroup.id, + } +} diff --git a/packages/job-worker/src/playout/timeline/pieceGroup.ts b/packages/job-worker/src/playout/timeline/pieceGroup.ts index bf4da90ef86..45ceeb43e7b 100644 --- a/packages/job-worker/src/playout/timeline/pieceGroup.ts +++ b/packages/job-worker/src/playout/timeline/pieceGroup.ts @@ -42,8 +42,11 @@ export function createPieceGroupAndCap( partGroup?: TimelineObjRundown, pieceStartOffset?: number ): { + /** The 'control' object which defines the bounds of the group. This triggers the timing, and does not include and pre/postroll */ controlObj: TimelineObjPieceAbstract & OnGenerateTimelineObjExt + /** The 'group' object that should contain all the content. This uses the control object for its timing, and adds the pre/postroll. */ childGroup: TimelineObjGroupRundown & OnGenerateTimelineObjExt + /** Any additional objects which are used to determine points in time that the piece should start/end relative to. */ capObjs: Array> } { const controlObj = literal>({ diff --git a/packages/job-worker/src/playout/timeline/rundown.ts b/packages/job-worker/src/playout/timeline/rundown.ts index 941c0a80d97..0fcb057a886 100644 --- a/packages/job-worker/src/playout/timeline/rundown.ts +++ b/packages/job-worker/src/playout/timeline/rundown.ts @@ -22,11 +22,9 @@ import { JobContext } from '../../jobs' import { ReadonlyDeep } from 'type-fest' import { SelectedPartInstancesTimelineInfo, SelectedPartInstanceTimelineInfo } from './generate' import { createPartGroup, createPartGroupFirstObject, PartEnable, transformPartIntoTimeline } from './part' -import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { literal, normalizeArrayToMapFunc } from '@sofie-automation/corelib/dist/lib' import { getCurrentTime } from '../../lib' import _ = require('underscore') -import { PlayoutModel } from '../model/PlayoutModel' import { getPieceEnableInsidePart, transformPieceGroupAndObjects } from './piece' import { logger } from '../../logging' @@ -50,14 +48,12 @@ export interface RundownTimelineResult { export function buildTimelineObjsForRundown( context: JobContext, - playoutModel: PlayoutModel, - _activeRundown: ReadonlyDeep, + activePlaylist: ReadonlyDeep, partInstancesInfo: SelectedPartInstancesTimelineInfo ): RundownTimelineResult { const span = context.startSpan('buildTimelineObjsForRundown') const timelineObjs: Array = [] - const activePlaylist = playoutModel.playlist const currentTime = getCurrentTime() timelineObjs.push( @@ -102,136 +98,110 @@ export function buildTimelineObjsForRundown( logger.info(`No next part and no current part set on RundownPlaylist "${activePlaylist._id}".`) } - let timingContext: RundownTimelineTimingContext | undefined - // Currently playing: - if (partInstancesInfo.current) { - const [currentInfinitePieces, currentNormalItems] = _.partition( - partInstancesInfo.current.pieceInstances, - (l) => !!(l.infinite && (l.piece.lifespan !== PieceLifespan.WithinPart || l.infinite.fromHold)) - ) - - // Find all the infinites in each of the selected parts - const currentInfinitePieceIds = new Set( - _.compact(currentInfinitePieces.map((l) => l.infinite?.infiniteInstanceId)) - ) - const nextPartInfinites = new Map() - if (partInstancesInfo.current.partInstance.part.autoNext && partInstancesInfo.next) { - partInstancesInfo.next.pieceInstances.forEach((piece) => { - if (piece.infinite) { - nextPartInfinites.set(piece.infinite.infiniteInstanceId, piece) - } - }) + if (!partInstancesInfo.current) { + if (span) span.end() + return { + timeline: timelineObjs, + timingContext: undefined, } + } - const previousPartInfinites: Map = - partInstancesInfo.previous - ? normalizeArrayToMapFunc(partInstancesInfo.previous.pieceInstances, (inst) => - inst.infinite ? inst.infinite.infiniteInstanceId : undefined - ) - : new Map() - - // The startTime of this start is used as the reference point for the calculated timings, so we can use 'now' and everything will lie after this point - const currentPartEnable: PartEnable = { start: 'now' } - if (partInstancesInfo.current.partInstance.timings?.plannedStartedPlayback) { - // If we are recalculating the currentPart, then ensure it doesnt think it is starting now - currentPartEnable.start = partInstancesInfo.current.partInstance.timings.plannedStartedPlayback - } + const [currentInfinitePieces, currentNormalItems] = _.partition( + partInstancesInfo.current.pieceInstances, + (l) => !!(l.infinite && (l.piece.lifespan !== PieceLifespan.WithinPart || l.infinite.fromHold)) + ) - if ( - partInstancesInfo.next && - partInstancesInfo.current.partInstance.part.autoNext && - partInstancesInfo.current.partInstance.part.expectedDuration !== undefined - ) { - // If there is a valid autonext out of the current part, then calculate the duration - currentPartEnable.duration = - partInstancesInfo.current.partInstance.part.expectedDuration + - partInstancesInfo.current.calculatedTimings.toPartDelay + - partInstancesInfo.current.calculatedTimings.toPartPostroll // autonext should have the postroll added to it to not confuse the timeline - - if ( - typeof currentPartEnable.start === 'number' && - currentPartEnable.start + currentPartEnable.duration < getCurrentTime() - ) { - logger.warn('Prevented setting the end of an autonext in the past') - // note - this will cause a small glitch on air where the next part is skipped into because this calculation does not account - // for the time it takes between timeline generation and timeline execution. That small glitch is preferable to setting the time - // very far in the past however. To do this properly we should support setting the "end" to "now" and have that calculated after - // timeline generation as we do for start times. - currentPartEnable.duration = getCurrentTime() - currentPartEnable.start + // Find all the infinites in each of the selected parts + const currentInfinitePieceIds = new Set(_.compact(currentInfinitePieces.map((l) => l.infinite?.infiniteInstanceId))) + const nextPartInfinites = new Map() + if (partInstancesInfo.current.partInstance.part.autoNext && partInstancesInfo.next) { + partInstancesInfo.next.pieceInstances.forEach((piece) => { + if (piece.infinite) { + nextPartInfinites.set(piece.infinite.infiniteInstanceId, piece) } - } - const currentPartGroup = createPartGroup(partInstancesInfo.current.partInstance, currentPartEnable) + }) + } - timingContext = { - currentPartGroup, - currentPartDuration: currentPartEnable.duration, - } + const previousPartInfinites: Map = + partInstancesInfo.previous + ? normalizeArrayToMapFunc(partInstancesInfo.previous.pieceInstances, (inst) => + inst.infinite ? inst.infinite.infiniteInstanceId : undefined + ) + : new Map() - // Start generating objects - if (partInstancesInfo.previous) { - timelineObjs.push( - ...generatePreviousPartInstanceObjects( - context, - activePlaylist, - partInstancesInfo.previous, - currentInfinitePieceIds, - timingContext, - partInstancesInfo.current.calculatedTimings - ) - ) - } + // The startTime of this start is used as the reference point for the calculated timings, so we can use 'now' and everything will lie after this point + const currentPartEnable = createCurrentPartGroupEnable(partInstancesInfo.current, !!partInstancesInfo.next) + const currentPartGroup = createPartGroup(partInstancesInfo.current.partInstance, currentPartEnable) - // any continued infinite lines need to skip the group, as they need a different start trigger - for (const infinitePiece of currentInfinitePieces) { - timelineObjs.push( - ...generateCurrentInfinitePieceObjects( - activePlaylist, - partInstancesInfo.current, - previousPartInfinites, - nextPartInfinites, - timingContext, - infinitePiece, - currentTime, - partInstancesInfo.current.calculatedTimings - ) - ) - } + const timingContext: RundownTimelineTimingContext = { + currentPartGroup, + currentPartDuration: currentPartEnable.duration, + } - const groupClasses: string[] = ['current_part'] + // Start generating objects + if (partInstancesInfo.previous) { timelineObjs.push( - currentPartGroup, - createPartGroupFirstObject( - activePlaylist._id, - partInstancesInfo.current.partInstance, - currentPartGroup, - partInstancesInfo.previous?.partInstance - ), - ...transformPartIntoTimeline( + ...generatePreviousPartInstanceObjects( context, - activePlaylist._id, - currentNormalItems, - groupClasses, - currentPartGroup, - partInstancesInfo.current.nowInPart, + activePlaylist, + partInstancesInfo.previous, + currentInfinitePieceIds, + timingContext, + partInstancesInfo.current.calculatedTimings + ) + ) + } + + // any continued infinite lines need to skip the group, as they need a different start trigger + for (const infinitePiece of currentInfinitePieces) { + timelineObjs.push( + ...generateCurrentInfinitePieceObjects( + activePlaylist, + partInstancesInfo.current, + previousPartInfinites, + nextPartInfinites, + timingContext, + infinitePiece, + currentTime, partInstancesInfo.current.calculatedTimings, - activePlaylist.holdState === RundownHoldState.ACTIVE, - partInstancesInfo.current.partInstance.part.outTransition ?? null + partInstancesInfo.next?.calculatedTimings ?? null ) ) + } + + const groupClasses: string[] = ['current_part'] + timelineObjs.push( + currentPartGroup, + createPartGroupFirstObject( + activePlaylist._id, + partInstancesInfo.current.partInstance, + currentPartGroup, + partInstancesInfo.previous?.partInstance + ), + ...transformPartIntoTimeline( + context, + activePlaylist._id, + currentNormalItems, + groupClasses, + currentPartGroup, + partInstancesInfo.current, + partInstancesInfo.next?.calculatedTimings ?? null, + activePlaylist.holdState === RundownHoldState.ACTIVE + ) + ) - // only add the next objects into the timeline if the current partgroup has a duration, and can autoNext - if (partInstancesInfo.next && currentPartEnable.duration) { - timelineObjs.push( - ...generateNextPartInstanceObjects( - context, - activePlaylist, - partInstancesInfo.current, - partInstancesInfo.next, - timingContext - ) + // only add the next objects into the timeline if the current partgroup has a duration, and can autoNext + if (partInstancesInfo.next && currentPartEnable.duration) { + timelineObjs.push( + ...generateNextPartInstanceObjects( + context, + activePlaylist, + partInstancesInfo.current, + partInstancesInfo.next, + timingContext ) - } + ) } if (span) span.end() @@ -241,6 +211,44 @@ export function buildTimelineObjsForRundown( } } +function createCurrentPartGroupEnable( + currentPartInfo: SelectedPartInstanceTimelineInfo, + hasNextPart: boolean +): PartEnable { + // The startTime of this start is used as the reference point for the calculated timings, so we can use 'now' and everything will lie after this point + const currentPartEnable: PartEnable = { start: 'now' } + if (currentPartInfo.partInstance.timings?.plannedStartedPlayback) { + // If we are recalculating the currentPart, then ensure it doesnt think it is starting now + currentPartEnable.start = currentPartInfo.partInstance.timings.plannedStartedPlayback + } + + if ( + hasNextPart && + currentPartInfo.partInstance.part.autoNext && + currentPartInfo.partInstance.part.expectedDuration !== undefined + ) { + // If there is a valid autonext out of the current part, then calculate the duration + currentPartEnable.duration = + currentPartInfo.partInstance.part.expectedDuration + + currentPartInfo.calculatedTimings.toPartDelay + + currentPartInfo.calculatedTimings.toPartPostroll // autonext should have the postroll added to it to not confuse the timeline + + if ( + typeof currentPartEnable.start === 'number' && + currentPartEnable.start + currentPartEnable.duration < getCurrentTime() + ) { + logger.warn('Prevented setting the end of an autonext in the past') + // note - this will cause a small glitch on air where the next part is skipped into because this calculation does not account + // for the time it takes between timeline generation and timeline execution. That small glitch is preferable to setting the time + // very far in the past however. To do this properly we should support setting the "end" to "now" and have that calculated after + // timeline generation as we do for start times. + currentPartEnable.duration = getCurrentTime() - currentPartEnable.start + } + } + + return currentPartEnable +} + export function getInfinitePartGroupId(pieceInstanceId: PieceInstanceId): string { return getPartGroupId(protectString(unprotectString(pieceInstanceId))) + '_infinite' } @@ -253,7 +261,8 @@ function generateCurrentInfinitePieceObjects( timingContext: RundownTimelineTimingContext, pieceInstance: PieceInstanceWithTimings, currentTime: Time, - currentPartInstanceTimings: PartCalculatedTimings + currentPartInstanceTimings: PartCalculatedTimings, + nextPartInstanceTimings: PartCalculatedTimings | null ): Array { if (!pieceInstance.infinite) { // Type guard, should never be hit @@ -344,14 +353,17 @@ function generateCurrentInfinitePieceObjects( infiniteGroup.enable.duration === undefined && infiniteGroup.enable.end === undefined ) { + let endOffset = 0 + + if (currentPartInstanceTimings.fromPartPostroll) endOffset -= currentPartInstanceTimings.fromPartPostroll + + if (pieceInstance.piece.postrollDuration) endOffset += pieceInstance.piece.postrollDuration + + if (pieceInstance.piece.excludeDuringPartKeepalive && nextPartInstanceTimings) + endOffset -= nextPartInstanceTimings.fromPartKeepalive + // cap relative to the currentPartGroup - infiniteGroup.enable.end = `#${timingContext.currentPartGroup.id}.end` - if (currentPartInstanceTimings.fromPartPostroll) { - infiniteGroup.enable.end += ' - ' + currentPartInstanceTimings.fromPartPostroll - } - if (pieceInstance.piece.postrollDuration) { - infiniteGroup.enable.end += ' + ' + pieceInstance.piece.postrollDuration - } + infiniteGroup.enable.end = `#${timingContext.currentPartGroup.id}.end + ${endOffset}` } // Still show objects flagged as 'HoldMode.EXCEPT' if this is a infinite continuation as they belong to the previous too @@ -409,10 +421,9 @@ function generatePreviousPartInstanceObjects( previousContinuedPieces, groupClasses, previousPartGroup, - previousPartInfo.nowInPart, - previousPartInfo.calculatedTimings, - activePlaylist.holdState === RundownHoldState.ACTIVE, - previousPartInfo.partInstance.part.outTransition ?? null + previousPartInfo, + currentPartInstanceTimings, + activePlaylist.holdState === RundownHoldState.ACTIVE ), ] } else { @@ -453,10 +464,9 @@ function generateNextPartInstanceObjects( nextPieceInstances, groupClasses, nextPartGroup, - 0, - nextPartInfo.calculatedTimings, - false, - nextPartInfo.partInstance.part.outTransition ?? null + nextPartInfo, + null, + false ), ] } diff --git a/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts b/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts index 89c4b34f05b..960ca4bb263 100644 --- a/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts +++ b/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts @@ -1450,6 +1450,7 @@ describe('rundown Timing Calculator', () => { toPartPostroll: 500, fromPartRemaining: 0, fromPartPostroll: 0, + fromPartKeepalive: 0, } const partInstance2 = wrapPartToTemporaryInstance(protectString(''), parts[1]) partInstance2.isTemporary = false @@ -1463,6 +1464,7 @@ describe('rundown Timing Calculator', () => { toPartPostroll: 0, fromPartRemaining: 500, fromPartPostroll: 500, + fromPartKeepalive: 0, } const partInstances = [partInstance1, partInstance2, ...convertPartsToPartInstances([parts[2], parts[3]])] const partInstancesMap: Map = new Map() From bc9695fa1e3ce1de250525a5349ba7ea90462da8 Mon Sep 17 00:00:00 2001 From: olzzon Date: Mon, 9 Sep 2024 12:26:50 +0200 Subject: [PATCH 73/81] feat: support for http header in packagemanager --- .../server-core-integration/src/lib/coreConnection.ts | 2 +- packages/server-core-integration/src/lib/ddpClient.ts | 9 ++++++++- packages/server-core-integration/src/lib/ddpConnector.ts | 1 + 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/packages/server-core-integration/src/lib/coreConnection.ts b/packages/server-core-integration/src/lib/coreConnection.ts index 0b7185ea497..ec03973683b 100644 --- a/packages/server-core-integration/src/lib/coreConnection.ts +++ b/packages/server-core-integration/src/lib/coreConnection.ts @@ -126,7 +126,7 @@ export class CoreConnection< } }) - const ddpOptions = ddpOptions0 || { + const ddpOptions: DDPConnectorOptions = ddpOptions0 || { host: '127.0.0.1', port: 3000, } diff --git a/packages/server-core-integration/src/lib/ddpClient.ts b/packages/server-core-integration/src/lib/ddpClient.ts index 73782ea5aea..427f7b6f2e5 100644 --- a/packages/server-core-integration/src/lib/ddpClient.ts +++ b/packages/server-core-integration/src/lib/ddpClient.ts @@ -34,6 +34,7 @@ export interface TLSOpts { export interface DDPConnectorOptions { host: string port: number + headers?: { [header: string]: string } path?: string ssl?: boolean debug?: boolean @@ -343,6 +344,10 @@ export class DDPClient extends EventEmitter { public get port(): number { return this.portInt } + private headersInt: { [header: string]: string } = {} + public get headers(): { [header: string]: string } { + return this.headersInt + } private pathInt?: string public get path(): string | undefined { return this.pathInt @@ -410,6 +415,7 @@ export class DDPClient extends EventEmitter { // console.log(opts) this.hostInt = opts.host || '127.0.0.1' this.portInt = opts.port || 3000 + this.headersInt = opts.headers || {} this.pathInt = opts.path this.sslInt = opts.ssl || this.port === 443 this.tlsOpts = opts.tlsOpts || {} @@ -722,6 +728,7 @@ export class DDPClient extends EventEmitter { try { const response = await got(url, { + headers: this.headers, https: { certificateAuthority: this.tlsOpts.ca, key: this.tlsOpts.key, @@ -762,7 +769,7 @@ export class DDPClient extends EventEmitter { private makeWebSocketConnection(url: string): void { // console.log('About to create WebSocket client') - this.socket = new WebSocket.Client(url, null, { tls: this.tlsOpts }) + this.socket = new WebSocket.Client(url, null, { tls: this.tlsOpts, headers: this.headers }) this.socket.on('open', () => { // just go ahead and open the connection on connect diff --git a/packages/server-core-integration/src/lib/ddpConnector.ts b/packages/server-core-integration/src/lib/ddpConnector.ts index a1f1fd17bb5..5905863169d 100644 --- a/packages/server-core-integration/src/lib/ddpConnector.ts +++ b/packages/server-core-integration/src/lib/ddpConnector.ts @@ -31,6 +31,7 @@ export class DDPConnector extends EventEmitter { const o: DDPConnectorOptions = { host: this._options.host, port: this._options.port, + headers: this._options.headers, path: this._options.path || '', ssl: this._options.ssl || false, tlsOpts: this._options.tlsOpts || {}, From 9445b4573f9225e60ed94df2988600bc12ac1fec Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Wed, 27 Nov 2024 10:13:07 +0000 Subject: [PATCH 74/81] feat: always add `dnt=gateway` header for gateway connections --- packages/server-core-integration/src/lib/ddpClient.ts | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/packages/server-core-integration/src/lib/ddpClient.ts b/packages/server-core-integration/src/lib/ddpClient.ts index 427f7b6f2e5..08a7a067043 100644 --- a/packages/server-core-integration/src/lib/ddpClient.ts +++ b/packages/server-core-integration/src/lib/ddpClient.ts @@ -719,6 +719,13 @@ export class DDPClient extends EventEmitter { }) } + private getHeadersWithDefaults(): { [header: string]: string } { + return { + dnt: 'gateway', // Provide the header needed for the header based auth to work when not connected through a reverse proxy + ...this.headers, + } + } + private async makeSockJSConnection(): Promise { const protocol = this.ssl ? 'https://' : 'http://' if (this.path && !this.path?.endsWith('/')) { @@ -728,7 +735,7 @@ export class DDPClient extends EventEmitter { try { const response = await got(url, { - headers: this.headers, + headers: this.getHeadersWithDefaults(), https: { certificateAuthority: this.tlsOpts.ca, key: this.tlsOpts.key, @@ -769,7 +776,7 @@ export class DDPClient extends EventEmitter { private makeWebSocketConnection(url: string): void { // console.log('About to create WebSocket client') - this.socket = new WebSocket.Client(url, null, { tls: this.tlsOpts, headers: this.headers }) + this.socket = new WebSocket.Client(url, null, { tls: this.tlsOpts, headers: this.getHeadersWithDefaults() }) this.socket.on('open', () => { // just go ahead and open the connection on connect From 9806ed9083c8fc9827ce2feebbf7cfaa700dcbe8 Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Tue, 3 Dec 2024 12:06:48 +0000 Subject: [PATCH 75/81] fix: Ensure CoreSystem.settingsWithOverrides is valid --- meteor/server/migration/X_X_X.ts | 60 ++++++++++++++++++++++++++++++++ 1 file changed, 60 insertions(+) diff --git a/meteor/server/migration/X_X_X.ts b/meteor/server/migration/X_X_X.ts index 62abd9a8f84..d0e41eb9765 100644 --- a/meteor/server/migration/X_X_X.ts +++ b/meteor/server/migration/X_X_X.ts @@ -310,6 +310,66 @@ export const addSteps = addMigrationSteps(CURRENT_SYSTEM_VERSION, [ } }, }, + + { + id: 'Ensure CoreSystem.settingsWithOverrides is valid', + dependOnResultFrom: `convert CoreSystem.settingsWithOverrides`, + canBeRunAutomatically: true, + validate: async () => { + const systems = await CoreSystem.findFetchAsync({ + $or: [ + { + 'settingsWithOverrides.defaults': { $exists: false }, + }, + { + 'settingsWithOverrides.overrides': { $exists: false }, + }, + ], + }) + + if (systems.length > 0) { + return 'settings must be converted to an ObjectWithOverrides' + } + + return false + }, + migrate: async () => { + const systems = await CoreSystem.findFetchAsync({ + $or: [ + { + 'settingsWithOverrides.defaults': { $exists: false }, + }, + { + 'settingsWithOverrides.overrides': { $exists: false }, + }, + ], + }) + + for (const system of systems) { + const newSettings = wrapDefaultObject({ + cron: { + casparCGRestart: { + enabled: false, + }, + storeRundownSnapshots: { + enabled: false, + }, + }, + support: { message: '' }, + evaluationsMessage: { enabled: false, heading: '', message: '' }, + }) + + await CoreSystem.updateAsync(system._id, { + $set: { + settingsWithOverrides: { + ...newSettings, + ...system.settingsWithOverrides, + }, + }, + }) + } + }, + }, ]) interface PartialOldICoreSystem { From a7adc6e357e11a72b7dcdee51ad747aa9f86dfba Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Tue, 3 Dec 2024 12:17:34 +0000 Subject: [PATCH 76/81] fix: unable to remove peripheralDevice --- meteor/server/api/peripheralDevice.ts | 16 ++++++++-------- .../src/peripheralDevice/methodsAPI.ts | 2 +- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/meteor/server/api/peripheralDevice.ts b/meteor/server/api/peripheralDevice.ts index 90604fb61aa..b6b29b218fd 100644 --- a/meteor/server/api/peripheralDevice.ts +++ b/meteor/server/api/peripheralDevice.ts @@ -67,6 +67,7 @@ import { convertPeripheralDeviceForGateway } from '../publications/peripheralDev import { executePeripheralDeviceFunction } from './peripheralDevice/executeFunction' import KoaRouter from '@koa/router' import bodyParser from 'koa-bodyparser' +import { assertConnectionHasOneOfPermissions } from '../security/auth' const apmNamespace = 'peripheralDevice' export namespace ServerPeripheralDeviceAPI { @@ -513,12 +514,11 @@ export namespace ServerPeripheralDeviceAPI { }, }) } - export async function removePeripheralDevice( - context: MethodContext, - deviceId: PeripheralDeviceId, - token?: string - ): Promise { - const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, token, context) + export async function removePeripheralDevice(context: MethodContext, deviceId: PeripheralDeviceId): Promise { + assertConnectionHasOneOfPermissions(context.connection, 'configure') + + const peripheralDevice = await PeripheralDevices.findOneAsync(deviceId) + if (!peripheralDevice) throw new Meteor.Error(404, `PeripheralDevice "${deviceId}" not found`) logger.info(`Removing PeripheralDevice ${peripheralDevice._id}`) @@ -850,8 +850,8 @@ class ServerPeripheralDeviceAPIClass extends MethodContextAPI implements NewPeri async testMethod(deviceId: PeripheralDeviceId, deviceToken: string, returnValue: string, throwError?: boolean) { return ServerPeripheralDeviceAPI.testMethod(this, deviceId, deviceToken, returnValue, throwError) } - async removePeripheralDevice(deviceId: PeripheralDeviceId, token?: string) { - return ServerPeripheralDeviceAPI.removePeripheralDevice(this, deviceId, token) + async removePeripheralDevice(deviceId: PeripheralDeviceId) { + return ServerPeripheralDeviceAPI.removePeripheralDevice(this, deviceId) } // ------ Playout Gateway -------- diff --git a/packages/shared-lib/src/peripheralDevice/methodsAPI.ts b/packages/shared-lib/src/peripheralDevice/methodsAPI.ts index 1f8a142f7bf..80dd4dd3666 100644 --- a/packages/shared-lib/src/peripheralDevice/methodsAPI.ts +++ b/packages/shared-lib/src/peripheralDevice/methodsAPI.ts @@ -116,7 +116,7 @@ export interface NewPeripheralDeviceAPI { timelineTriggerTime(deviceId: PeripheralDeviceId, deviceToken: string, r: TimelineTriggerTimeResult): Promise requestUserAuthToken(deviceId: PeripheralDeviceId, deviceToken: string, authUrl: string): Promise storeAccessToken(deviceId: PeripheralDeviceId, deviceToken: string, authToken: string): Promise - removePeripheralDevice(deviceId: PeripheralDeviceId, deviceToken?: string): Promise + removePeripheralDevice(deviceId: PeripheralDeviceId): Promise reportResolveDone( deviceId: PeripheralDeviceId, deviceToken: string, From f2bada1bbbb27293f2d2c5ffcce4fb0af4bdbf1b Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Wed, 4 Dec 2024 09:51:26 +0000 Subject: [PATCH 77/81] chore: fix test --- meteor/server/api/__tests__/peripheralDevice.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/meteor/server/api/__tests__/peripheralDevice.test.ts b/meteor/server/api/__tests__/peripheralDevice.test.ts index 4a3b69fe5a3..30e78c928d8 100644 --- a/meteor/server/api/__tests__/peripheralDevice.test.ts +++ b/meteor/server/api/__tests__/peripheralDevice.test.ts @@ -618,7 +618,7 @@ describe('test peripheralDevice general API methods', () => { const deviceObj = await PeripheralDevices.findOneAsync(device?._id) expect(deviceObj).toBeDefined() - await MeteorCall.peripheralDevice.removePeripheralDevice(device._id, device.token) + await MeteorCall.peripheralDevice.removePeripheralDevice(device._id) } { From 5219a71d4b335994d071abbd136f0aa33fbf6a8b Mon Sep 17 00:00:00 2001 From: Julian Waller Date: Wed, 4 Dec 2024 09:54:17 +0000 Subject: [PATCH 78/81] feat: configure peripheral device settings from blueprints SOFIE-91 (#37) --- meteor/__mocks__/defaultCollectionObjects.ts | 1 + meteor/__mocks__/helpers/database.ts | 3 +- meteor/server/__tests__/cronjobs.test.ts | 1 - .../api/__tests__/peripheralDevice.test.ts | 45 +- .../api/__tests__/userActions/system.test.ts | 2 - meteor/server/api/deviceTriggers/observer.ts | 2 +- .../api/integration/expectedPackages.ts | 41 +- .../server/api/integration/mediaWorkFlows.ts | 16 +- meteor/server/api/packageManager.ts | 2 +- meteor/server/api/peripheralDevice.ts | 78 ++- meteor/server/api/rest/v1/studios.ts | 68 ++- meteor/server/api/rest/v1/typeConversion.ts | 1 + meteor/server/api/snapshot.ts | 4 +- meteor/server/api/studio/api.ts | 45 +- meteor/server/api/studio/lib.ts | 10 +- meteor/server/collections/collection.ts | 2 +- meteor/server/collections/index.ts | 23 +- meteor/server/lib/rest/v1/studios.ts | 4 +- meteor/server/migration/0_1_0.ts | 1 + meteor/server/migration/1_50_0.ts | 29 +- meteor/server/migration/X_X_X.ts | 162 +++++- .../migration/__tests__/migrations.test.ts | 3 + .../publications/deviceTriggersPreview.ts | 2 +- meteor/server/publications/mountedTriggers.ts | 4 +- .../expectedPackages/publication.ts | 2 +- .../packageManager/packageContainers.ts | 2 +- .../packageManager/playoutContext.ts | 2 +- .../server/publications/peripheralDevice.ts | 2 +- .../publications/peripheralDeviceForDevice.ts | 35 +- meteor/server/publications/rundown.ts | 7 +- meteor/server/publications/studio.ts | 2 +- meteor/server/publications/timeline.ts | 4 +- meteor/server/systemStatus/systemStatus.ts | 2 +- .../blueprints-integration/src/api/studio.ts | 10 + .../corelib/src/dataModel/PeripheralDevice.ts | 20 +- .../PeripheralDeviceSettings/ingestDevice.ts | 2 +- packages/corelib/src/dataModel/Studio.ts | 12 + packages/corelib/src/overrideOpHelper.ts | 24 +- .../src/__mocks__/defaultCollectionObjects.ts | 1 + .../src/__mocks__/presetCollections.ts | 6 +- packages/job-worker/src/events/handle.ts | 2 +- packages/job-worker/src/peripheralDevice.ts | 2 +- .../model/implementation/LoadPlayoutModel.ts | 4 +- packages/job-worker/src/playout/upgrade.ts | 12 + .../studio/model/StudioPlayoutModelImpl.ts | 2 +- packages/meteor-lib/src/api/studios.ts | 10 +- packages/openapi/api/definitions/studios.yaml | 3 + .../src/core/model/peripheralDevice.ts | 8 +- .../src/__mocks__/defaultCollectionObjects.ts | 1 + .../client/lib/reactiveData/reactiveData.ts | 2 +- packages/webui/src/client/ui/RundownView.tsx | 4 +- .../ui/RundownView/RundownSystemStatus.tsx | 2 +- .../Settings/Studio/Devices/ParentDevices.tsx | 513 ++++++++++++++++++ .../Settings/Studio/Devices/SelectDevices.tsx | 169 ------ .../ui/Settings/Studio/Devices/index.tsx | 6 +- .../src/client/ui/Settings/StudioSettings.tsx | 2 +- .../components/ConfigManifestOAuthFlow.tsx | 8 +- .../GenericDeviceSettingsComponent.tsx | 62 +-- .../useDebugStatesForPlayoutDevice.tsx | 62 +++ .../ui/Status/SystemStatus/SystemStatus.tsx | 92 +--- 60 files changed, 1142 insertions(+), 506 deletions(-) create mode 100644 packages/webui/src/client/ui/Settings/Studio/Devices/ParentDevices.tsx delete mode 100644 packages/webui/src/client/ui/Settings/Studio/Devices/SelectDevices.tsx create mode 100644 packages/webui/src/client/ui/Settings/components/useDebugStatesForPlayoutDevice.tsx diff --git a/meteor/__mocks__/defaultCollectionObjects.ts b/meteor/__mocks__/defaultCollectionObjects.ts index 052ede4a903..5e4784ece25 100644 --- a/meteor/__mocks__/defaultCollectionObjects.ts +++ b/meteor/__mocks__/defaultCollectionObjects.ts @@ -121,6 +121,7 @@ export function defaultStudio(_id: StudioId): DBStudio { previewContainerIds: [], thumbnailContainerIds: [], peripheralDeviceSettings: { + deviceSettings: wrapDefaultObject({}), playoutDevices: wrapDefaultObject({}), ingestDevices: wrapDefaultObject({}), inputDevices: wrapDefaultObject({}), diff --git a/meteor/__mocks__/helpers/database.ts b/meteor/__mocks__/helpers/database.ts index 7c4bd82c412..f15db2942b2 100644 --- a/meteor/__mocks__/helpers/database.ts +++ b/meteor/__mocks__/helpers/database.ts @@ -127,8 +127,7 @@ export async function setupMockPeripheralDevice( _id: protectString('mockDevice' + dbI++), name: 'mockDevice', organizationId: null, - studioId: studio ? studio._id : undefined, - settings: {}, + studioAndConfigId: studio ? { studioId: studio._id, configId: 'test' } : undefined, category: category, type: type, diff --git a/meteor/server/__tests__/cronjobs.test.ts b/meteor/server/__tests__/cronjobs.test.ts index 3d6cc33c433..705587a6068 100644 --- a/meteor/server/__tests__/cronjobs.test.ts +++ b/meteor/server/__tests__/cronjobs.test.ts @@ -498,7 +498,6 @@ describe('cronjobs', () => { statusCode: StatusCode.GOOD, }, token: '', - settings: {}, ...props, }) diff --git a/meteor/server/api/__tests__/peripheralDevice.test.ts b/meteor/server/api/__tests__/peripheralDevice.test.ts index 30e78c928d8..7c0c8503767 100644 --- a/meteor/server/api/__tests__/peripheralDevice.test.ts +++ b/meteor/server/api/__tests__/peripheralDevice.test.ts @@ -11,10 +11,7 @@ import { getCurrentTime } from '../../lib/lib' import { waitUntil } from '../../../__mocks__/helpers/jest' import { setupDefaultStudioEnvironment, DefaultEnvironment } from '../../../__mocks__/helpers/database' import { setLogLevel } from '../../logging' -import { - IngestDeviceSettings, - IngestDeviceSecretSettings, -} from '@sofie-automation/corelib/dist/dataModel/PeripheralDeviceSettings/ingestDevice' +import { IngestDeviceSecretSettings } from '@sofie-automation/corelib/dist/dataModel/PeripheralDeviceSettings/ingestDevice' import { MediaWorkFlow } from '@sofie-automation/shared-lib/dist/core/model/MediaWorkFlows' import { MediaWorkFlowStep } from '@sofie-automation/shared-lib/dist/core/model/MediaWorkFlowSteps' import { MediaManagerAPI } from '@sofie-automation/meteor-lib/dist/api/mediaManager' @@ -412,7 +409,7 @@ describe('test peripheralDevice general API methods', () => { expect(QueueStudioJobSpy).toHaveBeenNthCalledWith( 1, StudioJobs.OnPlayoutPlaybackChanged, - device.studioId, + device.studioAndConfigId!.studioId, literal[0]>({ playlistId: rundownPlaylistID, changes: [ @@ -474,7 +471,7 @@ describe('test peripheralDevice general API methods', () => { expect(QueueStudioJobSpy).toHaveBeenNthCalledWith( 1, StudioJobs.OnTimelineTriggerTime, - device.studioId, + device.studioAndConfigId!.studioId, literal({ results: timelineTriggerTimeResult, }) @@ -556,7 +553,7 @@ describe('test peripheralDevice general API methods', () => { expect((deviceWithSecretToken.secretSettings as IngestDeviceSecretSettings).accessToken).toBe( 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' ) - expect((deviceWithSecretToken.settings as IngestDeviceSettings).secretAccessToken).toBe(true) + expect(deviceWithSecretToken.secretSettingsStatus?.accessToken).toBe(true) }) test('uninitialize', async () => { @@ -643,8 +640,10 @@ describe('test peripheralDevice general API methods', () => { organizationId: null, name: 'Mock Media Manager', deviceName: 'Media Manager', - studioId: env.studio._id, - settings: {}, + studioAndConfigId: { + studioId: env.studio._id, + configId: 'test', + }, category: PeripheralDeviceCategory.MEDIA_MANAGER, configManifest: { deviceConfigSchema: JSONBlobStringify({}), @@ -670,7 +669,7 @@ describe('test peripheralDevice general API methods', () => { deviceId: device._id, priority: 1, source: 'MockSource', - studioId: device.studioId!, + studioId: device.studioAndConfigId!.studioId, finished: false, success: false, }) @@ -682,7 +681,7 @@ describe('test peripheralDevice general API methods', () => { deviceId: device._id, priority: 2, status: MediaManagerAPI.WorkStepStatus.IDLE, - studioId: device.studioId!, + studioId: device.studioAndConfigId!.studioId, workFlowId: workFlowId, }) await MediaWorkFlowSteps.insertAsync({ @@ -693,14 +692,14 @@ describe('test peripheralDevice general API methods', () => { deviceId: device._id, priority: 1, status: MediaManagerAPI.WorkStepStatus.IDLE, - studioId: device.studioId!, + studioId: device.studioAndConfigId!.studioId, workFlowId: workFlowId, }) }) test('getMediaWorkFlowRevisions', async () => { const workFlows = ( await MediaWorkFlows.findFetchAsync({ - studioId: device.studioId, + studioId: device.studioAndConfigId!.studioId, }) ).map((wf) => ({ _id: wf._id, @@ -714,7 +713,7 @@ describe('test peripheralDevice general API methods', () => { test('getMediaWorkFlowStepRevisions', async () => { const workFlowSteps = ( await MediaWorkFlowSteps.findFetchAsync({ - studioId: device.studioId, + studioId: device.studioAndConfigId!.studioId, }) ).map((wf) => ({ _id: wf._id, @@ -799,8 +798,10 @@ describe('test peripheralDevice general API methods', () => { organizationId: null, name: 'Mock Media Manager', deviceName: 'Media Manager', - studioId: env.studio._id, - settings: {}, + studioAndConfigId: { + studioId: env.studio._id, + configId: 'test', + }, category: PeripheralDeviceCategory.MEDIA_MANAGER, configManifest: { deviceConfigSchema: JSONBlobStringify({}), @@ -834,7 +835,7 @@ describe('test peripheralDevice general API methods', () => { mediaSize: 10, mediaTime: 0, objId: MOCK_OBJID, - studioId: device.studioId!, + studioId: device.studioAndConfigId!.studioId, thumbSize: 0, thumbTime: 0, tinf: '', @@ -843,7 +844,7 @@ describe('test peripheralDevice general API methods', () => { test('getMediaObjectRevisions', async () => { const mobjects = ( await MediaObjects.findFetchAsync({ - studioId: device.studioId, + studioId: device.studioAndConfigId!.studioId, }) ).map((mo) => ({ _id: mo._id, @@ -864,7 +865,7 @@ describe('test peripheralDevice general API methods', () => { test('update', async () => { const mo = (await MediaObjects.findOneAsync({ collectionId: MOCK_COLLECTION, - studioId: device.studioId!, + studioId: device.studioAndConfigId!.studioId, })) as MediaObject expect(mo).toBeTruthy() @@ -882,14 +883,14 @@ describe('test peripheralDevice general API methods', () => { const updateMo = await MediaObjects.findOneAsync({ collectionId: MOCK_COLLECTION, - studioId: device.studioId!, + studioId: device.studioAndConfigId!.studioId, }) expect(updateMo).toMatchObject(newMo) }) test('remove', async () => { const mo = (await MediaObjects.findOneAsync({ collectionId: MOCK_COLLECTION, - studioId: device.studioId!, + studioId: device.studioAndConfigId!.studioId, })) as MediaObject expect(mo).toBeTruthy() @@ -903,7 +904,7 @@ describe('test peripheralDevice general API methods', () => { const updateMo = await MediaObjects.findOneAsync({ collectionId: MOCK_COLLECTION, - studioId: device.studioId!, + studioId: device.studioAndConfigId!.studioId, }) expect(updateMo).toBeFalsy() }) diff --git a/meteor/server/api/__tests__/userActions/system.test.ts b/meteor/server/api/__tests__/userActions/system.test.ts index 29bf0161c9d..1c6ed9711f3 100644 --- a/meteor/server/api/__tests__/userActions/system.test.ts +++ b/meteor/server/api/__tests__/userActions/system.test.ts @@ -40,7 +40,6 @@ describe('User Actions - Disable Peripheral SubDevice', () => { env.studio, { organizationId, - settings: {}, configManifest: { deviceConfigSchema: JSONBlobStringify({}), // unused subdeviceManifest: { @@ -165,7 +164,6 @@ describe('User Actions - Disable Peripheral SubDevice', () => { env.studio, { organizationId: null, - settings: {}, configManifest: { deviceConfigSchema: JSONBlobStringify({}), // unused subdeviceManifest: { diff --git a/meteor/server/api/deviceTriggers/observer.ts b/meteor/server/api/deviceTriggers/observer.ts index c155bcb6004..0d45085c03c 100644 --- a/meteor/server/api/deviceTriggers/observer.ts +++ b/meteor/server/api/deviceTriggers/observer.ts @@ -102,7 +102,7 @@ export async function receiveInputDeviceTrigger( check(deviceId, String) check(triggerId, String) - const studioId = peripheralDevice.studioId + const studioId = peripheralDevice.studioAndConfigId?.studioId if (!studioId) throw new Meteor.Error(400, `Peripheral Device "${peripheralDevice._id}" not assigned to a studio`) logger.debug( diff --git a/meteor/server/api/integration/expectedPackages.ts b/meteor/server/api/integration/expectedPackages.ts index 8e823b968fb..0127307b4f3 100644 --- a/meteor/server/api/integration/expectedPackages.ts +++ b/meteor/server/api/integration/expectedPackages.ts @@ -32,6 +32,7 @@ import { PackageInfos, } from '../../collections' import { logger } from '../../logging' +import _ from 'underscore' export namespace PackageManagerIntegration { export async function updateExpectedPackageWorkStatuses( @@ -58,7 +59,7 @@ export namespace PackageManagerIntegration { type FromPackage = Omit & { id: ExpectedPackageId } const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, deviceToken, context) - if (!peripheralDevice.studioId) + if (!peripheralDevice.studioAndConfigId) throw new Meteor.Error(400, 'Device "' + peripheralDevice._id + '" has no studio') const bulkChanges: AnyBulkWriteOperation[] = [] @@ -150,11 +151,11 @@ export namespace PackageManagerIntegration { const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, deviceToken, context) await ExpectedPackageWorkStatuses.removeAsync({ - $or: [ + $or: _.compact([ { deviceId: peripheralDevice._id }, // Since we only have one PM in a studio, we can remove everything in the studio: - { studioId: peripheralDevice.studioId }, - ], + peripheralDevice.studioAndConfigId ? { studioId: peripheralDevice.studioAndConfigId.studioId } : null, + ]), }) } @@ -177,10 +178,10 @@ export namespace PackageManagerIntegration { )[] ): Promise { const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, deviceToken, context) - if (!peripheralDevice.studioId) + if (!peripheralDevice.studioAndConfigId) throw new Meteor.Error(400, 'Device "' + peripheralDevice._id + '" has no studio') - const studioId = peripheralDevice.studioId + const studioId = peripheralDevice.studioAndConfigId.studioId const removedIds: PackageContainerPackageId[] = [] const ps: Promise[] = [] @@ -189,7 +190,7 @@ export namespace PackageManagerIntegration { check(change.packageId, String) const id = getPackageContainerPackageId( - peripheralDevice.studioId, + peripheralDevice.studioAndConfigId.studioId, change.containerId, protectString(change.packageId) ) @@ -245,11 +246,11 @@ export namespace PackageManagerIntegration { const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, deviceToken, context) await PackageContainerPackageStatuses.removeAsync({ - $or: [ + $or: _.compact([ { deviceId: peripheralDevice._id }, // Since we only have one PM in a studio, we can remove everything in the studio: - { studioId: peripheralDevice.studioId }, - ], + peripheralDevice.studioAndConfigId ? { studioId: peripheralDevice.studioAndConfigId.studioId } : null, + ]), }) } @@ -270,17 +271,17 @@ export namespace PackageManagerIntegration { )[] ): Promise { const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, deviceToken, context) - if (!peripheralDevice.studioId) + if (!peripheralDevice.studioAndConfigId) throw new Meteor.Error(400, 'Device "' + peripheralDevice._id + '" has no studio') - const studioId = peripheralDevice.studioId + const studioId = peripheralDevice.studioAndConfigId.studioId const removedIds: PackageContainerId[] = [] const ps: Promise[] = [] for (const change of changes) { check(change.containerId, String) - const id = getPackageContainerId(peripheralDevice.studioId, change.containerId) + const id = getPackageContainerId(peripheralDevice.studioAndConfigId.studioId, change.containerId) if (change.type === 'delete') { removedIds.push(id) @@ -332,11 +333,11 @@ export namespace PackageManagerIntegration { const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, deviceToken, context) await PackageContainerStatuses.removeAsync({ - $or: [ + $or: _.compact([ { deviceId: peripheralDevice._id }, // Since we only have one PM in a studio, we can remove everything in the studio: - { studioId: peripheralDevice.studioId }, - ], + peripheralDevice.studioAndConfigId ? { studioId: peripheralDevice.studioAndConfigId.studioId } : null, + ]), }) } @@ -352,7 +353,7 @@ export namespace PackageManagerIntegration { const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, deviceToken, context) check(packageIds, [String]) check(type, String) - if (!peripheralDevice.studioId) + if (!peripheralDevice.studioAndConfigId) throw new Meteor.Error(400, 'Device "' + peripheralDevice._id + '" has no studio') const ids = packageIds.map((packageId) => getPackageInfoId(packageId, type)) @@ -386,7 +387,7 @@ export namespace PackageManagerIntegration { const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, deviceToken, context) check(packageId, String) check(type, String) - if (!peripheralDevice.studioId) + if (!peripheralDevice.studioAndConfigId) throw new Meteor.Error(400, 'Device "' + peripheralDevice._id + '" has no studio') const id = getPackageInfoId(packageId, type) @@ -398,7 +399,7 @@ export namespace PackageManagerIntegration { expectedContentVersionHash: expectedContentVersionHash, actualContentVersionHash: actualContentVersionHash, - studioId: peripheralDevice.studioId, + studioId: peripheralDevice.studioAndConfigId.studioId, deviceId: peripheralDevice._id, @@ -425,7 +426,7 @@ export namespace PackageManagerIntegration { const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, deviceToken, context) check(packageId, String) check(type, String) - if (!peripheralDevice.studioId) + if (!peripheralDevice.studioAndConfigId) throw new Meteor.Error(400, 'Device "' + peripheralDevice._id + '" has no studio') const id = getPackageInfoId(packageId, type) diff --git a/meteor/server/api/integration/mediaWorkFlows.ts b/meteor/server/api/integration/mediaWorkFlows.ts index 36fb3e2461d..c2167e63659 100644 --- a/meteor/server/api/integration/mediaWorkFlows.ts +++ b/meteor/server/api/integration/mediaWorkFlows.ts @@ -22,10 +22,10 @@ export namespace MediaManagerIntegration { logger.debug('getMediaWorkFlowStepRevisions') const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, deviceToken, context) - if (peripheralDevice.studioId) { + if (peripheralDevice.studioAndConfigId) { const rawSteps = (await MediaWorkFlowSteps.findFetchAsync( { - studioId: peripheralDevice.studioId, + studioId: peripheralDevice.studioAndConfigId.studioId, }, { fields: { @@ -54,10 +54,10 @@ export namespace MediaManagerIntegration { logger.debug('getMediaWorkFlowRevisions') const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, deviceToken, context) - if (peripheralDevice.studioId) { + if (peripheralDevice.studioAndConfigId) { const rawWorkflows = (await MediaWorkFlows.findFetchAsync( { - studioId: peripheralDevice.studioId, + studioId: peripheralDevice.studioAndConfigId.studioId, }, { fields: { @@ -91,7 +91,7 @@ export namespace MediaManagerIntegration { 400, `Device "${peripheralDevice._id}".type is "${peripheralDevice.type}", should be MEDIA_MANAGER ` ) - if (!peripheralDevice.studioId) + if (!peripheralDevice.studioAndConfigId) throw new Meteor.Error(400, 'Device "' + peripheralDevice._id + '" has no studio') check(workFlowId, String) @@ -100,7 +100,7 @@ export namespace MediaManagerIntegration { if (obj) { check(obj._id, String) obj.deviceId = peripheralDevice._id - obj.studioId = peripheralDevice.studioId + obj.studioId = peripheralDevice.studioAndConfigId.studioId await MediaWorkFlows.upsertAsync(workFlowId, obj) @@ -131,7 +131,7 @@ export namespace MediaManagerIntegration { 400, `Device "${peripheralDevice._id}".type is "${peripheralDevice.type}", should be MEDIA_MANAGER ` ) - if (!peripheralDevice.studioId) + if (!peripheralDevice.studioAndConfigId) throw new Meteor.Error(400, 'Device "' + peripheralDevice._id + '" has no studio') check(stepId, String) @@ -147,7 +147,7 @@ export namespace MediaManagerIntegration { obj.workFlowId = workflow._id obj.deviceId = peripheralDevice._id - obj.studioId = peripheralDevice.studioId + obj.studioId = peripheralDevice.studioAndConfigId.studioId await MediaWorkFlowSteps.upsertAsync(stepId, obj) } else { diff --git a/meteor/server/api/packageManager.ts b/meteor/server/api/packageManager.ts index c446852f257..dcad87fe041 100644 --- a/meteor/server/api/packageManager.ts +++ b/meteor/server/api/packageManager.ts @@ -16,7 +16,7 @@ export async function abortExpectation(deviceId: PeripheralDeviceId, workId: str export async function restartAllExpectationsInStudio(studioId: StudioId): Promise { const packageManagerDevices = await PeripheralDevices.findFetchAsync({ - studioId: studioId, + 'studioAndConfigId.studioId': studioId, category: PeripheralDeviceCategory.PACKAGE_MANAGER, type: PeripheralDeviceType.PACKAGE_MANAGER, subType: PERIPHERAL_SUBTYPE_PROCESS, diff --git a/meteor/server/api/peripheralDevice.ts b/meteor/server/api/peripheralDevice.ts index b6b29b218fd..e20cdae255d 100644 --- a/meteor/server/api/peripheralDevice.ts +++ b/meteor/server/api/peripheralDevice.ts @@ -68,6 +68,7 @@ import { executePeripheralDeviceFunction } from './peripheralDevice/executeFunct import KoaRouter from '@koa/router' import bodyParser from 'koa-bodyparser' import { assertConnectionHasOneOfPermissions } from '../security/auth' +import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' const apmNamespace = 'peripheralDevice' export namespace ServerPeripheralDeviceAPI { @@ -146,7 +147,6 @@ export namespace ServerPeripheralDeviceAPI { status: { statusCode: StatusCode.UNKNOWN, }, - settings: {}, connected: true, connectionId: options.connectionId, lastSeen: getCurrentTime(), @@ -161,7 +161,6 @@ export namespace ServerPeripheralDeviceAPI { deviceName: options.name, parentDeviceId: options.parentDeviceId, versions: options.versions, - // settings: {}, configManifest: options.configManifest ? { @@ -267,7 +266,7 @@ export namespace ServerPeripheralDeviceAPI { const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, token, context) - if (!peripheralDevice.studioId) + if (!peripheralDevice.studioAndConfigId) throw new Meteor.Error(401, `peripheralDevice "${deviceId}" not attached to a studio`) // check(r.time, Number) @@ -278,9 +277,13 @@ export namespace ServerPeripheralDeviceAPI { }) if (results.length > 0) { - const job = await QueueStudioJob(StudioJobs.OnTimelineTriggerTime, peripheralDevice.studioId, { - results, - }) + const job = await QueueStudioJob( + StudioJobs.OnTimelineTriggerTime, + peripheralDevice.studioAndConfigId.studioId, + { + results, + } + ) await job.complete } @@ -298,16 +301,20 @@ export namespace ServerPeripheralDeviceAPI { // Note that this function can / might be called several times from playout-gateway for the same part const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, token, context) - if (!peripheralDevice.studioId) + if (!peripheralDevice.studioAndConfigId) throw new Error(`PeripheralDevice "${peripheralDevice._id}" sent piecePlaybackStarted, but has no studioId`) if (changedResults.changes.length) { check(changedResults.rundownPlaylistId, String) - const job = await QueueStudioJob(StudioJobs.OnPlayoutPlaybackChanged, peripheralDevice.studioId, { - playlistId: changedResults.rundownPlaylistId, - changes: changedResults.changes, - }) + const job = await QueueStudioJob( + StudioJobs.OnPlayoutPlaybackChanged, + peripheralDevice.studioAndConfigId.studioId, + { + playlistId: changedResults.rundownPlaylistId, + changes: changedResults.changes, + } + ) await job.complete } @@ -370,10 +377,10 @@ export namespace ServerPeripheralDeviceAPI { throw new Meteor.Error(405, `PeripheralDevice "${deviceId}" cannot have subdevice disabled`) if (!peripheralDevice.configManifest) throw new Meteor.Error(405, `PeripheralDevice "${deviceId}" does not provide a configuration manifest`) - if (!peripheralDevice.studioId) + if (!peripheralDevice.studioAndConfigId) throw new Meteor.Error(405, `PeripheralDevice "${deviceId}" does not belong to a Studio`) - const studio = await Studios.findOneAsync(peripheralDevice.studioId) + const studio = await Studios.findOneAsync(peripheralDevice.studioAndConfigId.studioId) if (!studio) throw new Meteor.Error(405, `PeripheralDevice "${deviceId}" does not belong to a Studio`) const playoutDevices = applyAndValidateOverrides(studio.peripheralDeviceSettings.playoutDevices).obj @@ -421,13 +428,13 @@ export namespace ServerPeripheralDeviceAPI { (o) => o.path === propPath ) if (existingIndex !== -1) { - await Studios.updateAsync(peripheralDevice.studioId, { + await Studios.updateAsync(peripheralDevice.studioAndConfigId.studioId, { $set: { [`${overridesPath}.${existingIndex}`]: newOverrideOp, }, }) } else { - await Studios.updateAsync(peripheralDevice.studioId, { + await Studios.updateAsync(peripheralDevice.studioAndConfigId.studioId, { $push: { [overridesPath]: newOverrideOp, }, @@ -441,12 +448,29 @@ export namespace ServerPeripheralDeviceAPI { if ( // Debug states are only valid for Playout devices and must be enabled with the `debugState` option peripheralDevice.type !== PeripheralDeviceType.PLAYOUT || - !peripheralDevice.settings || - !(peripheralDevice.settings as any)['debugState'] + !peripheralDevice.studioAndConfigId // Must be attached to a studio ) { return {} } + // Fetch the relevant studio + const studioForDevice = (await Studios.findOneAsync(peripheralDevice.studioAndConfigId.studioId, { + fields: { + peripheralDeviceSettings: 1, + }, + })) as Pick | undefined + if (!studioForDevice) return {} + + const studioDeviceSettings = applyAndValidateOverrides( + studioForDevice.peripheralDeviceSettings.deviceSettings + ).obj + + const settingsForDevice = studioDeviceSettings[peripheralDevice.studioAndConfigId.configId] + if (!settingsForDevice) return {} + + // Make sure debugState is enabled + if (!(settingsForDevice.options as Record | undefined)?.['debugState']) return {} + try { return await executePeripheralDeviceFunction(peripheralDevice._id, 'getDebugStates') } catch (e) { @@ -510,7 +534,7 @@ export namespace ServerPeripheralDeviceAPI { $set: { accessTokenUrl: '', 'secretSettings.accessToken': accessToken, - 'settings.secretAccessToken': true, + 'secretSettingsStatus.accessToken': true, }, }) } @@ -610,7 +634,7 @@ peripheralDeviceRouter.post('/:deviceId/uploadCredentials', bodyParser(), async await PeripheralDevices.updateAsync(peripheralDevice._id, { $set: { 'secretSettings.credentials': body, - 'settings.secretCredentials': true, + 'secretSettingsStatus.credentials': true, }, }) @@ -633,11 +657,11 @@ peripheralDeviceRouter.get('/:deviceId/oauthResponse', async (ctx) => { const peripheralDevice = await PeripheralDevices.findOneAsync(deviceId) if (!peripheralDevice) throw new Meteor.Error(404, `Peripheral device "${deviceId}" not found`) - if (!peripheralDevice.studioId) + if (!peripheralDevice.studioAndConfigId) throw new Meteor.Error(400, `Peripheral device "${deviceId}" is not attached to a studio`) - if (!(await checkStudioExists(peripheralDevice.studioId))) - throw new Meteor.Error(404, `Studio "${peripheralDevice.studioId}" not found`) + if (!(await checkStudioExists(peripheralDevice.studioAndConfigId.studioId))) + throw new Meteor.Error(404, `Studio "${peripheralDevice.studioAndConfigId.studioId}" not found`) let accessToken = ctx.query['code'] || undefined const scopes = ctx.query['scope'] || undefined @@ -681,7 +705,7 @@ peripheralDeviceRouter.post('/:deviceId/resetAuth', async (ctx) => { $unset: { // User credentials 'secretSettings.accessToken': true, - 'settings.secretAccessToken': true, + 'secretSettingsStatus.accessToken': true, accessTokenUrl: true, }, }) @@ -711,10 +735,10 @@ peripheralDeviceRouter.post('/:deviceId/resetAppCredentials', async (ctx) => { $unset: { // App credentials 'secretSettings.credentials': true, - 'settings.secretCredentials': true, + 'secretSettingsStatus.credentials': true, // User credentials 'secretSettings.accessToken': true, - 'settings.secretAccessToken': true, + 'secretSettingsStatus.accessToken': true, accessTokenUrl: true, }, }) @@ -832,7 +856,9 @@ class ServerPeripheralDeviceAPIClass extends MethodContextAPI implements NewPeri async getPeripheralDevice(deviceId: PeripheralDeviceId, deviceToken: string) { const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, deviceToken, this) - const studio = peripheralDevice.studioId && (await Studios.findOneAsync(peripheralDevice.studioId)) + const studio = + peripheralDevice.studioAndConfigId?.studioId && + (await Studios.findOneAsync(peripheralDevice.studioAndConfigId.studioId)) return convertPeripheralDeviceForGateway(peripheralDevice, studio) } diff --git a/meteor/server/api/rest/v1/studios.ts b/meteor/server/api/rest/v1/studios.ts index aaabedc6933..33d31e6df5e 100644 --- a/meteor/server/api/rest/v1/studios.ts +++ b/meteor/server/api/rest/v1/studios.ts @@ -12,15 +12,19 @@ import { APIStudioFrom, studioFrom, validateAPIBlueprintConfigForStudio } from ' import { runUpgradeForStudio, validateConfigForStudio } from '../../../migration/upgrades' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { ServerClientAPI } from '../../client' -import { assertNever } from '../../../lib/tempLib' +import { assertNever, literal } from '../../../lib/tempLib' import { getCurrentTime } from '../../../lib/lib' import { StudioJobs } from '@sofie-automation/corelib/dist/worker/studio' -import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { DBStudio, StudioDeviceSettings } from '@sofie-automation/corelib/dist/dataModel/Studio' import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' import { ServerPlayoutAPI } from '../../playout/playout' import { checkValidation } from '.' import { assertConnectionHasOneOfPermissions } from '../../../security/auth' import { UserPermissions } from '@sofie-automation/meteor-lib/dist/userPermissions' +import { + applyAndValidateOverrides, + ObjectOverrideSetOp, +} from '@sofie-automation/corelib/dist/settings/objectWithOverrides' const PERMISSIONS_FOR_PLAYOUT_USERACTION: Array = ['studio'] @@ -167,7 +171,11 @@ class StudiosServerAPI implements StudiosRestAPI { } } - await PeripheralDevices.updateAsync({ studioId }, { $unset: { studioId: 1 } }, { multi: true }) + await PeripheralDevices.updateAsync( + { 'studioAndConfigId.studioId': studioId }, + { $unset: { studioAndConfigId: 1 } }, + { multi: true } + ) const rundownPlaylists = (await RundownPlaylists.findFetchAsync( { studioId }, @@ -231,7 +239,7 @@ class StudiosServerAPI implements StudiosRestAPI { studioId: StudioId ): Promise>> { const peripheralDevices = (await PeripheralDevices.findFetchAsync( - { studioId }, + { 'studioAndConfigId.studioId': studioId }, { projection: { _id: 1 } } )) as Array> @@ -242,7 +250,8 @@ class StudiosServerAPI implements StudiosRestAPI { _connection: Meteor.Connection, _event: string, studioId: StudioId, - deviceId: PeripheralDeviceId + deviceId: PeripheralDeviceId, + configId: string | undefined ): Promise> { const studio = await Studios.findOneAsync(studioId) if (!studio) @@ -258,7 +267,7 @@ class StudiosServerAPI implements StudiosRestAPI { 404 ) - if (device.studioId !== undefined && device.studioId !== studio._id) { + if (device.studioAndConfigId !== undefined && device.studioAndConfigId.studioId !== studio._id) { return ClientAPI.responseError( UserError.from( new Error(`Device already attached to studio`), @@ -267,9 +276,33 @@ class StudiosServerAPI implements StudiosRestAPI { 412 ) } + + // If no configId is provided, use the id of the device + configId = configId || unprotectString(device._id) + + // Ensure that the requested config blob exists + const availableDeviceSettings = applyAndValidateOverrides(studio.peripheralDeviceSettings.deviceSettings).obj + if (!availableDeviceSettings[configId]) { + await Studios.updateAsync(studioId, { + $push: { + 'peripheralDeviceSettings.deviceSettings.overrides': literal({ + op: 'set', + path: configId, + value: literal({ + name: device.name, + options: {}, + }), + }), + }, + }) + } + await PeripheralDevices.updateAsync(deviceId, { $set: { - studioId, + studioAndConfigId: { + studioId, + configId, + }, }, }) @@ -288,11 +321,17 @@ class StudiosServerAPI implements StudiosRestAPI { UserError.from(new Error(`Studio does not exist`), UserErrorMessage.StudioNotFound), 404 ) - await PeripheralDevices.updateAsync(deviceId, { - $unset: { - studioId: 1, + await PeripheralDevices.updateAsync( + { + _id: deviceId, + 'studioAndConfigId.studioId': studioId, }, - }) + { + $unset: { + studioAndConfigId: 1, + }, + } + ) return ClientAPI.responseSuccess(undefined, 200) } @@ -452,7 +491,7 @@ export function registerRoutes(registerRoute: APIRegisterHook): } ) - registerRoute<{ studioId: string }, { deviceId: string }, void>( + registerRoute<{ studioId: string }, { deviceId: string; configId: string | undefined }, void>( 'put', '/studios/:studioId/devices', new Map([ @@ -463,9 +502,10 @@ export function registerRoutes(registerRoute: APIRegisterHook): async (serverAPI, connection, events, params, body) => { const studioId = protectString(params.studioId) const deviceId = protectString(body.deviceId) - logger.info(`API PUT: Attach device ${deviceId} to studio ${studioId}`) + const configId = body.configId + logger.info(`API PUT: Attach device ${deviceId} to studio ${studioId} (${configId})`) - return await serverAPI.attachDeviceToStudio(connection, events, studioId, deviceId) + return await serverAPI.attachDeviceToStudio(connection, events, studioId, deviceId, configId) } ) diff --git a/meteor/server/api/rest/v1/typeConversion.ts b/meteor/server/api/rest/v1/typeConversion.ts index 24481613bf6..e1ec97bea5b 100644 --- a/meteor/server/api/rest/v1/typeConversion.ts +++ b/meteor/server/api/rest/v1/typeConversion.ts @@ -328,6 +328,7 @@ export async function studioFrom(apiStudio: APIStudio, existingId?: StudioId): P previewContainerIds: [], thumbnailContainerIds: [], peripheralDeviceSettings: { + deviceSettings: wrapDefaultObject({}), playoutDevices: wrapDefaultObject({}), ingestDevices: wrapDefaultObject({}), inputDevices: wrapDefaultObject({}), diff --git a/meteor/server/api/snapshot.ts b/meteor/server/api/snapshot.ts index 3ca719a2d4c..d2557df17be 100644 --- a/meteor/server/api/snapshot.ts +++ b/meteor/server/api/snapshot.ts @@ -184,7 +184,7 @@ async function createSystemSnapshot( queryRundownLayouts = { showStyleBaseId: { $in: showStyleBaseIds } } queryTriggeredActions = { showStyleBaseIds: { $in: [null, ...showStyleBaseIds] } } - if (studioId) queryDevices = { studioId: studioId } + if (studioId) queryDevices = { 'studioAndConfigId.studioId': studioId } else if (organizationId) queryDevices = { organizationId: organizationId } const [showStyleVariants, rundownLayouts, devices, triggeredActions] = await Promise.all([ @@ -623,7 +623,7 @@ async function restoreFromSystemSnapshot(snapshot: SystemSnapshot): Promise { - if (peripheralDevice.studioId) { - return peripheralDevice.studioId + if (peripheralDevice.studioAndConfigId?.studioId) { + return peripheralDevice.studioAndConfigId.studioId } if (peripheralDevice.parentDeviceId) { // Also check the parent device: const parentDevice = (await PeripheralDevices.findOneAsync(peripheralDevice.parentDeviceId, { fields: { _id: 1, - studioId: 1, + studioAndConfigId: 1, }, - })) as Pick | undefined + })) as Pick | undefined if (parentDevice) { - return parentDevice.studioId + return parentDevice.studioAndConfigId?.studioId } } return undefined diff --git a/meteor/server/collections/collection.ts b/meteor/server/collections/collection.ts index 700ceda53d5..8ae8aae4a84 100644 --- a/meteor/server/collections/collection.ts +++ b/meteor/server/collections/collection.ts @@ -149,7 +149,7 @@ export interface AsyncOnlyMongoCollection>), modifier: MongoModifier, options?: UpdateOptions ): Promise diff --git a/meteor/server/collections/index.ts b/meteor/server/collections/index.ts index ee4a0280387..eb8ad9ba289 100644 --- a/meteor/server/collections/index.ts +++ b/meteor/server/collections/index.ts @@ -129,27 +129,22 @@ export const PeripheralDevices = createAsyncOnlyMongoCollection> /** * Detaches a device from a studio. diff --git a/meteor/server/migration/0_1_0.ts b/meteor/server/migration/0_1_0.ts index 72fba7b0c29..53682747795 100644 --- a/meteor/server/migration/0_1_0.ts +++ b/meteor/server/migration/0_1_0.ts @@ -46,6 +46,7 @@ export const addSteps = addMigrationSteps('0.1.0', [ thumbnailContainerIds: [], previewContainerIds: [], peripheralDeviceSettings: { + deviceSettings: wrapDefaultObject({}), playoutDevices: wrapDefaultObject({}), ingestDevices: wrapDefaultObject({}), inputDevices: wrapDefaultObject({}), diff --git a/meteor/server/migration/1_50_0.ts b/meteor/server/migration/1_50_0.ts index 9d85f90c983..2d7fd6e4147 100644 --- a/meteor/server/migration/1_50_0.ts +++ b/meteor/server/migration/1_50_0.ts @@ -171,7 +171,7 @@ export const addSteps = addMigrationSteps('1.50.0', [ }) const badObject = objects.find( (device) => - !!Object.values((device.settings as any)?.['devices'] ?? {}).find( + !!Object.values((device as any).settings?.['devices'] ?? {}).find( (subdev: any) => !subdev?.type || !subdev?.options ) ) @@ -187,7 +187,7 @@ export const addSteps = addMigrationSteps('1.50.0', [ 'settings.device': { $exists: true }, }) for (const obj of objects) { - const newDevices: any = clone((obj.settings as any)?.['devices'] || {}) + const newDevices: any = clone((obj as any).settings?.['devices'] || {}) for (const [id, subdev] of Object.entries(newDevices)) { if (!subdev) continue @@ -436,6 +436,7 @@ export const addSteps = addMigrationSteps('1.50.0', [ await Studios.updateAsync(studio._id, { $set: { peripheralDeviceSettings: { + deviceSettings: wrapDefaultObject({}), playoutDevices: wrapDefaultObject({}), ingestDevices: wrapDefaultObject({}), inputDevices: wrapDefaultObject({}), @@ -496,11 +497,13 @@ export const addSteps = addMigrationSteps('1.50.0', [ 'settings.devices': { $exists: true }, }) for (const device of objects) { - if (!device.studioId) continue + // @ts-expect-error removed in 1.52 + const studioId: StudioId = device.studioId + if (!studioId) continue const newOverrides: SomeObjectOverrideOp[] = [] - for (const [id, subDevice] of Object.entries((device.settings as any)?.['devices'] || {})) { + for (const [id, subDevice] of Object.entries((device as any).settings?.['devices'] || {})) { newOverrides.push( literal({ op: 'set', @@ -513,7 +516,7 @@ export const addSteps = addMigrationSteps('1.50.0', [ ) } - await Studios.updateAsync(device.studioId, { + await Studios.updateAsync(studioId, { $set: { [`peripheralDeviceSettings.playoutDevices.overrides`]: newOverrides, }, @@ -550,11 +553,13 @@ export const addSteps = addMigrationSteps('1.50.0', [ 'settings.devices': { $exists: true }, }) for (const device of objects) { - if (!device.studioId) continue + // @ts-expect-error removed in 1.52 + const studioId: StudioId = device.studioId + if (!studioId) continue const newOverrides: SomeObjectOverrideOp[] = [] - for (const [id, subDevice] of Object.entries((device.settings as any)?.['devices'] || {})) { + for (const [id, subDevice] of Object.entries((device as any).settings?.['devices'] || {})) { newOverrides.push( literal({ op: 'set', @@ -567,7 +572,7 @@ export const addSteps = addMigrationSteps('1.50.0', [ ) } - await Studios.updateAsync(device.studioId, { + await Studios.updateAsync(studioId, { $set: { [`peripheralDeviceSettings.ingestDevices.overrides`]: newOverrides, }, @@ -604,11 +609,13 @@ export const addSteps = addMigrationSteps('1.50.0', [ 'settings.devices': { $exists: true }, }) for (const device of objects) { - if (!device.studioId) continue + // @ts-expect-error removed in 1.52 + const studioId: StudioId = device.studioId + if (!studioId) continue const newOverrides: SomeObjectOverrideOp[] = [] - for (const [id, subDevice] of Object.entries((device.settings as any)?.['devices'] || {})) { + for (const [id, subDevice] of Object.entries((device as any).settings?.['devices'] || {})) { newOverrides.push( literal({ op: 'set', @@ -621,7 +628,7 @@ export const addSteps = addMigrationSteps('1.50.0', [ ) } - await Studios.updateAsync(device.studioId, { + await Studios.updateAsync(studioId, { $set: { [`peripheralDeviceSettings.inputDevices.overrides`]: newOverrides, }, diff --git a/meteor/server/migration/X_X_X.ts b/meteor/server/migration/X_X_X.ts index d0e41eb9765..2b526a53396 100644 --- a/meteor/server/migration/X_X_X.ts +++ b/meteor/server/migration/X_X_X.ts @@ -1,8 +1,9 @@ import { addMigrationSteps } from './databaseMigration' import { CURRENT_SYSTEM_VERSION } from './currentSystemVersion' -import { CoreSystem, Studios, TriggeredActions } from '../collections' +import { CoreSystem, PeripheralDevices, Studios, TriggeredActions } from '../collections' import { convertObjectIntoOverrides, + ObjectOverrideSetOp, wrapDefaultObject, } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' import { @@ -10,10 +11,13 @@ import { StudioRouteSetExclusivityGroup, StudioPackageContainer, IStudioSettings, + StudioDeviceSettings, } from '@sofie-automation/corelib/dist/dataModel/Studio' import { DEFAULT_CORE_TRIGGER_IDS } from './upgrades/defaultSystemActionTriggers' import { ICoreSystem } from '@sofie-automation/meteor-lib/dist/collections/CoreSystem' import { ICoreSystemSettings } from '@sofie-automation/shared-lib/dist/core/model/CoreSystemSettings' +import { logger } from '../logging' +import { literal, unprotectString } from '../lib/tempLib' /* * ************************************************************************************** @@ -370,6 +374,162 @@ export const addSteps = addMigrationSteps(CURRENT_SYSTEM_VERSION, [ } }, }, + + { + id: `studios create peripheralDeviceSettings.deviceSettings`, + canBeRunAutomatically: true, + validate: async () => { + const studios = await Studios.findFetchAsync({ + 'peripheralDeviceSettings.deviceSettings.defaults': { $exists: false }, + }) + + if (studios.length > 0) { + return 'studio is missing peripheralDeviceSettings.deviceSettings' + } + + return false + }, + migrate: async () => { + const studios = await Studios.findFetchAsync({ + 'peripheralDeviceSettings.deviceSettings.defaults': { $exists: false }, + }) + + for (const studio of studios) { + await Studios.updateAsync(studio._id, { + $set: { + 'peripheralDeviceSettings.deviceSettings': { + // Ensure the object is setup, preserving anything already configured + ...wrapDefaultObject({}), + ...studio.peripheralDeviceSettings.deviceSettings, + }, + }, + }) + } + }, + }, + + { + id: `PeripheralDevice populate secretSettingsStatus`, + canBeRunAutomatically: true, + dependOnResultFrom: `studios create peripheralDeviceSettings.deviceSettings`, + validate: async () => { + const devices = await PeripheralDevices.findFetchAsync({ + secretSettings: { $exists: true }, + settings: { $exists: true }, + secretSettingsStatus: { $exists: false }, + }) + + if (devices.length > 0) { + return 'settings must be moved to the studio' + } + + return false + }, + migrate: async () => { + const devices = await PeripheralDevices.findFetchAsync({ + secretSettings: { $exists: true }, + settings: { $exists: true }, + secretSettingsStatus: { $exists: false }, + }) + + for (const device of devices) { + // @ts-expect-error settings is typed as Record + const oldSettings = device.settings as Record | undefined + + await PeripheralDevices.updateAsync(device._id, { + $set: { + secretSettingsStatus: { + credentials: oldSettings?.secretCredentials, + accessToken: oldSettings?.secretAccessToken, + }, + }, + $unset: { + 'settings.secretCredentials': 1, + 'settings.secretAccessToken': 1, + }, + }) + } + }, + }, + + { + id: `move PeripheralDevice settings to studio`, + canBeRunAutomatically: true, + dependOnResultFrom: `PeripheralDevice populate secretSettingsStatus`, + validate: async () => { + const devices = await PeripheralDevices.findFetchAsync({ + studioId: { $exists: true }, + settings: { $exists: true }, + }) + + if (devices.length > 0) { + return 'settings must be moved to the studio' + } + + return false + }, + migrate: async () => { + const devices = await PeripheralDevices.findFetchAsync({ + studioId: { $exists: true }, + settings: { $exists: true }, + }) + + for (const device of devices) { + // @ts-expect-error settings is typed as Record + const oldSettings = device.settings + // @ts-expect-error studioId is typed as StudioId + const oldStudioId: StudioId = device.studioId + + // Will never happen, but make types match query + if (!oldSettings || !oldStudioId) { + logger.warn(`Skipping migration of device ${device._id} as it is missing settings or studioId`) + continue + } + + // If the studio is not found, then something is a little broken so skip + const existingStudio = await Studios.findOneAsync(oldStudioId) + if (!existingStudio) { + logger.warn(`Skipping migration of device ${device._id} as the studio ${oldStudioId} is missing`) + continue + } + + // Use the device id as the settings id + const newConfigId = unprotectString(device._id) + + // Compile the new list of overrides + const newOverrides = [ + ...existingStudio.peripheralDeviceSettings.deviceSettings.overrides, + literal({ + op: 'set', + path: newConfigId, + value: literal({ + name: device.name, + options: oldSettings, + }), + }), + ] + + await Studios.updateAsync(existingStudio._id, { + $set: { + 'peripheralDeviceSettings.deviceSettings.overrides': newOverrides, + }, + }) + + await PeripheralDevices.updateAsync(device._id, { + $set: { + studioAndConfigId: { + studioId: oldStudioId, + configId: newConfigId, + }, + }, + $unset: { + settings: 1, + studioId: 1, + }, + }) + } + }, + }, ]) interface PartialOldICoreSystem { diff --git a/meteor/server/migration/__tests__/migrations.test.ts b/meteor/server/migration/__tests__/migrations.test.ts index 6702dba9c6d..8c6e02c4fac 100644 --- a/meteor/server/migration/__tests__/migrations.test.ts +++ b/meteor/server/migration/__tests__/migrations.test.ts @@ -138,6 +138,7 @@ describe('Migrations', () => { previewContainerIds: [], thumbnailContainerIds: [], peripheralDeviceSettings: { + deviceSettings: wrapDefaultObject({}), playoutDevices: wrapDefaultObject({}), ingestDevices: wrapDefaultObject({}), inputDevices: wrapDefaultObject({}), @@ -179,6 +180,7 @@ describe('Migrations', () => { previewContainerIds: [], thumbnailContainerIds: [], peripheralDeviceSettings: { + deviceSettings: wrapDefaultObject({}), playoutDevices: wrapDefaultObject({}), ingestDevices: wrapDefaultObject({}), inputDevices: wrapDefaultObject({}), @@ -220,6 +222,7 @@ describe('Migrations', () => { previewContainerIds: [], thumbnailContainerIds: [], peripheralDeviceSettings: { + deviceSettings: wrapDefaultObject({}), playoutDevices: wrapDefaultObject({}), ingestDevices: wrapDefaultObject({}), inputDevices: wrapDefaultObject({}), diff --git a/meteor/server/publications/deviceTriggersPreview.ts b/meteor/server/publications/deviceTriggersPreview.ts index c8352ba51fa..d4cbbaeb7f8 100644 --- a/meteor/server/publications/deviceTriggersPreview.ts +++ b/meteor/server/publications/deviceTriggersPreview.ts @@ -39,7 +39,7 @@ export async function insertInputDeviceTriggerIntoPreview( if (!pDevice) throw new Meteor.Error(404, `Could not find peripheralDevice "${deviceId}"`) - const studioId = unprotectString(pDevice.studioId) + const studioId = unprotectString(pDevice.studioAndConfigId?.studioId) if (!studioId) throw new Meteor.Error(501, `Device "${pDevice._id}" is not assigned to any studio`) const lastTriggersStudio = prepareTriggerBufferForStudio(studioId) diff --git a/meteor/server/publications/mountedTriggers.ts b/meteor/server/publications/mountedTriggers.ts index 13c520221b8..63a68b9de1d 100644 --- a/meteor/server/publications/mountedTriggers.ts +++ b/meteor/server/publications/mountedTriggers.ts @@ -25,7 +25,7 @@ meteorCustomPublish( const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, token, this) - const studioId = peripheralDevice.studioId + const studioId = peripheralDevice.studioAndConfigId?.studioId if (!studioId) throw new Meteor.Error(400, `Peripheral Device "${deviceId}" not attached to a studio`) cursorCustomPublish( @@ -48,7 +48,7 @@ meteorCustomPublish( const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, token, this) - const studioId = peripheralDevice.studioId + const studioId = peripheralDevice.studioAndConfigId?.studioId if (!studioId) throw new Meteor.Error(400, `Peripheral Device "${deviceId}" not attached to a studio`) cursorCustomPublish( diff --git a/meteor/server/publications/packageManager/expectedPackages/publication.ts b/meteor/server/publications/packageManager/expectedPackages/publication.ts index 66ee316ae7f..c200a25d733 100644 --- a/meteor/server/publications/packageManager/expectedPackages/publication.ts +++ b/meteor/server/publications/packageManager/expectedPackages/publication.ts @@ -207,7 +207,7 @@ meteorCustomPublish( const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, token, this) - const studioId = peripheralDevice.studioId + const studioId = peripheralDevice.studioAndConfigId?.studioId if (!studioId) { logger.warn(`Pub.packageManagerExpectedPackages: device "${peripheralDevice._id}" has no studioId`) return this.ready() diff --git a/meteor/server/publications/packageManager/packageContainers.ts b/meteor/server/publications/packageManager/packageContainers.ts index 133569a882d..0c41c5cf295 100644 --- a/meteor/server/publications/packageManager/packageContainers.ts +++ b/meteor/server/publications/packageManager/packageContainers.ts @@ -97,7 +97,7 @@ meteorCustomPublish( const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, token, this) - const studioId = peripheralDevice.studioId + const studioId = peripheralDevice.studioAndConfigId?.studioId if (!studioId) { logger.warn(`Pub.packageManagerPackageContainers: device "${peripheralDevice._id}" has no studioId`) return this.ready() diff --git a/meteor/server/publications/packageManager/playoutContext.ts b/meteor/server/publications/packageManager/playoutContext.ts index 70b55955ca1..5959ae236e0 100644 --- a/meteor/server/publications/packageManager/playoutContext.ts +++ b/meteor/server/publications/packageManager/playoutContext.ts @@ -115,7 +115,7 @@ meteorCustomPublish( const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, token, this) - const studioId = peripheralDevice.studioId + const studioId = peripheralDevice.studioAndConfigId?.studioId if (!studioId) { logger.warn(`Pub.packageManagerPlayoutContext: device "${peripheralDevice._id}" has no studioId`) return this.ready() diff --git a/meteor/server/publications/peripheralDevice.ts b/meteor/server/publications/peripheralDevice.ts index a6add93fdc4..59bedc9eff4 100644 --- a/meteor/server/publications/peripheralDevice.ts +++ b/meteor/server/publications/peripheralDevice.ts @@ -49,7 +49,7 @@ meteorPublish(CorelibPubSub.peripheralDevicesAndSubDevices, async function (stud triggerWriteAccessBecauseNoCheckNecessary() const selector: MongoQuery = { - studioId, + 'studioAndConfigId.studioId': studioId, } // TODO - this is not correctly reactive when changing the `studioId` property of a parent device diff --git a/meteor/server/publications/peripheralDeviceForDevice.ts b/meteor/server/publications/peripheralDeviceForDevice.ts index cb45ec57ee0..50e5e1585a2 100644 --- a/meteor/server/publications/peripheralDeviceForDevice.ts +++ b/meteor/server/publications/peripheralDeviceForDevice.ts @@ -43,14 +43,13 @@ const studioFieldsSpecifier = literal> >({ _id: 1, category: 1, - studioId: 1, - settings: 1, + studioAndConfigId: 1, secretSettings: 1, }) @@ -62,7 +61,16 @@ export function convertPeripheralDeviceForGateway( const ingestDevices: PeripheralDeviceForDevice['ingestDevices'] = {} const inputDevices: PeripheralDeviceForDevice['inputDevices'] = {} + let deviceSettings: PeripheralDeviceForDevice['deviceSettings'] = {} + if (studio) { + if (peripheralDevice.studioAndConfigId?.configId) { + const allDeviceSettingsInStudio = applyAndValidateOverrides( + studio.peripheralDeviceSettings.deviceSettings + ).obj + deviceSettings = allDeviceSettingsInStudio[peripheralDevice.studioAndConfigId.configId] ?? deviceSettings + } + switch (peripheralDevice.category) { case PeripheralDeviceCategory.INGEST: { const resolvedDevices = applyAndValidateOverrides(studio.peripheralDeviceSettings.ingestDevices).obj @@ -110,9 +118,9 @@ export function convertPeripheralDeviceForGateway( return literal>({ _id: peripheralDevice._id, - studioId: peripheralDevice.studioId, + studioId: peripheralDevice.studioAndConfigId?.studioId, - deviceSettings: peripheralDevice.settings, + deviceSettings: deviceSettings, secretSettings: peripheralDevice.secretSettings, playoutDevices, @@ -127,13 +135,13 @@ async function setupPeripheralDevicePublicationObservers( ): Promise { const studioObserver = await ReactiveMongoObserverGroup(async () => { const peripheralDeviceCompact = (await PeripheralDevices.findOneAsync(args.deviceId, { - fields: { studioId: 1 }, - })) as Pick | undefined + fields: { studioAndConfigId: 1 }, + })) as Pick | undefined - if (peripheralDeviceCompact?.studioId) { + if (peripheralDeviceCompact?.studioAndConfigId?.studioId) { return [ Studios.observeChanges( - peripheralDeviceCompact.studioId, + peripheralDeviceCompact.studioAndConfigId.studioId, { added: () => triggerUpdate({ invalidatePublication: true }), changed: () => triggerUpdate({ invalidatePublication: true }), @@ -160,7 +168,7 @@ async function setupPeripheralDevicePublicationObservers( triggerUpdate({ invalidatePublication: true }) }, changed: (_id, fields) => { - if ('studioId' in fields) studioObserver.restart() + if ('studioAndConfigId' in fields) studioObserver.restart() triggerUpdate({ invalidatePublication: true }) }, @@ -191,9 +199,10 @@ async function manipulatePeripheralDevicePublicationData( })) as Pick | undefined if (!peripheralDevice) return [] + const studioId = peripheralDevice.studioAndConfigId?.studioId const studio = - peripheralDevice.studioId && - ((await Studios.findOneAsync(peripheralDevice.studioId, { projection: studioFieldsSpecifier })) as + studioId && + ((await Studios.findOneAsync(studioId, { projection: studioFieldsSpecifier })) as | Pick | undefined) @@ -208,7 +217,7 @@ meteorCustomPublish( const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, token, this) - const studioId = peripheralDevice.studioId + const studioId = peripheralDevice.studioAndConfigId?.studioId if (!studioId) return await setUpOptimizedObserverArray< diff --git a/meteor/server/publications/rundown.ts b/meteor/server/publications/rundown.ts index e4be7f6dac7..6e21b949568 100644 --- a/meteor/server/publications/rundown.ts +++ b/meteor/server/publications/rundown.ts @@ -57,11 +57,12 @@ meteorPublish( const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, token, this) // No studio, then no rundowns - if (!peripheralDevice.studioId) return null + const studioId = peripheralDevice.studioAndConfigId?.studioId + if (!studioId) return null return Rundowns.findWithCursor( { - studioId: peripheralDevice.studioId, + studioId: studioId, }, { fields: { @@ -448,7 +449,7 @@ meteorPublish( const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, token, this) - const studioId = peripheralDevice.studioId + const studioId = peripheralDevice.studioAndConfigId?.studioId if (!studioId) return null return ExpectedPlayoutItems.findWithCursor({ studioId }) diff --git a/meteor/server/publications/studio.ts b/meteor/server/publications/studio.ts index 633f2bd3936..955d0ba8da7 100644 --- a/meteor/server/publications/studio.ts +++ b/meteor/server/publications/studio.ts @@ -121,7 +121,7 @@ meteorCustomPublish( const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, token, this) - const studioId = peripheralDevice.studioId + const studioId = peripheralDevice.studioAndConfigId?.studioId if (!studioId) return await createObserverForMappingsPublication(pub, studioId) diff --git a/meteor/server/publications/timeline.ts b/meteor/server/publications/timeline.ts index c32c42b938b..cbe91128f3e 100644 --- a/meteor/server/publications/timeline.ts +++ b/meteor/server/publications/timeline.ts @@ -58,7 +58,7 @@ meteorCustomPublish( const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, token, this) - const studioId = peripheralDevice.studioId + const studioId = peripheralDevice.studioAndConfigId?.studioId if (!studioId) return await createObserverForTimelinePublication(pub, studioId) @@ -71,7 +71,7 @@ meteorPublish( const peripheralDevice = await checkAccessAndGetPeripheralDevice(deviceId, token, this) - const studioId = peripheralDevice.studioId + const studioId = peripheralDevice.studioAndConfigId?.studioId if (!studioId) return null const modifier: FindOptions = { diff --git a/meteor/server/systemStatus/systemStatus.ts b/meteor/server/systemStatus/systemStatus.ts index 34ae34ce499..566e6336414 100644 --- a/meteor/server/systemStatus/systemStatus.ts +++ b/meteor/server/systemStatus/systemStatus.ts @@ -248,7 +248,7 @@ export async function getSystemStatus(_cred: RequestCredentials | null, studioId if (studioId) { // Check status for a certain studio: - devices = await PeripheralDevices.findFetchAsync({ studioId: studioId }) + devices = await PeripheralDevices.findFetchAsync({ 'studioAndConfigId.studioId': studioId }) } else { // Check status for all studios: diff --git a/packages/blueprints-integration/src/api/studio.ts b/packages/blueprints-integration/src/api/studio.ts index e905e361fe7..5392c4e221e 100644 --- a/packages/blueprints-integration/src/api/studio.ts +++ b/packages/blueprints-integration/src/api/studio.ts @@ -149,6 +149,8 @@ export interface BlueprintResultApplyStudioConfig { /** Playout Mappings */ mappings: BlueprintMappings + /** Parent device settings */ + parentDevices: Record /** Playout-gateway subdevices */ playoutDevices: Record /** Ingest-gateway subdevices, the types here depend on the gateway you use */ @@ -164,6 +166,14 @@ export interface BlueprintResultApplyStudioConfig { studioSettings?: IStudioSettings } +export interface BlueprintParentDeviceSettings { + /** + * User friendly name for the device + */ + name: string + + options: Record +} export interface IStudioConfigPreset { name: string diff --git a/packages/corelib/src/dataModel/PeripheralDevice.ts b/packages/corelib/src/dataModel/PeripheralDevice.ts index bcba365e0c6..40c7ca2bb01 100644 --- a/packages/corelib/src/dataModel/PeripheralDevice.ts +++ b/packages/corelib/src/dataModel/PeripheralDevice.ts @@ -1,6 +1,10 @@ import { Time } from '@sofie-automation/blueprints-integration' import { DeviceConfigManifest } from '../deviceConfig' import { OrganizationId, PeripheralDeviceId, StudioId } from './Ids' +import type { + IngestDeviceSecretSettings, + IngestDeviceSecretSettingsStatus, +} from '@sofie-automation/shared-lib/dist/core/model/peripheralDevice' import { PeripheralDeviceStatusObject, @@ -18,12 +22,6 @@ export { PERIPHERAL_SUBTYPE_PROCESS, } -import { - GenericPeripheralDeviceSettings, - IngestDeviceSecretSettings, - IngestDeviceSettings, -} from '@sofie-automation/shared-lib/dist/core/model/peripheralDevice' - export interface PeripheralDevice { _id: PeripheralDeviceId @@ -33,8 +31,11 @@ export interface PeripheralDevice { /** Name of the device (set by the device) */ deviceName: string - /** The studio this device is assigned to. Will be undefined for sub-devices */ - studioId?: StudioId + /** The studio and config this device is assigned to. Will be undefined for sub-devices */ + studioAndConfigId?: { + studioId: StudioId + configId: string + } category: PeripheralDeviceCategory type: PeripheralDeviceType @@ -46,8 +47,6 @@ export interface PeripheralDevice { created: number status: PeripheralDeviceStatusObject - settings: IngestDeviceSettings | GenericPeripheralDeviceSettings - /** If set, this device is owned by that organization */ organizationId: OrganizationId | null @@ -70,6 +69,7 @@ export interface PeripheralDevice { token: string secretSettings?: IngestDeviceSecretSettings | { [key: string]: any } + secretSettingsStatus?: IngestDeviceSecretSettingsStatus /** If the device is of category ingest, the name of the NRCS being used */ nrcsName?: string diff --git a/packages/corelib/src/dataModel/PeripheralDeviceSettings/ingestDevice.ts b/packages/corelib/src/dataModel/PeripheralDeviceSettings/ingestDevice.ts index a8b0b15dbed..0dd830b45a7 100644 --- a/packages/corelib/src/dataModel/PeripheralDeviceSettings/ingestDevice.ts +++ b/packages/corelib/src/dataModel/PeripheralDeviceSettings/ingestDevice.ts @@ -1,6 +1,6 @@ export * from '@sofie-automation/shared-lib/dist/peripheralDevice/ingest' export { - IngestDeviceSettings, + IngestDeviceSecretSettingsStatus, IngestDeviceSecretSettings, } from '@sofie-automation/shared-lib/dist/core/model/peripheralDevice' diff --git a/packages/corelib/src/dataModel/Studio.ts b/packages/corelib/src/dataModel/Studio.ts index 5a440f9d186..dad72ea2eaf 100644 --- a/packages/corelib/src/dataModel/Studio.ts +++ b/packages/corelib/src/dataModel/Studio.ts @@ -89,6 +89,9 @@ export interface DBStudio { } export interface StudioPeripheralDeviceSettings { + /** Settings for gateway parent-devices */ + deviceSettings: ObjectWithOverrides> + /** Playout gateway sub-devices */ playoutDevices: ObjectWithOverrides> @@ -128,3 +131,12 @@ export interface StudioPlayoutDevice { options: TSR.DeviceOptionsAny } + +export interface StudioDeviceSettings { + /** + * User friendly name for the device + */ + name: string + + options: unknown +} diff --git a/packages/corelib/src/overrideOpHelper.ts b/packages/corelib/src/overrideOpHelper.ts index fd04bc3814b..a0043224ca6 100644 --- a/packages/corelib/src/overrideOpHelper.ts +++ b/packages/corelib/src/overrideOpHelper.ts @@ -44,13 +44,25 @@ export function getAllCurrentAndDeletedItemsFromOverrides( // Sort and wrap in the return type const sortedItems = getAllCurrentItemsFromOverrides(rawObject, comparitor) - const removedOutputLayers: WrappedOverridableItemDeleted[] = [] + const computedItemIds = new Set(sortedItems.map((l) => l.id)) + const removedItems = getAllRemovedItemsFromOverrides(rawObject, comparitor, computedItemIds) + + return [...sortedItems, ...removedItems] +} + +export function getAllRemovedItemsFromOverrides( + rawObject: ReadonlyDeep>>, + comparitor: + | ((a: [id: string, obj: T | ReadonlyDeep], b: [id: string, obj: T | ReadonlyDeep]) => number) + | null, + validItemIds: Set // TODO - should this be optional? +): WrappedOverridableItemDeleted[] { + const removedItems: WrappedOverridableItemDeleted[] = [] // Find the items which have been deleted with an override - const computedOutputLayerIds = new Set(sortedItems.map((l) => l.id)) for (const [id, output] of Object.entries>(rawObject.defaults)) { - if (!computedOutputLayerIds.has(id) && output) { - removedOutputLayers.push( + if (!validItemIds.has(id) && output) { + removedItems.push( literal>({ type: 'deleted', id: id, @@ -62,9 +74,9 @@ export function getAllCurrentAndDeletedItemsFromOverrides( } } - if (comparitor) removedOutputLayers.sort((a, b) => comparitor([a.id, a.defaults], [b.id, b.defaults])) + if (comparitor) removedItems.sort((a, b) => comparitor([a.id, a.defaults], [b.id, b.defaults])) - return [...sortedItems, ...removedOutputLayers] + return removedItems } /** diff --git a/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts b/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts index ebd99423359..77f56c6b60e 100644 --- a/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts +++ b/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts @@ -117,6 +117,7 @@ export function defaultStudio(_id: StudioId): DBStudio { previewContainerIds: [], thumbnailContainerIds: [], peripheralDeviceSettings: { + deviceSettings: wrapDefaultObject({}), playoutDevices: wrapDefaultObject({}), ingestDevices: wrapDefaultObject({}), inputDevices: wrapDefaultObject({}), diff --git a/packages/job-worker/src/__mocks__/presetCollections.ts b/packages/job-worker/src/__mocks__/presetCollections.ts index 7c1cf9e9918..9fd120815f6 100644 --- a/packages/job-worker/src/__mocks__/presetCollections.ts +++ b/packages/job-worker/src/__mocks__/presetCollections.ts @@ -420,8 +420,10 @@ export async function setupMockPeripheralDevice( name: 'mockDevice', deviceName: 'Mock Gateway', organizationId: null, - studioId: context.studioId, - settings: {}, + studioAndConfigId: { + studioId: context.studioId, + configId: 'test', + }, nrcsName: category === PeripheralDeviceCategory.INGEST ? 'JEST-NRCS' : undefined, category: category, diff --git a/packages/job-worker/src/events/handle.ts b/packages/job-worker/src/events/handle.ts index d9efe5387f4..e195e1641e1 100644 --- a/packages/job-worker/src/events/handle.ts +++ b/packages/job-worker/src/events/handle.ts @@ -256,7 +256,7 @@ export async function handleNotifyCurrentlyPlayingPart( } const parentDevice = await context.directCollections.PeripheralDevices.findOne({ _id: device.parentDeviceId, - studioId: context.studioId, + 'studioAndConfigId.studioId': context.studioId, parentDeviceId: { $exists: false }, }) if (!parentDevice) { diff --git a/packages/job-worker/src/peripheralDevice.ts b/packages/job-worker/src/peripheralDevice.ts index 7ac13b0747e..0b8b1c36391 100644 --- a/packages/job-worker/src/peripheralDevice.ts +++ b/packages/job-worker/src/peripheralDevice.ts @@ -199,7 +199,7 @@ export async function listPlayoutDevices( ): Promise { const parentDevicesMap = normalizeArrayToMap( playoutModel.peripheralDevices.filter( - (doc) => doc.studioId === context.studioId && doc.type === PeripheralDeviceType.PLAYOUT + (doc) => doc.studioAndConfigId?.studioId === context.studioId && doc.type === PeripheralDeviceType.PLAYOUT ), '_id' ) diff --git a/packages/job-worker/src/playout/model/implementation/LoadPlayoutModel.ts b/packages/job-worker/src/playout/model/implementation/LoadPlayoutModel.ts index 210524ff2d7..0099896c701 100644 --- a/packages/job-worker/src/playout/model/implementation/LoadPlayoutModel.ts +++ b/packages/job-worker/src/playout/model/implementation/LoadPlayoutModel.ts @@ -46,7 +46,7 @@ export async function loadPlayoutModelPreInit( } const [PeripheralDevices, Playlist, Rundowns] = await Promise.all([ - context.directCollections.PeripheralDevices.findFetch({ studioId: tmpPlaylist.studioId }), + context.directCollections.PeripheralDevices.findFetch({ 'studioAndConfigId.studioId': tmpPlaylist.studioId }), reloadPlaylist ? context.directCollections.RundownPlaylists.findOne(tmpPlaylist._id) : clone(tmpPlaylist), context.directCollections.Rundowns.findFetch({ playlistId: tmpPlaylist._id }), ]) @@ -121,7 +121,7 @@ async function loadInitData( existingRundowns: ReadonlyDeep | undefined ): Promise<[ReadonlyDeep, DBRundownPlaylist, ReadonlyDeep]> { const [peripheralDevices, reloadedPlaylist, rundowns] = await Promise.all([ - context.directCollections.PeripheralDevices.findFetch({ studioId: tmpPlaylist.studioId }), + context.directCollections.PeripheralDevices.findFetch({ 'studioAndConfigId.studioId': tmpPlaylist.studioId }), reloadPlaylist ? await context.directCollections.RundownPlaylists.findOne(tmpPlaylist._id) : clone(tmpPlaylist), diff --git a/packages/job-worker/src/playout/upgrade.ts b/packages/job-worker/src/playout/upgrade.ts index fda503f079a..37a4f120abd 100644 --- a/packages/job-worker/src/playout/upgrade.ts +++ b/packages/job-worker/src/playout/upgrade.ts @@ -2,12 +2,14 @@ import { BlueprintMapping, BlueprintMappings, IStudioSettings, + BlueprintParentDeviceSettings, JSONBlobParse, StudioRouteBehavior, TSR, } from '@sofie-automation/blueprints-integration' import { MappingsExt, + StudioDeviceSettings, StudioIngestDevice, StudioInputDevice, StudioPackageContainer, @@ -51,6 +53,15 @@ export async function handleBlueprintUpgradeForStudio(context: JobContext, _data compileCoreConfigValues(context.studio.settings) ) + const parentDevices = Object.fromEntries( + Object.entries(result.parentDevices ?? {}).map((dev) => [ + dev[0], + literal>({ + name: dev[1].name ?? '', + options: dev[1], + }), + ]) + ) const playoutDevices = Object.fromEntries( Object.entries(result.playoutDevices ?? {}).map((dev) => [ dev[0], @@ -124,6 +135,7 @@ export async function handleBlueprintUpgradeForStudio(context: JobContext, _data $set: { 'settingsWithOverrides.defaults': studioSettings, 'mappingsWithOverrides.defaults': translateMappings(result.mappings), + 'peripheralDeviceSettings.deviceSettings.defaults': parentDevices, 'peripheralDeviceSettings.playoutDevices.defaults': playoutDevices, 'peripheralDeviceSettings.ingestDevices.defaults': ingestDevices, 'peripheralDeviceSettings.inputDevices.defaults': inputDevices, diff --git a/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts b/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts index 8abd587defe..f00630f5a11 100644 --- a/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts +++ b/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts @@ -177,7 +177,7 @@ export async function loadStudioPlayoutModel( const studioId = context.studioId const collections = await Promise.all([ - context.directCollections.PeripheralDevices.findFetch({ studioId }), + context.directCollections.PeripheralDevices.findFetch({ 'studioAndConfigId.studioId': studioId }), context.directCollections.RundownPlaylists.findFetch({ studioId }), context.directCollections.Timelines.findOne(studioId), ]) diff --git a/packages/meteor-lib/src/api/studios.ts b/packages/meteor-lib/src/api/studios.ts index ee232fe4360..4aee06ed448 100644 --- a/packages/meteor-lib/src/api/studios.ts +++ b/packages/meteor-lib/src/api/studios.ts @@ -1,4 +1,4 @@ -import { StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { PeripheralDeviceId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { IStudioSettings, MappingsExt, @@ -9,11 +9,19 @@ import { export interface NewStudiosAPI { insertStudio(): Promise removeStudio(studioId: StudioId): Promise + + assignConfigToPeripheralDevice( + studioId: StudioId, + configId: string, + deviceId: PeripheralDeviceId | null + ): Promise } export enum StudiosAPIMethods { 'insertStudio' = 'studio.insertStudio', 'removeStudio' = 'studio.removeStudio', + + 'assignConfigToPeripheralDevice' = 'studio.assignConfigToPeripheralDevice', } /** diff --git a/packages/openapi/api/definitions/studios.yaml b/packages/openapi/api/definitions/studios.yaml index 2c27bbd1cbd..ae1e4851d8c 100644 --- a/packages/openapi/api/definitions/studios.yaml +++ b/packages/openapi/api/definitions/studios.yaml @@ -317,6 +317,9 @@ resources: properties: deviceId: type: string + configId: + type: string + description: Id of the studio owned configuration to assign to the device. If not specified, one will be created. required: - deviceId responses: diff --git a/packages/shared-lib/src/core/model/peripheralDevice.ts b/packages/shared-lib/src/core/model/peripheralDevice.ts index cbd40c5f000..e6441956f0b 100644 --- a/packages/shared-lib/src/core/model/peripheralDevice.ts +++ b/packages/shared-lib/src/core/model/peripheralDevice.ts @@ -1,12 +1,10 @@ import { TSR } from '../../tsr' import { PeripheralDeviceId, StudioId } from './Ids' -export type GenericPeripheralDeviceSettings = Record - -export interface IngestDeviceSettings { +export interface IngestDeviceSecretSettingsStatus { /** OAuth: Set to true when secret value exists */ - secretCredentials: boolean - secretAccessToken: boolean + credentials?: boolean + accessToken?: boolean } export interface IngestDeviceSecretSettings { /** OAuth: */ diff --git a/packages/webui/src/__mocks__/defaultCollectionObjects.ts b/packages/webui/src/__mocks__/defaultCollectionObjects.ts index edf6ddf181a..a17d5e18fb5 100644 --- a/packages/webui/src/__mocks__/defaultCollectionObjects.ts +++ b/packages/webui/src/__mocks__/defaultCollectionObjects.ts @@ -116,6 +116,7 @@ export function defaultStudio(_id: StudioId): DBStudio { previewContainerIds: [], thumbnailContainerIds: [], peripheralDeviceSettings: { + deviceSettings: wrapDefaultObject({}), playoutDevices: wrapDefaultObject({}), ingestDevices: wrapDefaultObject({}), inputDevices: wrapDefaultObject({}), diff --git a/packages/webui/src/client/lib/reactiveData/reactiveData.ts b/packages/webui/src/client/lib/reactiveData/reactiveData.ts index cb95386f6a3..f1fce992774 100644 --- a/packages/webui/src/client/lib/reactiveData/reactiveData.ts +++ b/packages/webui/src/client/lib/reactiveData/reactiveData.ts @@ -131,7 +131,7 @@ export namespace reactiveData { const allDevices: PeripheralDevice[] = [] const peripheralDevices = PeripheralDevices.find( { - studioId: studioId, + 'studioAndConfigId.studioId': studioId, ignore: { $ne: true, }, diff --git a/packages/webui/src/client/ui/RundownView.tsx b/packages/webui/src/client/ui/RundownView.tsx index bb807da0f90..05ec66863f5 100644 --- a/packages/webui/src/client/ui/RundownView.tsx +++ b/packages/webui/src/client/ui/RundownView.tsx @@ -1461,7 +1461,7 @@ const RundownViewContent = translateWithTracker i._id), @@ -2794,7 +2794,7 @@ const RundownViewContent = translateWithTracker PeripheralDevices.find({ - studioId: props.studioId, + 'studioAndConfigId.studioId': props.studioId, }).fetch(), [], [] diff --git a/packages/webui/src/client/ui/Settings/Studio/Devices/ParentDevices.tsx b/packages/webui/src/client/ui/Settings/Studio/Devices/ParentDevices.tsx new file mode 100644 index 00000000000..fe73cf2ddf3 --- /dev/null +++ b/packages/webui/src/client/ui/Settings/Studio/Devices/ParentDevices.tsx @@ -0,0 +1,513 @@ +import React, { useCallback, useMemo } from 'react' +import { PeripheralDeviceId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { useTranslation } from 'react-i18next' +import { + getAllCurrentAndDeletedItemsFromOverrides, + OverrideOpHelper, + useOverrideOpHelper, + WrappedOverridableItem, + WrappedOverridableItemDeleted, + WrappedOverridableItemNormal, +} from '../../util/OverrideOpHelper' +import { faCheck, faPencilAlt, faPlus, faSync, faTrash } from '@fortawesome/free-solid-svg-icons' +import { FontAwesomeIcon } from '@fortawesome/react-fontawesome' +import { JSONBlob, JSONBlobParse, JSONSchema } from '@sofie-automation/blueprints-integration' +import { DropdownInputControl, DropdownInputOption } from '../../../../lib/Components/DropdownInput' +import { useToggleExpandHelper } from '../../../util/useToggleExpandHelper' +import { doModalDialog } from '../../../../lib/ModalDialog' +import classNames from 'classnames' +import { unprotectString } from '@sofie-automation/corelib/dist/protectedString' +import { SchemaFormWithOverrides } from '../../../../lib/forms/SchemaFormWithOverrides' +import { LabelActual, LabelAndOverrides } from '../../../../lib/Components/LabelAndOverrides' +import { getRandomString, literal } from '@sofie-automation/corelib/dist/lib' +import { StudioDeviceSettings } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { + SomeObjectOverrideOp, + wrapDefaultObject, + ObjectOverrideSetOp, +} from '@sofie-automation/corelib/dist/settings/objectWithOverrides' +import Tooltip from 'rc-tooltip' +import { PeripheralDevices, Studios } from '../../../../collections' +import { getHelpMode } from '../../../../lib/localStorage' +import { useTracker } from '../../../../lib/ReactMeteorData/ReactMeteorData' +import { TextInputControl } from '../../../../lib/Components/TextInput' +import { MomentFromNow } from '../../../../lib/Moment' +import { MeteorCall } from '../../../../lib/meteorApi' +import { ReadonlyDeep } from 'type-fest' +import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' + +interface StudioParentDevicesProps { + studioId: StudioId +} +export function StudioParentDevices({ studioId }: Readonly): JSX.Element { + const { t } = useTranslation() + + const studio = useTracker(() => Studios.findOne(studioId), [studioId]) + + const saveOverrides = useCallback( + (newOps: SomeObjectOverrideOp[]) => { + if (studio?._id) { + Studios.update(studio._id, { + $set: { + 'peripheralDeviceSettings.deviceSettings.overrides': newOps, + }, + }) + } + }, + [studio?._id] + ) + + const deviceSettings = useMemo( + () => + studio?.peripheralDeviceSettings?.deviceSettings ?? wrapDefaultObject>({}), + [studio?.peripheralDeviceSettings?.deviceSettings] + ) + + const overrideHelper = useOverrideOpHelper(saveOverrides, deviceSettings) + + const wrappedDeviceSettings = useMemo( + () => + getAllCurrentAndDeletedItemsFromOverrides(deviceSettings, (a, b) => + a[0].localeCompare(b[0]) + ), + [deviceSettings] + ) + + const addNewItem = useCallback( + (id?: string) => { + const newId = id ?? getRandomString() + const newDevice = literal({ + // peripheralDeviceId: undefined, + name: 'New Device', + options: {}, + }) + + const addOp = literal({ + op: 'set', + path: newId, + value: newDevice, + }) + + Studios.update(studioId, { + $push: { + 'peripheralDeviceSettings.deviceSettings.overrides': addOp, + }, + }) + }, + [studioId] + ) + const addNewItemClick = useCallback(() => addNewItem(), [studioId]) + + const hasCurrentDevice = wrappedDeviceSettings.find((d) => d.type === 'normal') + + return ( +
+

+ + {t('Parent Devices')} + +

+ + + +
+ +
+
+ ) +} + +interface PeripheralDeviceTranslated { + _id: PeripheralDeviceId + name: string + lastSeen: number + deviceConfigSchema: JSONBlob +} + +interface ParentDevicesTableProps { + studioId: StudioId + devices: WrappedOverridableItem[] + overrideHelper: OverrideOpHelper + createItemWithId: (id: string) => void +} +function GenericParentDevicesTable({ + studioId, + devices, + overrideHelper, + createItemWithId, +}: Readonly): JSX.Element { + const { t } = useTranslation() + const { toggleExpanded, isExpanded } = useToggleExpandHelper() + + const allParentDevices = useTracker(() => PeripheralDevices.find({ parentDeviceId: undefined }).fetch(), [], []) + + const studioParentDevices = useTracker( + () => PeripheralDevices.find({ parentDeviceId: undefined, 'studioAndConfigId.studioId': studioId }).fetch(), + [studioId], + [] + ) + const allKnownConfigIds = new Set(devices.map((d) => d.id)) + + const peripheralDevicesByConfigIdMap = useMemo(() => { + const devicesMap = new Map() + + for (const device of allParentDevices) { + if (!device.studioAndConfigId) continue + if (device.studioAndConfigId.studioId !== studioId) continue + + devicesMap.set( + device.studioAndConfigId.configId, + literal({ + _id: device._id, + name: device.name || unprotectString(device._id), + lastSeen: device.lastSeen, + deviceConfigSchema: device.configManifest.deviceConfigSchema, + }) + ) + } + + return devicesMap + }, [studioId, allParentDevices]) + + const confirmRemove = useCallback( + (parentdeviceId: string) => { + doModalDialog({ + title: t('Remove this device?'), + no: t('Cancel'), + yes: t('Remove'), + onAccept: () => { + overrideHelper().deleteItem(parentdeviceId).commit() + }, + message: ( + +

+ {t('Are you sure you want to remove {{type}} "{{deviceId}}"?', { + type: 'device', + deviceId: parentdeviceId, + })} +

+

{t('Please note: This action is irreversible!')}

+
+ ), + }) + }, + [t, overrideHelper] + ) + + const peripheralDeviceOptions = useMemo(() => { + const options: DropdownInputOption[] = [ + { + value: undefined, + name: 'Unassigned', + i: 0, + }, + ] + + for (const device of allParentDevices) { + options.push({ + value: device._id, + name: device.name || unprotectString(device._id), + i: options.length, + }) + } + + return options + }, [allParentDevices]) + + const undeleteItemWithId = useCallback( + (itemId: string) => overrideHelper().resetItem(itemId).commit(), + [overrideHelper] + ) + + return ( + + + + + + + + + + + {devices.map((item) => { + if (item.type === 'deleted') { + return + } else { + const peripheralDevice = peripheralDevicesByConfigIdMap.get(item.id) + + return ( + + + {isExpanded(item.id) && ( + + )} + + ) + } + })} + {studioParentDevices.map((device) => { + if (!device.studioAndConfigId) return null + if (allKnownConfigIds.has(device.studioAndConfigId.configId)) return null + + return ( + + ) + })} + +
{t('Name')}{t('Gateway')}{t('Last Seen')} 
+ ) +} + +interface SummaryRowProps { + item: WrappedOverridableItemNormal + peripheralDevice: PeripheralDeviceTranslated | undefined + isEdited: boolean + editItemWithId: (itemId: string) => void + removeItemWithId: (itemId: string) => void +} +function SummaryRow({ + item, + peripheralDevice, + isEdited, + editItemWithId, + removeItemWithId, +}: Readonly): JSX.Element { + const editItem = useCallback(() => editItemWithId(item.id), [editItemWithId, item.id]) + const removeItem = useCallback(() => removeItemWithId(item.id), [removeItemWithId, item.id]) + + return ( + + {item.computed.name} + + {peripheralDevice?.name || '-'} + + + {peripheralDevice ? : '-'} + + + + + + + + ) +} + +interface DeletedSummaryRowProps { + item: WrappedOverridableItemDeleted + undeleteItemWithId: (itemId: string) => void +} +function DeletedSummaryRow({ item, undeleteItemWithId }: Readonly): JSX.Element { + const undeleteItem = useCallback(() => undeleteItemWithId(item.id), [undeleteItemWithId, item.id]) + + return ( + + {item.defaults.name} + + - + + - + + + + + + ) +} + +interface OrphanedSummaryRowProps { + configId: string + device: ReadonlyDeep + createItemWithId: (itemId: string) => void +} +function OrphanedSummaryRow({ configId, device, createItemWithId }: Readonly): JSX.Element { + const createItem = useCallback(() => createItemWithId(configId), [createItemWithId, configId]) + + return ( + + - + + {device.name || unprotectString(device._id)} + + {} + + + + + + ) +} + +interface ParentDeviceEditRowProps { + studioId: StudioId + peripheralDevice: PeripheralDeviceTranslated | undefined + peripheralDeviceOptions: DropdownInputOption[] + editItemWithId: (parentdeviceId: string, forceState?: boolean) => void + item: WrappedOverridableItemNormal + overrideHelper: OverrideOpHelper +} +function ParentDeviceEditRow({ + studioId, + peripheralDevice, + peripheralDeviceOptions, + editItemWithId, + item, + overrideHelper, +}: Readonly) { + const { t } = useTranslation() + + const finishEditItem = useCallback(() => editItemWithId(item.id, false), [editItemWithId, item.id]) + + return ( + + +
+ + {(value, handleUpdate) => ( + + )} + + + + + {!peripheralDevice &&

{t('A device must be assigned to the config to edit the settings')}

} + + {peripheralDevice && ( + + )} +
+
+ +
+ + + ) +} + +interface AssignPeripheralDeviceConfigIdProps { + studioId: StudioId + configId: string + value: PeripheralDeviceId | undefined + peripheralDeviceOptions: DropdownInputOption[] +} + +function AssignPeripheralDeviceConfigId({ + studioId, + configId, + value, + peripheralDeviceOptions, +}: AssignPeripheralDeviceConfigIdProps) { + const handleUpdate = useCallback( + (peripheralDeviceId: PeripheralDeviceId | undefined) => { + MeteorCall.studio.assignConfigToPeripheralDevice(studioId, configId, peripheralDeviceId ?? null).catch((e) => { + console.error('assignConfigToPeripheralDevice failed', e) + }) + }, + [configId] + ) + + return ( + + ) +} + +interface ParentDeviceEditFormProps { + peripheralDevice: PeripheralDeviceTranslated + item: WrappedOverridableItemNormal + overrideHelper: OverrideOpHelper +} +function ParentDeviceEditForm({ peripheralDevice, item, overrideHelper }: Readonly) { + const { t } = useTranslation() + + const parsedSchema = useMemo((): JSONSchema | undefined => { + if (peripheralDevice?.deviceConfigSchema) { + return JSONBlobParse(peripheralDevice.deviceConfigSchema) + } + + return undefined + }, [peripheralDevice]) + + const translationNamespaces = useMemo(() => ['peripheralDevice_' + peripheralDevice._id], [peripheralDevice._id]) + + return ( + <> + {parsedSchema ? ( + + ) : ( +

{t('Device is missing configuration schema')}

+ )} + + ) +} diff --git a/packages/webui/src/client/ui/Settings/Studio/Devices/SelectDevices.tsx b/packages/webui/src/client/ui/Settings/Studio/Devices/SelectDevices.tsx deleted file mode 100644 index 9245171e51a..00000000000 --- a/packages/webui/src/client/ui/Settings/Studio/Devices/SelectDevices.tsx +++ /dev/null @@ -1,169 +0,0 @@ -import { useCallback, useState } from 'react' -import Tooltip from 'rc-tooltip' -import { doModalDialog } from '../../../../lib/ModalDialog' -import { FontAwesomeIcon } from '@fortawesome/react-fontawesome' -import { faExclamationTriangle, faTrash, faPlus } from '@fortawesome/free-solid-svg-icons' -import { PeripheralDevice, PeripheralDeviceType } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' -import { Link } from 'react-router-dom' -import { MomentFromNow } from '../../../../lib/Moment' -import { useTranslation } from 'react-i18next' -import { getHelpMode } from '../../../../lib/localStorage' -import { unprotectString } from '../../../../lib/tempLib' -import { PeripheralDevices } from '../../../../collections' -import { PeripheralDeviceId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { useTracker } from '../../../../lib/ReactMeteorData/ReactMeteorData' - -interface StudioSelectDevicesProps { - studioId: StudioId - studioDevices: PeripheralDevice[] -} -export function StudioSelectDevices({ studioId, studioDevices }: Readonly): JSX.Element { - const { t } = useTranslation() - - const availableDevices = useTracker( - () => - PeripheralDevices.find( - { - studioId: { - $not: { - $eq: studioId, - }, - }, - parentDeviceId: { - $exists: false, - }, - }, - { - sort: { - lastConnected: -1, - }, - } - ).fetch(), - [studioId], - [] - ) - - const [showAvailableDevices, setShowAvailableDevices] = useState(false) - const toggleAvailableDevices = useCallback(() => setShowAvailableDevices((show) => !show), []) - - const isPlayoutConnected = !!studioDevices.find((device) => device.type === PeripheralDeviceType.PLAYOUT) - - const confirmRemove = useCallback((deviceId: PeripheralDeviceId, deviceName: string | undefined) => { - doModalDialog({ - title: t('Remove this device?'), - yes: t('Remove'), - no: t('Cancel'), - onAccept: () => { - PeripheralDevices.update(deviceId, { - $unset: { - studioId: 1, - }, - }) - }, - message: ( -

- {t('Are you sure you want to remove device "{{deviceId}}"?', { - deviceId: deviceName || deviceId, - })} -

- ), - }) - }, []) - - const addDevice = useCallback((deviceId: PeripheralDeviceId) => { - PeripheralDevices.update(deviceId, { - $set: { - studioId: studioId, - }, - }) - }, []) - - return ( -
-

- - {t('Peripheral Devices')} - -

-   - {!studioDevices.length ? ( -
- {t('No devices connected')} -
- ) : null} - {!isPlayoutConnected ? ( -
- {t('Playout gateway not connected')} -
- ) : null} - - - {studioDevices.map((device) => ( - - ))} - -
-
- - {showAvailableDevices && ( -
-
- {availableDevices.map((device) => ( - - ))} -
-
- )} -
-
- ) -} - -interface StudioDeviceEntryProps { - device: PeripheralDevice - confirmRemove: (deviceId: PeripheralDeviceId, deviceName: string | undefined) => void -} -function StudioDeviceEntry({ device, confirmRemove }: Readonly) { - const doConfirmRemove = useCallback( - () => confirmRemove(device._id, device.name), - [confirmRemove, device._id, device.name] - ) - return ( - - - {device.name} - - {unprotectString(device._id)} - - - - - - - - ) -} - -interface AvailableDeviceEntryProps { - device: PeripheralDevice - addDevice: (deviceId: PeripheralDeviceId) => void -} -function AvailableDeviceEntry({ device, addDevice }: Readonly) { - const doAddDevice = useCallback(() => { - addDevice(device._id) - }, [addDevice, device._id]) - - return ( -
- {device.name} ({unprotectString(device._id)}) -
- ) -} diff --git a/packages/webui/src/client/ui/Settings/Studio/Devices/index.tsx b/packages/webui/src/client/ui/Settings/Studio/Devices/index.tsx index b88c4fa2e67..119bde6e36f 100644 --- a/packages/webui/src/client/ui/Settings/Studio/Devices/index.tsx +++ b/packages/webui/src/client/ui/Settings/Studio/Devices/index.tsx @@ -1,10 +1,10 @@ import { PeripheralDevices } from '../../../../collections' import { StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { useTracker } from '../../../../lib/ReactMeteorData/ReactMeteorData' -import { StudioSelectDevices } from './SelectDevices' import { StudioPlayoutSubDevices } from './PlayoutSubDevices' import { StudioInputSubDevices } from './InputSubDevices' import { StudioIngestSubDevices } from './IngestSubDevices' +import { StudioParentDevices } from './ParentDevices' interface IStudioDevicesProps { studioId: StudioId @@ -14,7 +14,7 @@ export function StudioDevices({ studioId }: Readonly): JSX. const studioDevices = useTracker( () => PeripheralDevices.find({ - studioId: studioId, + 'studioAndConfigId.studioId': studioId, }).fetch(), [studioId], [] @@ -22,7 +22,7 @@ export function StudioDevices({ studioId }: Readonly): JSX. return ( <> - + diff --git a/packages/webui/src/client/ui/Settings/StudioSettings.tsx b/packages/webui/src/client/ui/Settings/StudioSettings.tsx index 1adb7c17d9d..19d10f98e98 100644 --- a/packages/webui/src/client/ui/Settings/StudioSettings.tsx +++ b/packages/webui/src/client/ui/Settings/StudioSettings.tsx @@ -50,7 +50,7 @@ export default function StudioSettings(): JSX.Element { () => PeripheralDevices.findOne( { - studioId: { + 'studioAndConfigId.studioId': { $eq: studioId, }, parentDeviceId: { diff --git a/packages/webui/src/client/ui/Settings/components/ConfigManifestOAuthFlow.tsx b/packages/webui/src/client/ui/Settings/components/ConfigManifestOAuthFlow.tsx index d1b286f1ce5..543da251685 100644 --- a/packages/webui/src/client/ui/Settings/components/ConfigManifestOAuthFlow.tsx +++ b/packages/webui/src/client/ui/Settings/components/ConfigManifestOAuthFlow.tsx @@ -2,7 +2,7 @@ import * as React from 'react' import { withTranslation } from 'react-i18next' import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' import { Translated } from '../../../lib/ReactMeteorData/react-meteor-data' -import { IngestDeviceSettings } from '@sofie-automation/corelib/dist/dataModel/PeripheralDeviceSettings/ingestDevice' +import { IngestDeviceSecretSettingsStatus } from '@sofie-automation/corelib/dist/dataModel/PeripheralDeviceSettings/ingestDevice' import { NotificationCenter, Notification, NoticeLevel } from '../../../lib/notifications/notifications' import { fetchFrom } from '../../../lib/lib' @@ -126,12 +126,12 @@ export const ConfigManifestOAuthFlowComponent = withTranslation()( render(): JSX.Element { const { t } = this.props - const settings = (this.props.device.settings || {}) as IngestDeviceSettings + const secretStatus = (this.props.device.secretSettingsStatus || {}) as IngestDeviceSecretSettingsStatus const device = this.props.device return (
- {settings.secretAccessToken ? ( + {secretStatus.accessToken ? ( // If this is set, we have completed the authentication procedure. // A reset button is provided to begin the flow again if authorization is revoked by the user.
@@ -145,7 +145,7 @@ export const ConfigManifestOAuthFlowComponent = withTranslation()(
) : (
- {!settings.secretCredentials ? ( + {!secretStatus.credentials ? (
@@ -101,66 +91,6 @@ function useSystemStatus(): StatusResponse | undefined { return sytemStatus } -function usePlayoutDebugStates( - devices: PeripheralDevice[], - userPermissions: UserPermissions -): Map { - const { t } = useTranslation() - - const [playoutDebugStates, setPlayoutDebugStates] = useState>(new Map()) - - const playoutDeviceIds = useMemo(() => { - const deviceIds: PeripheralDeviceId[] = [] - - for (const device of devices) { - if (device.type === PeripheralDeviceType.PLAYOUT && device.settings && (device.settings as any)['debugState']) { - deviceIds.push(device._id) - } - } - - deviceIds.sort() - return deviceIds - }, [devices]) - - useEffect(() => { - if (!userPermissions.developer) { - setPlayoutDebugStates(new Map()) - return - } - - let destroyed = false - - const refreshDebugStates = () => { - for (const deviceId of playoutDeviceIds) { - MeteorCall.systemStatus - .getDebugStates(deviceId) - .then((res) => { - if (destroyed) return - - setPlayoutDebugStates((oldState) => { - // Create a new map based on the old one - const newStates = new Map(oldState.entries()) - for (const [key, state] of Object.entries(res)) { - newStates.set(protectString(key), state) - } - return newStates - }) - }) - .catch((err) => console.log(`Error fetching device states: ${stringifyError(err)}`)) - } - } - - const interval = setInterval(refreshDebugStates, 1000) - - return () => { - clearInterval(interval) - destroyed = true - } - }, [t, JSON.stringify(playoutDeviceIds), userPermissions.developer]) - - return playoutDebugStates -} - function convertDevicesIntoHeirarchy(devices: PeripheralDevice[]): DeviceInHierarchy[] { const devicesMap = new Map() const devicesToAdd: DeviceInHierarchy[] = [] @@ -194,8 +124,18 @@ function convertDevicesIntoHeirarchy(devices: PeripheralDevice[]): DeviceInHiera return devicesHeirarchy } +interface ParentDeviceItemWithChildrenProps { + device: DeviceInHierarchy +} + +function ParentDeviceItemWithChildren({ device }: ParentDeviceItemWithChildrenProps) { + const playoutDebugStates = useDebugStatesForPlayoutDevice(device.device) + + return +} + interface DeviceItemWithChildrenProps { - playoutDebugStates: Map + playoutDebugStates: ReadonlyMap parentDevice: DeviceInHierarchy | null device: DeviceInHierarchy } From 147f3b4f590778881369cd66b42c2d69a226aac8 Mon Sep 17 00:00:00 2001 From: olzzon Date: Wed, 4 Dec 2024 12:03:27 +0100 Subject: [PATCH 79/81] feat: add in-out words to the VT type --- packages/blueprints-integration/src/content.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/packages/blueprints-integration/src/content.ts b/packages/blueprints-integration/src/content.ts index 67824be9059..975860172ca 100644 --- a/packages/blueprints-integration/src/content.ts +++ b/packages/blueprints-integration/src/content.ts @@ -55,6 +55,10 @@ export interface VTContent extends BaseContent { /** Duration of extra content past sourceDuration. Not planned to play back but present on the media and playable. */ postrollDuration?: number editable?: VTEditableParameters + /** This is for the VT's in out words */ + firstWords?: string + lastWords?: string + fullScript?: string } export interface GraphicsContent extends BaseContent { From 53758bc329c79a619d05f44263aa3c14575ce8e6 Mon Sep 17 00:00:00 2001 From: olzzon Date: Wed, 4 Dec 2024 13:25:09 +0100 Subject: [PATCH 80/81] feat: dummy VT preview on ON AIR playhead when arg ignore_piece_content_status=1 --- .../ui/FloatingInspectors/VTFloatingInspector.tsx | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/packages/webui/src/client/ui/FloatingInspectors/VTFloatingInspector.tsx b/packages/webui/src/client/ui/FloatingInspectors/VTFloatingInspector.tsx index 5edb6e68e8e..df7d8fa2848 100644 --- a/packages/webui/src/client/ui/FloatingInspectors/VTFloatingInspector.tsx +++ b/packages/webui/src/client/ui/FloatingInspectors/VTFloatingInspector.tsx @@ -13,6 +13,7 @@ import { UIStudio } from '@sofie-automation/meteor-lib/dist/api/studios' import { ITranslatableMessage, translateMessage } from '@sofie-automation/corelib/dist/TranslatableMessage' import { IFloatingInspectorPosition, useInspectorPosition } from './IFloatingInspectorPosition' import { ReadonlyDeep } from 'type-fest' +import { getIgnorePieceContentStatus } from '../../lib/localStorage' interface IProps { status: PieceStatusCode | undefined @@ -109,19 +110,22 @@ export const VTFloatingInspector: React.FC = ({ const { t } = useTranslation() const inspectorRef = useRef(null) + const debugMode = getIgnorePieceContentStatus() + const playPreviewUrl = debugMode ? 'http://some-ip-here:3000/preview.mp4' : previewUrl || '' + const itemDuration = content?.sourceDuration || renderedDuration || 0 const seek = content?.seek ?? 0 const loop = content?.loop ?? false const offsetTimePosition = timePosition + seek - const showVideoPlayerInspector = !hideHoverscrubPreview && previewUrl + const showVideoPlayerInspector = !hideHoverscrubPreview && (previewUrl || debugMode) const showMiniInspectorClipData = shouldShowFloatingInspectorContent(status ?? PieceStatusCode.UNKNOWN, content) const showMiniInspectorNotice = noticeLevel !== null const showMiniInspectorData = showMiniInspectorNotice || showMiniInspectorClipData const showAnyFloatingInspector = Boolean(showVideoPlayerInspector) || showMiniInspectorData - const shown = showMiniInspector && itemElement !== undefined && showAnyFloatingInspector + const shown = showMiniInspector && (itemElement !== undefined || debugMode) && showAnyFloatingInspector const { style: floatingInspectorStyle, isFlipped } = useInspectorPosition(position, inspectorRef, shown) @@ -157,7 +161,7 @@ export const VTFloatingInspector: React.FC = ({ ref={inspectorRef} loop={loop} seek={seek} - previewUrl={previewUrl} + previewUrl={playPreviewUrl} timePosition={offsetTimePosition} studioSettings={studio?.settings} floatingInspectorStyle={floatingInspectorStyle} From 1bf7f424ee7d51b65d63177aae16dd443e6c3ccc Mon Sep 17 00:00:00 2001 From: olzzon Date: Thu, 5 Dec 2024 12:58:52 +0100 Subject: [PATCH 81/81] feat: hack add in out work in floating inspector for testing without styling --- .../src/client/ui/FloatingInspectors/VTFloatingInspector.tsx | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/webui/src/client/ui/FloatingInspectors/VTFloatingInspector.tsx b/packages/webui/src/client/ui/FloatingInspectors/VTFloatingInspector.tsx index df7d8fa2848..2439779927a 100644 --- a/packages/webui/src/client/ui/FloatingInspectors/VTFloatingInspector.tsx +++ b/packages/webui/src/client/ui/FloatingInspectors/VTFloatingInspector.tsx @@ -148,6 +148,8 @@ export const VTFloatingInspector: React.FC = ({ {showMiniInspectorClipData && (
{content?.fileName} + {content?.firstWords} + {content?.lastWords}
)}