diff --git a/Gemfile b/Gemfile index f032ad84c..1efa68d04 100644 --- a/Gemfile +++ b/Gemfile @@ -73,6 +73,7 @@ gem "aws-sdk-s3" gem "csv" gem "excon" gem "faraday", "~> 0.17.4" +gem "link-header-parser", "~> 6.0", ">= 6.0.1" gem "fiddle" gem "hyperresource" gem "mutex_m" diff --git a/Gemfile.lock b/Gemfile.lock index b969ef24d..8ef8baa8f 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -267,6 +267,7 @@ GEM kaminari-core (= 1.2.2) kaminari-core (1.2.2) language_server-protocol (3.17.0.3) + link-header-parser (6.0.1) local_time (2.1.0) logger (1.6.1) lograge (0.14.0) @@ -783,6 +784,7 @@ DEPENDENCIES importmap-rails jbuilder kaminari + link-header-parser (~> 6.0, >= 6.0.1) local_time logger lograge diff --git a/app/controllers/feeds_controller.rb b/app/controllers/feeds_controller.rb index 4de1b6918..c2a137af3 100644 --- a/app/controllers/feeds_controller.rb +++ b/app/controllers/feeds_controller.rb @@ -24,7 +24,7 @@ def new end def get_apple_show_options(feed) - if feed.apple? && feed.apple_config&.key + if feed.integration_type == :apple && feed.apple_config&.key feed.apple_show_options end end @@ -39,6 +39,15 @@ def new_apple render "new" end + def new_megaphone + @feed = Feeds::MegaphoneFeed.new(podcast: @podcast, private: true) + @feed.build_megaphone_config + authorize @feed + + @feed.assign_attributes(feed_params) + render "new" + end + # POST /feeds def create @feed = @podcast.feeds.new(feed_params) @@ -154,7 +163,8 @@ def nilified_feed_params feed_tokens_attributes: %i[id label token _destroy], feed_images_attributes: %i[id original_url size alt_text caption credit _destroy _retry], itunes_images_attributes: %i[id original_url size alt_text caption credit _destroy _retry], - apple_config_attributes: [:id, :publish_enabled, :sync_blocks_rss, {key_attributes: %i[id provider_id key_id key_pem_b64]}] + apple_config_attributes: [:id, :publish_enabled, :sync_blocks_rss, {key_attributes: %i[id provider_id key_id key_pem_b64]}], + megaphone_config_attributes: [:id, :publish_enabled, :sync_blocks_rss, :network_id, :network_name, :token] ) end end diff --git a/app/controllers/placements_preview_controller.rb b/app/controllers/placements_preview_controller.rb index a670cc9e7..3356141f8 100644 --- a/app/controllers/placements_preview_controller.rb +++ b/app/controllers/placements_preview_controller.rb @@ -1,12 +1,12 @@ class PlacementsPreviewController < ApplicationController - include PrxAccess + include Prx::Api before_action :set_podcast # GET /podcasts/1/placements_preview/2 def show - @fetch_error = cached_placements.nil? @zones = get_zones(params[:id].to_i) + @fetch_error = @zones.nil? end private @@ -16,27 +16,9 @@ def set_podcast authorize @podcast, :show? end - def placements_href - "/api/v1/podcasts/#{@podcast.id}/placements" - end - - def fetch_placements - if ENV["AUGURY_HOST"].present? - api(root: augury_root, account: "*").tap { |a| a.href = placements_href }.get - end - rescue HyperResource::ClientError, HyperResource::ServerError, NotImplementedError => e - Rails.logger.error("Error fetching placements", error: e.message) - nil - end - - def cached_placements - Rails.cache.fetch(placements_href, expires_in: 1.minute) do - fetch_placements - end - end - def get_placement(original_count) - cached_placements&.find { |i| i.original_count == original_count } + placements = Prx::Augury.new.placements(@podcast.id) + placements&.find { |i| i.original_count == original_count } end def get_zones(original_count) diff --git a/app/controllers/podcasts_controller.rb b/app/controllers/podcasts_controller.rb index 6f4839903..7160fb035 100644 --- a/app/controllers/podcasts_controller.rb +++ b/app/controllers/podcasts_controller.rb @@ -1,5 +1,5 @@ class PodcastsController < ApplicationController - include PrxAccess + include Prx::Api include SlackHelper before_action :set_podcast, only: %i[show edit update destroy] diff --git a/app/helpers/embed_player_helper.rb b/app/helpers/embed_player_helper.rb index eeb41336c..05b26b441 100644 --- a/app/helpers/embed_player_helper.rb +++ b/app/helpers/embed_player_helper.rb @@ -1,5 +1,5 @@ module EmbedPlayerHelper - include PrxAccess + include Prx::Api EMBED_PLAYER_LANDING_PATH = "/listen" EMBED_PLAYER_PATH = "/e" diff --git a/app/helpers/episodes_helper.rb b/app/helpers/episodes_helper.rb index 69ac00c2a..b2670cbfb 100644 --- a/app/helpers/episodes_helper.rb +++ b/app/helpers/episodes_helper.rb @@ -15,6 +15,29 @@ def episode_explicit_options end end + def episode_integration_status(integration, episode) + status = episode.episode_delivery_status(integration, true) + if !status + "not_found" + elsif status.new_record? + "new" + elsif !status.uploaded? + "incomplete" + elsif !status.delivered? + "processing" + elsif status.delivered? + "complete" + else + "not_found" + end + end + + def episode_integration_updated_at(integration, episode) + episode.sync_log(integration)&.updated_at || + episode.episode_delivery_status(integration)&.created_at || + episode.updated_at + end + def episode_apple_status(episode) apple_episode = episode.apple_episode if !apple_episode diff --git a/app/helpers/feeds_helper.rb b/app/helpers/feeds_helper.rb index c101bbd6d..926308846 100644 --- a/app/helpers/feeds_helper.rb +++ b/app/helpers/feeds_helper.rb @@ -72,4 +72,8 @@ def feed_retry_image_path(feed, form) def apple_feed?(feed) feed.type == "Feeds::AppleSubscription" end + + def megaphone_feed?(feed) + feed.type == "Feeds::MegaphoneFeed" + end end diff --git a/app/helpers/podcasts_helper.rb b/app/helpers/podcasts_helper.rb index ef3024ddf..0e0ad2e87 100644 --- a/app/helpers/podcasts_helper.rb +++ b/app/helpers/podcasts_helper.rb @@ -5,6 +5,23 @@ module PodcastsHelper RSS_LANGUAGE_CODES = %w[af sq eu be bg ca zh-cn zh-tw hr cs da nl nl-be nl-nl en en-au en-bz en-ca en-ie en-jm en-nz en-ph en-za en-tt en-gb en-us en-zw et fo fi fr fr-be fr-ca fr-fr fr-lu fr-mc fr-ch gl gd de de-at de-de de-li de-lu de-ch el haw hu is in ga it it-it it-ch ja ko mk no pl pt pt-br pt-pt ro ro-mo ro-ro ru ru-mo ru-ru sr sk sl es es-ar es-bo es-cl es-co es-cr es-do es-ec es-sv es-gt es-hn es-mx es-ni es-pa es-py es-pe es-pr es-es es-uy es-ve sv sv-fi sv-se tr uk] + def podcast_integration_status(integration, podcast) + sync = podcast.public_feed.sync_log(integration) + if !sync + "not_found" + elsif !sync.external_id + "new" + elsif sync.updated_at <= podcast.updated_at + "incomplete" + else + "complete" + end + end + + def podcast_integration_updated_at(integration, podcast) + podcast.public_feed.sync_log(integration)&.updated_at || podcast.updated_at + end + def feed_description(feed, podcast) [feed.description, podcast.description].detect { |d| d.present? } || "" end diff --git a/app/jobs/publish_apple_job.rb b/app/jobs/publish_apple_job.rb deleted file mode 100644 index ba43a635d..000000000 --- a/app/jobs/publish_apple_job.rb +++ /dev/null @@ -1,20 +0,0 @@ -class PublishAppleJob < ApplicationJob - queue_as :feeder_publishing - - def self.publish_to_apple(apple_config) - apple_config.build_publisher.publish! - end - - def self.do_perform(apple_config) - if !apple_config.publish_to_apple? - logger.info "Skipping publish to apple for #{apple_config.class.name} #{apple_config.id}" - return - end - - publish_to_apple(apple_config) - end - - def perform(apple_config) - self.class.do_perform(apple_config) - end -end diff --git a/app/jobs/publish_feed_job.rb b/app/jobs/publish_feed_job.rb index d25f9e271..eba1f5e2d 100644 --- a/app/jobs/publish_feed_job.rb +++ b/app/jobs/publish_feed_job.rb @@ -12,11 +12,17 @@ def perform(podcast, pub_item) # grab the current publishing pipeline. return :null if null_publishing_item?(podcast, pub_item) return :mismatched if mismatched_publishing_item?(podcast, pub_item) + Rails.logger.info("Starting publishing pipeline via PublishFeedJob", {podcast_id: podcast.id, publishing_queue_item_id: pub_item.id}) PublishingPipelineState.start!(podcast) - podcast.feeds.each { |feed| publish_apple(podcast, feed) } + + # Publish each integration for each feed (e.g. apple, megaphone) + podcast.feeds.each { |feed| publish_integration(podcast, feed) } + + # After integrations, publish RSS, if appropriate podcast.feeds.each { |feed| publish_rss(podcast, feed) } + PublishingPipelineState.complete!(podcast) rescue Apple::AssetStateTimeoutError => e fail_state(podcast, "apple_timeout", e) @@ -26,18 +32,17 @@ def perform(podcast, pub_item) PublishingPipelineState.settle_remaining!(podcast) end - def publish_apple(podcast, feed) - return unless feed.publish_to_apple? - - res = PublishAppleJob.do_perform(podcast.apple_config) - PublishingPipelineState.publish_apple!(podcast) + def publish_integration(podcast, feed) + return unless feed.publish_integration? + res = feed.publish_integration! + PublishingPipelineState.publish_integration!(podcast) res rescue => e - if podcast.apple_config.sync_blocks_rss - fail_state(podcast, "apple", e) + if feed.config.sync_blocks_rss + fail_state(podcast, feed.integration_type, e) else - Rails.logger.error("Error publishing to Apple, but continuing to publish RSS", {podcast_id: podcast.id, error: e.message}) - PublishingPipelineState.error_apple!(podcast) + Rails.logger.error("Error publishing to #{feed.integration_type}, but continuing to publish RSS", {podcast_id: podcast.id, error: e.message}) + PublishingPipelineState.error_integration!(podcast) end end @@ -51,9 +56,9 @@ def publish_rss(podcast, feed) def fail_state(podcast, type, error) (pipeline_method, log_level) = case type - when "apple" then [:error_apple!, :warn] - when "rss" then [:error_rss!, :warn] when "apple_timeout", "error" then [:error!, :error] + when "rss" then [:error_rss!, :warn] + else [:error_integration!, :warn] end PublishingPipelineState.public_send(pipeline_method, podcast) diff --git a/app/models/apple/config.rb b/app/models/apple/config.rb index a1a4a2db7..8bdfa007a 100644 --- a/app/models/apple/config.rb +++ b/app/models/apple/config.rb @@ -41,7 +41,7 @@ def self.build_apple_config(podcast, key) def self.mark_as_delivered!(apple_publisher) apple_publisher.episodes_to_sync.each do |episode| if episode.podcast_container&.needs_delivery? == false - episode.feeder_episode.apple_has_delivery! + episode.feeder_episode.apple_mark_as_delivered! end end end @@ -96,7 +96,7 @@ def apple_key def apple_data episode_data = [ - SyncLog.where(feeder_type: "episodes", feeder_id: podcast.episodes.pluck(:id)), + SyncLog.apple.where(feeder_type: "episodes", feeder_id: podcast.episodes.pluck(:id)), Apple::PodcastContainer.where(episode: podcast.episodes) ].flatten.compact diff --git a/app/models/apple/episode.rb b/app/models/apple/episode.rb index 0b6040130..dcd1ec957 100644 --- a/app/models/apple/episode.rb +++ b/app/models/apple/episode.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true module Apple - class Episode + class Episode < Integrations::Base::Episode include Apple::ApiWaiting include Apple::ApiResponse attr_accessor :show, @@ -15,10 +15,8 @@ class Episode EPISODE_ASSET_WAIT_TIMEOUT = 15.minutes.freeze EPISODE_ASSET_WAIT_INTERVAL = 10.seconds.freeze - # Cleans up old delivery/delivery files iff the episode is to be delivered + # Cleans up old delivery/delivery files iff the episode is to be uploaded def self.prepare_for_delivery(episodes) - episodes = episodes.select { |ep| ep.needs_delivery? } - episodes.map do |ep| Rails.logger.info("Preparing episode #{ep.feeder_id} for delivery", {episode_id: ep.feeder_id}) ep.feeder_episode.apple_prepare_for_delivery! @@ -205,7 +203,13 @@ def self.upsert_sync_log(ep, res) apple_id = res.dig("api_response", "val", "data", "id") raise "Missing remote apple id" unless apple_id.present? - sl = SyncLog.log!(feeder_id: ep.feeder_episode.id, feeder_type: :episodes, external_id: apple_id, api_response: res) + sl = SyncLog.log!( + integration: :apple, + feeder_id: ep.feeder_episode.id, + feeder_type: :episodes, + external_id: apple_id, + api_response: res + ) # reload local state if ep.feeder_episode.apple_sync_log.nil? ep.feeder_episode.reload @@ -221,6 +225,14 @@ def initialize(show:, feeder_episode:, api:) @api = api || Apple::Api.from_env end + def synced_with_integration? + synced_with_apple? + end + + def integration_new? + apple_new? + end + def api_response feeder_episode.apple_sync_log&.api_response end @@ -247,7 +259,7 @@ def podcast def enclosure_url url = EnclosureUrlBuilder.new.base_enclosure_url(podcast, feeder_episode, private_feed) - EnclosureUrlBuilder.mark_authorized(url, show.private_feed) + EnclosureUrlBuilder.mark_authorized(url, private_feed) end def enclosure_filename @@ -256,7 +268,7 @@ def enclosure_filename end def sync_log - SyncLog.episodes.find_by(feeder_id: feeder_episode.id, feeder_type: :episodes) + SyncLog.apple.episodes.find_by(feeder_id: feeder_episode.id, feeder_type: :episodes) end def self.get_episode_bridge_params(api, feeder_id, apple_id) @@ -420,10 +432,6 @@ def publishing_state_parameters(state) self.class.publishing_state_params(apple_id, state) end - def apple? - feeder_episode.apple? - end - def apple_json return nil unless api_response.present? @@ -498,16 +506,6 @@ def audio_asset_state_success? audio_asset_state == AUDIO_ASSET_SUCCESS end - def has_media_version? - return false unless delivery_status.present? && delivery_status.source_media_version_id.present? - - delivery_status.source_media_version_id == feeder_episode.media_version_id - end - - def needs_media_version? - !has_media_version? - end - def needs_delivery? return true if missing_container? @@ -575,5 +573,19 @@ def apple_episode_delivery_statuses alias_method :delivery_files, :podcast_delivery_files alias_method :delivery_status, :apple_episode_delivery_status alias_method :delivery_statuses, :apple_episode_delivery_statuses + alias_method :apple_status, :apple_episode_delivery_status + + # Delegate methods to feeder_episode + def method_missing(method_name, *arguments, &block) + if feeder_episode.respond_to?(method_name) + feeder_episode.send(method_name, *arguments, &block) + else + super + end + end + + def respond_to_missing?(method_name, include_private = false) + feeder_episode.respond_to?(method_name) || super + end end end diff --git a/app/models/apple/episode_delivery_status.rb b/app/models/apple/episode_delivery_status.rb deleted file mode 100644 index 5781c180c..000000000 --- a/app/models/apple/episode_delivery_status.rb +++ /dev/null @@ -1,25 +0,0 @@ -module Apple - class EpisodeDeliveryStatus < ApplicationRecord - belongs_to :episode, -> { with_deleted }, class_name: "::Episode" - - def self.update_status(episode, attrs) - new_status = (episode.apple_episode_delivery_status&.dup || default_status(episode)) - new_status.assign_attributes(attrs) - new_status.save! - episode.apple_episode_delivery_statuses.reset - new_status - end - - def self.default_status(episode) - new(episode: episode) - end - - def increment_asset_wait - self.class.update_status(episode, asset_processing_attempts: (asset_processing_attempts || 0) + 1) - end - - def reset_asset_wait - self.class.update_status(episode, asset_processing_attempts: 0) - end - end -end diff --git a/app/models/apple/podcast_container.rb b/app/models/apple/podcast_container.rb index 30c09a59a..60c1c379b 100644 --- a/app/models/apple/podcast_container.rb +++ b/app/models/apple/podcast_container.rb @@ -9,7 +9,7 @@ class PodcastContainer < ApplicationRecord default_scope { includes(:apple_sync_log) } - has_one :apple_sync_log, -> { podcast_containers }, foreign_key: :feeder_id, class_name: "SyncLog", dependent: :destroy + has_one :apple_sync_log, -> { podcast_containers.apple }, foreign_key: :feeder_id, class_name: "SyncLog", dependent: :destroy has_many :podcast_deliveries, dependent: :destroy has_many :podcast_delivery_files, through: :podcast_deliveries belongs_to :episode, -> { with_deleted }, class_name: "::Episode" @@ -145,7 +145,7 @@ def self.upsert_podcast_container(episode, row) external_id: external_id, feeder_episode_id: episode.feeder_id}) - SyncLog.log!(feeder_id: pc.id, feeder_type: :podcast_containers, external_id: external_id, api_response: row) + SyncLog.log!(integration: :apple, feeder_id: pc.id, feeder_type: :podcast_containers, external_id: external_id, api_response: row) # reset the episode's podcast container cached value pc.reload if action == :updated diff --git a/app/models/apple/podcast_delivery.rb b/app/models/apple/podcast_delivery.rb index 861bdc5d6..f6f74d188 100644 --- a/app/models/apple/podcast_delivery.rb +++ b/app/models/apple/podcast_delivery.rb @@ -8,7 +8,7 @@ class PodcastDelivery < ApplicationRecord default_scope { includes(:apple_sync_log) } - has_one :apple_sync_log, -> { podcast_deliveries }, foreign_key: :feeder_id, class_name: "SyncLog", dependent: :destroy + has_one :apple_sync_log, -> { podcast_deliveries.apple }, foreign_key: :feeder_id, class_name: "SyncLog", dependent: :destroy has_many :podcast_delivery_files, dependent: :destroy belongs_to :episode, -> { with_deleted }, class_name: "::Episode" belongs_to :podcast_container, class_name: "::Apple::PodcastContainer" @@ -143,7 +143,7 @@ def self.upsert_podcast_delivery(podcast_container, row) feeder_episode_id: podcast_container.episode.id, podcast_delivery_id: delivery.id}) - SyncLog.log!(feeder_id: pd.id, feeder_type: :podcast_deliveries, external_id: external_id, api_response: row) + SyncLog.log!(integration: :apple, feeder_id: pd.id, feeder_type: :podcast_deliveries, external_id: external_id, api_response: row) # Flush the cache on the podcast container podcast_container.podcast_deliveries.reset diff --git a/app/models/apple/podcast_delivery_file.rb b/app/models/apple/podcast_delivery_file.rb index 854b5e04a..2a6baf2ae 100644 --- a/app/models/apple/podcast_delivery_file.rb +++ b/app/models/apple/podcast_delivery_file.rb @@ -11,7 +11,7 @@ class DeliveryFileError < StandardError; end default_scope { includes(:apple_sync_log) } - has_one :apple_sync_log, -> { podcast_delivery_files }, foreign_key: :feeder_id, class_name: "SyncLog", autosave: true, dependent: :delete + has_one :apple_sync_log, -> { podcast_delivery_files.apple }, foreign_key: :feeder_id, class_name: "SyncLog", autosave: true, dependent: :delete belongs_to :podcast_delivery has_one :podcast_container, through: :podcast_delivery belongs_to :episode, -> { with_deleted }, class_name: "::Episode" @@ -94,7 +94,7 @@ def self.mark_uploaded(api, pdfs) join_on(PODCAST_DELIVERY_FILE_ID_ATTR, pdfs, episode_bridge_results).each do |(pdf, row)| external_id = row.dig("api_response", "val", "data", "id") pdf.update!(api_marked_as_uploaded: true) - SyncLog.log!(feeder_id: pdf.id, feeder_type: :podcast_delivery_files, external_id: external_id, api_response: row) + SyncLog.log!(integration: :apple, feeder_id: pdf.id, feeder_type: :podcast_delivery_files, external_id: external_id, api_response: row) end api.raise_bridge_api_error(errs) if errs.present? @@ -287,7 +287,7 @@ def self.upsert_podcast_delivery_file(podcast_delivery, row) feeder_episode_id: pdf.episode.id, podcast_delivery_file_id: pdf.podcast_delivery.id}) - SyncLog.log!(feeder_id: pdf.id, feeder_type: :podcast_delivery_files, external_id: external_id, api_response: row) + SyncLog.log!(integration: :apple, feeder_id: pdf.id, feeder_type: :podcast_delivery_files, external_id: external_id, api_response: row) # Flush the cache on the podcast container podcast_delivery.delivery_files.reset diff --git a/app/models/apple/publisher.rb b/app/models/apple/publisher.rb index 65ee96901..8728946c7 100644 --- a/app/models/apple/publisher.rb +++ b/app/models/apple/publisher.rb @@ -1,9 +1,5 @@ -# frozen_string_literal: true - module Apple - class Publisher - PUBLISH_CHUNK_LEN = 25 - + class Publisher < Integrations::Base::Publisher attr_reader :public_feed, :private_feed, :api, @@ -35,52 +31,6 @@ def podcast public_feed.podcast end - def filter_episodes_to_sync(eps) - # Reject episodes if the audio is marked as uploaded/complete - # or if the episode is a video - eps - .reject(&:synced_with_apple?) - .reject(&:video_content_type?) - end - - def episodes_to_sync - # only look at the private delegated delivery feed - filter_episodes_to_sync(show.apple_private_feed_episodes) - end - - def filter_episodes_to_archive(eps) - eps_in_private_feed = Set.new(show.apple_private_feed_episodes) - - # Episodes to archive can include: - # - episodes that are now excluded from the feed - # - episodes that are deleted or unpublished - # - episodes that have fallen off the end of the feed (Feed#display_episodes_count) - eps - .reject { |ep| eps_in_private_feed.include?(ep) } - .reject(&:apple_new?) - .reject(&:archived?) - end - - def episodes_to_archive - # look at the global list of episodes, not just the private feed - filter_episodes_to_archive(show.podcast_episodes) - end - - def filter_episodes_to_unarchive(eps) - eps.filter(&:archived?) - end - - def episodes_to_unarchive - # only look at the private delegated delivery feed - filter_episodes_to_unarchive(show.apple_private_feed_episodes) - end - - def only_episodes_with_apple_state(eps) - # Only select episodes that have an remote apple state, - # as determined by the sync log - eps.reject(&:apple_new?) - end - def poll_all_episodes! poll_episodes!(show.podcast_episodes) end @@ -125,39 +75,61 @@ def publish! deliver_and_publish!(episodes_to_sync) # success - SyncLog.log!(feeder_id: public_feed.id, feeder_type: :feeds, external_id: show.apple_id, api_response: {success: true}) + SyncLog.log!( + integration: :apple, + feeder_id: public_feed.id, + feeder_type: :feeds, + external_id: show.apple_id, + api_response: {success: true} + ) end def deliver_and_publish!(eps) Rails.logger.tagged("Apple::Publisher#deliver_and_publish!") do eps.each_slice(PUBLISH_CHUNK_LEN) do |eps| - # Soft delete any existing delivery and delivery files - prepare_for_delivery!(eps) + eps.filter(&:apple_needs_upload?).tap do |eps| + upload_media!(eps) + end - # only create if needed - sync_episodes!(eps) - sync_podcast_containers!(eps) + process_and_deliver!(eps) - wait_for_versioned_source_metadata(eps) + raise_delivery_processing_errors(eps) + end + end + end - sync_podcast_deliveries!(eps) - sync_podcast_delivery_files!(eps) + def upload_media!(eps) + # Soft delete any existing delivery and delivery files + prepare_for_delivery!(eps) - # upload and mark as uploaded - execute_upload_operations!(eps) - mark_delivery_files_uploaded!(eps) + # only create if needed + sync_episodes!(eps) + sync_podcast_containers!(eps) - wait_for_upload_processing(eps) + wait_for_versioned_source_metadata(eps) - increment_asset_wait!(eps) - wait_for_asset_state(eps) + sync_podcast_deliveries!(eps) + sync_podcast_delivery_files!(eps) - publish_drafting!(eps) - reset_asset_wait!(eps) + # upload and mark as uploaded, then update the audio container reference + execute_upload_operations!(eps) + mark_delivery_files_uploaded!(eps) + update_audio_container_reference!(eps) - raise_delivery_processing_errors(eps) - end - end + # finally mark the episode as uploaded + mark_as_uploaded!(eps) + end + + def process_and_deliver!(eps) + increment_asset_wait!(eps) + + wait_for_upload_processing(eps) + wait_for_asset_state(eps) + + mark_as_delivered!(eps) + + publish_drafting!(eps) + reset_asset_wait!(eps) end def prepare_for_delivery!(eps) @@ -231,9 +203,7 @@ def wait_for_upload_processing(eps) def increment_asset_wait!(eps) Rails.logger.tagged("##{__method__}") do - eps = eps.filter { |e| e.podcast_delivery_files.any?(&:api_marked_as_uploaded?) } - - # Mark the episodes as waiting again for asset processing + eps = eps.filter { |e| e.feeder_episode.apple_status.uploaded? } eps.each { |ep| ep.apple_episode_delivery_status.increment_asset_wait } end end @@ -361,16 +331,33 @@ def mark_delivery_files_uploaded!(eps) Rails.logger.tagged("##{__method__}") do pdfs = eps.map(&:podcast_delivery_files).flatten ::Apple::PodcastDeliveryFile.mark_uploaded(api, pdfs) + end + end + def update_audio_container_reference!(eps) + Rails.logger.tagged("##{__method__}") do # link the podcast container with the audio to the episode res = Apple::Episode.update_audio_container_reference(api, eps) - # update the feeder episode to indicate that delivery is no longer needed + + Rails.logger.info("Updated remote container references for episodes.", {count: res.length}) + end + end + + def mark_as_delivered!(eps) + Rails.logger.tagged("##{__method__}") do eps.each do |ep| Rails.logger.info("Marking episode as no longer needing delivery", {episode_id: ep.feeder_episode.id}) - ep.feeder_episode.apple_has_delivery! + ep.feeder_episode.apple_mark_as_delivered! end + end + end - Rails.logger.info("Updated remote container references for episodes.", {count: res.length}) + def mark_as_uploaded!(eps) + Rails.logger.tagged("##{__method__}") do + eps.each do |ep| + Rails.logger.info("Marking episode as no longer needing delivery", {episode_id: ep.feeder_episode.id}) + ep.feeder_episode.apple_mark_as_uploaded! + end end end diff --git a/app/models/apple/show.rb b/app/models/apple/show.rb index be1b979b0..df336445d 100644 --- a/app/models/apple/show.rb +++ b/app/models/apple/show.rb @@ -1,12 +1,10 @@ # frozen_string_literal: true module Apple - class Show + class Show < Integrations::Base::Show include Apple::ApiResponse - attr_reader :public_feed, - :private_feed, - :api + attr_reader :api def self.apple_shows_json(api) api.get_paged_collection("shows") @@ -17,17 +15,20 @@ def self.apple_episode_json(api, show_id) end def self.connect_existing(apple_show_id, apple_config) - if (sl = SyncLog.find_by(feeder_id: apple_config.public_feed.id, feeder_type: :feeds)) + if (sl = SyncLog.apple.find_by(feeder_id: apple_config.public_feed.id, feeder_type: :feeds)) if apple_show_id.blank? return sl.destroy! elsif sl.external_id != apple_show_id sl.update!(external_id: apple_show_id) end else - SyncLog.log!(feeder_id: apple_config.public_feed.id, + SyncLog.log!( + integration: :apple, + feeder_id: apple_config.public_feed.id, feeder_type: :feeds, sync_completed_at: Time.now.utc, - external_id: apple_show_id) + external_id: apple_show_id + ) end api = Apple::Api.from_apple_config(apple_config) @@ -137,7 +138,13 @@ def sync! Rails.logger.tagged("Apple::Show#sync!") do apple_json = create_or_update_show(sync_log) public_feed.reload - SyncLog.log!(feeder_id: public_feed.id, feeder_type: :feeds, external_id: apple_json["api_response"]["val"]["data"]["id"], api_response: apple_json) + SyncLog.log!( + integration: :apple, + feeder_id: public_feed.id, + feeder_type: :feeds, + external_id: apple_json["api_response"]["val"]["data"]["id"], + api_response: apple_json + ) end end @@ -176,55 +183,8 @@ def get_show self.class.get_show(api, apple_id) end - # In the case where there are duplicate guids in the feeds, we want to make - # sure that the most "current" episode is the one that maps to the remote state. - def sort_by_episode_properties(eps) - # Sort the episodes by: - # 1. Non-deleted episodes first - # 2. Published episodes first - # 3. Published date most recent first - # 4. Created date most recent first - eps = - eps.sort_by do |e| - [ - e.deleted_at.nil? ? 1 : -1, - e.published_at.present? ? 1 : -1, - e.published_at || e.created_at, - e.created_at - ] - end - - # return sorted list, reversed - # modeling a priority queue -- most important first - eps.reverse - end - - def podcast_feeder_episodes - @podcast_feeder_episodes ||= - podcast.episodes - .reset - .with_deleted - .group_by(&:item_guid) - .values - .map { |eps| sort_by_episode_properties(eps) } - .map(&:first) - end - - # All the episodes -- including deleted and unpublished - def podcast_episodes - @podcast_episodes ||= podcast_feeder_episodes.map { |e| Apple::Episode.new(api: api, show: self, feeder_episode: e) } - end - - # Does not include deleted episodes - def episodes - raise "Missing apple show id" unless apple_id.present? - - @episodes ||= begin - feed_episode_ids = Set.new(private_feed.feed_episodes.feed_ready.map(&:id)) - - podcast_episodes - .filter { |e| feed_episode_ids.include?(e.feeder_episode.id) } - end + def build_integration_episode(feeder_episode) + Apple::Episode.new(api: api, show: self, feeder_episode: feeder_episode) end def apple_private_feed_episodes diff --git a/app/models/concerns/apple_delivery.rb b/app/models/concerns/apple_delivery.rb index 0083481b7..daea990f6 100644 --- a/app/models/concerns/apple_delivery.rb +++ b/app/models/concerns/apple_delivery.rb @@ -4,59 +4,60 @@ module AppleDelivery extend ActiveSupport::Concern included do - has_one :apple_sync_log, -> { episodes }, foreign_key: :feeder_id, class_name: "SyncLog" + has_one :apple_sync_log, -> { episodes.apple }, foreign_key: :feeder_id, class_name: "SyncLog" has_one :apple_podcast_delivery, class_name: "Apple::PodcastDelivery" has_one :apple_podcast_container, class_name: "Apple::PodcastContainer" has_many :apple_podcast_deliveries, through: :apple_podcast_container, source: :podcast_deliveries, class_name: "Apple::PodcastDelivery" has_many :apple_podcast_delivery_files, through: :apple_podcast_deliveries, source: :podcast_delivery_files, class_name: "Apple::PodcastDeliveryFile" - has_many :apple_episode_delivery_statuses, -> { order(created_at: :desc) }, dependent: :destroy, class_name: "Apple::EpisodeDeliveryStatus" alias_method :podcast_container, :apple_podcast_container alias_method :apple_status, :apple_episode_delivery_status end def publish_to_apple? - podcast.apple_config&.publish_to_apple? + !!podcast.apple_config&.publish_to_apple? end def apple_update_delivery_status(attrs) - Apple::EpisodeDeliveryStatus.update_status(self, attrs) + update_episode_delivery_status(:apple, attrs) + end + + def apple_episode_delivery_statuses + episode_delivery_statuses.apple end def build_initial_delivery_status - Apple::EpisodeDeliveryStatus.default_status(self) + Integrations::EpisodeDeliveryStatus.default_status(:apple, self) end def apple_episode_delivery_status - apple_episode_delivery_statuses.order(created_at: :desc).first || build_initial_delivery_status + episode_delivery_status(:apple) || build_initial_delivery_status end def apple_needs_delivery? - return true if apple_episode_delivery_status.nil? - apple_episode_delivery_status.delivered == false end - def apple_needs_delivery! - apple_update_delivery_status(delivered: false) + def apple_needs_upload? + apple_episode_delivery_status.uploaded == false end - def apple_has_delivery! - apple_update_delivery_status(delivered: true) + def apple_mark_as_not_delivered! + apple_episode_delivery_status.mark_as_not_delivered! end - def measure_asset_processing_duration - statuses = apple_episode_delivery_statuses.to_a - - last_status = statuses.shift - return nil unless last_status&.asset_processing_attempts.to_i.positive? + def apple_mark_as_delivered! + apple_episode_delivery_status.mark_as_delivered! + end - start_status = statuses.find { |status| status.asset_processing_attempts.to_i.zero? } - return nil unless start_status + def apple_mark_as_uploaded! + apple_episode_delivery_status.mark_as_uploaded! + end - Time.now - start_status.created_at + def apple_mark_as_not_uploaded! + apple_episode_delivery_status.mark_as_not_uploaded! end def apple_prepare_for_delivery! @@ -68,7 +69,19 @@ def apple_prepare_for_delivery! end def apple_mark_for_reupload! - apple_needs_delivery! + apple_mark_as_not_delivered! + end + + def measure_asset_processing_duration + statuses = apple_episode_delivery_statuses.to_a + + last_status = statuses.shift + return nil unless last_status&.asset_processing_attempts.to_i.positive? + + start_status = statuses.find { |status| status.asset_processing_attempts.to_i.zero? } + return nil unless start_status + + Time.now - start_status.created_at end def apple_episode diff --git a/app/models/concerns/episode_has_feeds.rb b/app/models/concerns/episode_has_feeds.rb index f2e5eb205..d5afbeef3 100644 --- a/app/models/concerns/episode_has_feeds.rb +++ b/app/models/concerns/episode_has_feeds.rb @@ -24,7 +24,7 @@ module EpisodeHasFeeds def set_default_feeds if feeds.blank? self.feeds = (podcast&.feeds || []).filter_map do |feed| - feed if feed.default? || feed.apple? + feed if feed.default? || feed.integration_type end end end diff --git a/app/models/concerns/import_utils.rb b/app/models/concerns/import_utils.rb index 11ba70dbe..547223962 100644 --- a/app/models/concerns/import_utils.rb +++ b/app/models/concerns/import_utils.rb @@ -1,5 +1,5 @@ require "active_support/concern" -require "prx_access" +require "prx/api" require "net/http" require "uri" require "text_sanitizer" diff --git a/app/models/episode.rb b/app/models/episode.rb index 36e9826e1..d9dde682d 100644 --- a/app/models/episode.rb +++ b/app/models/episode.rb @@ -8,6 +8,7 @@ class Episode < ApplicationRecord include EpisodeFilters include EpisodeHasFeeds include EpisodeMedia + include Integrations::EpisodeIntegrations include PublishingStatus include TextSanitizer include EmbedPlayerHelper @@ -245,7 +246,7 @@ def copy_media(force = false) def publish! Rails.logger.tagged("Episode#publish!") do - apple_mark_for_reupload! + feeds.each { |f| f.mark_as_not_delivered!(self) } podcast&.publish! end end diff --git a/app/models/feed.rb b/app/models/feed.rb index 8538400ae..5bda742df 100644 --- a/app/models/feed.rb +++ b/app/models/feed.rb @@ -34,7 +34,7 @@ class Feed < ApplicationRecord has_many :itunes_images, -> { order("created_at DESC") }, autosave: true, dependent: :destroy, inverse_of: :feed has_many :itunes_categories, validate: true, autosave: true, dependent: :destroy - has_one :apple_sync_log, -> { feeds }, foreign_key: :feeder_id, class_name: "SyncLog" + has_one :apple_sync_log, -> { feeds.apple }, foreign_key: :feeder_id, class_name: "SyncLog" accepts_nested_attributes_for :feed_images, allow_destroy: true, reject_if: ->(i) { i[:id].blank? && i[:original_url].blank? } accepts_nested_attributes_for :itunes_images, allow_destroy: true, reject_if: ->(i) { i[:id].blank? && i[:original_url].blank? } @@ -70,6 +70,28 @@ def self.enclosure_template_default "https://#{ENV["DOVETAIL_HOST"]}{/podcast_id,feed_slug,guid,original_basename}{feed_extension}" end + def mark_as_not_delivered!(episode) + # for default / RSS feeds, don't do anything + # TODO: we could mark an episode needing to pulished in this RSS feed file + # then later check to see if it is published in the feed yet + # a la "where's my episode?" publish tracking + end + + def integration_type + nil + end + + def publish_integration? + false + end + + def publish_integration! + end + + def sync_log(integration) + SyncLog.latest.find_by(integration: integration, feeder_id: id, feeder_type: :feeds) + end + def set_defaults self.file_name ||= DEFAULT_FILE_NAME self.enclosure_template ||= Feed.enclosure_template_default @@ -84,8 +106,8 @@ def sanitize_text def friendly_title if default? I18n.t("helpers.label.feed.friendly_titles.default") - elsif apple? - I18n.t("helpers.label.feed.friendly_titles.apple") + elsif integration_type + I18n.t("helpers.label.feed.friendly_titles.#{integration_type}") else title end @@ -146,10 +168,6 @@ def public? !private? end - def apple? - false - end - def default_runtime_settings? default? && public? && include_zones.nil? && audio_format.blank? end diff --git a/app/models/feeds/apple_subscription.rb b/app/models/feeds/apple_subscription.rb index 6e1d9a96b..31aa07485 100644 --- a/app/models/feeds/apple_subscription.rb +++ b/app/models/feeds/apple_subscription.rb @@ -25,6 +25,8 @@ class Feeds::AppleSubscription < Feed validate :must_be_private validate :must_have_token + alias_method :config, :apple_config + # for soft delete, need a unique slug to be able to make another def paranoia_destroy_attributes { @@ -33,6 +35,10 @@ def paranoia_destroy_attributes } end + def mark_as_not_delivered!(episode) + episode.apple_episode_delivery_status.mark_as_not_delivered! + end + def set_defaults self.slug ||= DEFAULT_FEED_SLUG self.title ||= DEFAULT_TITLE @@ -109,8 +115,18 @@ def must_have_token end end - def apple? - true + def integration_type + :apple + end + + def publish_integration! + if publish_integration? + apple_config.build_publisher.publish! + end + end + + def publish_integration? + publish_to_apple? end def publish_to_apple? diff --git a/app/models/feeds/megaphone_feed.rb b/app/models/feeds/megaphone_feed.rb new file mode 100644 index 000000000..54e012f12 --- /dev/null +++ b/app/models/feeds/megaphone_feed.rb @@ -0,0 +1,43 @@ +class Feeds::MegaphoneFeed < Feed + has_one :megaphone_config, class_name: "::Megaphone::Config", dependent: :destroy, autosave: true, validate: true, inverse_of: :feed + + after_initialize :set_defaults + + alias_method :config, :megaphone_config + + accepts_nested_attributes_for :megaphone_config, allow_destroy: true, reject_if: :all_blank + + def self.model_name + Feed.model_name + end + + def integration_type + :megaphone + end + + def set_defaults + self.slug = "prx-#{id}" + self.title = podcast&.title + self.private = true + + super + end + + def publish_integration? + publish_to_megaphone? + end + + def publish_integration! + if publish_integration? + Publisher.new(self).publish! + end + end + + def publish_to_megaphone? + valid? && persisted? && config&.publish_to_megaphone? + end + + def mark_as_not_delivered!(episode) + episode.episode_delivery_statuses.megaphone.first&.mark_as_not_delivered! + end +end diff --git a/app/models/integrations.rb b/app/models/integrations.rb new file mode 100644 index 000000000..f52e375f3 --- /dev/null +++ b/app/models/integrations.rb @@ -0,0 +1,7 @@ +module Integrations + def self.table_name_prefix + "integrations_" + end + + INTEGRATIONS = %i[apple megaphone] +end diff --git a/app/models/integrations/base/episode.rb b/app/models/integrations/base/episode.rb new file mode 100644 index 000000000..ed069fd92 --- /dev/null +++ b/app/models/integrations/base/episode.rb @@ -0,0 +1,46 @@ +module Integrations + module Base + class Episode + attr_accessor :feeder_episode + + def synced_with_integration? + raise NotImplementedError, "Subclasses must implement synced_with_integration?" + end + + def integration_new? + raise NotImplementedError, "Subclasses must implement integration_new?" + end + + def archived? + raise NotImplementedError, "Subclasses must implement archived?" + end + + def video_content_type? + feeder_episode.video_content_type? + end + + def has_media_version? + return false unless delivery_status.present? && delivery_status.source_media_version_id.present? + + delivery_status.source_media_version_id == feeder_episode.media_version_id + end + + def needs_media_version? + !has_media_version? + end + + # Delegate methods to feeder_episode + def method_missing(method_name, *arguments, &block) + if feeder_episode.respond_to?(method_name) + feeder_episode.send(method_name, *arguments, &block) + else + super + end + end + + def respond_to_missing?(method_name, include_private = false) + feeder_episode.respond_to?(method_name) || super + end + end + end +end diff --git a/app/models/integrations/base/episode_set_operations.rb b/app/models/integrations/base/episode_set_operations.rb new file mode 100644 index 000000000..fcdfc2c7f --- /dev/null +++ b/app/models/integrations/base/episode_set_operations.rb @@ -0,0 +1,56 @@ +module Integrations + module Base + module EpisodeSetOperations + # In the case where there are duplicate guids in the feeds, we want to make + # sure that the most "current" episode is the one that maps to the remote state. + def sort_by_episode_properties(eps) + # Sort the episodes by: + # 1. Non-deleted episodes first + # 2. Published episodes first + # 3. Published date most recent first + # 4. Created date most recent first + eps = + eps.sort_by do |e| + [ + e.deleted_at.nil? ? 1 : -1, + e.published_at.present? ? 1 : -1, + e.published_at || e.created_at, + e.created_at + ] + end + + # return sorted list, reversed + # modeling a priority queue -- most important first + eps.reverse + end + + def filter_episodes_to_sync(eps) + # Reject episodes if the audio is marked as uploaded/complete + # or if the episode is a video + eps + .reject(&:synced_with_integration?) + .reject(&:video_content_type?) + end + + def filter_episodes_to_archive(eps, eps_in_feed) + # Episodes to archive can include: + # - episodes that are now excluded from the feed + # - episodes that are deleted or unpublished + # - episodes that have fallen off the end of the feed (Feed#display_episodes_count) + eps + .reject { |ep| eps_in_feed.include?(ep) } + .reject(&:integration_new?) + .reject(&:archived?) + end + + def filter_episodes_to_unarchive(eps) + eps.filter(&:archived?) + end + + # Only select episodes that have an remote integration state + def only_episodes_with_integration_state(eps) + eps.reject(&:integration_new?) + end + end + end +end diff --git a/app/models/integrations/base/publisher.rb b/app/models/integrations/base/publisher.rb new file mode 100644 index 000000000..36f106c95 --- /dev/null +++ b/app/models/integrations/base/publisher.rb @@ -0,0 +1,30 @@ +module Integrations + module Base + class Publisher + PUBLISH_CHUNK_LEN = 25 + include EpisodeSetOperations + + attr_accessor :show + + def initialize(show:) + @show = show + end + + def episodes_to_sync + filter_episodes_to_sync(show.episodes) + end + + def episodes_to_archive + filter_episodes_to_archive(show.podcast_episodes, Set.new(show.episodes)) + end + + def episodes_to_unarchive + filter_episodes_to_unarchive(show.episodes) + end + + def publish! + raise NotImplementedError, "Subclasses must implement publish!" + end + end + end +end diff --git a/app/models/integrations/base/show.rb b/app/models/integrations/base/show.rb new file mode 100644 index 000000000..4b1e8851e --- /dev/null +++ b/app/models/integrations/base/show.rb @@ -0,0 +1,45 @@ +module Integrations + module Base + class Show + include EpisodeSetOperations + + attr_accessor :public_feed, :private_feed + + def podcast + private_feed.podcast + end + + def podcast_feeder_episodes + @podcast_feeder_episodes ||= + podcast.episodes + .reset + .with_deleted + .group_by(&:item_guid) + .values + .map { |eps| sort_by_episode_properties(eps) } + .map(&:first) + end + + # All the episodes -- including deleted and unpublished + def podcast_episodes + @podcast_episodes ||= podcast_feeder_episodes.map { |e| build_integration_episode(e) } + end + + # Does not include deleted episodes + def episodes + @episodes ||= begin + feed_episode_ids = Set.new(private_feed.feed_episodes.feed_ready.map(&:id)) + + podcast_episodes + .filter { |e| feed_episode_ids.include?(e.feeder_episode.id) } + end + end + + private + + def build_integration_episode(feeder_episode) + raise NotImplementedError, "Subclasses must implement create_integration_episode" + end + end + end +end diff --git a/app/models/integrations/episode_delivery_status.rb b/app/models/integrations/episode_delivery_status.rb new file mode 100644 index 000000000..9f5c4e017 --- /dev/null +++ b/app/models/integrations/episode_delivery_status.rb @@ -0,0 +1,46 @@ +module Integrations + class EpisodeDeliveryStatus < ApplicationRecord + belongs_to :episode, -> { with_deleted }, class_name: "::Episode" + + enum :integration, Integrations::INTEGRATIONS + + def self.update_status(integration, episode, attrs) + new_status = (episode.episode_delivery_status(integration)&.dup || default_status(integration, episode)) + attrs[:integration] = integration + new_status.assign_attributes(attrs) + new_status.save! + episode.episode_delivery_statuses.reset + new_status + end + + def self.default_status(integration, episode) + new(episode: episode, integration: integration) + end + + def increment_asset_wait + self.class.update_status(integration, episode, asset_processing_attempts: (asset_processing_attempts || 0) + 1) + end + + def reset_asset_wait + self.class.update_status(integration, episode, asset_processing_attempts: 0) + end + + def mark_as_uploaded! + self.class.update_status(integration, episode, uploaded: true) + end + + def mark_as_not_uploaded! + self.class.update_status(integration, episode, uploaded: false) + end + + # Whether the media file has been uploaded to the Integration + # is a subset of whether the episode has been delivered + def mark_as_delivered! + self.class.update_status(integration, episode, delivered: true, uploaded: true, asset_processing_attempts: 0) + end + + def mark_as_not_delivered! + self.class.update_status(integration, episode, delivered: false, uploaded: false) + end + end +end diff --git a/app/models/integrations/episode_integrations.rb b/app/models/integrations/episode_integrations.rb new file mode 100644 index 000000000..bf0a7dea4 --- /dev/null +++ b/app/models/integrations/episode_integrations.rb @@ -0,0 +1,47 @@ +require "active_support/concern" + +module Integrations::EpisodeIntegrations + extend ActiveSupport::Concern + + included do + has_many :episode_delivery_statuses, -> { order(created_at: :desc) }, class_name: "Integrations::EpisodeDeliveryStatus" + has_many :sync_logs, -> { episodes }, foreign_key: "feeder_id" + + scope :unfinished, ->(integration) do + int = Integrations::EpisodeDeliveryStatus.integrations[integration] + frag = <<~SQL + left join lateral ( + select "integrations_episode_delivery_statuses".* + from "integrations_episode_delivery_statuses" + where "episodes"."id" = "integrations_episode_delivery_statuses"."episode_id" + order by "integrations_episode_delivery_statuses"."created_at" desc + limit 1 + ) eds on true + SQL + joins(frag) + .where('("eds"."episode_id" is null) or (("eds"."delivered" = false or "eds"."uploaded" = false) and "eds"."integration" = ?)', int) + end + end + + def publish_to_integration?(integration) + # see if there is an integration + podcast.feeds.any? { |f| f.integration_type == integration && f.publish_integration? } + end + + def sync_log(integration) + sync_logs.send(integration.intern).order(updated_at: :desc).first + end + + def episode_delivery_status(integration, with_default = false) + status = episode_delivery_statuses.reset.order(created_at: :desc).send(integration.intern).first + if !status && with_default + Integrations::EpisodeDeliveryStatus.default_status(integration, self) + else + status + end + end + + def update_episode_delivery_status(integration, attrs) + Integrations::EpisodeDeliveryStatus.update_status(integration, self, attrs) + end +end diff --git a/app/models/integrations/podcast_integrations.rb b/app/models/integrations/podcast_integrations.rb new file mode 100644 index 000000000..5ca65e8ad --- /dev/null +++ b/app/models/integrations/podcast_integrations.rb @@ -0,0 +1,13 @@ +require "active_support/concern" + +module Integrations::PodcastIntegrations + extend ActiveSupport::Concern + + # included do + # end + + def publish_to_integration?(integration) + # see if there is an integration + feeds.any? { |f| f.integration_type == integration && f.publish_integration? } + end +end diff --git a/app/models/megaphone.rb b/app/models/megaphone.rb new file mode 100644 index 000000000..bad25999f --- /dev/null +++ b/app/models/megaphone.rb @@ -0,0 +1,5 @@ +module Megaphone + def self.table_name_prefix + "megaphone_" + end +end diff --git a/app/models/megaphone/api.rb b/app/models/megaphone/api.rb new file mode 100644 index 000000000..f8494e8f1 --- /dev/null +++ b/app/models/megaphone/api.rb @@ -0,0 +1,127 @@ +module Megaphone + class Api + attr_accessor :token, :network_id, :endpoint_url + + DEFAULT_ENDPOINT = "https://cms.megaphone.fm/api" + + PAGINATION_HEADERS = %w[link x-per-page x-page x-total] + PAGINATION_LINKS = %i[first last next previous] + + def initialize(token:, network_id:, endpoint_url: nil) + @token = token + @network_id = network_id + @endpoint_url = endpoint_url + end + + def get(path, params = {}, headers = {}) + request = {url: join_url(path), headers: headers, params: params} + response = get_url(request) + result(request, response) + end + + def post(path, body, headers = {}) + request = {url: join_url(path), headers: headers, body: outgoing_body_filter(body)} + response = connection(request.slice(:url, :headers)).post do |req| + req.body = request[:body] + end + result(request, response) + end + + def put(path, body, headers = {}) + request = {url: join_url(path), headers: headers, body: outgoing_body_filter(body)} + response = connection(request.slice(:url, :headers)).put do |req| + req.body = request[:body] + end + result(request, response) + end + + def result(request, response) + data = incoming_body_filter(response.body) + if data.is_a?(Array) + pagination = pagination_from_headers(response.env.response_headers) + {items: data, pagination: pagination, request: request, response: response} + else + {items: [data], pagination: {}, request: request, response: response} + end + end + + # TODO: and we need delete... + + def api_base + @endpoint_url || DEFAULT_ENDPOINT + end + + def pagination_from_headers(headers) + paging = (headers || {}).slice(*PAGINATION_HEADERS).transform_keys do |h| + h.sub(/^x-/, "").tr("-", "_").to_sym + end + + [:page, :per_page, :total].each do |k| + paging[k] = paging[k].to_i if paging.key?(k) + end + + paging[:link] = parse_links(paging[:link]) + + paging + end + + def parse_links(link_headers) + return {} unless link_headers.present? + collection = LinkHeaderParser.parse(link_headers, base: api_base) + links = collection.group_by_relation_type + PAGINATION_LINKS.each_with_object({}) do |key, map| + if (link = (links[key] || []).first) + map[key] = link.target_uri + end + map + end + end + + def get_url(options) + connection(options).get + end + + def join_url(*path) + File.join(api_base, "networks", network_id, *path) + end + + def incoming_body_filter(str) + result = JSON.parse(str || "") + transform_keys(result) + end + + def transform_keys(result) + if result.is_a?(Array) + result.map { |r| transform_keys(r) } + elsif result.respond_to?(:deep_transform_keys) + result.deep_transform_keys { |key| key.to_s.underscore.to_sym } + else + result + end + end + + def outgoing_body_filter(attr) + (attr || {}).deep_transform_keys { |key| key.to_s.camelize(:lower) }.to_json + end + + def default_headers + { + "Content-Type" => "application/json", + "Accept" => "application/json", + "User-Agent" => "PRX-Feeder-Megaphone/1.0 (Rails-#{Rails.env})" + } + end + + def connection(options) + url = options[:url] + headers = default_headers.merge(options[:headers] || {}) + params = options[:params] || {} + Faraday.new(url: url, headers: headers, params: params) do |builder| + builder.request :token_auth, token + builder.response :raise_error + builder.response :logger, Rails.logger + builder.adapter :excon + end + end + end +end diff --git a/app/models/megaphone/config.rb b/app/models/megaphone/config.rb new file mode 100644 index 000000000..77a440dce --- /dev/null +++ b/app/models/megaphone/config.rb @@ -0,0 +1,14 @@ +module Megaphone + class Config < ApplicationRecord + belongs_to :feed + + validates_presence_of :token, :network_id + + encrypts :token + encrypts :network_id + + def publish_to_megaphone? + valid? && publish_enabled? + end + end +end diff --git a/app/models/megaphone/cuepoint.rb b/app/models/megaphone/cuepoint.rb new file mode 100644 index 000000000..6a0aaeaa1 --- /dev/null +++ b/app/models/megaphone/cuepoint.rb @@ -0,0 +1,62 @@ +module Megaphone + class Cuepoint + include Megaphone::Model + + CUEPOINT_TYPES = %i[preroll midroll postroll remove] + + AD_SOURCES = %i[auto promo span] + + CREATE_REQUIRED = %i[cuepoint_type ad_count start_time end_time ad_sources action] + + CREATE_ATTRIBUTES = CREATE_REQUIRED + %i[title end_time is_active offset notes] + + ALL_ATTRIBUTES = CREATE_ATTRIBUTES + + attr_accessor(*ALL_ATTRIBUTES) + + validates_presence_of CREATE_REQUIRED + + def self.from_zones_and_media(zones, media) + cuepoints = [] + current_cuepoint = nil + original_duration = 0 + original_count = 0 + zones.each do |zone| + # if this is an ad zone, add it to the cue point + if ["ad", "sonic_id", "house"].include?(zone[:type]) + if current_cuepoint + current_cuepoint.ad_count = current_cuepoint.ad_count + 1 + current_cuepoint.ad_sources << source_for_zone(zone) + else + current_cuepoint = new( + cuepoint_type: "#{zone[:section]}roll", + ad_count: 1, + start_time: original_duration, + ad_sources: [source_for_zone(zone)], + action: :insert, + is_active: true + ) + cuepoints << current_cuepoint + end + elsif zone[:type] == "original" + current_cuepoint = nil + original_duration += media[original_count].duration + original_count += 1 + end + end + cuepoints + end + + def self.source_for_zone(zone) + if ["sonic_id", "house"].include?(zone[:type]) + :promo + else + :auto + end + end + + def as_json_for_create + as_json(only: CREATE_ATTRIBUTES.map(&:to_s)) + end + end +end diff --git a/app/models/megaphone/episode.rb b/app/models/megaphone/episode.rb new file mode 100644 index 000000000..5cab9e362 --- /dev/null +++ b/app/models/megaphone/episode.rb @@ -0,0 +1,348 @@ +module Megaphone + class Episode < Integrations::Base::Episode + include Megaphone::Model + attr_accessor :podcast + + # track upload source data + SOURCE_ATTRIBUTES = [:source_media_version_id, :source_size, :source_fetch_count, :source_url, :source_filename] + attr_accessor(*SOURCE_ATTRIBUTES) + + # Used to form the adhash value + ADHASH_VALUES = {"pre" => "0", "mid" => "1", "post" => "2"}.freeze + + # Required attributes for a create + # external_id is not required by megaphone, but we need it to be set! + CREATE_REQUIRED = %i[title external_id] + + CREATE_ATTRIBUTES = CREATE_REQUIRED + %i[pubdate pubdate_timezone author link explicit draft + subtitle summary background_image_file_url background_audio_file_url pre_count post_count + insertion_points guid pre_offset post_offset expected_adhash original_filename original_url + episode_number season_number retain_ad_locations advertising_tags ad_free] + + UPDATE_ATTRIBUTES = CREATE_ATTRIBUTES + + # All other attributes we might expect back from the Megaphone API + # (some documented, others not so much) + OTHER_ATTRIBUTES = %i[id podcast_id created_at updated_at status + download_url audio_file_processing audio_file_status audio_file_updated_at] + + DEPRECATED = %i[] + + ALL_ATTRIBUTES = (CREATE_ATTRIBUTES + DEPRECATED + OTHER_ATTRIBUTES) + + attr_accessor(*ALL_ATTRIBUTES) + + validates_presence_of CREATE_REQUIRED + + validates_presence_of :id, on: :update + + validates_absence_of :id, on: :create + + def self.find_by_episode(megaphone_podcast, feeder_episode) + episode = new_from_episode(megaphone_podcast, feeder_episode) + sync_log = feeder_episode.sync_log(:megaphone) + mp = episode.find_by_megaphone_id(sync_log&.external_id) + mp ||= episode.find_by_guid(feeder_episode.guid) + mp + end + + def self.new_from_episode(megaphone_podcast, feeder_episode) + # start with basic attributes copied from the feeder episode + episode = Megaphone::Episode.new(attributes_from_episode(feeder_episode)) + + # set relations to the feeder episode and megaphone podcast + episode.feeder_episode = feeder_episode + episode.podcast = megaphone_podcast + episode.config = megaphone_podcast.config + + # we should always be able to set these, published or not + # this does make a remote call to get the placements from augury + episode.set_placement_attributes + + # may move this later, but let's also see about audio + episode.set_audio_attributes + + episode + end + + def self.attributes_from_episode(e) + { + title: e.title, + external_id: e.guid, + guid: e.item_guid, + pubdate: e.published_at, + pubdate_timezone: e.published_at, + author: e.author_name, + link: e.url, + explicit: e.explicit, + draft: e.draft?, + subtitle: e.subtitle, + summary: e.description, + background_image_file_url: e.ready_image&.href, + episode_number: e.episode_number, + season_number: e.season_number, + advertising_tags: e.categories, + ad_free: e.categories.include?("adfree") + } + end + + def find_by_guid(guid = feeder_episode.guid) + return nil if guid.blank? + self.api_response = api.get("podcasts/#{podcast.id}/episodes", externalId: guid) + handle_response(api_response) + end + + def find_by_megaphone_id(mpid = id) + return nil if mpid.blank? + self.api_response = api.get("podcasts/#{podcast.id}/episodes/#{mpid}") + handle_response(api_response) + end + + def create! + validate!(:create) + body = as_json(only: CREATE_ATTRIBUTES.map(&:to_s)) + self.api_response = api.post("podcasts/#{podcast.id}/episodes", body) + handle_response(api_response) + update_sync_log + update_delivery_status + self + end + + def update!(episode = nil) + if episode + self.attributes = self.class.attributes_from_episode(episode) + set_placement_attributes + set_audio_attributes + end + validate!(:update) + body = as_json(only: UPDATE_ATTRIBUTES.map(&:to_s)) + self.api_response = api.put("podcasts/#{podcast.id}/episodes/#{id}", body) + handle_response(api_response) + update_sync_log + update_delivery_status + self + end + + # call this when we need to update the audio on mp + # like when dtr wasn't ready at first + # so we can make that update and then mark uploaded + def upload_audio! + set_audio_attributes + update! + end + + # call this when audio has been updated on mp + # and we're checking to see if mp is done processing + # so we can update cuepoints and mark delivered + def check_audio! + # Re-request the megaphone api + find_by_megaphone_id + + # get the audip attributes + set_audio_attributes + + # check to see if the audio on mp matches + if original_filename == source_filename + if !audio_file_processing && audio_file_status == "success" + replace_cuepoints!(episode) + delivery_status(true).mark_as_delivered! + else + # still waiting - increment asset state + delivery_status(true).increment_asset_wait + end + else + # this would be a weird timing thing maybe, but ... + # if the files don't match, we need to go back and upload + delivery_status(true).mark_as_not_uploaded! + end + end + + def replace_cuepoints!(episode) + # retrieve the placement info from augury + zones = get_placement_zones(feeder_episode.segment_count) + media = feeder_episode.media + + # create cuepoint instances from that + cuepoints = Megaphone::Cuepoint.from_zones_and_media(zones, media) + + # put those as a list to the mp api + cuepoints_batch!(cuepoints) + end + + def cuepoints_batch!(cuepoints) + # validate all the cuepoints about to be created + cuepoints.all? { |cp| cp.validate!(:create) } + body = cuepoints.map { |cp| cp.as_json_for_create } + self.api_response = api.put("podcasts/#{podcast.id}/episodes/#{id}/cuepoints_batch", body) + update_sync_log + update_delivery_status + self + end + + def update_sync_log + SyncLog.log!( + integration: :megaphone, + feeder_id: feeder_episode.id, + feeder_type: :episodes, + external_id: id, + api_response: api_response_log_item + ) + end + + def handle_response(api_response) + if (item = (api_response[:items] || []).first) + self.attributes = item.slice(*ALL_ATTRIBUTES) + self + end + end + + # update delivery status after a create or update + def update_delivery_status + # if there's audio and we just uploaded it successfully, set attr, then check status + if feeder_episode.complete_media? && background_audio_file_url + attrs = source_attributes.merge(uploaded: true) + feeder_episode.update_episode_delivery_status(:megaphone, attrs) + # or if there's not audio yet or it didn't change + else + # we're done, mark it as delivered! + delivery_status(true).mark_as_delivered! + end + feeder_episode.episode_delivery_statuses.reset + end + + def private_feed + podcast.private_feed + end + + def synced_with_integration? + delivery_status&.delivered? + end + + def integration_new? + false + end + + def archived? + false + end + + def feeder_podcast + feeder_episode.podcast + end + + def delivery_status(with_default = false) + feeder_episode&.episode_delivery_status(:megaphone, with_default) + end + + def set_placement_attributes + if (zones = get_placement_zones(feeder_episode.segment_count)) + self.expected_adhash = adhash_for_placement(zones) + self.pre_count = expected_adhash.count("0") + self.post_count = expected_adhash.count("2") + end + end + + def adhash_for_placement(zones) + zones + .filter { |z| ["ad", "sonic_id", "house"].include?(z[:type]) } + .map { |z| ADHASH_VALUES[z[:section]] } + .join("") + end + + def get_placement_zones(original_count = nil) + if original_count.to_i < 1 + original_count = (feeder_episode&.segment_count || 1).to_i + end + placements = Prx::Augury.new.placements(feeder_podcast.id) + placement = placements&.find { |i| i.original_count == original_count } + (placement&.zones || []).map(&:with_indifferent_access) + end + + # call this before create or update, yah + def set_audio_attributes + return unless feeder_episode.complete_media? + + # check if the version is different from what was saved before + if !has_media_version? + media_info = get_media_info(enclosure_url) + + # if dovetail has the right media info, we can update + if media_info[:media_version] == feeder_episode.media_version_id + self.source_media_version_id = media_info[:media_version] + self.source_size = media_info[:size] + self.source_fetch_count = (delivery_status&.source_fetch_count || 0) + 1 + self.source_url = arrangement_version_url(media_info[:location], media_info[:media_version], source_fetch_count) + self.source_filename = url_filename(source_url) + self.background_audio_file_url = source_url + self.insertion_points = timings + self.retain_ad_locations = true + else + # if not, mark it as not uploaded and move on + delivery_status.mark_as_not_uploaded! + end + end + end + + def source_attributes + attributes.slice(*SOURCE_ATTRIBUTES) + end + + def get_media_info(enclosure) + info = { + enclosure_url: enclosure, + media_version: nil, + location: nil, + size: nil + } + resp = Faraday.head(enclosure) + if resp.status == 302 + info[:media_version] = resp.env.response_headers["x-episode-media-version"] + info[:location] = resp.env.response_headers["location"] + info[:size] = resp.env.response_headers["content-length"] + else + logger.error("DTR media redirect not returned: #{resp.status}", enclosure: enclosure, resp: resp) + raise("DTR media redirect not returned: #{resp.status}") + end + info + rescue err + logger.error("Error getting DTR media info", enclosure: enclosure, err: err) + raise err + end + + def arrangement_version_url(location, media_version, count) + uri = URI.parse(location) + path = uri.path.split("/") + ext = File.extname(path.last) + base = File.basename(path.last, ext) + filename = "#{base}_#{media_version}_#{count}#{ext}" + uri.path = (path[0..-2] + [filename]).join("/") + end + + def url_filename(url) + URI.parse(url).path.split("/").last + end + + def enclosure_url + url = EnclosureUrlBuilder.new.base_enclosure_url( + feeder_podcast, + feeder_episode, + private_feed + ) + EnclosureUrlBuilder.mark_authorized(url, private_feed) + end + + def timings + feeder_episode.media[0..-2].map(&:duration) + end + + def pre_after_original?(zones) + sections = zones.split { |z| z[:type] == "original" } + sections[1].any? { |z| %w[ad house].include?(z[:type]) && z[:id].match(/pre/) } + end + + def post_before_original?(zones) + sections = zones.split { |z| z[:type] == "original" } + sections[-2].any? { |z| %w[ad house].include?(z[:type]) && z[:id].match(/post/) } + end + end +end diff --git a/app/models/megaphone/model.rb b/app/models/megaphone/model.rb new file mode 100644 index 000000000..a9f3b6bec --- /dev/null +++ b/app/models/megaphone/model.rb @@ -0,0 +1,22 @@ +require "active_support/concern" + +module Megaphone + module Model + extend ActiveSupport::Concern + + included do + include ActiveModel::Model + attr_accessor :config + attr_writer :api + attr_accessor :api_response + end + + def api + @api ||= Megaphone::Api.new(token: config.token, network_id: config.network_id) + end + + def api_response_log_item + api_response&.slice(:request, :items, :pagination) + end + end +end diff --git a/app/models/megaphone/paged_collection.rb b/app/models/megaphone/paged_collection.rb new file mode 100644 index 000000000..a852f08f4 --- /dev/null +++ b/app/models/megaphone/paged_collection.rb @@ -0,0 +1,19 @@ +class Megaphone::PagedCollection + attr_accessor :model, :items, :per_page, :page, :total, :links + + def initialize(model, result) + @model = model + + @items = (result[:items] || []).map { |i| model.new(i) } + + paging = result[:pagination] || {} + @per_page = paging[:per_page] + @page = paging[:page] + @total = paging[:total] + @links = paging[:link] + end + + def count + items.length + end +end diff --git a/app/models/megaphone/podcast.rb b/app/models/megaphone/podcast.rb new file mode 100644 index 000000000..f4bc3f03a --- /dev/null +++ b/app/models/megaphone/podcast.rb @@ -0,0 +1,150 @@ +module Megaphone + class Podcast < Integrations::Base::Show + include Megaphone::Model + + # Required attributes for a create + # external_id is not required by megaphone, but we need it to be set! + CREATE_REQUIRED = %i[title subtitle summary itunes_categories language external_id] + + # Other attributes available on create + CREATE_ATTRIBUTES = CREATE_REQUIRED + %i[link copyright author background_image_file_url + explicit owner_name owner_email slug original_rss_url itunes_identifier podtrac_enabled + google_play_identifier episode_limit podcast_type advertising_tags excluded_categories] + + # Update also allows the span opt in, but we don't have permissions + # UPDATE_ATTRIBUTES = CREATE_ATTRIBUTES + %i[span_opt_in] + UPDATE_ATTRIBUTES = CREATE_ATTRIBUTES + + # Deprecated, so we shouldn't rely on these, but they show up as attributes + DEPRECATED = %i[category redirect_url itunes_active redirected_at itunes_rating + google_podcasts_identifier stitcher_identifier] + + # All other attributes we might expect back from the Megaphone API + # (some documented, others not so much) + OTHER_ATTRIBUTES = %i[id created_at updated_at image_file uid network_id recurring_import + episodes_count spotify_identifier default_ad_settings iheart_identifier feed_url + default_pre_count default_post_count cloned_feed_urls ad_free_feed_urls main_feed ad_free] + + ALL_ATTRIBUTES = (UPDATE_ATTRIBUTES + DEPRECATED + OTHER_ATTRIBUTES) + + attr_accessor(*ALL_ATTRIBUTES) + + validates_presence_of CREATE_REQUIRED + + validates_presence_of :id, on: :update + + validates_absence_of :id, on: :create + + def self.find_by_feed(feed) + podcast = new_from_feed(feed) + sync_log = podcast.public_feed.sync_log(:megaphone) + mp = podcast.find_by_megaphone_id(sync_log&.external_id) + mp ||= podcast.find_by_guid(feed.podcast.guid) + mp + end + + def self.new_from_feed(feed) + podcast = Megaphone::Podcast.new(attributes_from_feed(feed)) + podcast.private_feed = feed + podcast.config = feed.config + podcast + end + + def self.attributes_from_feed(feed) + podcast = feed.podcast + itunes_categories = feed.itunes_categories.present? ? feed.itunes_categories : podcast.itunes_categories + { + title: feed.title || podcast.title, + subtitle: feed.subtitle || podcast.subtitle, + summary: feed.description || podcast.description, + itunes_categories: (itunes_categories || []).map(&:name), + language: (podcast.language || "en-us").split("-").first, + link: podcast.link, + copyright: podcast.copyright, + author: podcast.author_name, + background_image_file_url: feed.ready_itunes_image || podcast.ready_itunes_image, + explicit: podcast.explicit, + owner_name: podcast.owner_name, + owner_email: podcast.owner_email, + slug: feed.slug, + # itunes_identifier: ????? TBD, + # handle prefix values in dt rss rendering of enclosure urls + podtrac_enabled: false, + episode_limit: feed.display_episodes_count, + external_id: podcast.guid, + podcast_type: podcast.itunes_type, + advertising_tags: podcast.categories + # set in augury, can we get it here? + # excluded_categories: ????? TBD, + } + end + + def public_feed + private_feed.podcast&.public_feed + end + + def build_integration_episode(feeder_episode) + Megaphone::Episode.new_from_episode(self, feeder_episode) + end + + def updated_at=(d) + d = Time.parse(d) if d.is_a?(String) + @updated_at = d + end + + def list + self.api_response = api.get("podcasts") + Megaphone::PagedCollection.new(Megaphone::Podcast, api_response) + end + + def find_by_guid(guid = podcast.guid) + return nil if guid.blank? + self.api_response = api.get("podcasts", externalId: guid) + handle_response(api_response) + end + + def find_by_megaphone_id(mpid = id) + return nil if mpid.blank? + self.api_response = api.get("podcasts/#{mpid}") + handle_response(api_response) + end + + def create! + validate!(:create) + body = as_json(only: CREATE_ATTRIBUTES.map(&:to_s)) + self.api_response = api.post("podcasts", body) + handle_response(api_response) + update_sync_log + self + end + + def update!(feed = nil) + if feed + self.attributes = self.class.attributes_from_feed(feed) + end + validate!(:update) + body = as_json(only: UPDATE_ATTRIBUTES.map(&:to_s)) + self.api_response = api.put("podcasts/#{id}", body) + handle_response(api_response) + update_sync_log + self + end + + def handle_response(api_response) + if (item = (api_response[:items] || []).first) + self.attributes = item.slice(*ALL_ATTRIBUTES) + self + end + end + + def update_sync_log + SyncLog.log!( + integration: :megaphone, + feeder_type: :feeds, + feeder_id: public_feed.id, + external_id: id, + api_response: api_response_log_item + ) + end + end +end diff --git a/app/models/megaphone/publisher.rb b/app/models/megaphone/publisher.rb new file mode 100644 index 000000000..f6534bf18 --- /dev/null +++ b/app/models/megaphone/publisher.rb @@ -0,0 +1,131 @@ +module Megaphone + class Publisher < Integrations::Base::Publisher + WAIT_INTERVAL = 5.seconds + WAIT_TIMEOUT = 5.minutes + + attr_reader :feed + + def initialize(feed) + @feed = feed + end + + def megaphone_podcast + @megaphone_podcast ||= Megaphone::Podcast.find_by_feed(feed) + end + + alias_method :show, :megaphone_podcast + alias_method :private_feed, :feed + + def publish! + sync_podcast! + sync_episodes! + end + + def sync_episodes! + Rails.logger.tagged("Megaphone::Publisher#sync_episodes!") do + # delete or unpublish episodes we aren't including in the feed anymore + unpublish_and_delete_episodes! + + # start with create and update, make sure they have been created at least + create_and_update_episodes! + + # check if the upload has completed and the audio has finished processing + check_status_episodes! + end + end + + # For status + # :uploaded = the latest media version was ready on dtr, and was saved to mp + # :delivered = attributes saved, media finished processing + # if not uploaded, and media ready, try to set that on the next update + # if uploaded and not delivered, check mp status, see if processing done + def check_status_episodes! + episodes = private_feed.episodes.unfinished(:megaphone) + timeout_at = Time.now.utc + WAIT_TIMEOUT + + while episodes.size > 0 && Time.now.utc < timeout_at + sleep(wait_interval) + episodes = check_episodes(episodes) + end + + # if after all those checks, still incomplete? throw an error + if episodes.size > 0 + msg = "Megaphone::Publisher.check_status_episodes! timed out on: #{episodes.map(&:id)}" + Logger.error(msg) + raise msg + end + end + + def check_episodes(episodes) + remaining = [] + + episodes.each do |ep| + megaphone_episode = Megaphone::Episode.find_by_episode(megaphone_podcast, ep) + + # check if it is uploaded yet + # if not go looking for the DTR media version + if !megaphone_episode.delivery_status.uploaded? + megaphone_episode.upload_audio! + end + + # check if it is uploaded, but not delivered - see if megaphone has processed + status = megaphone_episode.delivery_status + if !status.delivered? && status.uploaded? + megaphone_episode.check_audio! + end + + if !ep.episode_delivery_status(:megaphone).delivered? + remaining << ep + end + end + end + + def create_and_update_episodes! + megaphone_episodes = [] + private_feed.episodes.unfinished(:megaphone).each do |ep| + # see if we can find it by guid or megaphone id + if (megaphone_episode = Megaphone::Episode.find_by_episode(megaphone_podcast, ep)) + megaphone_episode.update!(ep) + end + megaphone_episode ||= create_episode!(megaphone_podcast, ep) + megaphone_episodes << megaphone_episode + end + + megaphone_episodes + end + + def create_episode!(megaphone_podcast, ep) + me = Megaphone::Episode.new_from_episode(megaphone_podcast, ep) + me.create! + end + + def unpublish_and_delete_episodes! + end + + def sync_podcast! + Rails.logger.tagged("Megaphone::Publisher#sync_podcast!") do + # see if we need to update by comparing dates + # - there is no episode delivery status ;) + if megaphone_podcast && (megaphone_podcast.updated_at < podcast.updated_at) + megaphone_podcast.update!(feed) + end + + # if that didn't find & update a podcast, create it and set it + @megaphone_podcast ||= Megaphone::Podcast.new_from_feed(feed).create! + megaphone_podcast + end + end + + def config + feed&.config + end + + def public_feed + podcast&.public_feed + end + + def podcast + feed&.podcast + end + end +end diff --git a/app/models/podcast.rb b/app/models/podcast.rb index 238019921..55b5c0d03 100644 --- a/app/models/podcast.rb +++ b/app/models/podcast.rb @@ -15,6 +15,7 @@ class Podcast < ApplicationRecord include EmbedPlayerHelper include PodcastFilters include ReleaseEpisodes + include Integrations::PodcastIntegrations acts_as_paranoid diff --git a/app/models/prx/api.rb b/app/models/prx/api.rb new file mode 100644 index 000000000..1569cfce1 --- /dev/null +++ b/app/models/prx/api.rb @@ -0,0 +1,144 @@ +module Prx + module Api + class PrxHyperResource < HyperResource + def incoming_body_filter(hash) + super(hash.deep_transform_keys { |key| key.to_s.underscore }) + end + + def outgoing_body_filter(hash) + super(hash.deep_transform_keys { |key| key.to_s.camelize(:lower) }) + end + + def to_hash + attributes || {} + end + + class Link < HyperResource::Link + attr_accessor :type, :profile + + def initialize(resource, link_spec = {}) + super + self.type = link_spec["type"] + self.profile = link_spec["profile"] + end + + def where(params) + super.tap do |res| + res.type = type + res.profile = profile + end + end + + def headers(*args) + super.tap do |res| + if args.count > 0 + res.type = type + res.profile = profile + end + end + end + + def post_response(attrs = nil) + attrs ||= resource.attributes + attrs = (resource.default_attributes || {}).merge(attrs) + + # adding this line to call outgoing_body_filter + attrs = resource.outgoing_body_filter(attrs) + + faraday_connection.post do |req| + req.body = resource.adapter.serialize(attrs) + end + end + + def put_response(attrs = nil) + attrs ||= resource.attributes + attrs = (resource.default_attributes || {}).merge(attrs) + + # adding this line to call outgoing_body_filter + attrs = resource.outgoing_body_filter(attrs) + + faraday_connection.put do |req| + req.body = resource.adapter.serialize(attrs) + end + end + + def patch_response(attrs = nil) + attrs ||= resource.attributes.changed_attributes + attrs = (resource.default_attributes || {}).merge(attrs) + + # adding this line to call outgoing_body_filter + attrs = resource.outgoing_body_filter(attrs) + + faraday_connection.patch do |req| + req.body = resource.adapter.serialize(attrs) + end + end + end + end + + def default_headers + { + "Content-Type" => "application/json", + "Accept" => "application/json" + } + end + + def api(options = {}) + opts = {root: id_root, headers: default_headers}.merge(options) + if (account = opts.delete(:account)) + token = get_account_token(account) + opts[:headers]["Authorization"] = "Bearer #{token}" + end + + PrxHyperResource.new(opts) + end + + def api_resource(body, root = id_root) + href = body.dig(:_links, :self, :href) + resource = api(root: root) + link = PrxHyperResource::Link.new(resource, href: href) + PrxHyperResource.new_from(body: body, resource: resource, link: link) + end + + def get_account_token(account) + id = ENV["PRX_CLIENT_ID"] + se = ENV["PRX_SECRET"] + oauth_options = {site: id_root, token_url: "/token"} + client = OAuth2::Client.new(id, se, oauth_options) do |faraday| + faraday.request :url_encoded + faraday.adapter :excon + end + client.client_credentials.get_token(account: account).token + end + + def id_root + root_uri ENV["ID_HOST"] + end + + def play_root + root_uri ENV["PLAY_HOST"] + end + + private + + def method_missing(method, *args) + if /_root$/.match?(method) + root_uri ENV[method.to_s.sub(/_root$/, "_HOST").upcase], "/api/v1" + else + super + end + end + + def respond_to_missing?(method, include_private = false) + method.to_s.ends_with?("_root") || super + end + + def root_uri(host, path = "") + if /\.org|\.tech/.match?(host) + URI::HTTPS.build(host: host, path: path).to_s + else + URI::HTTP.build(host: host, path: path).to_s + end + end + end +end diff --git a/app/models/prx/augury.rb b/app/models/prx/augury.rb new file mode 100644 index 000000000..10376bd49 --- /dev/null +++ b/app/models/prx/augury.rb @@ -0,0 +1,27 @@ +module Prx + class Augury + include Prx::Api + API_PATH = "/api/v1" + + attr_accessor :enabled, :root, :expiration + + def initialize(options = {}) + @expiration = (options[:expiration] || 1.minute).to_i + @root = options[:root] || augury_root + @enabled = @root.present? + end + + def placements(podcast_id, options = {}) + path = "#{API_PATH}/podcasts/#{podcast_id}/placements" + expires = (options[:expiration] || expiration).to_i + Rails.cache.fetch(path, expires_in: expires) { get(root, path) } + end + + def get(root, path) + api(root: root, account: "*").tap { |a| a.href = path }.get + rescue HyperResource::ClientError, HyperResource::ServerError, NotImplementedError => e + Rails.logger.error("Error: GET #{path}", error: e.message) + nil + end + end +end diff --git a/app/models/publishing_pipeline_state.rb b/app/models/publishing_pipeline_state.rb index 3fcb5baf7..b7d38ebff 100644 --- a/app/models/publishing_pipeline_state.rb +++ b/app/models/publishing_pipeline_state.rb @@ -48,11 +48,11 @@ class PublishingPipelineState < ApplicationRecord :created, :started, :published_rss, - :published_apple, + :published_integration, :complete, :error, :expired, - :error_apple, + :error_integration, :error_rss ] @@ -146,16 +146,17 @@ def self.publish_rss!(podcast) state_transition(podcast, :published_rss) end - def self.publish_apple!(podcast) - state_transition(podcast, :published_apple) + def self.error_rss!(podcast) + state_transition(podcast, :error_rss) end - def self.error_apple!(podcast) - state_transition(podcast, :error_apple) + # TODO: do something with the integration type? + def self.publish_integration!(podcast) + state_transition(podcast, :published_integration) end - def self.error_rss!(podcast) - state_transition(podcast, :error_rss) + def self.error_integration!(podcast) + state_transition(podcast, :error_integration) end def self.complete!(podcast) diff --git a/app/models/sync_log.rb b/app/models/sync_log.rb index e443b1619..ede1522a9 100644 --- a/app/models/sync_log.rb +++ b/app/models/sync_log.rb @@ -1,9 +1,12 @@ -# frozen_string_literal: true - class SyncLog < ApplicationRecord + enum :integration, Integrations::INTEGRATIONS + + # kinda like an AR polymorphic relation, but not using that enum :feeder_type, { + # common feeds: "feeds", episodes: "episodes", + # apple podcast_containers: "containers", podcast_deliveries: "deliveries", podcast_delivery_files: "delivery_files" @@ -12,7 +15,7 @@ class SyncLog < ApplicationRecord scope :latest, -> do joins("JOIN LATERAL ( SELECT max(id) as max_id FROM sync_logs - GROUP BY feeder_type, feeder_id, external_id ) q + GROUP BY integration, feeder_type, feeder_id, external_id ) q ON id = max_id") end @@ -23,12 +26,18 @@ def complete? end def self.log!(attrs) + integration = attrs.delete(:integration) feeder_type = attrs.delete(:feeder_type) feeder_id = attrs.delete(:feeder_id) external_id = attrs.delete(:external_id) api_response = attrs.delete(:api_response) - sync_log = SyncLog.find_or_initialize_by(feeder_type: feeder_type, feeder_id: feeder_id, external_id: external_id) + sync_log = SyncLog.find_or_initialize_by( + integration: integration, + feeder_type: feeder_type, + feeder_id: feeder_id, + external_id: external_id + ) sync_log.update!(api_response: api_response) sync_log end diff --git a/app/models/task.rb b/app/models/task.rb index fdc14b2a1..074eef8f4 100644 --- a/app/models/task.rb +++ b/app/models/task.rb @@ -1,5 +1,5 @@ require "hash_serializer" -require "prx_access" +require "prx/api" class Task < ApplicationRecord include PorterCallback diff --git a/app/policies/feed_policy.rb b/app/policies/feed_policy.rb index 5b60bf395..34552abda 100644 --- a/app/policies/feed_policy.rb +++ b/app/policies/feed_policy.rb @@ -15,6 +15,10 @@ def new_apple? update? end + def new_megaphone? + update? + end + def update? PodcastPolicy.new(token, resource.podcast).update? && !resource.edit_locked? end diff --git a/app/policies/megaphone/config_policy.rb b/app/policies/megaphone/config_policy.rb new file mode 100644 index 000000000..1ab3e04b5 --- /dev/null +++ b/app/policies/megaphone/config_policy.rb @@ -0,0 +1,17 @@ +class Megaphone::ConfigPolicy < ApplicationPolicy + def new? + create? + end + + def show? + FeedPolicy.new(token, resource.feed).show? + end + + def create? + FeedPolicy.new(token, resource.feed).create? + end + + def update? + FeedPolicy.new(token, resource.feed).update? + end +end diff --git a/app/representers/api/base_representer.rb b/app/representers/api/base_representer.rb index f221ac120..0d9a3aa0e 100644 --- a/app/representers/api/base_representer.rb +++ b/app/representers/api/base_representer.rb @@ -1,10 +1,10 @@ require "api" require "hal_api/representer" -require "prx_access" +require "prx/api" require "text_sanitizer" class Api::BaseRepresenter < HalApi::Representer - include PrxAccess + include Prx::Api include TextSanitizer self.alternate_host = ENV["PRX_HOST"] || "www.prx.org" diff --git a/app/views/episodes/_form_status.html.erb b/app/views/episodes/_form_status.html.erb index d29098d70..1f01c9817 100644 --- a/app/views/episodes/_form_status.html.erb +++ b/app/views/episodes/_form_status.html.erb @@ -47,6 +47,22 @@
<% end %> + <% if episode.persisted? && episode.publish_to_integration?(:megaphone) %> + <% integration = :megaphone %> + <% integration_status = episode_integration_status(integration, episode) %> +
+ <%= "#{integration.to_s.titleize} Status" %>:
+
+ <%= t("helpers.label.episode.media_statuses.#{integration_status}") %>
+
+
+ <%= t(".last_updated") %>
+
+ <%= local_time_ago(episode_integration_updated_at(integration, episode)) %>
+
<%= t(".description") %>
+ <%= t(".guide_link").html_safe %> +
+ <%= "#{integration.to_s.titleize} Status" %>:
+
+ <%= t("helpers.label.episode.media_statuses.#{integration_status}") %>
+
+
+ <%= t(".last_updated") %>
+
+ <%= local_time_ago(podcast_integration_updated_at(integration, podcast)) %>
+