From 0afc52221ec70062b34ad4a53e9ce02f3265e1ae Mon Sep 17 00:00:00 2001 From: Maciej Mensfeld Date: Thu, 7 Dec 2023 15:40:51 +0100 Subject: [PATCH] [In progress] 0.8.0 (#168) --- CHANGELOG.md | 36 + Gemfile.lock | 18 +- karafka-web.gemspec | 2 +- lib/karafka/web.rb | 8 +- lib/karafka/web/config.rb | 14 +- lib/karafka/web/contracts/config.rb | 7 +- lib/karafka/web/errors.rb | 12 + lib/karafka/web/inflector.rb | 33 + lib/karafka/web/installer.rb | 31 +- lib/karafka/web/management/actions/base.rb | 36 + .../web/management/actions/clean_boot_file.rb | 33 + .../actions/create_initial_states.rb | 77 + .../web/management/actions/create_topics.rb | 137 + .../web/management/actions/delete_topics.rb | 30 + lib/karafka/web/management/actions/enable.rb | 105 + .../management/actions/extend_boot_file.rb | 39 + .../management/actions/migrate_states_data.rb | 18 + lib/karafka/web/management/base.rb | 34 - lib/karafka/web/management/clean_boot_file.rb | 31 - .../web/management/create_initial_states.rb | 101 - lib/karafka/web/management/create_topics.rb | 133 - lib/karafka/web/management/delete_topics.rb | 28 - lib/karafka/web/management/enable.rb | 102 - .../web/management/extend_boot_file.rb | 37 - .../web/management/migrations/0_base.rb | 58 + .../0_set_initial_consumers_metrics.rb | 36 + .../0_set_initial_consumers_state.rb | 43 + ...ved_and_sent_bytes_in_consumers_metrics.rb | 26 + ...eived_and_sent_bytes_in_consumers_state.rb | 23 + ..._introduce_waiting_in_consumers_metrics.rb | 24 + ...22_introduce_waiting_in_consumers_state.rb | 20 + ...emove_processing_from_consumers_metrics.rb | 24 + ..._remove_processing_from_consumers_state.rb | 20 + lib/karafka/web/management/migrator.rb | 117 + lib/karafka/web/processing/consumer.rb | 77 +- .../consumers/aggregators/metrics.rb | 5 +- .../processing/consumers/aggregators/state.rb | 6 +- lib/karafka/web/processing/publisher.rb | 59 + .../web/tracking/consumers/contracts/job.rb | 3 +- .../tracking/consumers/contracts/partition.rb | 1 + .../tracking/consumers/contracts/report.rb | 1 + .../consumers/contracts/subscription_group.rb | 11 +- .../consumers/listeners/connections.rb | 34 + .../tracking/consumers/listeners/pausing.rb | 11 +- .../consumers/listeners/processing.rb | 32 +- .../consumers/listeners/statistics.rb | 48 +- lib/karafka/web/tracking/consumers/sampler.rb | 88 +- .../web/tracking/helpers/ttls/array.rb | 72 + lib/karafka/web/tracking/helpers/ttls/hash.rb | 34 + .../web/tracking/helpers/ttls/stats.rb | 49 + .../web/tracking/helpers/ttls/windows.rb | 32 + lib/karafka/web/tracking/ttl_array.rb | 59 - lib/karafka/web/tracking/ttl_hash.rb | 16 - lib/karafka/web/ui/app.rb | 26 +- lib/karafka/web/ui/base.rb | 20 +- lib/karafka/web/ui/controllers/base.rb | 38 +- lib/karafka/web/ui/controllers/become_pro.rb | 2 +- lib/karafka/web/ui/controllers/cluster.rb | 42 +- lib/karafka/web/ui/controllers/consumers.rb | 10 +- lib/karafka/web/ui/controllers/dashboard.rb | 4 +- lib/karafka/web/ui/controllers/errors.rb | 4 +- lib/karafka/web/ui/controllers/jobs.rb | 60 +- .../web/ui/controllers/requests/params.rb | 5 + .../web/ui/controllers/responses/deny.rb | 15 + .../web/ui/controllers/responses/file.rb | 23 + .../responses/{data.rb => render.rb} | 6 +- lib/karafka/web/ui/controllers/routing.rb | 13 +- lib/karafka/web/ui/controllers/status.rb | 2 +- .../web/ui/helpers/application_helper.rb | 70 + lib/karafka/web/ui/lib/hash_proxy.rb | 43 +- lib/karafka/web/ui/lib/sorter.rb | 170 + lib/karafka/web/ui/models/counters.rb | 6 + lib/karafka/web/ui/models/health.rb | 25 +- lib/karafka/web/ui/models/jobs.rb | 48 + .../ui/models/metrics/charts/aggregated.rb | 19 + .../web/ui/models/metrics/charts/topics.rb | 2 +- lib/karafka/web/ui/models/process.rb | 3 +- lib/karafka/web/ui/models/status.rb | 30 +- lib/karafka/web/ui/models/topic.rb | 4 +- .../web/ui/models/visibility_filter.rb | 16 + lib/karafka/web/ui/pro/app.rb | 50 +- lib/karafka/web/ui/pro/controllers/cluster.rb | 1 + .../web/ui/pro/controllers/consumers.rb | 58 +- .../web/ui/pro/controllers/dashboard.rb | 2 +- lib/karafka/web/ui/pro/controllers/dlq.rb | 2 +- lib/karafka/web/ui/pro/controllers/errors.rb | 6 +- .../web/ui/pro/controllers/explorer.rb | 16 +- lib/karafka/web/ui/pro/controllers/health.rb | 36 +- lib/karafka/web/ui/pro/controllers/jobs.rb | 11 + .../web/ui/pro/controllers/messages.rb | 42 + lib/karafka/web/ui/pro/controllers/routing.rb | 13 +- .../ui/pro/views/consumers/_breadcrumbs.erb | 10 +- .../web/ui/pro/views/consumers/_counters.erb | 14 +- .../ui/pro/views/consumers/consumer/_job.erb | 5 +- .../pro/views/consumers/consumer/_no_jobs.erb | 2 +- .../views/consumers/consumer/_partition.erb | 4 +- .../consumer/_subscription_group.erb | 37 +- .../ui/pro/views/consumers/consumer/_tabs.erb | 13 +- .../web/ui/pro/views/consumers/index.erb | 6 +- .../ui/pro/views/consumers/pending_jobs.erb | 43 + .../consumers/{jobs.erb => running_jobs.erb} | 21 +- .../pro/views/dashboard/_ranges_selector.erb | 39 - .../web/ui/pro/views/dashboard/index.erb | 6 + .../explorer/message/_message_actions.erb | 18 + .../pro/views/explorer/message/_metadata.erb | 43 + .../pro/views/explorer/message/_payload.erb | 21 + .../explorer/message/_payload_actions.erb | 19 + .../web/ui/pro/views/explorer/show.erb | 93 +- .../web/ui/pro/views/health/_breadcrumbs.erb | 8 + .../web/ui/pro/views/health/_partition.erb | 4 +- .../ui/pro/views/health/_partition_offset.erb | 8 +- .../ui/pro/views/health/_partition_times.erb | 32 + lib/karafka/web/ui/pro/views/health/_tabs.erb | 9 + .../web/ui/pro/views/health/changes.erb | 66 + .../web/ui/pro/views/health/offsets.erb | 28 +- .../web/ui/pro/views/health/overview.erb | 22 +- lib/karafka/web/ui/pro/views/jobs/_job.erb | 2 +- .../web/ui/pro/views/jobs/_no_jobs.erb | 2 +- lib/karafka/web/ui/pro/views/jobs/pending.erb | 39 + lib/karafka/web/ui/pro/views/jobs/running.erb | 39 + .../ui/pro/views/routing/_consumer_group.erb | 4 +- .../web/ui/pro/views/shared/_navigation.erb | 2 +- .../web/ui/public/javascripts/application.js | 10 + .../web/ui/public/stylesheets/application.css | 4 + .../web/ui/views/cluster/_breadcrumbs.erb | 16 + lib/karafka/web/ui/views/cluster/_tabs.erb | 27 + lib/karafka/web/ui/views/cluster/brokers.erb | 27 + lib/karafka/web/ui/views/cluster/index.erb | 74 - lib/karafka/web/ui/views/cluster/topics.erb | 35 + .../web/ui/views/consumers/_counters.erb | 14 +- lib/karafka/web/ui/views/consumers/index.erb | 6 +- .../ui/views/dashboard/_ranges_selector.erb | 30 +- lib/karafka/web/ui/views/dashboard/index.erb | 27 +- lib/karafka/web/ui/views/errors/show.erb | 25 +- .../web/ui/views/jobs/_breadcrumbs.erb | 18 +- lib/karafka/web/ui/views/jobs/_job.erb | 2 +- lib/karafka/web/ui/views/jobs/_no_jobs.erb | 2 +- lib/karafka/web/ui/views/jobs/_tabs.erb | 27 + .../ui/views/jobs/{index.erb => pending.erb} | 16 +- .../jobs/index.erb => views/jobs/running.erb} | 20 +- .../web/ui/views/routing/_consumer_group.erb | 4 +- .../web/ui/views/shared/_navigation.erb | 2 +- .../web/ui/views/shared/_pagination.erb | 2 +- .../views/shared/exceptions/not_allowed.erb | 37 + lib/karafka/web/ui/views/status/show.erb | 19 +- .../warnings/_routing_topics_presence.erb | 15 + lib/karafka/web/version.rb | 2 +- spec/fixtures/consumer_report.json | 23 +- spec/fixtures/consumers_metrics.json | 228 +- spec/fixtures/consumers_metrics_v1.0.0.json | 5864 +++++++++++++++++ spec/fixtures/consumers_single_metrics.json | 16 +- spec/fixtures/consumers_state.json | 2 +- spec/fixtures/consumers_state_v1.1.0.json | 33 + .../multi_partition_reports/process_1.json | 10 +- .../multi_partition_reports/process_2.json | 10 +- spec/lib/karafka/web/contracts/config_spec.rb | 18 +- spec/lib/karafka/web/installer_spec.rb | 62 +- .../{ => actions}/clean_boot_file_spec.rb | 0 .../create_initial_states_spec.rb | 48 +- .../{ => actions}/create_topics_spec.rb | 0 .../{ => actions}/delete_topics_spec.rb | 0 .../{ => actions}/extend_boot_file_spec.rb | 0 .../actions/migrate_states_data_spec.rb | 17 + .../0_set_initial_consumers_metrics_spec.rb | 8 + .../0_set_initial_consumers_state_spec.rb | 8 + ...nd_sent_bytes_in_consumers_metrics_spec.rb | 31 + ..._and_sent_bytes_in_consumers_state_spec.rb | 17 + ...oduce_waiting_in_consumers_metrics_spec.rb | 21 + ...troduce_waiting_in_consumers_state_spec.rb | 14 + ..._processing_from_consumers_metrics_spec.rb | 21 + ...ve_processing_from_consumers_state_spec.rb | 14 + .../karafka/web/management/migrator_spec.rb | 67 + .../consumers/aggregators/metrics_spec.rb | 10 +- .../contracts/consumer_group_spec.rb | 4 +- .../tracking/consumers/contracts/job_spec.rb | 29 +- .../consumers/contracts/partition_spec.rb | 3 + .../consumers/contracts/report_spec.rb | 5 +- .../contracts/subscription_group_spec.rb | 55 +- .../consumers/contracts/topic_spec.rb | 1 + .../consumers/listeners/pausing_spec.rb | 13 +- .../consumers/listeners/processing_spec.rb | 22 +- .../consumers/listeners/statistics_spec.rb | 23 +- .../web/tracking/consumers/sampler_spec.rb | 7 +- .../ttls/array_spec.rb} | 0 .../ttls/hash_spec.rb} | 0 .../web/tracking/helpers/ttls/stats_spec.rb | 5 + .../web/tracking/helpers/ttls/windows_spec.rb | 5 + spec/lib/karafka/web/tracking/sampler_spec.rb | 4 +- .../web/ui/controllers/cluster_spec.rb | 30 +- .../web/ui/controllers/dashboard_spec.rb | 26 +- .../karafka/web/ui/controllers/errors_spec.rb | 3 +- .../karafka/web/ui/controllers/jobs_spec.rb | 216 +- spec/lib/karafka/web/ui/lib/sorter_spec.rb | 89 + .../karafka/web/ui/models/counters_spec.rb | 11 + spec/lib/karafka/web/ui/models/health_spec.rb | 2 +- spec/lib/karafka/web/ui/models/job_spec.rb | 2 +- spec/lib/karafka/web/ui/models/jobs_spec.rb | 34 + .../lib/karafka/web/ui/models/process_spec.rb | 2 +- spec/lib/karafka/web/ui/models/status_spec.rb | 61 +- .../web/ui/models/visibility_filter_spec.rb | 28 + .../web/ui/pro/controllers/cluster_spec.rb | 30 +- .../web/ui/pro/controllers/consumers_spec.rb | 150 +- .../web/ui/pro/controllers/dashboard_spec.rb | 26 +- .../web/ui/pro/controllers/explorer_spec.rb | 54 +- .../web/ui/pro/controllers/health_spec.rb | 83 + .../web/ui/pro/controllers/jobs_spec.rb | 223 +- .../web/ui/pro/controllers/messages_spec.rb | 94 + spec/spec_helper.rb | 3 + 208 files changed, 10697 insertions(+), 1382 deletions(-) create mode 100644 lib/karafka/web/inflector.rb create mode 100644 lib/karafka/web/management/actions/base.rb create mode 100644 lib/karafka/web/management/actions/clean_boot_file.rb create mode 100644 lib/karafka/web/management/actions/create_initial_states.rb create mode 100644 lib/karafka/web/management/actions/create_topics.rb create mode 100644 lib/karafka/web/management/actions/delete_topics.rb create mode 100644 lib/karafka/web/management/actions/enable.rb create mode 100644 lib/karafka/web/management/actions/extend_boot_file.rb create mode 100644 lib/karafka/web/management/actions/migrate_states_data.rb delete mode 100644 lib/karafka/web/management/base.rb delete mode 100644 lib/karafka/web/management/clean_boot_file.rb delete mode 100644 lib/karafka/web/management/create_initial_states.rb delete mode 100644 lib/karafka/web/management/create_topics.rb delete mode 100644 lib/karafka/web/management/delete_topics.rb delete mode 100644 lib/karafka/web/management/enable.rb delete mode 100644 lib/karafka/web/management/extend_boot_file.rb create mode 100644 lib/karafka/web/management/migrations/0_base.rb create mode 100644 lib/karafka/web/management/migrations/0_set_initial_consumers_metrics.rb create mode 100644 lib/karafka/web/management/migrations/0_set_initial_consumers_state.rb create mode 100644 lib/karafka/web/management/migrations/1699543515_fill_missing_received_and_sent_bytes_in_consumers_metrics.rb create mode 100644 lib/karafka/web/management/migrations/1699543515_fill_missing_received_and_sent_bytes_in_consumers_state.rb create mode 100644 lib/karafka/web/management/migrations/1700234522_introduce_waiting_in_consumers_metrics.rb create mode 100644 lib/karafka/web/management/migrations/1700234522_introduce_waiting_in_consumers_state.rb create mode 100644 lib/karafka/web/management/migrations/1700234522_remove_processing_from_consumers_metrics.rb create mode 100644 lib/karafka/web/management/migrations/1700234522_remove_processing_from_consumers_state.rb create mode 100644 lib/karafka/web/management/migrator.rb create mode 100644 lib/karafka/web/processing/publisher.rb create mode 100644 lib/karafka/web/tracking/consumers/listeners/connections.rb create mode 100644 lib/karafka/web/tracking/helpers/ttls/array.rb create mode 100644 lib/karafka/web/tracking/helpers/ttls/hash.rb create mode 100644 lib/karafka/web/tracking/helpers/ttls/stats.rb create mode 100644 lib/karafka/web/tracking/helpers/ttls/windows.rb delete mode 100644 lib/karafka/web/tracking/ttl_array.rb delete mode 100644 lib/karafka/web/tracking/ttl_hash.rb create mode 100644 lib/karafka/web/ui/controllers/responses/deny.rb create mode 100644 lib/karafka/web/ui/controllers/responses/file.rb rename lib/karafka/web/ui/controllers/responses/{data.rb => render.rb} (80%) create mode 100644 lib/karafka/web/ui/lib/sorter.rb create mode 100644 lib/karafka/web/ui/models/jobs.rb create mode 100644 lib/karafka/web/ui/pro/views/consumers/pending_jobs.erb rename lib/karafka/web/ui/pro/views/consumers/{jobs.erb => running_jobs.erb} (56%) delete mode 100644 lib/karafka/web/ui/pro/views/dashboard/_ranges_selector.erb create mode 100644 lib/karafka/web/ui/pro/views/explorer/message/_message_actions.erb create mode 100644 lib/karafka/web/ui/pro/views/explorer/message/_metadata.erb create mode 100644 lib/karafka/web/ui/pro/views/explorer/message/_payload.erb create mode 100644 lib/karafka/web/ui/pro/views/explorer/message/_payload_actions.erb create mode 100644 lib/karafka/web/ui/pro/views/health/_partition_times.erb create mode 100644 lib/karafka/web/ui/pro/views/health/changes.erb create mode 100644 lib/karafka/web/ui/pro/views/jobs/pending.erb create mode 100644 lib/karafka/web/ui/pro/views/jobs/running.erb create mode 100644 lib/karafka/web/ui/views/cluster/_tabs.erb create mode 100644 lib/karafka/web/ui/views/cluster/brokers.erb delete mode 100644 lib/karafka/web/ui/views/cluster/index.erb create mode 100644 lib/karafka/web/ui/views/cluster/topics.erb create mode 100644 lib/karafka/web/ui/views/jobs/_tabs.erb rename lib/karafka/web/ui/views/jobs/{index.erb => pending.erb} (60%) rename lib/karafka/web/ui/{pro/views/jobs/index.erb => views/jobs/running.erb} (60%) create mode 100644 lib/karafka/web/ui/views/shared/exceptions/not_allowed.erb create mode 100644 lib/karafka/web/ui/views/status/warnings/_routing_topics_presence.erb create mode 100644 spec/fixtures/consumers_metrics_v1.0.0.json create mode 100644 spec/fixtures/consumers_state_v1.1.0.json rename spec/lib/karafka/web/management/{ => actions}/clean_boot_file_spec.rb (100%) rename spec/lib/karafka/web/management/{ => actions}/create_initial_states_spec.rb (53%) rename spec/lib/karafka/web/management/{ => actions}/create_topics_spec.rb (100%) rename spec/lib/karafka/web/management/{ => actions}/delete_topics_spec.rb (100%) rename spec/lib/karafka/web/management/{ => actions}/extend_boot_file_spec.rb (100%) create mode 100644 spec/lib/karafka/web/management/actions/migrate_states_data_spec.rb create mode 100644 spec/lib/karafka/web/management/migrations/0_set_initial_consumers_metrics_spec.rb create mode 100644 spec/lib/karafka/web/management/migrations/0_set_initial_consumers_state_spec.rb create mode 100644 spec/lib/karafka/web/management/migrations/1699543515_fill_missing_received_and_sent_bytes_in_consumers_metrics_spec.rb create mode 100644 spec/lib/karafka/web/management/migrations/1699543515_fill_missing_received_and_sent_bytes_in_consumers_state_spec.rb create mode 100644 spec/lib/karafka/web/management/migrations/1700234522_introduce_waiting_in_consumers_metrics_spec.rb create mode 100644 spec/lib/karafka/web/management/migrations/1700234522_introduce_waiting_in_consumers_state_spec.rb create mode 100644 spec/lib/karafka/web/management/migrations/1700234522_remove_processing_from_consumers_metrics_spec.rb create mode 100644 spec/lib/karafka/web/management/migrations/1700234522_remove_processing_from_consumers_state_spec.rb create mode 100644 spec/lib/karafka/web/management/migrator_spec.rb rename spec/lib/karafka/web/tracking/{ttl_array_spec.rb => helpers/ttls/array_spec.rb} (100%) rename spec/lib/karafka/web/tracking/{ttl_hash_spec.rb => helpers/ttls/hash_spec.rb} (100%) create mode 100644 spec/lib/karafka/web/tracking/helpers/ttls/stats_spec.rb create mode 100644 spec/lib/karafka/web/tracking/helpers/ttls/windows_spec.rb create mode 100644 spec/lib/karafka/web/ui/lib/sorter_spec.rb create mode 100644 spec/lib/karafka/web/ui/models/jobs_spec.rb diff --git a/CHANGELOG.md b/CHANGELOG.md index c9f808f9..98862ac2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,32 @@ # Karafka Web changelog +## 0.8.0 (Unreleased) +- **[Feature]** Provide ability to sort table data for part of the views (note: not all attributes can be sorted due to technical limitations of sub-components fetching from Kafka). +- **[Feature]** Track and report pause timeouts via "Changes" view in Health. +- **[Feature]** Introduce pending jobs visibility alongside of running jobs both in total and per process. +- **[Feature]** Introduce states migrations for seamless upgrades. +- **[Feature]** Introduce "Data transfers" chart with data received and data sent to the cluster. +- **[Feature]** Introduce ability to download raw payloads. +- **[Feature]** Introduce ability to download deserialized message payload as JSON. +- [Enhancement] Split cluster info into two tabs, one for brokers and one for topics with partitions. +- [Enhancement] Track pending jobs. Pending jobs are jobs that are not yet scheduled for execution by advanced schedulers. +- [Enhancement] Rename "Enqueued" to "Pending" to support jobs that are not yet enqueued but within a scheduler. +- [Enhancement] Make sure only running jobs are displayed in running jobs +- [Enhancement] Improve jobs related breadcrumbs +- [Enhancement] Display errors backtraces in OSS. +- [Enhancement] Display concurrency graph in OSS. +- [Enhancement] Support time ranges for graphs in OSS. +- [Enhancement] Report last poll time for each subscription group. +- [Enhancement] Show last poll time per consumer instance. +- [Enhancement] Display number of jobs in a particular process jobs view. +- [Enhancement] Promote "Batches" chart to OSS. +- [Enhancement] Promote "Utilization" chart to OSS. +- [Fix] Fix times precisions that could be incorrectly reported by 1 second in few places. +- [Fix] Fix random order in Consumers groups Health view. +- [Change] Rename "Busy" to "Running" to align with "Running Jobs". +- [Change] Rename "Active subscriptions" to "Subscriptions" as process subscriptions are always active. +- [Maintenance] Introduce granular subscription group contracts. + ## 0.7.10 (2023-10-31) - [Fix] Max LSO chart does not work as expected (#201) @@ -29,6 +56,15 @@ - [Fix] Cache assets for 1 year instead of 7 days. - [Fix] Remove source maps pointing to non-existing locations. - [Maintenance] Include license and copyrights notice for `timeago.js` that was missing in the JS min file. +- [Refactor] Rename `ui.show_internal_topics` to `ui.visibility.internal_topics_display` + +### Upgrade Notes + +**NO** rolling upgrade needed. Just configuration update. + +1. If you are using `ui.visibility_filter` this option is now `ui.visibility.filter` (yes, only `.` difference). +2. If you are using a custom visibility filter, it requires now two extra methods: `#download?` and `#export?`. The default visibility filter allows both actions unless message is encrypted. +3. `ui.show_internal_topics` config option has been moved and renamed to `ui.visibility.internal_topics`. ## 0.7.4 (2023-09-19) - [Improvement] Skip aggregations on older schemas during upgrades. This only skips process-reports (that are going to be rolled) on the 5s window in case of an upgrade that should not be a rolling one anyhow. This simplifies the operations and minimizes the risk on breaking upgrades. diff --git a/Gemfile.lock b/Gemfile.lock index 94d4cfcf..63ae0f52 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,9 +1,9 @@ PATH remote: . specs: - karafka-web (0.7.10) + karafka-web (0.8.0) erubi (~> 1.4) - karafka (>= 2.2.9, < 3.0.0) + karafka (>= 2.2.14, < 3.0.0) karafka-core (>= 2.2.4, < 3.0.0) roda (~> 3.68, >= 3.69) tilt (~> 2.0) @@ -36,14 +36,14 @@ GEM ffi (1.16.3) i18n (1.14.1) concurrent-ruby (~> 1.0) - karafka (2.2.9) - karafka-core (>= 2.2.2, < 2.3.0) - waterdrop (>= 2.6.10, < 3.0.0) + karafka (2.2.14) + karafka-core (>= 2.2.7, < 2.3.0) + waterdrop (>= 2.6.11, < 3.0.0) zeitwerk (~> 2.3) - karafka-core (2.2.4) + karafka-core (2.2.7) concurrent-ruby (>= 1.1) - karafka-rdkafka (>= 0.13.6, < 0.14.0) - karafka-rdkafka (0.13.6) + karafka-rdkafka (>= 0.13.9, < 0.15.0) + karafka-rdkafka (0.14.0) ffi (~> 1.15) mini_portile2 (~> 2.6) rake (> 12) @@ -82,7 +82,7 @@ GEM tilt (2.3.0) tzinfo (2.0.6) concurrent-ruby (~> 1.0) - waterdrop (2.6.10) + waterdrop (2.6.11) karafka-core (>= 2.2.3, < 3.0.0) zeitwerk (~> 2.3) webrick (1.8.1) diff --git a/karafka-web.gemspec b/karafka-web.gemspec index 0bd70967..ea2b5e7e 100644 --- a/karafka-web.gemspec +++ b/karafka-web.gemspec @@ -17,7 +17,7 @@ Gem::Specification.new do |spec| spec.licenses = %w[LGPL-3.0 Commercial] spec.add_dependency 'erubi', '~> 1.4' - spec.add_dependency 'karafka', '>= 2.2.9', '< 3.0.0' + spec.add_dependency 'karafka', '>= 2.2.14', '< 3.0.0' spec.add_dependency 'karafka-core', '>= 2.2.4', '< 3.0.0' spec.add_dependency 'roda', '~> 3.68', '>= 3.69' spec.add_dependency 'tilt', '~> 2.0' diff --git a/lib/karafka/web.rb b/lib/karafka/web.rb index 440a6f51..ca371109 100644 --- a/lib/karafka/web.rb +++ b/lib/karafka/web.rb @@ -53,7 +53,10 @@ def enable! end end +require_relative 'web/inflector' + loader = Zeitwerk::Loader.new + # Make sure pro is not loaded unless Pro loader.ignore(Karafka::Web.gem_root.join('lib/karafka/web/ui/pro')) @@ -62,9 +65,10 @@ def enable! loader = Zeitwerk::Loader.new end -root = File.expand_path('..', __dir__) loader.tag = 'karafka-web' -loader.inflector = Zeitwerk::GemInflector.new("#{root}/karafka/web.rb") +# Use our custom inflector to support migrations +root = File.expand_path('..', __dir__) +loader.inflector = Karafka::Web::Inflector.new("#{root}/karafka/web.rb") loader.push_dir(root) loader.setup diff --git a/lib/karafka/web/config.rb b/lib/karafka/web/config.rb index 66a05fbd..99075aa9 100644 --- a/lib/karafka/web/config.rb +++ b/lib/karafka/web/config.rb @@ -62,6 +62,7 @@ class Config setting :listeners, default: [ Tracking::Consumers::Listeners::Status.new, Tracking::Consumers::Listeners::Errors.new, + Tracking::Consumers::Listeners::Connections.new, Tracking::Consumers::Listeners::Statistics.new, Tracking::Consumers::Listeners::Pausing.new, Tracking::Consumers::Listeners::Processing.new, @@ -117,9 +118,16 @@ class Config Karafka.env.production? ? 60_000 * 5 : 5_000 ) - # Should we display internal topics of Kafka. The once starting with `__` - # By default we do not display them as they are not usable from regular users perspective - setting :show_internal_topics, default: false + setting :visibility do + # Allows to manage visibility of payload, headers and message key in the UI + # In some cases you may want to limit what is being displayed due to the type of data you + # are dealing with + setting :filter, default: Ui::Models::VisibilityFilter.new + + # Should we display internal topics of Kafka. The once starting with `__` + # By default we do not display them as they are not usable from regular users perspective + setting :internal_topics, default: false + end # How many elements should we display on pages that support pagination setting :per_page, default: 25 diff --git a/lib/karafka/web/contracts/config.rb b/lib/karafka/web/contracts/config.rb index 57f04fed..93e1c8c3 100644 --- a/lib/karafka/web/contracts/config.rb +++ b/lib/karafka/web/contracts/config.rb @@ -52,10 +52,13 @@ class Config < Web::Contracts::Base required(:secret) { |val| val.is_a?(String) && val.length >= 64 } end - required(:show_internal_topics) { |val| [true, false].include?(val) } required(:cache) { |val| !val.nil? } required(:per_page) { |val| val.is_a?(Integer) && val >= 1 && val <= 100 } - required(:visibility_filter) { |val| !val.nil? } + + nested(:visibility) do + required(:filter) { |val| !val.nil? } + required(:internal_topics) { |val| [true, false].include?(val) } + end end end end diff --git a/lib/karafka/web/errors.rb b/lib/karafka/web/errors.rb index 93d1f1ef..80fcfb49 100644 --- a/lib/karafka/web/errors.rb +++ b/lib/karafka/web/errors.rb @@ -11,6 +11,18 @@ module Errors # This should never happen and if you see this, please open an issue. ContractError = Class.new(BaseError) + # Errors specific to management + module Management + # Similar to processing error with the same name, it is raised when a critical + # incompatibility is detected. + # + # This error is raised when there was an attempt to operate on aggregated Web UI states + # that are already in a newer version that the one in the current process. We prevent + # this from happening not to corrupt the data. Please upgrade all the Web UI consumers to + # the same version + IncompatibleSchemaError = Class.new(BaseError) + end + # Processing related errors namespace module Processing # Raised when we try to process reports but we do not have the current state bootstrapped diff --git a/lib/karafka/web/inflector.rb b/lib/karafka/web/inflector.rb new file mode 100644 index 00000000..143c466c --- /dev/null +++ b/lib/karafka/web/inflector.rb @@ -0,0 +1,33 @@ +# frozen_string_literal: true + +module Karafka + module Web + # Web UI Zeitwerk Inflector that allows us to have time prefixed files with migrations, similar + # to how Rails does that. + class Inflector < Zeitwerk::GemInflector + # Checks if given path is a migration one + MIGRATION_ABSPATH_REGEXP = /migrations\/[0-9]+_(.*)/ + + # Checks if it is a migration file + MIGRATION_BASENAME_REGEXP = /\A[0-9]+_(.*)/ + + private_constant :MIGRATION_ABSPATH_REGEXP, :MIGRATION_BASENAME_REGEXP + + # @param [String] basename of the file to be loaded + # @param abspath [String] absolute path of the file to be loaded + # @return [String] Constant name to be used for given file + def camelize(basename, abspath) + # If not migration directory with proper migration files, use defaults + return super unless abspath.match?(MIGRATION_ABSPATH_REGEXP) + # If base name is not of a proper name in migrations, use defaults + return super unless basename.match?(MIGRATION_BASENAME_REGEXP) + + super( + # Extract only the name without the timestamp + basename.match(MIGRATION_BASENAME_REGEXP).to_a.last, + abspath + ) + end + end + end +end diff --git a/lib/karafka/web/installer.rb b/lib/karafka/web/installer.rb index b8cb70b5..381d7844 100644 --- a/lib/karafka/web/installer.rb +++ b/lib/karafka/web/installer.rb @@ -17,11 +17,14 @@ def install(replication_factor: 1) puts puts 'Creating necessary topics and populating state data...' puts - Management::CreateTopics.new.call(replication_factor) + Management::Actions::CreateTopics.new.call(replication_factor) wait_for_topics - Management::CreateInitialStates.new.call + Management::Actions::CreateInitialStates.new.call puts - Management::ExtendBootFile.new.call + puts 'Running data migrations...' + Management::Actions::MigrateStatesData.new.call + puts + Management::Actions::ExtendBootFile.new.call puts puts("Installation #{green('completed')}. Have fun!") puts @@ -35,9 +38,12 @@ def migrate(replication_factor: 1) puts puts 'Creating necessary topics and populating state data...' puts - Management::CreateTopics.new.call(replication_factor) + Management::Actions::CreateTopics.new.call(replication_factor) wait_for_topics - Management::CreateInitialStates.new.call + Management::Actions::CreateInitialStates.new.call + puts + puts 'Running data migrations...' + Management::Actions::MigrateStatesData.new.call puts puts("Migration #{green('completed')}. Have fun!") puts @@ -49,11 +55,14 @@ def reset(replication_factor: 1) puts puts 'Resetting Karafka Web UI...' puts - Management::DeleteTopics.new.call + Management::Actions::DeleteTopics.new.call puts - Management::CreateTopics.new.call(replication_factor) + Management::Actions::CreateTopics.new.call(replication_factor) wait_for_topics - Management::CreateInitialStates.new.call + Management::Actions::CreateInitialStates.new.call + puts + puts 'Running data migrations...' + Management::Actions::MigrateStatesData.new.call puts puts("Resetting #{green('completed')}. Have fun!") puts @@ -64,8 +73,8 @@ def uninstall puts puts 'Uninstalling Karafka Web UI...' puts - Management::DeleteTopics.new.call - Management::CleanBootFile.new.call + Management::Actions::DeleteTopics.new.call + Management::Actions::CleanBootFile.new.call puts puts("Uninstalling #{green('completed')}. Goodbye!") puts @@ -73,7 +82,7 @@ def uninstall # Enables the Web-UI in the karafka app. Sets up needed routes and listeners. def enable! - Management::Enable.new.call + Management::Actions::Enable.new.call end private diff --git a/lib/karafka/web/management/actions/base.rb b/lib/karafka/web/management/actions/base.rb new file mode 100644 index 00000000..550a84b6 --- /dev/null +++ b/lib/karafka/web/management/actions/base.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Management + # Namespace for all the commands related to management of the Web-UI in the context of + # Karafka. It includes things like installing, creating needed topics, etc. + module Actions + # Base class for all the commands that we use to manage + class Base + include ::Karafka::Helpers::Colorize + + private + + # @return [String] green colored word "successfully" + def successfully + green('successfully') + end + + # @return [String] green colored word "already" + def already + green('already') + end + + # @return [Array] topics available in the cluster + def existing_topics_names + @existing_topics_names ||= ::Karafka::Admin + .cluster_info + .topics + .map { |topic| topic[:topic_name] } + end + end + end + end + end +end diff --git a/lib/karafka/web/management/actions/clean_boot_file.rb b/lib/karafka/web/management/actions/clean_boot_file.rb new file mode 100644 index 00000000..86c0eec5 --- /dev/null +++ b/lib/karafka/web/management/actions/clean_boot_file.rb @@ -0,0 +1,33 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Management + module Actions + # Cleans the boot file from Karafka Web-UI details. + class CleanBootFile < Base + # Web-UI enabled code + ENABLER_CODE = ExtendBootFile::ENABLER_CODE + + private_constant :ENABLER_CODE + + # Removes the Web-UI boot file data + def call + karafka_rb = File.readlines(Karafka.boot_file) + + if karafka_rb.any? { |line| line.include?(ENABLER_CODE) } + puts 'Updating the Karafka boot file...' + karafka_rb.delete_if { |line| line.include?(ENABLER_CODE) } + + File.write(Karafka.boot_file, karafka_rb.join) + puts "Karafka boot file #{successfully} updated." + puts 'Make sure to remove configuration and other customizations as well.' + else + puts 'Karafka Web UI components not found in the boot file.' + end + end + end + end + end + end +end diff --git a/lib/karafka/web/management/actions/create_initial_states.rb b/lib/karafka/web/management/actions/create_initial_states.rb new file mode 100644 index 00000000..4318ad6f --- /dev/null +++ b/lib/karafka/web/management/actions/create_initial_states.rb @@ -0,0 +1,77 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Management + module Actions + # Creates the records needed for the Web-UI to operate. + # It creates "almost" empty states because the rest is handled via migrations + class CreateInitialStates < Base + # Whole default empty state + # This will be further migrated by the migrator + DEFAULT_STATE = { + schema_version: '0.0.0' + }.freeze + + # Default metrics state + DEFAULT_METRICS = { + schema_version: '0.0.0' + }.freeze + + # Creates the initial states for the Web-UI if needed (if they don't exist) + def call + if exists?(Karafka::Web.config.topics.consumers.states) + exists('consumers state') + else + creating('consumers state') + ::Karafka::Web.producer.produce_sync( + topic: Karafka::Web.config.topics.consumers.states, + key: Karafka::Web.config.topics.consumers.states, + payload: DEFAULT_STATE.to_json + ) + created('consumers state') + end + + if exists?(Karafka::Web.config.topics.consumers.metrics) + exists('consumers metrics') + else + creating('consumers metrics') + ::Karafka::Web.producer.produce_sync( + topic: Karafka::Web.config.topics.consumers.metrics, + key: Karafka::Web.config.topics.consumers.metrics, + payload: DEFAULT_METRICS.to_json + ) + created('consumers metrics') + end + end + + private + + # @param topic [String] topic name + # @return [Boolean] true if there is already an initial record in a given topic + def exists?(topic) + !::Karafka::Admin.read_topic(topic, 0, 5).last.nil? + end + + # @param type [String] type of state + # @return [String] exists message + def exists(type) + puts "Initial #{type} #{already} exists." + end + + # @param type [String] type of state + # @return [String] message that the state is being created + def creating(type) + puts "Creating #{type} initial record..." + end + + # @param type [String] type of state + # @return [String] message that the state was created + def created(type) + puts "Initial #{type} record #{successfully} created." + end + end + end + end + end +end diff --git a/lib/karafka/web/management/actions/create_topics.rb b/lib/karafka/web/management/actions/create_topics.rb new file mode 100644 index 00000000..bfe96822 --- /dev/null +++ b/lib/karafka/web/management/actions/create_topics.rb @@ -0,0 +1,137 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Management + module Actions + # Creates all the needed topics (if they don't exist). + # It does **not** populate data. + class CreateTopics < Base + # Runs the creation process + # + # @param replication_factor [Integer] replication factor for Web-UI topics + # + # @note The order of creation of those topics is important. In order to support the + # zero-downtime bootstrap, we use the presence of the states topic and its initial + # state existence as an indicator that the setup went as expected. It the consumers + # states topic exists and contains needed data, it means all went as expected and that + # topics created before it also exist (as no error). + def call(replication_factor) + consumers_states_topic = ::Karafka::Web.config.topics.consumers.states + consumers_metrics_topic = ::Karafka::Web.config.topics.consumers.metrics + consumers_reports_topic = ::Karafka::Web.config.topics.consumers.reports + errors_topic = ::Karafka::Web.config.topics.errors + + if existing_topics_names.include?(errors_topic) + exists(errors_topic) + else + creating(errors_topic) + # All the errors will be dispatched here + # This topic can have multiple partitions but we go with one by default. A single + # Ruby process should not crash that often and if there is an expectation of a higher + # volume of errors, this can be changed by the end user + ::Karafka::Admin.create_topic( + errors_topic, + 1, + replication_factor, + # Remove really old errors (older than 3 months just to preserve space) + { + 'retention.ms': 3 * 31 * 24 * 60 * 60 * 1_000 # 3 months + } + ) + created(errors_topic) + end + + if existing_topics_names.include?(consumers_reports_topic) + exists(consumers_reports_topic) + else + creating(consumers_reports_topic) + # This topic needs to have one partition + ::Karafka::Admin.create_topic( + consumers_reports_topic, + 1, + replication_factor, + # We do not need to to store this data for longer than 1 day as this data is only + # used to materialize the end states + # On the other hand we do not want to have it really short-living because in case + # of a consumer crash, we may want to use this info to catch up and backfill the + # state. + # + # In case its not consumed because no processes are running, it also usually means + # there's no data to consume because no karafka servers report + { + 'retention.ms': 24 * 60 * 60 * 1_000 # 1 day + } + ) + created(consumers_reports_topic) + end + + if existing_topics_names.include?(consumers_metrics_topic) + exists(consumers_metrics_topic) + else + creating(consumers_metrics_topic) + # This topic needs to have one partition + # Same as states - only most recent is relevant as it is a materialized state + ::Karafka::Admin.create_topic( + consumers_metrics_topic, + 1, + replication_factor, + { + 'cleanup.policy': 'compact', + 'retention.ms': 60 * 60 * 1_000, # 1h + 'segment.ms': 24 * 60 * 60 * 1_000, # 1 day + 'segment.bytes': 104_857_600 # 100MB + } + ) + created(consumers_metrics_topic) + end + + # Create only if needed + if existing_topics_names.include?(consumers_states_topic) + exists(consumers_states_topic) + else + creating(consumers_states_topic) + # This topic needs to have one partition + ::Karafka::Admin.create_topic( + consumers_states_topic, + 1, + replication_factor, + # We care only about the most recent state, previous are irrelevant. So we can + # easily compact after one minute. We do not use this beyond the most recent + # collective state, hence it all can easily go away. We also limit the segment + # size to at most 100MB not to use more space ever. + { + 'cleanup.policy': 'compact', + 'retention.ms': 60 * 60 * 1_000, + 'segment.ms': 24 * 60 * 60 * 1_000, # 1 day + 'segment.bytes': 104_857_600 # 100MB + } + ) + created(consumers_states_topic) + end + end + + private + + # @param topic_name [String] name of the topic that exists + # @return [String] formatted message + def exists(topic_name) + puts("Topic #{topic_name} #{already} exists.") + end + + # @param topic_name [String] name of the topic that we are creating + # @return [String] formatted message + def creating(topic_name) + puts("Creating topic #{topic_name}...") + end + + # @param topic_name [String] name of the topic that we created + # @return [String] formatted message + def created(topic_name) + puts("Topic #{topic_name} #{successfully} created.") + end + end + end + end + end +end diff --git a/lib/karafka/web/management/actions/delete_topics.rb b/lib/karafka/web/management/actions/delete_topics.rb new file mode 100644 index 00000000..481e5fda --- /dev/null +++ b/lib/karafka/web/management/actions/delete_topics.rb @@ -0,0 +1,30 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Management + module Actions + # Removes the Web-UI topics from Kafka + class DeleteTopics < Base + # Removes the Web-UI topics + def call + [ + ::Karafka::Web.config.topics.consumers.states, + ::Karafka::Web.config.topics.consumers.reports, + ::Karafka::Web.config.topics.consumers.metrics, + ::Karafka::Web.config.topics.errors + ].each do |topic_name| + if existing_topics_names.include?(topic_name.to_s) + puts "Removing #{topic_name}..." + ::Karafka::Admin.delete_topic(topic_name) + puts "Topic #{topic_name} #{successfully} deleted." + else + puts "Topic #{topic_name} not found." + end + end + end + end + end + end + end +end diff --git a/lib/karafka/web/management/actions/enable.rb b/lib/karafka/web/management/actions/enable.rb new file mode 100644 index 00000000..511fa8ab --- /dev/null +++ b/lib/karafka/web/management/actions/enable.rb @@ -0,0 +1,105 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Management + module Actions + # @note This runs on each process start that has `karafka.rb`. It needs to be executed + # also in the context of other processes types and not only karafka server, because it + # installs producers instrumentation and routing as well. + class Enable < Base + # Enables routing consumer group and subscribes Web-UI listeners + def call + extend_routing + subscribe_to_monitor + subscribe_to_close_web_producer + end + + private + + # Enables all the needed routes + def extend_routing + ::Karafka::App.routes.draw do + web_deserializer = ::Karafka::Web::Deserializer.new + + consumer_group ::Karafka::Web.config.processing.consumer_group do + # Topic we listen on to materialize the states + topic ::Karafka::Web.config.topics.consumers.reports do + config(active: false) + active ::Karafka::Web.config.processing.active + # Since we materialize state in intervals, we can poll for half of this time + # without impacting the reporting responsiveness + max_wait_time ::Karafka::Web.config.processing.interval / 2 + max_messages 1_000 + consumer ::Karafka::Web::Processing::Consumer + # This needs to be true in order not to reload the consumer in dev. This consumer + # should not be affected by the end user development process + consumer_persistence true + deserializer web_deserializer + manual_offset_management true + # Start from the most recent data, do not materialize historical states + # This prevents us from dealing with cases, where client id would be changed and + # consumer group name would be renamed and we would start consuming all + # historical + initial_offset 'latest' + end + + # We define those three here without consumption, so Web understands how to + # deserialize them when used / viewed + topic ::Karafka::Web.config.topics.consumers.states do + config(active: false) + active false + deserializer web_deserializer + end + + topic ::Karafka::Web.config.topics.consumers.metrics do + config(active: false) + active false + deserializer web_deserializer + end + + topic ::Karafka::Web.config.topics.errors do + config(active: false) + active false + deserializer web_deserializer + end + end + end + end + + # Subscribes with all needed listeners + def subscribe_to_monitor + # Installs all the consumer related listeners + ::Karafka::Web.config.tracking.consumers.listeners.each do |listener| + ::Karafka.monitor.subscribe(listener) + end + + # Installs all the producer related listeners into Karafka default listener and + # into Karafka::Web listener in case it would be different than the Karafka one + ::Karafka::Web.config.tracking.producers.listeners.each do |listener| + ::Karafka.producer.monitor.subscribe(listener) + + # Do not instrument twice in case only one default producer is used + next if ::Karafka.producer == ::Karafka::Web.producer + + ::Karafka::Web.producer.monitor.subscribe(listener) + end + end + + # In most cases we want to close the producer if possible. + # While we cannot do it easily in user processes and we should rely on WaterDrop + # finalization logic, we can do it in `karafka server` on terminate + # + # In other places, this producer anyhow should not be used. + def subscribe_to_close_web_producer + ::Karafka::App.monitor.subscribe('app.terminated') do + # If Web producer is the same as `Karafka.producer` it will do nothing as you can + # call `#close` multiple times without side effects + ::Karafka::Web.producer.close + end + end + end + end + end + end +end diff --git a/lib/karafka/web/management/actions/extend_boot_file.rb b/lib/karafka/web/management/actions/extend_boot_file.rb new file mode 100644 index 00000000..4ecb40a4 --- /dev/null +++ b/lib/karafka/web/management/actions/extend_boot_file.rb @@ -0,0 +1,39 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Management + module Actions + # Extends the boot file with Web components + class ExtendBootFile < Base + # Code that is needed in the `karafka.rb` to connect Web UI to Karafka + ENABLER_CODE = 'Karafka::Web.enable!' + + # Template with initial Web UI configuration + # Session secret needs to be set per user and per env + SETUP_TEMPLATE = <<~CONFIG.freeze + Karafka::Web.setup do |config| + # You may want to set it per ENV. This value was randomly generated. + config.ui.sessions.secret = '#{SecureRandom.hex(32)}' + end + + #{ENABLER_CODE} + CONFIG + + # Adds needed code + def call + if File.read(Karafka.boot_file).include?(ENABLER_CODE) + puts "Web UI #{already} installed." + else + puts 'Updating the Karafka boot file...' + File.open(Karafka.boot_file, 'a') do |f| + f << "\n#{SETUP_TEMPLATE}\n" + end + puts "Karafka boot file #{successfully} updated." + end + end + end + end + end + end +end diff --git a/lib/karafka/web/management/actions/migrate_states_data.rb b/lib/karafka/web/management/actions/migrate_states_data.rb new file mode 100644 index 00000000..a80cebd2 --- /dev/null +++ b/lib/karafka/web/management/actions/migrate_states_data.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Management + module Actions + # Command to migrate states data + # Useful when we have older schema and need to move forward + class MigrateStatesData < Base + # Runs needed migrations (if any) on the states topics + def call + Migrator.new.call + end + end + end + end + end +end diff --git a/lib/karafka/web/management/base.rb b/lib/karafka/web/management/base.rb deleted file mode 100644 index f8c34f92..00000000 --- a/lib/karafka/web/management/base.rb +++ /dev/null @@ -1,34 +0,0 @@ -# frozen_string_literal: true - -module Karafka - module Web - # Namespace for all the commands related to management of the Web-UI in the context of Karafka - # It includes things like installing, creating needed topics, etc. - module Management - # Base class for all the commands that we use to manage - class Base - include ::Karafka::Helpers::Colorize - - private - - # @return [String] green colored word "successfully" - def successfully - green('successfully') - end - - # @return [String] green colored word "already" - def already - green('already') - end - - # @return [Array] topics available in the cluster - def existing_topics_names - @existing_topics_names ||= ::Karafka::Admin - .cluster_info - .topics - .map { |topic| topic[:topic_name] } - end - end - end - end -end diff --git a/lib/karafka/web/management/clean_boot_file.rb b/lib/karafka/web/management/clean_boot_file.rb deleted file mode 100644 index ece3cfd0..00000000 --- a/lib/karafka/web/management/clean_boot_file.rb +++ /dev/null @@ -1,31 +0,0 @@ -# frozen_string_literal: true - -module Karafka - module Web - module Management - # Cleans the boot file from Karafka Web-UI details. - class CleanBootFile < Base - # Web-UI enabled code - ENABLER_CODE = ExtendBootFile::ENABLER_CODE - - private_constant :ENABLER_CODE - - # Removes the Web-UI boot file data - def call - karafka_rb = File.readlines(Karafka.boot_file) - - if karafka_rb.any? { |line| line.include?(ENABLER_CODE) } - puts 'Updating the Karafka boot file...' - karafka_rb.delete_if { |line| line.include?(ENABLER_CODE) } - - File.write(Karafka.boot_file, karafka_rb.join) - puts "Karafka boot file #{successfully} updated." - puts 'Make sure to remove configuration and other customizations as well.' - else - puts 'Karafka Web UI components not found in the boot file.' - end - end - end - end - end -end diff --git a/lib/karafka/web/management/create_initial_states.rb b/lib/karafka/web/management/create_initial_states.rb deleted file mode 100644 index 2257e60d..00000000 --- a/lib/karafka/web/management/create_initial_states.rb +++ /dev/null @@ -1,101 +0,0 @@ -# frozen_string_literal: true - -module Karafka - module Web - module Management - # Creates the records needed for the Web-UI to operate. - class CreateInitialStates < Base - # Defaults stats state that we create in Kafka - DEFAULT_STATS = { - batches: 0, - messages: 0, - retries: 0, - dead: 0, - busy: 0, - enqueued: 0, - processing: 0, - workers: 0, - processes: 0, - rss: 0, - listeners: 0, - utilization: 0, - lag_stored: 0, - errors: 0 - }.freeze - - # Default empty historicals for first record in Kafka - DEFAULT_AGGREGATED = Processing::TimeSeriesTracker::TIME_RANGES - .keys - .map { |range| [range, []] } - .to_h - .freeze - - # WHole default empty state (aside from dispatch time) - DEFAULT_STATE = { - processes: {}, - stats: DEFAULT_STATS, - schema_state: 'accepted', - schema_version: Processing::Consumers::Aggregators::State::SCHEMA_VERSION, - dispatched_at: Time.now.to_f - }.freeze - - # Default metrics state - DEFAULT_METRICS = { - aggregated: DEFAULT_AGGREGATED, - consumer_groups: DEFAULT_AGGREGATED, - dispatched_at: Time.now.to_f, - schema_version: Processing::Consumers::Aggregators::Metrics::SCHEMA_VERSION - }.freeze - - private_constant :DEFAULT_STATS, :DEFAULT_AGGREGATED - - # Creates the initial states for the Web-UI if needed (if they don't exist) - def call - if Ui::Models::ConsumersState.current - exists('consumers state') - else - creating('consumers state') - ::Karafka::Web.producer.produce_sync( - topic: Karafka::Web.config.topics.consumers.states, - key: Karafka::Web.config.topics.consumers.states, - payload: DEFAULT_STATE.to_json - ) - created('consumers state') - end - - if Ui::Models::ConsumersMetrics.current - exists('consumers metrics') - else - creating('consumers metrics') - ::Karafka::Web.producer.produce_sync( - topic: Karafka::Web.config.topics.consumers.metrics, - key: Karafka::Web.config.topics.consumers.metrics, - payload: DEFAULT_METRICS.merge(dispatched_at: Time.now.to_f).to_json - ) - created('consumers metrics') - end - end - - private - - # @param type [String] type of state - # @return [String] exists message - def exists(type) - puts "Initial #{type} #{already} exists." - end - - # @param type [String] type of state - # @return [String] message that the state is being created - def creating(type) - puts "Creating #{type} initial record..." - end - - # @param type [String] type of state - # @return [String] message that the state was created - def created(type) - puts "Initial #{type} record #{successfully} created." - end - end - end - end -end diff --git a/lib/karafka/web/management/create_topics.rb b/lib/karafka/web/management/create_topics.rb deleted file mode 100644 index 3e64a9f0..00000000 --- a/lib/karafka/web/management/create_topics.rb +++ /dev/null @@ -1,133 +0,0 @@ -# frozen_string_literal: true - -module Karafka - module Web - module Management - # Creates all the needed topics (if they don't exist). - # It does **not** populate data. - class CreateTopics < Base - # Runs the creation process - # - # @param replication_factor [Integer] replication factor for Web-UI topics - # - # @note The order of creation of those topics is important. In order to support the - # zero-downtime bootstrap, we use the presence of the states topic and its initial state - # existence as an indicator that the setup went as expected. It the consumers states - # topic exists and contains needed data, it means all went as expected and that - # topics created before it also exist (as no error). - def call(replication_factor) - consumers_states_topic = ::Karafka::Web.config.topics.consumers.states - consumers_metrics_topic = ::Karafka::Web.config.topics.consumers.metrics - consumers_reports_topic = ::Karafka::Web.config.topics.consumers.reports - errors_topic = ::Karafka::Web.config.topics.errors - - if existing_topics_names.include?(errors_topic) - exists(errors_topic) - else - creating(errors_topic) - # All the errors will be dispatched here - # This topic can have multiple partitions but we go with one by default. A single Ruby - # process should not crash that often and if there is an expectation of a higher volume - # of errors, this can be changed by the end user - ::Karafka::Admin.create_topic( - errors_topic, - 1, - replication_factor, - # Remove really old errors (older than 3 months just to preserve space) - { - 'retention.ms': 3 * 31 * 24 * 60 * 60 * 1_000 # 3 months - } - ) - created(errors_topic) - end - - if existing_topics_names.include?(consumers_reports_topic) - exists(consumers_reports_topic) - else - creating(consumers_reports_topic) - # This topic needs to have one partition - ::Karafka::Admin.create_topic( - consumers_reports_topic, - 1, - replication_factor, - # We do not need to to store this data for longer than 1 day as this data is only - # used to materialize the end states - # On the other hand we do not want to have it really short-living because in case of - # a consumer crash, we may want to use this info to catch up and backfill the state. - # In case its not consumed because no processes are running, it also usually means - # there's no data to consume because no karafka servers report - { - 'retention.ms': 24 * 60 * 60 * 1_000 # 1 day - } - ) - created(consumers_reports_topic) - end - - if existing_topics_names.include?(consumers_metrics_topic) - exists(consumers_metrics_topic) - else - creating(consumers_metrics_topic) - # This topic needs to have one partition - # Same as states - only most recent is relevant as it is a materialized state - ::Karafka::Admin.create_topic( - consumers_metrics_topic, - 1, - replication_factor, - { - 'cleanup.policy': 'compact', - 'retention.ms': 60 * 60 * 1_000, # 1h - 'segment.ms': 24 * 60 * 60 * 1_000, # 1 day - 'segment.bytes': 104_857_600 # 100MB - } - ) - created(consumers_metrics_topic) - end - - # Create only if needed - if existing_topics_names.include?(consumers_states_topic) - exists(consumers_states_topic) - else - creating(consumers_states_topic) - # This topic needs to have one partition - ::Karafka::Admin.create_topic( - consumers_states_topic, - 1, - replication_factor, - # We care only about the most recent state, previous are irrelevant. So we can easily - # compact after one minute. We do not use this beyond the most recent collective - # state, hence it all can easily go away. We also limit the segment size to at most - # 100MB not to use more space ever. - { - 'cleanup.policy': 'compact', - 'retention.ms': 60 * 60 * 1_000, - 'segment.ms': 24 * 60 * 60 * 1_000, # 1 day - 'segment.bytes': 104_857_600 # 100MB - } - ) - created(consumers_states_topic) - end - end - - private - - # @param topic_name [String] name of the topic that exists - # @return [String] formatted message - def exists(topic_name) - puts("Topic #{topic_name} #{already} exists.") - end - - # @param topic_name [String] name of the topic that we are creating - # @return [String] formatted message - def creating(topic_name) - puts("Creating topic #{topic_name}...") - end - - # @param topic_name [String] name of the topic that we created - # @return [String] formatted message - def created(topic_name) - puts("Topic #{topic_name} #{successfully} created.") - end - end - end - end -end diff --git a/lib/karafka/web/management/delete_topics.rb b/lib/karafka/web/management/delete_topics.rb deleted file mode 100644 index 908d62d8..00000000 --- a/lib/karafka/web/management/delete_topics.rb +++ /dev/null @@ -1,28 +0,0 @@ -# frozen_string_literal: true - -module Karafka - module Web - module Management - # Removes the Web-UI topics from Kafka - class DeleteTopics < Base - # Removes the Web-UI topics - def call - [ - ::Karafka::Web.config.topics.consumers.states, - ::Karafka::Web.config.topics.consumers.reports, - ::Karafka::Web.config.topics.consumers.metrics, - ::Karafka::Web.config.topics.errors - ].each do |topic_name| - if existing_topics_names.include?(topic_name.to_s) - puts "Removing #{topic_name}..." - ::Karafka::Admin.delete_topic(topic_name) - puts "Topic #{topic_name} #{successfully} deleted." - else - puts "Topic #{topic_name} not found." - end - end - end - end - end - end -end diff --git a/lib/karafka/web/management/enable.rb b/lib/karafka/web/management/enable.rb deleted file mode 100644 index 02e4387c..00000000 --- a/lib/karafka/web/management/enable.rb +++ /dev/null @@ -1,102 +0,0 @@ -# frozen_string_literal: true - -module Karafka - module Web - module Management - # @note This runs on each process start that has `karafka.rb`. It needs to be executed - # also in the context of other processes types and not only karafka server, because it - # installs producers instrumentation and routing as well. - class Enable < Base - # Enables routing consumer group and subscribes Web-UI listeners - def call - extend_routing - subscribe_to_monitor - subscribe_to_close_web_producer - end - - private - - # Enables all the needed routes - def extend_routing - ::Karafka::App.routes.draw do - web_deserializer = ::Karafka::Web::Deserializer.new - - consumer_group ::Karafka::Web.config.processing.consumer_group do - # Topic we listen on to materialize the states - topic ::Karafka::Web.config.topics.consumers.reports do - config(active: false) - active ::Karafka::Web.config.processing.active - # Since we materialize state in intervals, we can poll for half of this time without - # impacting the reporting responsiveness - max_wait_time ::Karafka::Web.config.processing.interval / 2 - max_messages 1_000 - consumer ::Karafka::Web::Processing::Consumer - # This needs to be true in order not to reload the consumer in dev. This consumer - # should not be affected by the end user development process - consumer_persistence true - deserializer web_deserializer - manual_offset_management true - # Start from the most recent data, do not materialize historical states - # This prevents us from dealing with cases, where client id would be changed and - # consumer group name would be renamed and we would start consuming all historical - initial_offset 'latest' - end - - # We define those three here without consumption, so Web understands how to deserialize - # them when used / viewed - topic ::Karafka::Web.config.topics.consumers.states do - config(active: false) - active false - deserializer web_deserializer - end - - topic ::Karafka::Web.config.topics.consumers.metrics do - config(active: false) - active false - deserializer web_deserializer - end - - topic ::Karafka::Web.config.topics.errors do - config(active: false) - active false - deserializer web_deserializer - end - end - end - end - - # Subscribes with all needed listeners - def subscribe_to_monitor - # Installs all the consumer related listeners - ::Karafka::Web.config.tracking.consumers.listeners.each do |listener| - ::Karafka.monitor.subscribe(listener) - end - - # Installs all the producer related listeners into Karafka default listener and - # into Karafka::Web listener in case it would be different than the Karafka one - ::Karafka::Web.config.tracking.producers.listeners.each do |listener| - ::Karafka.producer.monitor.subscribe(listener) - - # Do not instrument twice in case only one default producer is used - next if ::Karafka.producer == ::Karafka::Web.producer - - ::Karafka::Web.producer.monitor.subscribe(listener) - end - end - - # In most cases we want to close the producer if possible. - # While we cannot do it easily in user processes and we should rely on WaterDrop - # finalization logic, we can do it in `karafka server` on terminate - # - # In other places, this producer anyhow should not be used. - def subscribe_to_close_web_producer - ::Karafka::App.monitor.subscribe('app.terminated') do - # If Web producer is the same as `Karafka.producer` it will do nothing as you can - # call `#close` multiple times without side effects - ::Karafka::Web.producer.close - end - end - end - end - end -end diff --git a/lib/karafka/web/management/extend_boot_file.rb b/lib/karafka/web/management/extend_boot_file.rb deleted file mode 100644 index de36b5de..00000000 --- a/lib/karafka/web/management/extend_boot_file.rb +++ /dev/null @@ -1,37 +0,0 @@ -# frozen_string_literal: true - -module Karafka - module Web - module Management - # Extends the boot file with Web components - class ExtendBootFile < Base - # Code that is needed in the `karafka.rb` to connect Web UI to Karafka - ENABLER_CODE = 'Karafka::Web.enable!' - - # Template with initial Web UI configuration - # Session secret needs to be set per user and per env - SETUP_TEMPLATE = <<~CONFIG.freeze - Karafka::Web.setup do |config| - # You may want to set it per ENV. This value was randomly generated. - config.ui.sessions.secret = '#{SecureRandom.hex(32)}' - end - - #{ENABLER_CODE} - CONFIG - - # Adds needed code - def call - if File.read(Karafka.boot_file).include?(ENABLER_CODE) - puts "Web UI #{already} installed." - else - puts 'Updating the Karafka boot file...' - File.open(Karafka.boot_file, 'a') do |f| - f << "\n#{SETUP_TEMPLATE}\n" - end - puts "Karafka boot file #{successfully} updated." - end - end - end - end - end -end diff --git a/lib/karafka/web/management/migrations/0_base.rb b/lib/karafka/web/management/migrations/0_base.rb new file mode 100644 index 00000000..3bb20889 --- /dev/null +++ b/lib/karafka/web/management/migrations/0_base.rb @@ -0,0 +1,58 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Management + # Namespace for storing migrations of our Web UI topics data + module Migrations + # Base for all our migrations + # + # Each migration **MUST** have a `#migrate` method defined + # Migrations are expected to modify the provided state **IN PLACE** + class Base + include Karafka::Core::Helpers::Time + + class << self + # First version that should **NOT** be affected by this migration + attr_accessor :versions_until + # What resource does it relate it + # One migration should modify only one resource type + attr_accessor :type + + # @param version [String] sem-ver version + # @return [Boolean] is the given migration applicable + def applicable?(version) + version < versions_until + end + + # @param state [Hash] deserialized state to be modified + def migrate(state) + raise NotImplementedError, 'Implement in a subclass' + end + + # @return [Integer] index for sorting. Older migrations are always applied first + def index + instance_method(:migrate) + .source_location + .first + .split('/') + .last + .split('_') + .first + .to_i + end + + # @return [Array] array with migrations sorted from oldest to latest. This is + # the order in which they need to be applied + def sorted_descendants + ObjectSpace + .each_object(Class) + .select { |klass| klass < self } + .sort_by(&:index) + end + end + end + end + end + end +end diff --git a/lib/karafka/web/management/migrations/0_set_initial_consumers_metrics.rb b/lib/karafka/web/management/migrations/0_set_initial_consumers_metrics.rb new file mode 100644 index 00000000..415a0ef1 --- /dev/null +++ b/lib/karafka/web/management/migrations/0_set_initial_consumers_metrics.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Management + module Migrations + # Initial migration that sets the consumers metrics initial first state. + # This is the basic of metrics as they were when they were introduced. + class SetInitialConsumersMetrics < Base + # Always migrate from empty up + self.versions_until = '0.0.1' + self.type = :consumers_metrics + + # @param state [Hash] initial empty state + def migrate(state) + state.merge!( + aggregated: { + days: [], + hours: [], + minutes: [], + seconds: [] + }, + consumer_groups: { + days: [], + hours: [], + minutes: [], + seconds: [] + }, + dispatched_at: float_now + ) + end + end + end + end + end +end diff --git a/lib/karafka/web/management/migrations/0_set_initial_consumers_state.rb b/lib/karafka/web/management/migrations/0_set_initial_consumers_state.rb new file mode 100644 index 00000000..de0a1dc3 --- /dev/null +++ b/lib/karafka/web/management/migrations/0_set_initial_consumers_state.rb @@ -0,0 +1,43 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Management + module Migrations + # Initial migration that sets the consumers state initial first state. + # This is the basic of state as they were when they were introduced. + class SetInitialConsumersState < Base + # Run this only on the first setup + self.versions_until = '0.0.1' + self.type = :consumers_state + + # @param state [Hash] + def migrate(state) + state.merge!( + processes: {}, + stats: { + batches: 0, + messages: 0, + retries: 0, + dead: 0, + busy: 0, + enqueued: 0, + processing: 0, + workers: 0, + processes: 0, + rss: 0, + listeners: 0, + utilization: 0, + errors: 0, + lag_stored: 0, + lag: 0 + }, + schema_state: 'accepted', + dispatched_at: float_now + ) + end + end + end + end + end +end diff --git a/lib/karafka/web/management/migrations/1699543515_fill_missing_received_and_sent_bytes_in_consumers_metrics.rb b/lib/karafka/web/management/migrations/1699543515_fill_missing_received_and_sent_bytes_in_consumers_metrics.rb new file mode 100644 index 00000000..7e8ab71b --- /dev/null +++ b/lib/karafka/web/management/migrations/1699543515_fill_missing_received_and_sent_bytes_in_consumers_metrics.rb @@ -0,0 +1,26 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Management + module Migrations + # Adds bytes_sent and bytes_received to all the aggregated metrics samples, so we have + # charts that do not have to fill gaps or check anything + class FillMissingReceivedAndSentBytesInConsumersMetrics < Base + self.versions_until = '1.1.0' + self.type = :consumers_metrics + + # @param state [Hash] metrics state + def migrate(state) + state[:aggregated].each_value do |metrics| + metrics.each do |metric| + metric.last[:bytes_sent] = 0 + metric.last[:bytes_received] = 0 + end + end + end + end + end + end + end +end diff --git a/lib/karafka/web/management/migrations/1699543515_fill_missing_received_and_sent_bytes_in_consumers_state.rb b/lib/karafka/web/management/migrations/1699543515_fill_missing_received_and_sent_bytes_in_consumers_state.rb new file mode 100644 index 00000000..7625eada --- /dev/null +++ b/lib/karafka/web/management/migrations/1699543515_fill_missing_received_and_sent_bytes_in_consumers_state.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Management + module Migrations + # Similar to filling in consumers metrics, we initialize this with zeros so it is always + # present as expected + class FillMissingReceivedAndSentBytesInConsumersState < Base + # Network metrics were introduced with schema 1.2.0 + self.versions_until = '1.2.0' + self.type = :consumers_state + + # @param state [Hash] + def migrate(state) + state[:stats][:bytes_sent] = 0 + state[:stats][:bytes_received] = 0 + end + end + end + end + end +end diff --git a/lib/karafka/web/management/migrations/1700234522_introduce_waiting_in_consumers_metrics.rb b/lib/karafka/web/management/migrations/1700234522_introduce_waiting_in_consumers_metrics.rb new file mode 100644 index 00000000..7c224cb2 --- /dev/null +++ b/lib/karafka/web/management/migrations/1700234522_introduce_waiting_in_consumers_metrics.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Management + module Migrations + # Introduce waiting in consumers metrics to complement busy and enqueued for jobs metrics + class IntroduceWaitingInConsumersMetrics < Base + self.versions_until = '1.1.1' + self.type = :consumers_metrics + + # @param state [Hash] + def migrate(state) + state[:aggregated].each_value do |metrics| + metrics.each do |metric| + metric.last[:waiting] = 0 + end + end + end + end + end + end + end +end diff --git a/lib/karafka/web/management/migrations/1700234522_introduce_waiting_in_consumers_state.rb b/lib/karafka/web/management/migrations/1700234522_introduce_waiting_in_consumers_state.rb new file mode 100644 index 00000000..2dc16c2b --- /dev/null +++ b/lib/karafka/web/management/migrations/1700234522_introduce_waiting_in_consumers_state.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Management + module Migrations + # Introduce waiting in consumers metrics to complement busy and enqueued for jobs stats + class IntroduceWaitingInConsumersState < Base + self.versions_until = '1.2.1' + self.type = :consumers_state + + # @param state [Hash] + def migrate(state) + state[:stats][:waiting] = 0 + end + end + end + end + end +end diff --git a/lib/karafka/web/management/migrations/1700234522_remove_processing_from_consumers_metrics.rb b/lib/karafka/web/management/migrations/1700234522_remove_processing_from_consumers_metrics.rb new file mode 100644 index 00000000..a03f00d2 --- /dev/null +++ b/lib/karafka/web/management/migrations/1700234522_remove_processing_from_consumers_metrics.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Management + module Migrations + # Moves unused "processing" that was used instead of "busy" in older versions + class RemoveProcessingFromConsumersMetrics < Base + self.versions_until = '1.1.1' + self.type = :consumers_metrics + + # @param state [Hash] + def migrate(state) + state[:aggregated].each_value do |metrics| + metrics.each do |metric| + metric.last.delete(:processing) + end + end + end + end + end + end + end +end diff --git a/lib/karafka/web/management/migrations/1700234522_remove_processing_from_consumers_state.rb b/lib/karafka/web/management/migrations/1700234522_remove_processing_from_consumers_state.rb new file mode 100644 index 00000000..a19c18d3 --- /dev/null +++ b/lib/karafka/web/management/migrations/1700234522_remove_processing_from_consumers_state.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Management + module Migrations + # Moves unused "processing" that was used instead of "busy" in older versions + class RemoveProcessingFromConsumersState < Base + self.versions_until = '1.2.1' + self.type = :consumers_state + + # @param state [Hash] + def migrate(state) + state[:stats].delete(:processing) + end + end + end + end + end +end diff --git a/lib/karafka/web/management/migrator.rb b/lib/karafka/web/management/migrator.rb new file mode 100644 index 00000000..16ba0b31 --- /dev/null +++ b/lib/karafka/web/management/migrator.rb @@ -0,0 +1,117 @@ +# frozen_string_literal: true + +module Karafka + module Web + # Namespace for all cross-context management operations that are needed to make sure everything + # operate as expected. + module Management + # Migrator used to run migrations on the states topics + # There are cases during upgrades, where extra fields may be added and other data, so in + # order not to deal with cases of some information missing, we can just migrate the data + # and ensure all the fields that we require after upgrade are present + # + # Migrations are similar to the once that are present in Ruby on Rails conceptually. + # + # We take our most recent state and we can alter it "in place". The altered result will be + # passed to the consecutive migrations and then republished back to Kafka. This allows us + # to manage Web UI aggregated data easily. + # + # @note We do not migrate the consumers reports for the following reasons: + # - if would be extremely hard to migrate them as they are being published and can be still + # published when the migrations are running + # - we would have to run migrations on each message + # - we already have a mechanism in the processing consumer that skips outdated records for + # rolling migrations + # - those records are short-lived and the expectation is for the user not to run old and + # new consumers together for an extensive period of time + # + # @note It will raise an error if we try to run migrations but the schemas we want to operate + # are newer. This will prevent us from damaging the data and ensures that we only move + # forward with the migrations. This can happen in case of a rolling upgrade, where old + # instance that is going to be terminated would get a temporary assignment with already + # migrated state. + class Migrator + # Include this so we can reference the schema versions easily + include Processing::Consumers::Aggregators + + # Picks needed data from Kafka, alters it with migrations and puts the updated data + # back into Kafka. This ensures, that our Web UI topics that hold aggregated data are + # always aligned with the Web UI expectations + # + # @note To simplify things we always migrate and update all the topics data even if only + # part was migrated. That way we always ensure that all the elements are up to date + def call + ensure_migrable! + # If migrating returns `false` it means no migrations happened + migrate && publish + end + + private + + # Raise an exception if there would be an attempt to run migrations on a newer schema for + # any states we manage. We can only move forward, so attempt to migrate for example from + # 1.0.0 to 0.9.0 should be considered and error. + def ensure_migrable! + if consumers_state[:schema_version] > State::SCHEMA_VERSION + raise( + Errors::Management::IncompatibleSchemaError, + 'consumers state newer than supported' + ) + end + + if consumers_metrics[:schema_version] > Metrics::SCHEMA_VERSION + raise( + Errors::Management::IncompatibleSchemaError, + 'consumers metrics newer than supported' + ) + end + + true + end + + # Applies migrations if needed and mutates the in-memory data + # + # @return [Boolean] were there any migrations applied + def migrate + any_migrations = false + + Migrations::Base.sorted_descendants.each do |migration_class| + data = send(migration_class.type) + + next unless migration_class.applicable?(data[:schema_version]) + + migration_class.new.migrate(data) + + any_migrations = true + end + + any_migrations + end + + # Publishes all the states migrated records + def publish + consumers_state[:schema_version] = State::SCHEMA_VERSION + consumers_metrics[:schema_version] = Metrics::SCHEMA_VERSION + + # Migrator may run in the context of the processing consumer prior to any states + # fetching related to processing. We use sync to make sure, that the following + # processing related states fetched fetch the new states + Processing::Publisher.publish!( + consumers_state, + consumers_metrics + ) + end + + # @return [Hash] current consumers states most recent state + def consumers_state + @consumers_state ||= Processing::Consumers::State.current! + end + + # @return [Hash] current consumers metrics most recent state + def consumers_metrics + @consumers_metrics ||= Processing::Consumers::Metrics.current! + end + end + end + end +end diff --git a/lib/karafka/web/processing/consumer.rb b/lib/karafka/web/processing/consumer.rb index 51280092..6acd2b42 100644 --- a/lib/karafka/web/processing/consumer.rb +++ b/lib/karafka/web/processing/consumer.rb @@ -11,35 +11,19 @@ module Processing class Consumer < Karafka::BaseConsumer include ::Karafka::Core::Helpers::Time - # @param args [Object] all the arguments `Karafka::BaseConsumer` accepts by default - def initialize(*args) - super - - @flush_interval = ::Karafka::Web.config.processing.interval - - @schema_manager = Consumers::SchemaManager.new - @state_aggregator = Consumers::Aggregators::State.new(@schema_manager) - @state_contract = Consumers::Contracts::State.new - - @metrics_aggregator = Consumers::Aggregators::Metrics.new - @metrics_contract = Consumers::Contracts::Metrics.new - - # We set this that way so we report with first batch and so we report as fast as possible - @flushed_at = monotonic_now - @flush_interval - @established = false - end - # Aggregates consumers state into a single current state representation def consume + bootstrap! + consumers_messages = messages.select { |message| message.payload[:type] == 'consumer' } # If there is even one incompatible message, we need to stop consumers_messages.each do |message| - case @schema_manager.call(message) + case @reports_schema_manager.call(message) when :current true when :newer - @schema_manager.invalidate! + @reports_schema_manager.invalidate! dispatch @@ -49,6 +33,9 @@ def consume # requests without significant or any impact on data quality but without having to # worry about backwards compatibility. Errors are tracked independently, so it should # not be a problem. + # + # In case user wants to do a rolling upgrade, the user docs state that this can happen + # and it is something user should be aware when :older next else @@ -83,6 +70,35 @@ def shutdown private + # Prepares all the initial objects and ensures all the needed states are as expected + # @note We do not run it in the `#initialize` anymore as `#initialize` happens before + # the work starts so errors there are handled differently. We want this initial setup + # to operate and fail (if needed) during messages consumption phase + def bootstrap! + return if @bootstrapped + + # Run the migrator on the assignment to make sure all our data is as expected + # While users may run the CLI command this is a fail-safe for zero downtime deployments + # It costs us two extra requests to Kafka topics as we migrate prior to fetching the + # states to the aggregators but this is done on purpose not to mix those two contexts. + Management::Migrator.new.call + + @flush_interval = ::Karafka::Web.config.processing.interval + + @reports_schema_manager = Consumers::SchemaManager.new + @state_aggregator = Consumers::Aggregators::State.new(@reports_schema_manager) + @state_contract = Consumers::Contracts::State.new + + @metrics_aggregator = Consumers::Aggregators::Metrics.new + @metrics_contract = Consumers::Contracts::Metrics.new + + # We set this that way so we report with first batch and so we report as fast as possible + @flushed_at = monotonic_now - @flush_interval + @established = false + + @bootstrapped = true + end + # Flushes the state of the Web-UI to the DB def dispatch return unless @established @@ -114,24 +130,9 @@ def validate! def flush @flushed_at = monotonic_now - ::Karafka::Web.producer.produce_many_async( - [ - { - topic: Karafka::Web.config.topics.consumers.states, - payload: Zlib::Deflate.deflate(@state.to_json), - # This will ensure that the consumer states are compacted - key: Karafka::Web.config.topics.consumers.states, - partition: 0, - headers: { 'zlib' => 'true' } - }, - { - topic: Karafka::Web.config.topics.consumers.metrics, - payload: Zlib::Deflate.deflate(@metrics.to_json), - key: Karafka::Web.config.topics.consumers.metrics, - partition: 0, - headers: { 'zlib' => 'true' } - } - ] + Publisher.publish( + @state, + @metrics ) end end diff --git a/lib/karafka/web/processing/consumers/aggregators/metrics.rb b/lib/karafka/web/processing/consumers/aggregators/metrics.rb index d9ee9c51..b1cfbe7f 100644 --- a/lib/karafka/web/processing/consumers/aggregators/metrics.rb +++ b/lib/karafka/web/processing/consumers/aggregators/metrics.rb @@ -10,9 +10,8 @@ module Aggregators # values for charts and metrics class Metrics < Base # Current schema version - # This can be used in the future for detecting incompatible changes and writing - # migrations - SCHEMA_VERSION = '1.0.0' + # This is used for detecting incompatible changes and writing migrations + SCHEMA_VERSION = '1.1.1' def initialize super diff --git a/lib/karafka/web/processing/consumers/aggregators/state.rb b/lib/karafka/web/processing/consumers/aggregators/state.rb index 1b542e0c..b4367ac6 100644 --- a/lib/karafka/web/processing/consumers/aggregators/state.rb +++ b/lib/karafka/web/processing/consumers/aggregators/state.rb @@ -20,7 +20,7 @@ class State < Base # Current schema version # This can be used in the future for detecting incompatible changes and writing # migrations - SCHEMA_VERSION = '1.1.0' + SCHEMA_VERSION = '1.2.1' # @param schema_manager [Karafka::Web::Processing::Consumers::SchemaManager] schema # manager that tracks the compatibility of schemas. @@ -129,6 +129,8 @@ def refresh_current_stats stats[:listeners] = 0 stats[:lag] = 0 stats[:lag_stored] = 0 + stats[:bytes_received] = 0 + stats[:bytes_sent] = 0 utilization = 0 @active_reports @@ -149,6 +151,8 @@ def refresh_current_stats stats[:busy] += report_stats[:busy] stats[:enqueued] += report_stats[:enqueued] stats[:workers] += report_process[:workers] || 0 + stats[:bytes_received] += report_process[:bytes_received] || 0 + stats[:bytes_sent] += report_process[:bytes_sent] || 0 stats[:listeners] += report_process[:listeners] || 0 stats[:processes] += 1 stats[:rss] += report_process[:memory_usage] diff --git a/lib/karafka/web/processing/publisher.rb b/lib/karafka/web/processing/publisher.rb new file mode 100644 index 00000000..37956317 --- /dev/null +++ b/lib/karafka/web/processing/publisher.rb @@ -0,0 +1,59 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Processing + # Object responsible for publishing states data back into Kafka so it can be used in the UI + class Publisher + class << self + # Publishes data back to Kafka in an async fashion + # + # @param consumers_state [Hash] consumers current state + # @param consumers_metrics [Hash] consumers current metrics + def publish(consumers_state, consumers_metrics) + ::Karafka::Web.producer.produce_many_async( + prepare_data(consumers_state, consumers_metrics) + ) + end + + # Publishes data back to Kafka in a sync fashion + # + # @param consumers_state [Hash] consumers current state + # @param consumers_metrics [Hash] consumers current metrics + def publish!(consumers_state, consumers_metrics) + ::Karafka::Web.producer.produce_many_sync( + prepare_data(consumers_state, consumers_metrics) + ) + end + + private + + # Converts the states into format that we can dispatch to Kafka + # + # @param consumers_state [Hash] consumers current state + # @param consumers_metrics [Hash] consumers current metrics + # @return [Array] + def prepare_data(consumers_state, consumers_metrics) + [ + { + topic: Karafka::Web.config.topics.consumers.states, + payload: Zlib::Deflate.deflate(consumers_state.to_json), + # This will ensure that the consumer states are compacted + key: Karafka::Web.config.topics.consumers.states, + partition: 0, + headers: { 'zlib' => 'true' } + }, + { + topic: Karafka::Web.config.topics.consumers.metrics, + payload: Zlib::Deflate.deflate(consumers_metrics.to_json), + key: Karafka::Web.config.topics.consumers.metrics, + partition: 0, + headers: { 'zlib' => 'true' } + } + ] + end + end + end + end + end +end diff --git a/lib/karafka/web/tracking/consumers/contracts/job.rb b/lib/karafka/web/tracking/consumers/contracts/job.rb index e036dd1d..2df7ba28 100644 --- a/lib/karafka/web/tracking/consumers/contracts/job.rb +++ b/lib/karafka/web/tracking/consumers/contracts/job.rb @@ -11,7 +11,7 @@ class Job < Web::Contracts::Base required(:consumer) { |val| val.is_a?(String) } required(:consumer_group) { |val| val.is_a?(String) } - required(:started_at) { |val| val.is_a?(Float) && val >= 0 } + required(:updated_at) { |val| val.is_a?(Float) && val >= 0 } required(:topic) { |val| val.is_a?(String) } required(:partition) { |val| val.is_a?(Integer) && val >= 0 } required(:first_offset) { |val| val.is_a?(Integer) && (val >= 0 || val == -1001) } @@ -23,6 +23,7 @@ class Job < Web::Contracts::Base # -1 can be here for workless flows required(:consumption_lag) { |val| val.is_a?(Integer) && (val >= 0 || val == -1) } required(:processing_lag) { |val| val.is_a?(Integer) && (val >= 0 || val == -1) } + required(:status) { |val| %w[running pending].include?(val) } end end end diff --git a/lib/karafka/web/tracking/consumers/contracts/partition.rb b/lib/karafka/web/tracking/consumers/contracts/partition.rb index 0fd4549f..9fd439df 100644 --- a/lib/karafka/web/tracking/consumers/contracts/partition.rb +++ b/lib/karafka/web/tracking/consumers/contracts/partition.rb @@ -20,6 +20,7 @@ class Partition < Web::Contracts::Base required(:stored_offset_fd) { |val| val.is_a?(Integer) && val >= 0 } required(:fetch_state) { |val| val.is_a?(String) && !val.empty? } required(:poll_state) { |val| val.is_a?(String) && !val.empty? } + required(:poll_state_ch) { |val| val.is_a?(Integer) && val >= 0 } required(:hi_offset) { |val| val.is_a?(Integer) } required(:hi_offset_fd) { |val| val.is_a?(Integer) && val >= 0 } required(:lo_offset) { |val| val.is_a?(Integer) } diff --git a/lib/karafka/web/tracking/consumers/contracts/report.rb b/lib/karafka/web/tracking/consumers/contracts/report.rb index 6f82b411..4c5d0c3e 100644 --- a/lib/karafka/web/tracking/consumers/contracts/report.rb +++ b/lib/karafka/web/tracking/consumers/contracts/report.rb @@ -52,6 +52,7 @@ class Report < Web::Contracts::Base nested(:stats) do required(:busy) { |val| val.is_a?(Integer) && val >= 0 } required(:enqueued) { |val| val.is_a?(Integer) && val >= 0 } + required(:waiting) { |val| val.is_a?(Integer) && val >= 0 } required(:utilization) { |val| val.is_a?(Numeric) && val >= 0 } nested(:total) do diff --git a/lib/karafka/web/tracking/consumers/contracts/subscription_group.rb b/lib/karafka/web/tracking/consumers/contracts/subscription_group.rb index 1b3e698c..f5322ddd 100644 --- a/lib/karafka/web/tracking/consumers/contracts/subscription_group.rb +++ b/lib/karafka/web/tracking/consumers/contracts/subscription_group.rb @@ -12,7 +12,16 @@ class SubscriptionGroup < Web::Contracts::Base required(:id) { |val| val.is_a?(String) && !val.empty? } required(:topics) { |val| val.is_a?(Hash) } - required(:state) { |val| val.is_a?(Hash) } + + nested(:state) do + required(:state) { |val| val.is_a?(String) && !val.empty? } + required(:join_state) { |val| val.is_a?(String) && !val.empty? } + required(:stateage) { |val| val.is_a?(Integer) && val >= 0 } + required(:rebalance_age) { |val| val.is_a?(Integer) && val >= 0 } + required(:rebalance_cnt) { |val| val.is_a?(Integer) && val >= 0 } + required(:rebalance_reason) { |val| val.is_a?(String) && !val.empty? } + required(:poll_age) { |val| val.is_a?(Numeric) && val >= 0 } + end virtual do |data, errors| next unless errors.empty? diff --git a/lib/karafka/web/tracking/consumers/listeners/connections.rb b/lib/karafka/web/tracking/consumers/listeners/connections.rb new file mode 100644 index 00000000..8cc3b129 --- /dev/null +++ b/lib/karafka/web/tracking/consumers/listeners/connections.rb @@ -0,0 +1,34 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Tracking + module Consumers + module Listeners + # Listener for listening on connections related events like polling, etc + class Connections < Base + # Set first poll time before we start fetching so we always have a poll time + # and we don't have to worry about it being always available + # @param event [Karafka::Core::Monitoring::Event] + def on_connection_listener_before_fetch_loop(event) + on_connection_listener_fetch_loop_received(event) + end + + # Tracks the moment a poll happened on a given subscription group + # + # @param event [Karafka::Core::Monitoring::Event] + def on_connection_listener_fetch_loop_received(event) + sg_id = event[:subscription_group].id + + track do |sampler| + sampler.subscription_groups[sg_id] = { + polled_at: monotonic_now + } + end + end + end + end + end + end + end +end diff --git a/lib/karafka/web/tracking/consumers/listeners/pausing.rb b/lib/karafka/web/tracking/consumers/listeners/pausing.rb index 66d43a10..99529f6e 100644 --- a/lib/karafka/web/tracking/consumers/listeners/pausing.rb +++ b/lib/karafka/web/tracking/consumers/listeners/pausing.rb @@ -11,9 +11,12 @@ class Pausing < Base # Indicate pause # # @param event [Karafka::Core::Monitoring::Event] - def on_client_pause(event) + def on_consumer_consuming_pause(event) track do |sampler| - sampler.pauses << pause_id(event) + sampler.pauses[pause_id(event)] = { + timeout: event[:timeout], + paused_till: monotonic_now + event[:timeout] + } end end @@ -33,9 +36,9 @@ def on_client_resume(event) def pause_id(event) topic = event[:topic] partition = event[:partition] - consumer_group_id = event[:subscription_group].consumer_group.id + subscription_group_id = event[:subscription_group].id - [consumer_group_id, topic, partition].join('-') + [subscription_group_id, topic, partition].join('-') end end end diff --git a/lib/karafka/web/tracking/consumers/listeners/processing.rb b/lib/karafka/web/tracking/consumers/listeners/processing.rb index f83b9351..1af4978f 100644 --- a/lib/karafka/web/tracking/consumers/listeners/processing.rb +++ b/lib/karafka/web/tracking/consumers/listeners/processing.rb @@ -12,10 +12,32 @@ class Processing < Base # @param event [Karafka::Core::Monitoring::Event] def on_worker_processed(event) track do |sampler| - sampler.times[:total] << event[:time] + sampler.windows.m1[:processed_total_time] << event[:time] end end + # We do not track idle jobs here because they are internal and not user-facing + %i[ + consume + revoked + shutdown + ].each do |action| + # Tracks the job that is going to be scheduled so we can also display pending jobs + class_eval <<~RUBY, __FILE__, __LINE__ + 1 + # @param event [Karafka::Core::Monitoring::Event] + def on_consumer_before_schedule_#{action}(event) + consumer = event.payload[:caller] + jid = job_id(consumer, '#{action}') + job_details = job_details(consumer, '#{action}') + job_details[:status] = 'pending' + + track do |sampler| + sampler.jobs[jid] = job_details + end + end + RUBY + end + # Counts work execution and processing states in consumer instances # # @param event [Karafka::Core::Monitoring::Event] @@ -24,6 +46,7 @@ def on_consumer_consume(event) messages_count = consumer.messages.size jid = job_id(consumer, 'consume') job_details = job_details(consumer, 'consume') + job_details[:status] = 'running' track do |sampler| # We count batches and messages prior to the execution, so they are tracked even @@ -71,15 +94,10 @@ def on_error_occurred(event) # @param event [Karafka::Core::Monitoring::Event] def on_consumer_consumed(event) consumer = event.payload[:caller] - topic = consumer.topic - consumer_group_id = topic.consumer_group.id - messages_count = consumer.messages.size - time = event[:time] jid = job_id(consumer, 'consume') track do |sampler| sampler.jobs.delete(jid) - sampler.times[consumer_group_id] << [topic.name, time, messages_count] end end @@ -152,7 +170,7 @@ def job_id(consumer, type) # more details. def job_details(consumer, type) { - started_at: float_now, + updated_at: float_now, topic: consumer.topic.name, partition: consumer.partition, first_offset: consumer.messages.metadata.first_offset, diff --git a/lib/karafka/web/tracking/consumers/listeners/statistics.rb b/lib/karafka/web/tracking/consumers/listeners/statistics.rb index 8bd6f2b5..a2b794be 100644 --- a/lib/karafka/web/tracking/consumers/listeners/statistics.rb +++ b/lib/karafka/web/tracking/consumers/listeners/statistics.rb @@ -18,6 +18,8 @@ def on_statistics_emitted(event) sg_id = event[:subscription_group_id] sg_details = extract_sg_details(sg_id, cgrp) + track_transfers(statistics) + # More than one subscription group from the same consumer group may be reporting # almost the same time. To prevent corruption of partial data, we put everything here # in track as we merge data from multiple subscription groups @@ -42,10 +44,11 @@ def on_statistics_emitted(event) } topic_details[:partitions][pt_id] = metrics.merge( - id: pt_id, + id: pt_id + ).merge( # Pauses are stored on a consumer group since we do not process same topic # twice in the multiple subscription groups - poll_state: poll_state(cg_id, topic_name, pt_id) + poll_details(sg_id, topic_name, pt_id) ) end end @@ -61,6 +64,26 @@ def on_statistics_emitted(event) private + # Tracks network transfers from and to the client using a 1 minute rolling window + # + # @param statistics [Hash] statistics hash + def track_transfers(statistics) + brokers = statistics.fetch('brokers', {}) + + return if brokers.empty? + + track do |sampler| + client_name = statistics.fetch('name') + + brokers.each do |broker_name, values| + scope_name = "#{client_name}-#{broker_name}" + + sampler.windows.m1["#{scope_name}-rxbytes"] << values.fetch('rxbytes', 0) + sampler.windows.m1["#{scope_name}-txbytes"] << values.fetch('txbytes', 0) + end + end + end + # Extracts basic consumer group related details # @param sg_id [String] # @param sg_stats [Hash] @@ -75,7 +98,7 @@ def extract_sg_details(sg_id, sg_stats) 'rebalance_age', 'rebalance_cnt', 'rebalance_reason' - ), + ).transform_keys(&:to_sym), topics: {} } end @@ -132,14 +155,23 @@ def extract_partition_metrics(pt_stats) metrics end - # @param cg_id [String] + # @param sg_id [String] subscription group id # @param topic_name [String] - # @param pt_id [Integer] + # @param pt_id [Integer] partition id # @return [String] poll state / is partition paused or not - def poll_state(cg_id, topic_name, pt_id) - pause_id = [cg_id, topic_name, pt_id].join('-') + def poll_details(sg_id, topic_name, pt_id) + pause_id = [sg_id, topic_name, pt_id].join('-') + + details = { poll_state: 'active', poll_state_ch: 0 } - sampler.pauses.include?(pause_id) ? 'paused' : 'active' + pause_details = sampler.pauses[pause_id] + + return details unless pause_details + + { + poll_state: 'paused', + poll_state_ch: [(pause_details.fetch(:paused_till) - monotonic_now).round, 0].max + } end end end diff --git a/lib/karafka/web/tracking/consumers/sampler.rb b/lib/karafka/web/tracking/consumers/sampler.rb index a1c41ef3..857213bf 100644 --- a/lib/karafka/web/tracking/consumers/sampler.rb +++ b/lib/karafka/web/tracking/consumers/sampler.rb @@ -9,18 +9,13 @@ module Consumers class Sampler < Tracking::Sampler include ::Karafka::Core::Helpers::Time - attr_reader :counters, :consumer_groups, :errors, :times, :pauses, :jobs + attr_reader :counters, :consumer_groups, :subscription_groups, :errors, + :pauses, :jobs, :windows # Current schema version - # This can be used in the future for detecting incompatible changes and writing - # migrations - SCHEMA_VERSION = '1.2.3' - - # 60 seconds window for time tracked window-based metrics - TIMES_TTL = 60 - - # Times ttl in ms - TIMES_TTL_MS = TIMES_TTL * 1_000 + # This is used for detecting incompatible changes and not using outdated data during + # upgrades + SCHEMA_VERSION = '1.2.8' # Counters that count events occurrences during the given window COUNTERS_BASE = { @@ -36,17 +31,18 @@ class Sampler < Tracking::Sampler dead: 0 }.freeze - private_constant :TIMES_TTL, :TIMES_TTL_MS, :COUNTERS_BASE + private_constant :COUNTERS_BASE def initialize super + @windows = Helpers::Ttls::Windows.new @counters = COUNTERS_BASE.dup - @times = TtlHash.new(TIMES_TTL_MS) @consumer_groups = {} + @subscription_groups = {} @errors = [] @started_at = float_now - @pauses = Set.new + @pauses = {} @jobs = {} @shell = MemoizedShell.new @memory_total_usage = 0 @@ -81,7 +77,9 @@ def to_report cpus: cpus, threads: threads, cpu_usage: @cpu_usage, - tags: Karafka::Process.tags + tags: Karafka::Process.tags, + bytes_received: bytes_received, + bytes_sent: bytes_sent }, versions: { @@ -98,7 +96,7 @@ def to_report utilization: utilization ).merge(total: @counters), - consumer_groups: @consumer_groups, + consumer_groups: enriched_consumer_groups, jobs: jobs.values } end @@ -130,15 +128,16 @@ def sample # utilized all the time within the given time window. 0% means, nothing is happening # most if not all the time. def utilization - return 0 if times[:total].empty? + totals = windows.m1[:processed_total_time] + + return 0 if totals.empty? - # Max times ttl timefactor = float_now - @started_at - timefactor = timefactor > TIMES_TTL ? TIMES_TTL : timefactor + timefactor = timefactor > 60 ? 60 : timefactor # We divide by 1_000 to convert from milliseconds # We multiply by 100 to have it in % scale - times[:total].sum / 1_000 / workers / timefactor * 100 + totals.sum / 1_000 / workers / timefactor * 100 end # @return [Integer] number of listeners @@ -175,9 +174,14 @@ def memory_usage # @return [Hash] job queue statistics def jobs_queue_statistics # We return empty stats in case jobs queue is not yet initialized + base = Karafka::Server.jobs_queue&.statistics || { busy: 0, enqueued: 0 } + stats = base.slice(:busy, :enqueued, :waiting) + stats[:waiting] ||= 0 # busy - represents number of jobs that are being executed currently - # enqueued - represents number of jobs that are enqueued to be processed - Karafka::Server.jobs_queue&.statistics || { busy: 0, enqueued: 0 } + # enqueued - jobs that are in the queue but not being picked up yet + # waiting - jobs that are not scheduled on the queue but will be + # be enqueued in case of advanced schedulers + stats end # Total memory used in the OS @@ -265,6 +269,48 @@ def memory_threads_ps @memory_threads_ps = false end end + + # Consumer group details need to be enriched with details about polling that comes from + # Karafka level. It is also time based, hence we need to materialize it only at the + # moment of message dispatch to have it accurate. + def enriched_consumer_groups + @consumer_groups.each_value do |cg_details| + cg_details.each do + cg_details.fetch(:subscription_groups, {}).each do |sg_id, sg_details| + # This should be always available, since we subscription group polled at time + # is first initialized before we start polling, there should be no case where + # we have statistics about a given subscription group but we do not have the + # last polling time + polled_at = subscription_groups.fetch(sg_id).fetch(:polled_at) + sg_details[:state][:poll_age] = monotonic_now - polled_at + end + end + end + + @consumer_groups + end + + # @return [Integer] number of bytes received per second out of a one minute time window + # by all the consumers + # @note We use one minute window to compensate for cases where metrics would be reported + # or recorded faster or slower. This normalizes data + def bytes_received + @windows + .m1 + .stats_from { |k, _v| k.end_with?('rxbytes') } + .rps + .round + end + + # @return [Integer] number of bytes sent per second out of a one minute time window by + # all the consumers + def bytes_sent + @windows + .m1 + .stats_from { |k, _v| k.end_with?('txbytes') } + .rps + .round + end end end end diff --git a/lib/karafka/web/tracking/helpers/ttls/array.rb b/lib/karafka/web/tracking/helpers/ttls/array.rb new file mode 100644 index 00000000..6d6097d8 --- /dev/null +++ b/lib/karafka/web/tracking/helpers/ttls/array.rb @@ -0,0 +1,72 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Tracking + module Helpers + # Namespace for time sensitive related buffers and operators + module Ttls + # Array that allows us to store data points that expire over time automatically. + class Array + include ::Karafka::Core::Helpers::Time + include Enumerable + + # @param ttl [Integer] milliseconds ttl + def initialize(ttl) + @ttl = ttl + @accu = [] + end + + # Iterates over only active elements + def each + clear + + @accu.each do |sample| + yield sample[:value] + end + end + + # @param value [Object] adds value to the array + # @return [Object] added element + def <<(value) + @accu << { value: value, added_at: monotonic_now } + + clear + + value + end + + # @return [Boolean] is the array empty + def empty? + clear + @accu.empty? + end + + # Samples that are within our TTL time window with the times + # + # @return [Hash] + def samples + clear + @accu + end + + # @return [::Array] pure array version with only active elements + def to_a + clear + super + end + + private + + # Evicts outdated samples + def clear + @accu.delete_if do |sample| + monotonic_now - sample[:added_at] > @ttl + end + end + end + end + end + end + end +end diff --git a/lib/karafka/web/tracking/helpers/ttls/hash.rb b/lib/karafka/web/tracking/helpers/ttls/hash.rb new file mode 100644 index 00000000..09796e0d --- /dev/null +++ b/lib/karafka/web/tracking/helpers/ttls/hash.rb @@ -0,0 +1,34 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Tracking + module Helpers + module Ttls + # Hash that accumulates data that has an expiration date (ttl) + # Used to keep track of metrics in a window + class Hash < Hash + # @param ttl [Integer] milliseconds ttl + def initialize(ttl) + super() { |k, v| k[v] = Ttls::Array.new(ttl) } + end + + # Takes a block where we provide a hash select filtering to select keys we are + # interested in using for aggregated stats. Once filtered, builds a Stats object out + # of the candidates + # + # @param block [Proc] block for selection of elements for stats + # @yieldparam [String] key + # @yieldparam [Ttls::Array] samples + # @return [Stats] + def stats_from(&block) + Stats.new( + select(&block) + ) + end + end + end + end + end + end +end diff --git a/lib/karafka/web/tracking/helpers/ttls/stats.rb b/lib/karafka/web/tracking/helpers/ttls/stats.rb new file mode 100644 index 00000000..3c816f47 --- /dev/null +++ b/lib/karafka/web/tracking/helpers/ttls/stats.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Tracking + module Helpers + module Ttls + # Object that simplifies computing aggregated statistics out of ttl data + # For TTL based operations we may collect samples from multiple consumers/producers etc + # but in the end we are interested in the collective result of the whole process. + # + # For example when we talk about data received from Kafka, we want to materialize total + # number of bytes and not bytes per given client connection. This layer simplifies this + # by doing necessary aggregations and providing the final results + class Stats + # @param ttls_hash [Ttls::Hash, Hash] hash with window based samples + def initialize(ttls_hash) + @data = ttls_hash + .values + .map(&:samples) + .map(&:to_a) + .delete_if { |samples| samples.size < 2 } + .map { |samples| samples.map(&:values) } + end + + # Computes the rate out of the samples provided on a per second basis. The samples need + # to come from the window aggregations + # + # @return [Float] per second rate value + def rps + sub_results = @data.map do |samples| + oldest = samples.first + newest = samples.last + + value = oldest[0] - newest[0] + # Convert to seconds as we want to have it in a 1 sec pace + time = (oldest[1] - newest[1]) / 1_000 + + value / time.to_f + end + + sub_results.flatten.sum + end + end + end + end + end + end +end diff --git a/lib/karafka/web/tracking/helpers/ttls/windows.rb b/lib/karafka/web/tracking/helpers/ttls/windows.rb new file mode 100644 index 00000000..eca45e6c --- /dev/null +++ b/lib/karafka/web/tracking/helpers/ttls/windows.rb @@ -0,0 +1,32 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Tracking + module Helpers + module Ttls + # Object used to track process metrics in time windows. Those are shared, meaning they do + # not refer to particular metric type but allow us to store whatever we want. + # + # We have following time windows: + # - m1 - one minute big + # - m5 - five minute big + Windows = Struct.new(:m1, :m5) do + # @return [Ttls::Windows] + def initialize + super( + Ttls::Hash.new(60 * 1_000), + Ttls::Hash.new(5 * 60 * 1_000) + ) + end + + # Clears the TTLs windows + def clear + values.each(&:clear) + end + end + end + end + end + end +end diff --git a/lib/karafka/web/tracking/ttl_array.rb b/lib/karafka/web/tracking/ttl_array.rb deleted file mode 100644 index 87523300..00000000 --- a/lib/karafka/web/tracking/ttl_array.rb +++ /dev/null @@ -1,59 +0,0 @@ -# frozen_string_literal: true - -module Karafka - module Web - module Tracking - # Array that allows us to store data points that expire over time automatically. - class TtlArray - include ::Karafka::Core::Helpers::Time - include Enumerable - - # @param ttl [Integer] milliseconds ttl - def initialize(ttl) - @ttl = ttl - @accu = [] - end - - # Iterates over only active elements - def each - clear - - @accu.each do |sample| - yield sample[:value] - end - end - - # @param value [Object] adds value to the array - # @return [Object] added element - def <<(value) - @accu << { value: value, added_at: monotonic_now } - - clear - - value - end - - # @return [Boolean] is the array empty - def empty? - clear - @accu.empty? - end - - # @return [Array] pure array version with only active elements - def to_a - clear - super - end - - private - - # Evicts outdated samples - def clear - @accu.delete_if do |sample| - monotonic_now - sample[:added_at] > @ttl - end - end - end - end - end -end diff --git a/lib/karafka/web/tracking/ttl_hash.rb b/lib/karafka/web/tracking/ttl_hash.rb deleted file mode 100644 index 23c8c744..00000000 --- a/lib/karafka/web/tracking/ttl_hash.rb +++ /dev/null @@ -1,16 +0,0 @@ -# frozen_string_literal: true - -module Karafka - module Web - module Tracking - # Hash that accumulates data that has an expiration date (ttl) - # Used to keep track of metrics in a window - class TtlHash < Hash - # @param ttl [Integer] milliseconds ttl - def initialize(ttl) - super() { |k, v| k[v] = TtlArray.new(ttl) } - end - end - end - end -end diff --git a/lib/karafka/web/ui/app.rb b/lib/karafka/web/ui/app.rb index d9b9a4ff..57bdf4d6 100644 --- a/lib/karafka/web/ui/app.rb +++ b/lib/karafka/web/ui/app.rb @@ -48,9 +48,18 @@ class App < Base end end - r.get 'jobs' do + r.on 'jobs' do controller = Controllers::Jobs.new(params) - controller.index + + r.get 'running' do + controller.running + end + + r.get 'pending' do + controller.pending + end + + r.redirect root_path('jobs/running') end r.on 'routing' do @@ -65,9 +74,18 @@ class App < Base end end - r.get 'cluster' do + r.on 'cluster' do controller = Controllers::Cluster.new(params) - controller.index + + r.get 'brokers' do + controller.brokers + end + + r.get 'topics' do + controller.topics + end + + r.redirect root_path('cluster/brokers') end r.on 'errors' do diff --git a/lib/karafka/web/ui/base.rb b/lib/karafka/web/ui/base.rb index cfb71fe3..90182be6 100644 --- a/lib/karafka/web/ui/base.rb +++ b/lib/karafka/web/ui/base.rb @@ -55,10 +55,16 @@ class Base < Roda plugin :custom_block_results - handle_block_result Controllers::Responses::Data do |result| + handle_block_result Controllers::Responses::Render do |result| render_response(result) end + handle_block_result Controllers::Responses::Deny do + @error = true + response.status = 403 + view 'shared/exceptions/not_allowed' + end + # Redirect either to referer back or to the desired path handle_block_result Controllers::Responses::Redirect do |result| # Map redirect flashes (if any) to Roda flash messages @@ -67,6 +73,12 @@ class Base < Roda response.redirect result.back? ? request.referer : root_path(result.path) end + handle_block_result Controllers::Responses::File do |result| + response.headers['Content-Type'] = 'application/octet-stream' + response.headers['Content-Disposition'] = "attachment; filename=\"#{result.file_name}\"" + response.write result.content + end + # Display appropriate error specific to a given error type plugin :error_handler, classes: [ ::Rdkafka::RdkafkaError, @@ -111,10 +123,14 @@ class Base < Roda raise Errors::Ui::NotFoundError end - # Allows us to build current path with additional params + # Allows us to build current path with additional params + it merges existing params into + # the query data. Query data takes priority over request params. # @param query_data [Hash] query params we want to add to the current path path :current do |query_data = {}| q = query_data + .transform_values(&:to_s) + .transform_keys(&:to_s) + .then { |candidates| request.params.merge(candidates) } .select { |_, v| v } .map { |k, v| "#{k}=#{CGI.escape(v.to_s)}" } .join('&') diff --git a/lib/karafka/web/ui/controllers/base.rb b/lib/karafka/web/ui/controllers/base.rb index b8dfb509..87da2a0a 100644 --- a/lib/karafka/web/ui/controllers/base.rb +++ b/lib/karafka/web/ui/controllers/base.rb @@ -7,6 +7,13 @@ module Ui module Controllers # Base controller from which all the controllers should inherit. class Base + class << self + # Attributes on which we can sort in a given controller + attr_accessor :sortable_attributes + end + + self.sortable_attributes = [] + # @param params [Karafka::Web::Ui::Controllers::Requests::Params] request parameters def initialize(params) @params = params @@ -14,10 +21,10 @@ def initialize(params) private - # Builds the respond data object with assigned attributes based on instance variables. + # Builds the render data object with assigned attributes based on instance variables. # - # @return [Responses::Data] data that should be used to render appropriate view - def respond + # @return [Responses::Render] data that should be used to render appropriate view + def render attributes = {} scope = self.class.to_s.split('::').last.gsub(/(.)([A-Z])/, '\1_\2').downcase @@ -30,7 +37,7 @@ def respond attributes[iv.to_s.delete('@').to_sym] = instance_variable_get(iv) end - Responses::Data.new( + Responses::Render.new( "#{scope}/#{action}", attributes ) @@ -45,6 +52,29 @@ def redirect(path = :back, flashes = {}) Responses::Redirect.new(path, flashes) end + # Builds a file response object that will be used as a base to dispatch the file + # + # @param content [String] Payload we want to dispatch as a file + # @param file_name [String] name under which the browser is suppose to save the file + # @return [Responses::File] file response result + def file(content, file_name) + Responses::File.new(content, file_name) + end + + # Builds a halt 403 response + def deny + Responses::Deny.new + end + + # @param resources [Hash, Array, Lib::HashProxy] object for sorting + # @return [Hash, Array, Lib::HashProxy] sorted results + def refine(resources) + Lib::Sorter.new( + @params.sort, + allowed_attributes: self.class.sortable_attributes + ).call(resources) + end + # Initializes the expected pagination engine and assigns expected arguments # @param args Any arguments accepted by the selected pagination engine def paginate(*args) diff --git a/lib/karafka/web/ui/controllers/become_pro.rb b/lib/karafka/web/ui/controllers/become_pro.rb index 222238ef..4cf51378 100644 --- a/lib/karafka/web/ui/controllers/become_pro.rb +++ b/lib/karafka/web/ui/controllers/become_pro.rb @@ -8,7 +8,7 @@ module Controllers class BecomePro < Base # Display a message, that a given feature is available only in Pro def show - respond + render end end end diff --git a/lib/karafka/web/ui/controllers/cluster.rb b/lib/karafka/web/ui/controllers/cluster.rb index d173c0d3..df137f13 100644 --- a/lib/karafka/web/ui/controllers/cluster.rb +++ b/lib/karafka/web/ui/controllers/cluster.rb @@ -6,31 +6,55 @@ module Ui module Controllers # Selects cluster info and topics basic info class Cluster < Base - # List cluster info data - def index - # Make sure, that for the cluster view we always get the most recent cluster state - @cluster_info = Models::ClusterInfo.fetch(cached: false) + self.sortable_attributes = %w[ + broker_id + broker_name + broker_port + topic_name + partition_id + leader + replica_count + in_sync_replica_brokers + ].freeze + # Lists available brokers in the cluster + def brokers + @brokers = refine(cluster_info.brokers) + + render + end + + # List topics and partitions with details + def topics partitions_total = [] - displayable_topics(@cluster_info).each do |topic| + displayable_topics(cluster_info).each do |topic| topic[:partitions].each do |partition| - partitions_total << partition.merge(topic: topic) + partitions_total << partition.merge( + topic: topic, + # Will allow sorting by name + topic_name: topic.fetch(:topic_name) + ) end end @partitions, last_page = Ui::Lib::Paginations::Paginators::Arrays.call( - partitions_total, + refine(partitions_total), @params.current_page ) paginate(@params.current_page, !last_page) - respond + render end private + # Make sure, that for the cluster view we always get the most recent cluster state + def cluster_info + @cluster_info ||= Models::ClusterInfo.fetch(cached: false) + end + # @param cluster_info [Rdkafka::Metadata] cluster metadata # @return [Array] array with topics to be displayed sorted in an alphabetical # order @@ -39,7 +63,7 @@ def displayable_topics(cluster_info) .topics .sort_by { |topic| topic[:topic_name] } - return all if ::Karafka::Web.config.ui.show_internal_topics + return all if ::Karafka::Web.config.ui.visibility.internal_topics all.reject { |topic| topic[:topic_name].start_with?('__') } end diff --git a/lib/karafka/web/ui/controllers/consumers.rb b/lib/karafka/web/ui/controllers/consumers.rb index 277e6265..52b1c6e7 100644 --- a/lib/karafka/web/ui/controllers/consumers.rb +++ b/lib/karafka/web/ui/controllers/consumers.rb @@ -6,19 +6,25 @@ module Ui module Controllers # Consumers (consuming processes - `karafka server`) processes display consumer class Consumers < Base + self.sortable_attributes = %w[ + name + started_at + lag_stored + ].freeze + # List page with consumers # @note For now we load all and paginate over the squashed data. def index @current_state = Models::ConsumersState.current! @counters = Models::Counters.new(@current_state) @processes, last_page = Ui::Lib::Paginations::Paginators::Arrays.call( - Models::Processes.active(@current_state), + refine(Models::Processes.active(@current_state)), @params.current_page ) paginate(@params.current_page, !last_page) - respond + render end end end diff --git a/lib/karafka/web/ui/controllers/dashboard.rb b/lib/karafka/web/ui/controllers/dashboard.rb index 5779d617..5292d96d 100644 --- a/lib/karafka/web/ui/controllers/dashboard.rb +++ b/lib/karafka/web/ui/controllers/dashboard.rb @@ -20,10 +20,10 @@ def index # Load only historicals for the selected range @aggregated_charts = Models::Metrics::Charts::Aggregated.new( - @aggregated, :seconds + @aggregated, @params.current_range ) - respond + render end end end diff --git a/lib/karafka/web/ui/controllers/errors.rb b/lib/karafka/web/ui/controllers/errors.rb index 6afe046e..ecb5c276 100644 --- a/lib/karafka/web/ui/controllers/errors.rb +++ b/lib/karafka/web/ui/controllers/errors.rb @@ -20,7 +20,7 @@ def index @error_messages.map(&:offset) ) - respond + render end # @param offset [Integer] given error message offset @@ -31,7 +31,7 @@ def show(offset) offset ) - respond + render end private diff --git a/lib/karafka/web/ui/controllers/jobs.rb b/lib/karafka/web/ui/controllers/jobs.rb index dbade929..c30525a7 100644 --- a/lib/karafka/web/ui/controllers/jobs.rb +++ b/lib/karafka/web/ui/controllers/jobs.rb @@ -6,27 +6,77 @@ module Ui module Controllers # Active jobs (work) reporting controller class Jobs < Base - # Lists jobs - def index + self.sortable_attributes = %w[ + name + topic + consumer + type + updated_at + ].freeze + + # Lists running jobs + def running current_state = Models::ConsumersState.current! processes = Models::Processes.active(current_state) + @jobs_counters = count_jobs_types(processes) + # Aggregate jobs and inject the process info into them for better reporting jobs_total = processes.flat_map do |process| - process.jobs.map do |job| + process.jobs.running.map do |job| job.to_h[:process] = process job end end @jobs, last_page = Ui::Lib::Paginations::Paginators::Arrays.call( - jobs_total, + refine(jobs_total), @params.current_page ) paginate(@params.current_page, !last_page) - respond + render + end + + # Lists pending jobs + def pending + current_state = Models::ConsumersState.current! + processes = Models::Processes.active(current_state) + + @jobs_counters = count_jobs_types(processes) + + # Aggregate jobs and inject the process info into them for better reporting + jobs_total = processes.flat_map do |process| + process.jobs.pending.map do |job| + job.to_h[:process] = process + job + end + end + + @jobs, last_page = Ui::Lib::Paginations::Paginators::Arrays.call( + refine(jobs_total), + @params.current_page + ) + + paginate(@params.current_page, !last_page) + + render + end + + private + + # @param processes [Array] + # @return [Lib::HashProxy] particular type jobs count + def count_jobs_types(processes) + counts = { running: 0, pending: 0 } + + processes.flat_map do |process| + counts[:running] += process.jobs.running.size + counts[:pending] += process.jobs.pending.size + end + + Lib::HashProxy.new(counts) end end end diff --git a/lib/karafka/web/ui/controllers/requests/params.rb b/lib/karafka/web/ui/controllers/requests/params.rb index 9f9dabc7..96ff3cef 100644 --- a/lib/karafka/web/ui/controllers/requests/params.rb +++ b/lib/karafka/web/ui/controllers/requests/params.rb @@ -24,6 +24,11 @@ def initialize(request_params) @request_params = request_params end + # @return [String] sort query value + def sort + @sort ||= @request_params['sort'].to_s.downcase + end + # @return [Integer] current page for paginated views # @note It does basic sanitization def current_page diff --git a/lib/karafka/web/ui/controllers/responses/deny.rb b/lib/karafka/web/ui/controllers/responses/deny.rb new file mode 100644 index 00000000..15482604 --- /dev/null +++ b/lib/karafka/web/ui/controllers/responses/deny.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Ui + module Controllers + module Responses + # Response that will make Roda render 403 deny + class Deny + end + end + end + end + end +end diff --git a/lib/karafka/web/ui/controllers/responses/file.rb b/lib/karafka/web/ui/controllers/responses/file.rb new file mode 100644 index 00000000..473c31db --- /dev/null +++ b/lib/karafka/web/ui/controllers/responses/file.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Ui + module Controllers + module Responses + # Response that tells Roda to ship the content under a file name + class File + attr_reader :content, :file_name + + # @param content [String] data we want to send + # @param file_name [String] name under which we want to send it + def initialize(content, file_name) + @content = content + @file_name = file_name + end + end + end + end + end + end +end diff --git a/lib/karafka/web/ui/controllers/responses/data.rb b/lib/karafka/web/ui/controllers/responses/render.rb similarity index 80% rename from lib/karafka/web/ui/controllers/responses/data.rb rename to lib/karafka/web/ui/controllers/responses/render.rb index 2eebd5e9..6374ba6b 100644 --- a/lib/karafka/web/ui/controllers/responses/data.rb +++ b/lib/karafka/web/ui/controllers/responses/render.rb @@ -6,10 +6,10 @@ module Ui module Controllers # Response related components module Responses - # Response data object. It is used to transfer attributes assigned in controllers into - # views + # Response render data object. It is used to transfer attributes assigned in controllers + # into views # It acts as a simplification / transport layer for assigned attributes - class Data + class Render attr_reader :path, :attributes # @param path [String] render path diff --git a/lib/karafka/web/ui/controllers/routing.rb b/lib/karafka/web/ui/controllers/routing.rb index 93e7480a..44a28bd1 100644 --- a/lib/karafka/web/ui/controllers/routing.rb +++ b/lib/karafka/web/ui/controllers/routing.rb @@ -6,11 +6,20 @@ module Ui module Controllers # Routing presentation controller class Routing < Base + self.sortable_attributes = %w[ + name + active? + ].freeze + # Routing list def index @routes = Karafka::App.routes - respond + @routes.each do |consumer_group| + refine(consumer_group.topics) + end + + render end # Given route details @@ -21,7 +30,7 @@ def show(topic_id) @topic || raise(::Karafka::Web::Errors::Ui::NotFoundError, topic_id) - respond + render end end end diff --git a/lib/karafka/web/ui/controllers/status.rb b/lib/karafka/web/ui/controllers/status.rb index ea88ec36..42b21797 100644 --- a/lib/karafka/web/ui/controllers/status.rb +++ b/lib/karafka/web/ui/controllers/status.rb @@ -15,7 +15,7 @@ def show @status = Models::Status.new @sampler = Tracking::Sampler.new - respond + render end end end diff --git a/lib/karafka/web/ui/helpers/application_helper.rb b/lib/karafka/web/ui/helpers/application_helper.rb index a4ad9c9c..e36bc5ab 100644 --- a/lib/karafka/web/ui/helpers/application_helper.rb +++ b/lib/karafka/web/ui/helpers/application_helper.rb @@ -126,6 +126,40 @@ def time_with_label(time) %(#{time}) end + # @param state [String] poll state + # @param state_ch [Integer] time until next change of the poll state + # (from paused to active) + # @return [String] span tag with label and title with change time if present + def poll_state_with_change_time_label(state, state_ch) + year_in_seconds = 131_556_926 + state_ch_in_seconds = state_ch / 1_000.0 + + # If state is active, there is no date of change + if state == 'active' + %( + #{state} + ) + elsif state_ch_in_seconds > year_in_seconds + %( + + #{state} + + ) + else + %( + + #{state} + + ) + end + end + # @param lag [Integer] lag # @return [String] lag if correct or `N/A` with labeled explanation # @see #offset_with_label @@ -212,6 +246,42 @@ def flat_hash(hash, parent_key = nil, result = {}) result end + + # @param name [String] link value + # @param attribute [Symbol, nil] sorting attribute or nil if we provide only symbol name + # @param rev [Boolean] when set to true, arrows will be in the reverse position. This is + # used when the description in the link is reverse to data we sort. For example we have + # order on when processes were started and we display "x hours" ago but we sort on + # their age, meaning that it looks like it is the other way around. This flag allows + # us to reverse just he arrow making it look consistent with the presented data order + # @return [String] html link for sorting with arrow when attribute sort enabled + def sort_link(name, attribute = nil, rev: false) + unless attribute + attribute = name + name = attribute.to_s.tr('_', ' ').capitalize + end + + arrow_both = '⇕' + arrow_down = '▾' + arrow_up = '▴' + + desc = "#{attribute} desc" + asc = "#{attribute} asc" + path = current_path(sort: desc) + full_name = "#{name} #{arrow_both}" + + if params.sort == desc + path = current_path(sort: asc) + full_name = "#{name} #{rev ? arrow_up : arrow_down}" + end + + if params.sort == asc + path = current_path(sort: desc) + full_name = "#{name} #{rev ? arrow_down : arrow_up}" + end + + "#{full_name}" + end end end end diff --git a/lib/karafka/web/ui/lib/hash_proxy.rb b/lib/karafka/web/ui/lib/hash_proxy.rb index 0ff88838..a3503f09 100644 --- a/lib/karafka/web/ui/lib/hash_proxy.rb +++ b/lib/karafka/web/ui/lib/hash_proxy.rb @@ -17,12 +17,17 @@ module Lib class HashProxy extend Forwardable - def_delegators :@hash, :[], :[]=, :key?, :each, :find + def_delegators :@hash, :[], :[]=, :key?, :each, :find, :values, :keys, :select # @param hash [Hash] hash we want to convert to a proxy def initialize(hash) @hash = hash - @visited = [] + # Nodes we already visited in the context of a given attribute lookup + # We cache them not to look for them over and over again if they are used more than + # once + @visited = Hash.new { |h, k| h[k] = {} } + # Methods invocations cache + @results = {} end # @return [Original hash] @@ -34,22 +39,32 @@ def to_h # @param args [Object] all the args of the method # @param block [Proc] block for the method def method_missing(method_name, *args, &block) - return super unless args.empty? && block.nil? + method_name = method_name.to_sym - @visited.clear + return super unless args.empty? && block.nil? + return @results[method_name] if @results.key?(method_name) - result = deep_find(@hash, method_name.to_sym) + result = deep_find(@hash, method_name) - @visited.clear + return super if result.nil? - result.nil? ? super : result + @results[method_name] = result end # @param method_name [String] method name # @param include_private [Boolean] def respond_to_missing?(method_name, include_private = false) - result = deep_find(@hash, method_name.to_sym) - result.nil? ? super : true + method_name = method_name.to_sym + + return true if @results.key?(method_name) + + result = deep_find(@hash, method_name) + + return super if result.nil? + + @results[method_name] = result + + true end private @@ -59,16 +74,16 @@ def respond_to_missing?(method_name, include_private = false) def deep_find(obj, key) # Prevent circular dependency lookups by making sure we do not check the same object # multiple times - return nil if @visited.include?(obj) + return nil if @visited[key].key?(obj) - @visited << obj + @visited[key][obj] = nil if obj.respond_to?(:key?) && obj.key?(key) obj[key] elsif obj.respond_to?(:each) - r = nil - obj.find { |*a| r = deep_find(a.last, key) } - r + result = nil + obj.find { |*a| result = deep_find(a.last, key) } + result end end end diff --git a/lib/karafka/web/ui/lib/sorter.rb b/lib/karafka/web/ui/lib/sorter.rb new file mode 100644 index 00000000..3bc2bc9d --- /dev/null +++ b/lib/karafka/web/ui/lib/sorter.rb @@ -0,0 +1,170 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Ui + module Lib + # Sorting engine for deep in-memory structures + # It supports hashes, arrays and hash proxies. + # + # @note It handles sorting in place by mutating appropriate resources and sub-components + class Sorter + # We can support only two order types + ALLOWED_ORDERS = %w[asc desc].freeze + + # Max depth for nested sorting + MAX_DEPTH = 8 + + private_constant :ALLOWED_ORDERS, :MAX_DEPTH + + # @param sort_query [String] query for sorting or empty string if no sorting needed + # @param allowed_attributes [Array] attributes on which we allow to sort. Since + # we can sort on method invocations, this needs to be limited and provided on a per + # controller basis. + def initialize(sort_query, allowed_attributes:) + field, order = sort_query.split(' ') + + @order = order.to_s.downcase + @order = ALLOWED_ORDERS.first unless ALLOWED_ORDERS.include?(@order) + + # Normalize the key since we do not operate on capitalized values + @field = field.to_s.downcase + + @field = '' unless allowed_attributes.include?(@field) + + # Things we have already seen and sorted. Prevents crashing on the circular + # dependencies sorting when same resources are present in different parts of the three + @seen = {} + end + + # Sorts the structure and returns it sorted. + # + # @param resource [Hash, Array, Lib::HashProxy] structure we want to sort + # @param current_depth [] + def call(resource, current_depth = 0) + # Skip if there is no sort field at all + return resource if @field.empty? + # Skip if we've already seen this resource + # We use object id instead of full object as the objects can get big + return resource if @seen.key?(resource.object_id) + # Skip if we are too deep + return resource if current_depth > MAX_DEPTH + + @seen[resource.object_id] = nil + + case resource + when Array + sort_array!(resource, current_depth) + when Hash + sort_hash!(resource, current_depth) + when Lib::HashProxy + # We can short hash in place here, because it will be still references (the same) + # in the hash proxy object, so we can do it that way + sort_hash!(resource.to_h, current_depth) + when Enumerable + sort_array!(resource, current_depth) + end + + resource + end + + private + + # Sorts the hash in place + # + # @param hash [Hash] hash we want to sort + # @param current_depth [Integer] current depth of sorting from root + def sort_hash!(hash, current_depth) + # Run sorting on each value, since we may have nested hashes and arrays + hash.each do |key, value| + previous_key = @parent_key + @parent_key = key.to_s.downcase + call(value, current_depth + 1) + @parent_key = previous_key + end + + # We cannot short hashes that are not type aligned. That is, we cannot compare + # nested hashes with integers, etc. In some cases we could (Float vs Integer), however + # for the same of simplicity, we do not to that + return unless hash.values.map(&:class).uniq.size == 1 + + # Allows sorting based on parent key when hash contains another hash where we want to + # sort based on the keys and not based on the value + if @parent_key == @field + # We also should not modify hashes that do not have values that are sortable + # false is sortable but nil is not + sorted = hash.sort_by { |key, _| key.to_s } + else + values = hash.values.map { |value| sortable_value(value) } + + return if values.any?(&:nil?) + return unless values.map(&:class).uniq.size == 1 + + # Generate new hash that will have things in our desired order + sorted = hash.sort_by { |_, value| sortable_value(value) } + end + + sorted.reverse! if desc? + + # Clear our hash and inject the new values in the order in which we want to have them + # Such clear and merge will ensure things are in the order we desired them + hash.clear + hash.merge!(sorted.to_h) + end + + # Sorts an array in-place based on a specified attribute. + # + # The method iterates over each element in the array and applies the transformation. + # + # @param array [Array] The array of elements to be sorted + # @param current_depth [Integer] The current depth of the sorting operation, + # used in the `call` method to handle nested structures or recursion. + # @note This method modifies the array in place (mutates the caller). + def sort_array!(array, current_depth) + # Sort arrays containing hashes by a specific attribute + array.map! { |element| call(element, current_depth + 1) } + + values = array.map { |element| sortable_value(element) } + + return if values.any?(&:nil?) + return unless values.map(&:class).uniq.size == 1 + + array.sort_by! { |element| sortable_value(element) } + array.reverse! if desc? + end + + # @return [Boolean] true if we sort in desc, otherwise false + def desc? + @order == 'desc' + end + + # Extracts the attribute based on which we should sort (if present) + # + # @param element [Object] takes the element object and depending on its type, tries to + # figure out the value based on which we may sort + # @return [Object, nil] sortable value or nil if nothing to sort + def sortable_value(element) + result = nil + result = element[@field] || element[@field.to_sym] if element.is_a?(Hash) + result = element.public_send(@field) if element.respond_to?(@field) + + # We cannot sort on some of the types and some require mapping, thus we convert + # types here when needed + case result + when Hash + nil + when Lib::HashProxy + nil + when true + 1 + when false + 0 + else + result + end + end + end + end + end + end +end diff --git a/lib/karafka/web/ui/models/counters.rb b/lib/karafka/web/ui/models/counters.rb index 8e7e8b64..47df48cd 100644 --- a/lib/karafka/web/ui/models/counters.rb +++ b/lib/karafka/web/ui/models/counters.rb @@ -17,6 +17,12 @@ def initialize(state) @hash[:errors] = estimate_errors_count end + # @return [Integer] number of jobs that are not yet running. This includes jobs on the + # workers queue as well as jobs in the scheduling + def pending + enqueued + waiting + end + private # Estimates the number of errors present in the errors topic. diff --git a/lib/karafka/web/ui/models/health.rb b/lib/karafka/web/ui/models/health.rb index fe9be0de..b51111c1 100644 --- a/lib/karafka/web/ui/models/health.rb +++ b/lib/karafka/web/ui/models/health.rb @@ -15,7 +15,7 @@ def current(state) fetch_topics_data(state, stats) fetch_rebalance_ages(state, stats) - stats + sort_structure(stats) end private @@ -49,7 +49,7 @@ def fetch_rebalance_ages(state, stats) dispatched_at - rebalance_age_ms / 1_000 end - stats[cg_name][:rebalance_ages] ||= [] + stats[cg_name][:rebalance_ages] ||= Set.new stats[cg_name][:rebalance_ages] += ages end @@ -80,6 +80,27 @@ def iterate_partitions(state) end end end + + # Sorts data so we always present it in an alphabetical order + # + # @param stats [Hash] stats hash + # @return [Hash] sorted data + def sort_structure(stats) + # Ensure that partitions for all topics are in correct order + # Ensure topics are in alphabetical order always + stats.each_value do |cg_data| + topics = cg_data[:topics] + + topics.each do |topic_name, t_data| + topics[topic_name] = Hash[t_data.sort_by { |key, _| key }] + end + + cg_data[:topics] = Hash[topics.sort_by { |key, _| key }] + end + + # Ensure that all consumer groups are always in the same order + Hash[stats.sort_by { |key, _| key }] + end end end end diff --git a/lib/karafka/web/ui/models/jobs.rb b/lib/karafka/web/ui/models/jobs.rb new file mode 100644 index 00000000..bb9ca1d5 --- /dev/null +++ b/lib/karafka/web/ui/models/jobs.rb @@ -0,0 +1,48 @@ +# frozen_string_literal: true + +module Karafka + module Web + module Ui + module Models + # Model representing group of jobs + # + # It simplifies filtering on running jobs and others, etc + class Jobs + include Enumerable + extend Forwardable + + # Last three methods are needed to provide sorting + def_delegators :@jobs_array, :empty?, :size, :map!, :sort_by!, :reverse! + + # @param jobs_array [Array] all jobs we want to enclose + def initialize(jobs_array) + @jobs_array = jobs_array + end + + # @return [Jobs] running jobs + def running + select { |job| job.status == 'running' } + end + + # @return [Jobs] pending jobs + def pending + select { |job| job.status == 'pending' } + end + + # Creates a new Jobs object with selected jobs + # @param block [Proc] select proc + # @return [Jobs] selected jobs enclosed with the Jobs object + def select(&block) + self.class.new(super(&block)) + end + + # Allows for iteration over jobs + # @param block [Proc] block to call for each job + def each(&block) + @jobs_array.each(&block) + end + end + end + end + end +end diff --git a/lib/karafka/web/ui/models/metrics/charts/aggregated.rb b/lib/karafka/web/ui/models/metrics/charts/aggregated.rb index 5e0c366a..063fe089 100644 --- a/lib/karafka/web/ui/models/metrics/charts/aggregated.rb +++ b/lib/karafka/web/ui/models/metrics/charts/aggregated.rb @@ -12,9 +12,28 @@ class Aggregated < Lib::HashProxy # @param aggregated [Hash] all aggregated for all periods # @param period [Symbol] period that we are interested in def initialize(aggregated, period) + @period = period @data = aggregated.to_h.fetch(period) end + # @return [String] JSON with bytes sent and bytes received metrics + def data_transfers + scale_factor = Processing::TimeSeriesTracker::TIME_RANGES + .fetch(@period) + .fetch(:resolution) + .then { |factor| factor / 1_024.to_f } + + received = bytes_received.map do |element| + [element[0], element[1] * scale_factor] + end + + sent = bytes_sent.map do |element| + [element[0], element[1] * scale_factor] + end + + { received: received, sent: sent }.to_json + end + # @param args [Array] names of aggregated we want to show # @return [String] JSON with data about all the charts we were interested in def with(*args) diff --git a/lib/karafka/web/ui/models/metrics/charts/topics.rb b/lib/karafka/web/ui/models/metrics/charts/topics.rb index 79d593ff..ced6d1a8 100644 --- a/lib/karafka/web/ui/models/metrics/charts/topics.rb +++ b/lib/karafka/web/ui/models/metrics/charts/topics.rb @@ -80,7 +80,7 @@ def max_lso_time # We convert this to seconds from milliseconds due to our Web UI precision # Reporting is in ms for consistency - normalized_fd = (ls_offset_fd / 1_000).round + normalized_fd = (ls_offset_fd / 1_000.0).round topics[topic_without_cg][current.first] << normalized_fd end diff --git a/lib/karafka/web/ui/models/process.rb b/lib/karafka/web/ui/models/process.rb index a509d7ee..e9635f35 100644 --- a/lib/karafka/web/ui/models/process.rb +++ b/lib/karafka/web/ui/models/process.rb @@ -37,7 +37,8 @@ def consumer_groups def jobs super .map { |job| Job.new(job) } - .sort_by(&:started_at) + .sort_by(&:updated_at) + .then { |jobs| Jobs.new(jobs) } end # @return [Integer] collective stored lag on this process diff --git a/lib/karafka/web/ui/models/status.rb b/lib/karafka/web/ui/models/status.rb index 2f12ccbe..cc80776f 100644 --- a/lib/karafka/web/ui/models/status.rb +++ b/lib/karafka/web/ui/models/status.rb @@ -244,18 +244,34 @@ def consumers_reports_schema_state ) end + # @return [Status::Step] are there any active topics in the routing that are not present + # in the cluster (does not apply to patterns) + def routing_topics_presence + if consumers_reports_schema_state.success? + existing = @cluster_info.topics.map { |topic| topic[:topic_name] } + + missing = ::Karafka::App + .routes + .flat_map(&:topics) + .flat_map { |topics| topics.map(&:itself) } + .select(&:active?) + .reject { |topic| topic.respond_to?(:patterns?) ? topic.patterns? : false } + .map(&:name) + .uniq + .then { |routed_topics| routed_topics - existing } + + Step.new(missing.empty? ? :success : :warning, missing) + else + Step.new(:halted, []) + end + end + # @return [Status::Step] is Pro enabled with all of its features. # @note It's not an error not to have it but we want to warn, that some of the features # may not work without Pro. def pro_subscription - status = if consumers_reports_schema_state.success? - ::Karafka.pro? ? :success : :warning - else - :halted - end - Step.new( - status, + ::Karafka.pro? ? :success : :warning, nil ) end diff --git a/lib/karafka/web/ui/models/topic.rb b/lib/karafka/web/ui/models/topic.rb index b62912b3..c7ba203d 100644 --- a/lib/karafka/web/ui/models/topic.rb +++ b/lib/karafka/web/ui/models/topic.rb @@ -8,7 +8,9 @@ module Models class Topic < Lib::HashProxy # @return [Array] All topic partitions data def partitions - super.values.map do |partition_hash| + super.map do |partition_id, partition_hash| + partition_hash[:partition_id] = partition_id + Partition.new(partition_hash) end end diff --git a/lib/karafka/web/ui/models/visibility_filter.rb b/lib/karafka/web/ui/models/visibility_filter.rb index e91dfda4..1810e76f 100644 --- a/lib/karafka/web/ui/models/visibility_filter.rb +++ b/lib/karafka/web/ui/models/visibility_filter.rb @@ -26,6 +26,22 @@ def headers?(_message) def payload?(message) !message.headers.key?('encryption') end + + # Should it be allowed to download this message raw payload + # + # @param message [::Karafka::Messages::Message] + # @return [Boolean] true if downloads allowed + def download?(message) + payload?(message) + end + + # Should it be allowed to download the deserialized and sanitized payload as JSON + # + # @param message [::Karafka::Messages::Message] + # @return [Boolean] true if exports allowed + def export?(message) + payload?(message) + end end end end diff --git a/lib/karafka/web/ui/pro/app.rb b/lib/karafka/web/ui/pro/app.rb index b9512442..89515fd2 100644 --- a/lib/karafka/web/ui/pro/app.rb +++ b/lib/karafka/web/ui/pro/app.rb @@ -49,8 +49,16 @@ class App < Ui::Base r.on 'consumers' do controller = Controllers::Consumers.new(params) - r.get String, 'jobs' do |process_id| - controller.jobs(process_id) + r.on String, 'jobs' do |process_id| + r.get 'running' do + controller.running_jobs(process_id) + end + + r.get 'pending' do + controller.pending_jobs(process_id) + end + + r.redirect root_path("consumers/#{process_id}/jobs/running") end r.get String, 'subscriptions' do |process_id| @@ -67,9 +75,18 @@ class App < Ui::Base end end - r.get 'jobs' do + r.on 'jobs' do controller = Controllers::Jobs.new(params) - controller.index + + r.get 'running' do + controller.running + end + + r.get 'pending' do + controller.pending + end + + r.redirect root_path('jobs/running') end r.on 'routing' do @@ -133,6 +150,14 @@ class App < Ui::Base r.post String, Integer, Integer, 'republish' do |topic_id, partition_id, offset| controller.republish(topic_id, partition_id, offset) end + + r.get String, Integer, Integer, 'download' do |topic_id, partition_id, offset| + controller.download(topic_id, partition_id, offset) + end + + r.get String, Integer, Integer, 'export' do |topic_id, partition_id, offset| + controller.export(topic_id, partition_id, offset) + end end r.on 'health' do @@ -146,14 +171,27 @@ class App < Ui::Base controller.overview end + r.get 'changes' do + controller.changes + end + r.get do r.redirect root_path('health/overview') end end - r.get 'cluster' do + r.on 'cluster' do controller = Controllers::Cluster.new(params) - controller.index + + r.get 'brokers' do + controller.brokers + end + + r.get 'topics' do + controller.topics + end + + r.redirect root_path('cluster/brokers') end r.on 'errors' do diff --git a/lib/karafka/web/ui/pro/controllers/cluster.rb b/lib/karafka/web/ui/pro/controllers/cluster.rb index 5859432e..eba48cb0 100644 --- a/lib/karafka/web/ui/pro/controllers/cluster.rb +++ b/lib/karafka/web/ui/pro/controllers/cluster.rb @@ -19,6 +19,7 @@ module Pro module Controllers # Cluster controller class Cluster < Ui::Controllers::Cluster + self.sortable_attributes = Ui::Controllers::Cluster.sortable_attributes end end end diff --git a/lib/karafka/web/ui/pro/controllers/consumers.rb b/lib/karafka/web/ui/pro/controllers/consumers.rb index cfd4a6f4..2b034322 100644 --- a/lib/karafka/web/ui/pro/controllers/consumers.rb +++ b/lib/karafka/web/ui/pro/controllers/consumers.rb @@ -18,18 +18,39 @@ module Pro module Controllers # Controller for displaying consumers states and details about them class Consumers < Ui::Controllers::Base + self.sortable_attributes = %w[ + name + started_at + lag_stored + id + lag_stored_d + committed_offset + stored_offset + fetch_state + poll_state + lso_risk_state + topic + consumer + type + messages + first_offset + last_offset + updated_at + ].freeze + # Consumers list def index @current_state = Models::ConsumersState.current! @counters = Models::Counters.new(@current_state) + @processes, last_page = Lib::Paginations::Paginators::Arrays.call( - Models::Processes.active(@current_state), + refine(Models::Processes.active(@current_state)), @params.current_page ) paginate(@params.current_page, !last_page) - respond + render end # @param process_id [String] id of the process we're interested in @@ -37,19 +58,44 @@ def details(process_id) current_state = Models::ConsumersState.current! @process = Models::Process.find(current_state, process_id) - respond + render + end + + # Renders details about running jobs + # + # @param process_id [String] id of the process we're interested in + def running_jobs(process_id) + details(process_id) + + @running_jobs = @process.jobs.running + + refine(@running_jobs) + + render end + # Renders details about pending jobs + # # @param process_id [String] id of the process we're interested in - def jobs(process_id) + def pending_jobs(process_id) details(process_id) - respond + + @pending_jobs = @process.jobs.pending + + refine(@pending_jobs) + + render end # @param process_id [String] id of the process we're interested in def subscriptions(process_id) details(process_id) - respond + + # We want to have sorting but on a per subscription group basis and not to sort + # everything + @process.consumer_groups.each { |subscription_group| refine(subscription_group) } + + render end end end diff --git a/lib/karafka/web/ui/pro/controllers/dashboard.rb b/lib/karafka/web/ui/pro/controllers/dashboard.rb index b3e31417..d81bc8bf 100644 --- a/lib/karafka/web/ui/pro/controllers/dashboard.rb +++ b/lib/karafka/web/ui/pro/controllers/dashboard.rb @@ -44,7 +44,7 @@ def index @topics, @params.current_range ) - respond + render end end end diff --git a/lib/karafka/web/ui/pro/controllers/dlq.rb b/lib/karafka/web/ui/pro/controllers/dlq.rb index aa614631..a9d87ced 100644 --- a/lib/karafka/web/ui/pro/controllers/dlq.rb +++ b/lib/karafka/web/ui/pro/controllers/dlq.rb @@ -33,7 +33,7 @@ def index .select { |topic| dlq_topic_names.include?(topic[:topic_name]) } .sort_by { |topic| topic[:topic_name] } - respond + render end end end diff --git a/lib/karafka/web/ui/pro/controllers/errors.rb b/lib/karafka/web/ui/pro/controllers/errors.rb index a2e1ebe9..147c474a 100644 --- a/lib/karafka/web/ui/pro/controllers/errors.rb +++ b/lib/karafka/web/ui/pro/controllers/errors.rb @@ -35,7 +35,7 @@ def index paginate(@params.current_page, next_page) - respond + render end # @param partition_id [Integer] id of the partition of errors we are interested in @@ -58,7 +58,7 @@ def partition(partition_id) @error_messages.map(&:offset) ) - respond + render end # Shows given error details @@ -77,7 +77,7 @@ def show(partition_id, offset) watermark_offsets = Ui::Models::WatermarkOffsets.find(errors_topic, partition_id) paginate(offset, watermark_offsets.low, watermark_offsets.high) - respond + render end private diff --git a/lib/karafka/web/ui/pro/controllers/explorer.rb b/lib/karafka/web/ui/pro/controllers/explorer.rb index 2fb10c72..fd33e9ee 100644 --- a/lib/karafka/web/ui/pro/controllers/explorer.rb +++ b/lib/karafka/web/ui/pro/controllers/explorer.rb @@ -26,11 +26,11 @@ def index .topics .sort_by { |topic| topic[:topic_name] } - unless ::Karafka::Web.config.ui.show_internal_topics + unless ::Karafka::Web.config.ui.visibility.internal_topics @topics.reject! { |topic| topic[:topic_name].start_with?('__') } end - respond + render end # Displays aggregated messages from (potentially) all partitions of a topic @@ -45,7 +45,7 @@ def index # @note We cannot use offset references here because each of the partitions may have # completely different values def topic(topic_id) - @visibility_filter = ::Karafka::Web.config.ui.visibility_filter + @visibility_filter = ::Karafka::Web.config.ui.visibility.filter @topic_id = topic_id @partitions_count = Models::ClusterInfo.partitions_count(topic_id) @@ -60,7 +60,7 @@ def topic(topic_id) paginate(@params.current_page, next_page) - respond + render end # Shows messages available in a given partition @@ -68,7 +68,7 @@ def topic(topic_id) # @param topic_id [String] # @param partition_id [Integer] def partition(topic_id, partition_id) - @visibility_filter = ::Karafka::Web.config.ui.visibility_filter + @visibility_filter = ::Karafka::Web.config.ui.visibility.filter @topic_id = topic_id @partition_id = partition_id @watermark_offsets = Ui::Models::WatermarkOffsets.find(topic_id, partition_id) @@ -84,7 +84,7 @@ def partition(topic_id, partition_id) @messages.map { |message| message.is_a?(Array) ? message.last : message.offset } ) - respond + render end # Displays given message @@ -94,7 +94,7 @@ def partition(topic_id, partition_id) # @param offset [Integer] offset of the message we want to display # @param paginate [Boolean] do we want to have pagination def show(topic_id, partition_id, offset, paginate: true) - @visibility_filter = ::Karafka::Web.config.ui.visibility_filter + @visibility_filter = ::Karafka::Web.config.ui.visibility.filter @topic_id = topic_id @partition_id = partition_id @offset = offset @@ -116,7 +116,7 @@ def show(topic_id, partition_id, offset, paginate: true) paginate(offset, watermark_offsets.low, watermark_offsets.high) end - respond + render end # Displays the most recent message on a topic/partition diff --git a/lib/karafka/web/ui/pro/controllers/health.rb b/lib/karafka/web/ui/pro/controllers/health.rb index 5466b0df..621d3350 100644 --- a/lib/karafka/web/ui/pro/controllers/health.rb +++ b/lib/karafka/web/ui/pro/controllers/health.rb @@ -18,12 +18,36 @@ module Pro module Controllers # Health state controller class Health < Ui::Controllers::Base + self.sortable_attributes = %w[ + id + lag_stored + lag_stored_d + committed_offset + committed_offset_fd + stored_offset + stored_offset_fd + hi_offset + hi_offset_fd + ls_offset + ls_offset_fd + fetch_state + poll_state + lso_risk_state + name + poll_state_ch + ].freeze + # Displays the current system state def overview current_state = Models::ConsumersState.current! @stats = Models::Health.current(current_state) - respond + # Refine only on a per topic basis not to resort higher levels + @stats.each_value do |cg_details| + cg_details.each_value { |topic_details| refine(topic_details) } + end + + render end # Displays details about offsets and their progression/statuses @@ -31,7 +55,15 @@ def offsets # Same data as overview but presented differently overview - respond + render + end + + # Displays information related to time of changes of particular attributes + def changes + # Same data as overview but presented differently + overview + + render end end end diff --git a/lib/karafka/web/ui/pro/controllers/jobs.rb b/lib/karafka/web/ui/pro/controllers/jobs.rb index 53fe9ce7..773f46fd 100644 --- a/lib/karafka/web/ui/pro/controllers/jobs.rb +++ b/lib/karafka/web/ui/pro/controllers/jobs.rb @@ -18,6 +18,17 @@ module Pro module Controllers # Displays list of active jobs class Jobs < Ui::Controllers::Jobs + self.sortable_attributes = %w[ + name + topic + consumer + type + messages + first_offset + last_offset + committed_offset + updated_at + ].freeze end end end diff --git a/lib/karafka/web/ui/pro/controllers/messages.rb b/lib/karafka/web/ui/pro/controllers/messages.rb index de5e639f..f6015c8d 100644 --- a/lib/karafka/web/ui/pro/controllers/messages.rb +++ b/lib/karafka/web/ui/pro/controllers/messages.rb @@ -42,6 +42,43 @@ def republish(topic_id, partition_id, offset) ) end + # Dispatches the message raw payload to the browser as a file + # + # @param topic_id [String] + # @param partition_id [Integer] + # @param offset [Integer] offset of the message we want to download + def download(topic_id, partition_id, offset) + message = Ui::Models::Message.find(topic_id, partition_id, offset) + + # Check if downloads are allowed + return deny unless visibility_filter.download?(message) + + file( + message.raw_payload, + "#{topic_id}_#{partition_id}_#{offset}_payload.msg" + ) + end + + # Dispatches the message payload first deserialized and then serialized to JSON + # It differs from the raw payload in cases where raw payload is compressed or binary + # or contains data that the Web UI user should not see that was altered on the Web UI + # with the visibility filter. + # + # @param topic_id [String] + # @param partition_id [Integer] + # @param offset [Integer] offset of the message we want to export + def export(topic_id, partition_id, offset) + message = Ui::Models::Message.find(topic_id, partition_id, offset) + + # Check if exports are allowed + return deny unless visibility_filter.export?(message) + + file( + message.payload.to_json, + "#{topic_id}_#{partition_id}_#{offset}_payload.json" + ) + end + private # @param message [Karafka::Messages::Message] @@ -54,6 +91,11 @@ def reproduced(message, delivery) and received offset #{delivery.offset}. MSG end + + # @return [Object] visibility filter. Either default or user-based + def visibility_filter + ::Karafka::Web.config.ui.visibility.filter + end end end end diff --git a/lib/karafka/web/ui/pro/controllers/routing.rb b/lib/karafka/web/ui/pro/controllers/routing.rb index 8b3ad02c..d342f030 100644 --- a/lib/karafka/web/ui/pro/controllers/routing.rb +++ b/lib/karafka/web/ui/pro/controllers/routing.rb @@ -18,13 +18,22 @@ module Pro module Controllers # Routing details - same as in OSS class Routing < Ui::Controllers::Routing + self.sortable_attributes = %w[ + name + active? + ].freeze + # Routing list def index detect_patterns_routes @routes = Karafka::App.routes - respond + @routes.each do |consumer_group| + refine(consumer_group.topics) + end + + render end # Given route details @@ -37,7 +46,7 @@ def show(topic_id) @topic || raise(::Karafka::Web::Errors::Ui::NotFoundError, topic_id) - respond + render end private diff --git a/lib/karafka/web/ui/pro/views/consumers/_breadcrumbs.erb b/lib/karafka/web/ui/pro/views/consumers/_breadcrumbs.erb index 208a8aac..7e3709ef 100644 --- a/lib/karafka/web/ui/pro/views/consumers/_breadcrumbs.erb +++ b/lib/karafka/web/ui/pro/views/consumers/_breadcrumbs.erb @@ -14,13 +14,19 @@ <% if current_path.include?('/jobs') %> + + <% elsif current_path.include?('/subscriptions') %> <% else %> diff --git a/lib/karafka/web/ui/pro/views/consumers/_counters.erb b/lib/karafka/web/ui/pro/views/consumers/_counters.erb index 1989e064..5c99c76d 100644 --- a/lib/karafka/web/ui/pro/views/consumers/_counters.erb +++ b/lib/karafka/web/ui/pro/views/consumers/_counters.erb @@ -24,18 +24,20 @@
Lag stored
  • - +
    <%= number_with_delimiter @counters.busy, ' ' %>
    -
    Busy
    +
    Running
  • -
    - <%= number_with_delimiter @counters.enqueued, ' ' %> -
    -
    Enqueued
    + +
    + <%= number_with_delimiter @counters.pending, ' ' %> +
    +
    Pending
    +
  • diff --git a/lib/karafka/web/ui/pro/views/consumers/consumer/_job.erb b/lib/karafka/web/ui/pro/views/consumers/consumer/_job.erb index f6524db9..bf683911 100644 --- a/lib/karafka/web/ui/pro/views/consumers/consumer/_job.erb +++ b/lib/karafka/web/ui/pro/views/consumers/consumer/_job.erb @@ -16,6 +16,9 @@ #<%= job.type %> + + <%= job.messages %> + <%== offset_with_label job.topic, job.partition, job.first_offset, explore: true %> @@ -26,6 +29,6 @@ <%== offset_with_label job.topic, job.partition, job.committed_offset, explore: true %> - <%== relative_time job.started_at %> + <%== relative_time job.updated_at %> diff --git a/lib/karafka/web/ui/pro/views/consumers/consumer/_no_jobs.erb b/lib/karafka/web/ui/pro/views/consumers/consumer/_no_jobs.erb index 82dc0ce6..d43dc6de 100644 --- a/lib/karafka/web/ui/pro/views/consumers/consumer/_no_jobs.erb +++ b/lib/karafka/web/ui/pro/views/consumers/consumer/_no_jobs.erb @@ -2,7 +2,7 @@
    diff --git a/lib/karafka/web/ui/pro/views/consumers/consumer/_partition.erb b/lib/karafka/web/ui/pro/views/consumers/consumer/_partition.erb index cf716f47..3cd09868 100644 --- a/lib/karafka/web/ui/pro/views/consumers/consumer/_partition.erb +++ b/lib/karafka/web/ui/pro/views/consumers/consumer/_partition.erb @@ -26,9 +26,7 @@ - - <%= partition.poll_state %> - + <%== poll_state_with_change_time_label(partition.poll_state, partition.poll_state_ch) %> diff --git a/lib/karafka/web/ui/pro/views/consumers/consumer/_subscription_group.erb b/lib/karafka/web/ui/pro/views/consumers/consumer/_subscription_group.erb index bd06a3cd..dd8e565c 100644 --- a/lib/karafka/web/ui/pro/views/consumers/consumer/_subscription_group.erb +++ b/lib/karafka/web/ui/pro/views/consumers/consumer/_subscription_group.erb @@ -14,6 +14,7 @@ +
    Join state:  @@ -22,30 +23,46 @@
    +
    State change:  <%== relative_time( - Time.at(@process.dispatched_at) - (subscription_group.stateage / 1_000) + Time.at(@process.dispatched_at) - (subscription_group.stateage / 1_000.0) ) %>
    + +
    +
    + Last Poll:  + + <%== + relative_time( + Time.at(@process.dispatched_at) - (subscription_group.poll_age / 1_000.0) + ) + %> + +
    +
    +
    Last rebalance:  <%== relative_time( - Time.at(@process.dispatched_at) - (subscription_group.rebalance_age / 1_000) + Time.at(@process.dispatched_at) - (subscription_group.rebalance_age / 1_000.0) ) %>
    +
    Rebalance count:  @@ -86,14 +103,14 @@ - Partition - Lag stored - Lag stored trend - Committed offset - Stored offset - Fetch state - Poll state - LSO state + <%== sort_link('Partition', :id) %> + <%== sort_link(:lag_stored) %> + <%== sort_link('Lag stored trend', :lag_stored_d) %> + <%== sort_link(:committed_offset) %> + <%== sort_link(:stored_offset) %> + <%== sort_link(:fetch_state) %> + <%== sort_link(:poll_state) %> + <%== sort_link('LSO state', :lso_risk_state) %> diff --git a/lib/karafka/web/ui/pro/views/consumers/consumer/_tabs.erb b/lib/karafka/web/ui/pro/views/consumers/consumer/_tabs.erb index f4fcb759..63479420 100644 --- a/lib/karafka/web/ui/pro/views/consumers/consumer/_tabs.erb +++ b/lib/karafka/web/ui/pro/views/consumers/consumer/_tabs.erb @@ -5,15 +5,22 @@
    +
    + +<% end %> diff --git a/lib/karafka/web/ui/pro/views/consumers/jobs.erb b/lib/karafka/web/ui/pro/views/consumers/running_jobs.erb similarity index 56% rename from lib/karafka/web/ui/pro/views/consumers/jobs.erb rename to lib/karafka/web/ui/pro/views/consumers/running_jobs.erb index 2ec9cbb1..44a897fa 100644 --- a/lib/karafka/web/ui/pro/views/consumers/jobs.erb +++ b/lib/karafka/web/ui/pro/views/consumers/running_jobs.erb @@ -8,8 +8,8 @@ <%== partial 'consumers/consumer/tabs' %> -<% if @process.jobs.empty? %> - <%== partial 'consumers/consumer/no_jobs' %> +<% if @running_jobs.empty? %> + <%== partial 'consumers/consumer/no_jobs', locals: { type: 'running' } %> <% else %>
    @@ -17,19 +17,20 @@ - - - - - - - + + + + + + + + <%== render_each( - @process.jobs, + @running_jobs, 'consumers/consumer/_job', local: :job ) diff --git a/lib/karafka/web/ui/pro/views/dashboard/_ranges_selector.erb b/lib/karafka/web/ui/pro/views/dashboard/_ranges_selector.erb deleted file mode 100644 index b5592443..00000000 --- a/lib/karafka/web/ui/pro/views/dashboard/_ranges_selector.erb +++ /dev/null @@ -1,39 +0,0 @@ -
    -
    -
    -
    - <% - path = root_path('dashboard?range=seconds') - active = params.current_range == :seconds ? 'active' : false - %> - - 5 minutes - - - <% - path = root_path('dashboard?range=minutes') - active = params.current_range == :minutes ? 'active' : false - %> - - 1 hour - - - <% - path = root_path('dashboard?range=hours') - active = params.current_range == :hours ? 'active' : false - %> - - 24 hours - - - <% - path = root_path('dashboard?range=days') - active = params.current_range == :days ? 'active' : false - %> - - 7 days - -
    -
    -
    -
    diff --git a/lib/karafka/web/ui/pro/views/dashboard/index.erb b/lib/karafka/web/ui/pro/views/dashboard/index.erb index ccb5ac1d..5a17dcbe 100644 --- a/lib/karafka/web/ui/pro/views/dashboard/index.erb +++ b/lib/karafka/web/ui/pro/views/dashboard/index.erb @@ -56,6 +56,7 @@ <%== partial 'shared/tab_nav', locals: { title: 'Utilization', id: 'utilization', active: true } %> <%== partial 'shared/tab_nav', locals: { title: 'RSS', id: 'rss' } %> <%== partial 'shared/tab_nav', locals: { title: 'Concurrency', id: 'concurrency' } %> + <%== partial 'shared/tab_nav', locals: { title: 'Data transfers', id: 'data-transfers' } %>
    @@ -73,6 +74,11 @@ <% data = @aggregated_charts.with(:processes, :workers, :listeners) %> <%== partial 'shared/chart', locals: { data: data, id: 'concurrency' } %>
    + +
    + <% data = @aggregated_charts.data_transfers %> + <%== partial 'shared/chart', locals: { data: data, id: 'data-transfers', label_type_y: 'memory' } %> +
    diff --git a/lib/karafka/web/ui/pro/views/explorer/message/_message_actions.erb b/lib/karafka/web/ui/pro/views/explorer/message/_message_actions.erb new file mode 100644 index 00000000..5cda4722 --- /dev/null +++ b/lib/karafka/web/ui/pro/views/explorer/message/_message_actions.erb @@ -0,0 +1,18 @@ +<% + republish_path = root_path('messages', @message.topic, @message.partition, @message.offset, 'republish') + surrounding_path = explorer_path(@message.topic, @message.partition, @message.offset, 'surrounding') +%> + +
    +
    + + ⇋ + Surrounding + + +
    + <%== csrf_tag(republish_path) %> + + +
    +
    diff --git a/lib/karafka/web/ui/pro/views/explorer/message/_metadata.erb b/lib/karafka/web/ui/pro/views/explorer/message/_metadata.erb new file mode 100644 index 00000000..9565ab48 --- /dev/null +++ b/lib/karafka/web/ui/pro/views/explorer/message/_metadata.erb @@ -0,0 +1,43 @@ +
    +
    +
    TopicConsumerTypeFirst offsetLast offsetCommitted offsetStarted at<%== sort_link(:topic) %><%== sort_link(:consumer) %><%== sort_link(:type) %><%== sort_link(:messages) %><%== sort_link(:first_offset) %><%== sort_link(:last_offset) %><%== sort_link(:committed_offset) %><%== sort_link('Started at', :updated_at, rev: true) %>
    + + <% @message.metadata.to_h.except(:received_at, :key, :headers).each do |k, v| %> + <%== + partial( + 'explorer/messages/detail', + locals: { + k: k, + v: v + } + ) + %> + <% end %> + + <%== + partial( + 'explorer/messages/detail', + locals: { + k: 'bytesize', + v: format_memory(((@message.raw_payload&.bytesize || 0) / 1024.to_f).round(4)) + } + ) + %> + + <%== + partial( + 'explorer/messages/key', + locals: { message: @message } + ) + %> + + <%== + partial( + 'explorer/messages/headers', + locals: { message: @message } + ) + %> + +
    +
    +
    diff --git a/lib/karafka/web/ui/pro/views/explorer/message/_payload.erb b/lib/karafka/web/ui/pro/views/explorer/message/_payload.erb new file mode 100644 index 00000000..bbb4ddce --- /dev/null +++ b/lib/karafka/web/ui/pro/views/explorer/message/_payload.erb @@ -0,0 +1,21 @@ +<% if @visibility_filter.payload?(@message) %> +
    +
    + <% if @payload_error %> + <%== partial 'explorer/failed_deserialization' %> + <% end %> + +
    +
    + <% if @payload_error %> +
    <%= @message.raw_payload %>
    + <% else %> +
    <%= @pretty_payload %>
    + <% end %> +
    +
    +
    +
    +<% else %> + <%== partial 'explorer/filtered' %> +<% end %> diff --git a/lib/karafka/web/ui/pro/views/explorer/message/_payload_actions.erb b/lib/karafka/web/ui/pro/views/explorer/message/_payload_actions.erb new file mode 100644 index 00000000..4fdfa0e6 --- /dev/null +++ b/lib/karafka/web/ui/pro/views/explorer/message/_payload_actions.erb @@ -0,0 +1,19 @@ +
    + <% if @visibility_filter.download?(@message) %> + + ⇓ Download raw + + <% end %> + + <% if @visibility_filter.export?(@message) && !@payload_error %> + + ⇓ Export as JSON + + <% end %> +
    diff --git a/lib/karafka/web/ui/pro/views/explorer/show.erb b/lib/karafka/web/ui/pro/views/explorer/show.erb index d24cb415..5113ffe2 100644 --- a/lib/karafka/web/ui/pro/views/explorer/show.erb +++ b/lib/karafka/web/ui/pro/views/explorer/show.erb @@ -1,22 +1,5 @@ -<% - republish_path = root_path('messages', @message.topic, @message.partition, @message.offset, 'republish') - surrounding_path = explorer_path(@message.topic, @message.partition, @message.offset, 'surrounding') -%> -
    -
    -
    - - ⇋ - Surrounding - - -
    - <%== csrf_tag(republish_path) %> - -
    -
    -
    + <%== partial 'explorer/message/message_actions' %>
    @@ -28,81 +11,23 @@
    -
    -
    - - - <% @message.metadata.to_h.except(:received_at, :key, :headers).each do |k, v| %> - <%== - partial( - 'explorer/messages/detail', - locals: { - k: k, - v: v - } - ) - %> - <% end %> - - <%== - partial( - 'explorer/messages/detail', - locals: { - k: 'bytesize', - v: format_memory(((@message.raw_payload&.bytesize || 0) / 1024.to_f).round(4)) - } - ) - %> - - <%== - partial( - 'explorer/messages/key', - locals: { message: @message } - ) - %> - - <%== - partial( - 'explorer/messages/headers', - locals: { message: @message } - ) - %> - -
    -
    -
    + <%== partial 'explorer/message/metadata' %>
    -
    +
    Payload
    -
    -
    -
    + <%== partial 'explorer/message/payload_actions' %> - <% if @visibility_filter.payload?(@message) %> -
    -
    - <% if @payload_error %> - <%== partial 'explorer/failed_deserialization' %> - <% end %> +
    -
    -
    - <% if @payload_error %> -
    <%= @message.raw_payload %>
    - <% else %> -
    <%= @pretty_payload %>
    - <% end %> -
    -
    -
    +
    - <% else %> - <%== partial 'explorer/filtered' %> - <% end %> +
    + + <%== partial 'explorer/message/payload' %> diff --git a/lib/karafka/web/ui/pro/views/health/_breadcrumbs.erb b/lib/karafka/web/ui/pro/views/health/_breadcrumbs.erb index f87f94e0..7dd91040 100644 --- a/lib/karafka/web/ui/pro/views/health/_breadcrumbs.erb +++ b/lib/karafka/web/ui/pro/views/health/_breadcrumbs.erb @@ -19,3 +19,11 @@
  • <% end %> + +<% if current_path.include?('/changes') %> + +<% end %> diff --git a/lib/karafka/web/ui/pro/views/health/_partition.erb b/lib/karafka/web/ui/pro/views/health/_partition.erb index d24dd691..533c244d 100644 --- a/lib/karafka/web/ui/pro/views/health/_partition.erb +++ b/lib/karafka/web/ui/pro/views/health/_partition.erb @@ -22,9 +22,7 @@ - - <%= details.poll_state %> - + <%== poll_state_with_change_time_label(details.poll_state, details.poll_state_ch) %> diff --git a/lib/karafka/web/ui/pro/views/health/_partition_offset.erb b/lib/karafka/web/ui/pro/views/health/_partition_offset.erb index a360ba63..2458db36 100644 --- a/lib/karafka/web/ui/pro/views/health/_partition_offset.erb +++ b/lib/karafka/web/ui/pro/views/health/_partition_offset.erb @@ -9,13 +9,13 @@ <%== offset_with_label topic_name, partition_id, details.committed_offset %> - <%== relative_time(Time.now - details.committed_offset_fd / 1_000) %> + <%== relative_time(Time.now - details.committed_offset_fd / 1_000.0) %> <%== offset_with_label topic_name, partition_id, details.stored_offset %> - <%== relative_time(Time.now - details.stored_offset_fd / 1_000) %> + <%== relative_time(Time.now - details.stored_offset_fd / 1_000.0) %> <%== offset_with_label topic_name, partition_id, details.lo_offset %> @@ -24,13 +24,13 @@ <%== offset_with_label topic_name, partition_id, details.hi_offset %> - <%== relative_time(Time.now - details.hi_offset_fd / 1_000) %> + <%== relative_time(Time.now - details.hi_offset_fd / 1_000.0) %> <%== offset_with_label topic_name, partition_id, details.ls_offset %> - <%== relative_time(Time.now - details.ls_offset_fd / 1_000) %> + <%== relative_time(Time.now - details.ls_offset_fd / 1_000.0) %> diff --git a/lib/karafka/web/ui/pro/views/health/_partition_times.erb b/lib/karafka/web/ui/pro/views/health/_partition_times.erb new file mode 100644 index 00000000..995a13d1 --- /dev/null +++ b/lib/karafka/web/ui/pro/views/health/_partition_times.erb @@ -0,0 +1,32 @@ + + + <%= partition_id %> + + + <%== relative_time(Time.now - details.committed_offset_fd / 1_000.0) %> + + + <%== relative_time(Time.now - details.stored_offset_fd / 1_000.0) %> + + + <%== relative_time(Time.now - details.hi_offset_fd / 1_000.0) %> + + + <%== relative_time(Time.now - details.ls_offset_fd / 1_000.0) %> + + + <% change_in_seconds = details.poll_state_ch / 1_000.0 %> + + <% if details.poll_state == 'active' %> + + N/A + + <% elsif change_in_seconds >= 60 * 60 * 31 * 12 %> + + Until manual resume + + <% else %> + <%== relative_time(Time.now + change_in_seconds) %> + <% end %> + + diff --git a/lib/karafka/web/ui/pro/views/health/_tabs.erb b/lib/karafka/web/ui/pro/views/health/_tabs.erb index 5e2598fb..2abeea58 100644 --- a/lib/karafka/web/ui/pro/views/health/_tabs.erb +++ b/lib/karafka/web/ui/pro/views/health/_tabs.erb @@ -20,6 +20,15 @@ Offsets + + diff --git a/lib/karafka/web/ui/pro/views/health/changes.erb b/lib/karafka/web/ui/pro/views/health/changes.erb new file mode 100644 index 00000000..75f0bb3e --- /dev/null +++ b/lib/karafka/web/ui/pro/views/health/changes.erb @@ -0,0 +1,66 @@ +<%== view_title('Consumers groups changes details') %> + +<% if @stats.empty? %> + <%== partial 'health/no_data' %> +<% else %> + <%== partial 'health/tabs' %> +<% end %> + + +<% @stats.each_with_index do |(cg_name, details), index| %> +
    +
    +
    +

    <%= cg_name %>

    +
    + +
    + + Last rebalance: + + <%== relative_time(details[:rebalanced_at]) %> + + +
    +
    + +
    +
    + <% topics = details[:topics] %> + <% topics.each_with_index do |(topic_name, partitions), index| %> + + + + + + + + + + + + + + + + <% partitions.each do |partition_id, details| %> + <%== + partial( + 'health/partition_times', + locals: { + topic_name: topic_name, + partition_id: partition_id, + details: details + } + ) + %> + <% end %> + +
    +
    <%= topic_name %>
    +
    <%== sort_link('Partition', :id) %><%== sort_link('Committed offset change', :committed_offset_fd) %><%== sort_link('Stored offset change', :stored_offset_fd) %><%== sort_link('High offset change', :hi_offset_fd) %><%== sort_link('Last stable offset state', :lso_risk_state) %><%== sort_link('Pause state change', :poll_state_ch) %>
    + <% end %> +
    +
    +
    +<% end %> diff --git a/lib/karafka/web/ui/pro/views/health/offsets.erb b/lib/karafka/web/ui/pro/views/health/offsets.erb index b6f7e597..bf331ea2 100644 --- a/lib/karafka/web/ui/pro/views/health/offsets.erb +++ b/lib/karafka/web/ui/pro/views/health/offsets.erb @@ -25,7 +25,7 @@
    - <% topics = details[:topics].sort_by(&:first) %> + <% topics = details[:topics] %> <% topics.each_with_index do |(topic_name, partitions), index| %> @@ -35,22 +35,22 @@ - - - - - - - - - - - - + + + + + + + + + + + + - <% partitions.sort_by(&:first).each do |partition_id, details| %> + <% partitions.each do |partition_id, details| %> <%== partial( 'health/partition_offset', diff --git a/lib/karafka/web/ui/pro/views/health/overview.erb b/lib/karafka/web/ui/pro/views/health/overview.erb index b135ccd3..6bde1162 100644 --- a/lib/karafka/web/ui/pro/views/health/overview.erb +++ b/lib/karafka/web/ui/pro/views/health/overview.erb @@ -25,7 +25,7 @@
    - <% topics = details[:topics].sort_by(&:first) %> + <% topics = details[:topics] %> <% topics.each_with_index do |(topic_name, partitions), index| %>
    PartitionLag storedCommitted offsetCommitted offset changeStored offsetStored offset changeLow offsetHigh offsetHigh offset changeLast stable offsetLast stable offset changeLast stable offset state<%== sort_link('Partition', :id) %><%== sort_link(:lag_stored) %><%== sort_link(:committed_offset) %><%== sort_link('Committed offset change', :committed_offset_fd) %><%== sort_link(:stored_offset) %><%== sort_link('Stored offset change', :stored_offset_fd) %><%== sort_link('Low offset', :lo_offset) %><%== sort_link('High offset', :hi_offset) %><%== sort_link('High offset change', :hi_offset_fd) %><%== sort_link('Last stable offset', :ls_offset) %><%== sort_link('Last stable offset change', :ls_offset_fd) %><%== sort_link('Last stable offset state', :lso_risk_state) %>
    @@ -35,19 +35,19 @@ - - - - - - - - - + + + + + + + + + - <% partitions.sort_by(&:first).each do |partition_id, details| %> + <% partitions.each do |partition_id, details| %> <%== partial( 'health/partition', diff --git a/lib/karafka/web/ui/pro/views/jobs/_job.erb b/lib/karafka/web/ui/pro/views/jobs/_job.erb index 0f004842..bc8e1fa5 100644 --- a/lib/karafka/web/ui/pro/views/jobs/_job.erb +++ b/lib/karafka/web/ui/pro/views/jobs/_job.erb @@ -34,6 +34,6 @@ <%== offset_with_label job.topic, job.partition, job.committed_offset %> diff --git a/lib/karafka/web/ui/pro/views/jobs/_no_jobs.erb b/lib/karafka/web/ui/pro/views/jobs/_no_jobs.erb index 49b0ce34..8fe00377 100644 --- a/lib/karafka/web/ui/pro/views/jobs/_no_jobs.erb +++ b/lib/karafka/web/ui/pro/views/jobs/_no_jobs.erb @@ -2,7 +2,7 @@
    diff --git a/lib/karafka/web/ui/pro/views/jobs/pending.erb b/lib/karafka/web/ui/pro/views/jobs/pending.erb new file mode 100644 index 00000000..c78caf2d --- /dev/null +++ b/lib/karafka/web/ui/pro/views/jobs/pending.erb @@ -0,0 +1,39 @@ +<%== view_title('Pending jobs overview', hr: false) %> + +<%== partial 'jobs/tabs' %> + +<% if @jobs.empty? && params.current_page <= 1 %> + <%== partial 'jobs/no_jobs', locals: { type: 'pending' } %> +<% elsif @jobs.empty? %> + <%== partial 'shared/no_paginated_data' %> +<% else %> +
    +
    +
    +
    PartitionLag storedLag stored trendCommitted offsetStored offsetFetch statePoll stateLSO stateProcess name<%== sort_link('Partition', :id) %><%== sort_link(:lag_stored) %><%== sort_link('Lag stored trend', :lag_stored_d) %><%== sort_link(:committed_offset) %><%== sort_link(:stored_offset) %><%== sort_link(:fetch_state) %><%== sort_link(:poll_state) %><%== sort_link('LSO state', :lso_risk_state) %><%== sort_link('Process name', :name) %>
    - <%== relative_time job.started_at %> + <%== relative_time job.updated_at %>
    + + + + + + + + + + + + + + + <%== + each_partial( + @jobs, + 'jobs/job' + ) + %> + +
    <%== sort_link('Process', :name) %><%== sort_link(:topic) %><%== sort_link(:consumer) %><%== sort_link(:type) %><%== sort_link(:messages) %><%== sort_link(:first_offset) %><%== sort_link(:last_offset) %><%== sort_link(:committed_offset) %><%== sort_link('Created at', :updated_at, rev: true) %>
    +
    +
    + +<% end %> diff --git a/lib/karafka/web/ui/pro/views/jobs/running.erb b/lib/karafka/web/ui/pro/views/jobs/running.erb new file mode 100644 index 00000000..d146429e --- /dev/null +++ b/lib/karafka/web/ui/pro/views/jobs/running.erb @@ -0,0 +1,39 @@ +<%== view_title('Running jobs overview', hr: false) %> + +<%== partial 'jobs/tabs' %> + +<% if @jobs.empty? && params.current_page <= 1 %> + <%== partial 'jobs/no_jobs', locals: { type: 'running' } %> +<% elsif @jobs.empty? %> + <%== partial 'shared/no_paginated_data' %> +<% else %> +
    +
    +
    + + + + + + + + + + + + + + + + <%== + each_partial( + @jobs, + 'jobs/job' + ) + %> + +
    <%== sort_link('Process', :name) %><%== sort_link(:topic) %><%== sort_link(:consumer) %><%== sort_link(:type) %><%== sort_link(:messages) %><%== sort_link(:first_offset) %><%== sort_link(:last_offset) %><%== sort_link(:committed_offset) %><%== sort_link('Started at', :updated_at, rev: true) %>
    +
    +
    +
    +<% end %> diff --git a/lib/karafka/web/ui/pro/views/routing/_consumer_group.erb b/lib/karafka/web/ui/pro/views/routing/_consumer_group.erb index 1f67a3bd..34e68afd 100644 --- a/lib/karafka/web/ui/pro/views/routing/_consumer_group.erb +++ b/lib/karafka/web/ui/pro/views/routing/_consumer_group.erb @@ -13,9 +13,9 @@ Subscription group - Topic + <%== sort_link('Topic', :name) %> Type - Active + <%== sort_link('Active', :active?) %> diff --git a/lib/karafka/web/ui/pro/views/shared/_navigation.erb b/lib/karafka/web/ui/pro/views/shared/_navigation.erb index 3826331c..f8c2f1be 100644 --- a/lib/karafka/web/ui/pro/views/shared/_navigation.erb +++ b/lib/karafka/web/ui/pro/views/shared/_navigation.erb @@ -15,7 +15,7 @@ diff --git a/lib/karafka/web/ui/public/javascripts/application.js b/lib/karafka/web/ui/public/javascripts/application.js index 8e9f479b..38897a40 100644 --- a/lib/karafka/web/ui/public/javascripts/application.js +++ b/lib/karafka/web/ui/public/javascripts/application.js @@ -6,6 +6,16 @@ function updateTimeAgo() { timeago.render(selection); timeago.cancel() } + + var selection = document.getElementsByClassName('time-title') + var title = null + + for (var i = 0; i < selection.length; i++) { + let element = selection[i] + + title = element.getAttribute('title') + element.setAttribute('title', timeago.format(title)) + } } // To prevent from flickering, the UI is initially hidden and visible when all the JS components diff --git a/lib/karafka/web/ui/public/stylesheets/application.css b/lib/karafka/web/ui/public/stylesheets/application.css index befeaec5..7236a4ac 100644 --- a/lib/karafka/web/ui/public/stylesheets/application.css +++ b/lib/karafka/web/ui/public/stylesheets/application.css @@ -123,3 +123,7 @@ main { .chartjs-wrapper { height: 250px; } + +a.sort { + color: inherit; +} diff --git a/lib/karafka/web/ui/views/cluster/_breadcrumbs.erb b/lib/karafka/web/ui/views/cluster/_breadcrumbs.erb index b3cd6f95..e2be8347 100644 --- a/lib/karafka/web/ui/views/cluster/_breadcrumbs.erb +++ b/lib/karafka/web/ui/views/cluster/_breadcrumbs.erb @@ -3,3 +3,19 @@ Cluster informations + +<% if current_path.include?('/brokers') %> + +<% end %> + +<% if current_path.include?('/topics') %> + +<% end %> diff --git a/lib/karafka/web/ui/views/cluster/_tabs.erb b/lib/karafka/web/ui/views/cluster/_tabs.erb new file mode 100644 index 00000000..37ed3573 --- /dev/null +++ b/lib/karafka/web/ui/views/cluster/_tabs.erb @@ -0,0 +1,27 @@ +
    +
    +
    + + + +
    +
    +
    diff --git a/lib/karafka/web/ui/views/cluster/brokers.erb b/lib/karafka/web/ui/views/cluster/brokers.erb new file mode 100644 index 00000000..ef1686e8 --- /dev/null +++ b/lib/karafka/web/ui/views/cluster/brokers.erb @@ -0,0 +1,27 @@ +<%== view_title('Cluster informations') %> + +<%== partial 'cluster/tabs' %> + +
    +
    +
    + + + + + + + + + + <%== + each_partial( + @brokers, + 'cluster/broker' + ) + %> + +
    <%== sort_link('Id', :broker_id) %><%== sort_link('Name', :broker_name) %><%== sort_link('Port', :broker_port) %>
    +
    +
    +
    diff --git a/lib/karafka/web/ui/views/cluster/index.erb b/lib/karafka/web/ui/views/cluster/index.erb deleted file mode 100644 index 27b793df..00000000 --- a/lib/karafka/web/ui/views/cluster/index.erb +++ /dev/null @@ -1,74 +0,0 @@ -<%== view_title('Cluster informations') %> - -
    -
    -
    -

    - Brokers -

    -
    -
    -
    - -
    -
    - - - - - - - - - - <%== - each_partial( - @cluster_info.brokers, - 'cluster/broker' - ) - %> - -
    Broker idNamePort
    -
    -
    -
    - -
    -
    -
    -

    - Topics and partitions -

    -
    -
    -
    -
    -
    - <% if @partitions.empty? && params.current_page <= 1 %> - <%== partial 'cluster/no_partitions' %> - <% elsif @partitions.empty? %> - <%== partial 'shared/no_paginated_data' %> - <% else %> - - - - - - - - - - - - <%== - each_partial( - @partitions, - 'cluster/partition' - ) - %> - -
    Topic namePartition idLeaderReplica countIn sync brokers
    - <% end %> -
    -
    -
    diff --git a/lib/karafka/web/ui/views/cluster/topics.erb b/lib/karafka/web/ui/views/cluster/topics.erb new file mode 100644 index 00000000..226eba97 --- /dev/null +++ b/lib/karafka/web/ui/views/cluster/topics.erb @@ -0,0 +1,35 @@ +<%== view_title('Cluster informations') %> + +<%== partial 'cluster/tabs' %> + +
    +
    +
    + <% if @partitions.empty? && params.current_page <= 1 %> + <%== partial 'cluster/no_partitions' %> + <% elsif @partitions.empty? %> + <%== partial 'shared/no_paginated_data' %> + <% else %> + + + + + + + + + + + + <%== + each_partial( + @partitions, + 'cluster/partition' + ) + %> + +
    <%== sort_link(:topic_name) %><%== sort_link(:partition_id) %><%== sort_link(:leader) %><%== sort_link(:replica_count) %><%== sort_link('In sync brokers', :in_sync_replica_brokers) %>
    + <% end %> +
    +
    +
    diff --git a/lib/karafka/web/ui/views/consumers/_counters.erb b/lib/karafka/web/ui/views/consumers/_counters.erb index 5c35409f..e165c643 100644 --- a/lib/karafka/web/ui/views/consumers/_counters.erb +++ b/lib/karafka/web/ui/views/consumers/_counters.erb @@ -24,18 +24,20 @@
    Lag stored
  • - +
    <%= number_with_delimiter @counters.busy, ' ' %>
    -
    Busy
    +
    Running
  • -
    - <%= number_with_delimiter @counters.enqueued, ' ' %> -
    -
    Enqueued
    + +
    + <%= number_with_delimiter @counters.pending, ' ' %> +
    +
    Pending
    +
  • diff --git a/lib/karafka/web/ui/views/consumers/index.erb b/lib/karafka/web/ui/views/consumers/index.erb index e9967c3f..0f2ffb7e 100644 --- a/lib/karafka/web/ui/views/consumers/index.erb +++ b/lib/karafka/web/ui/views/consumers/index.erb @@ -12,11 +12,11 @@ - - + + - + diff --git a/lib/karafka/web/ui/views/dashboard/_ranges_selector.erb b/lib/karafka/web/ui/views/dashboard/_ranges_selector.erb index 73794bef..b5592443 100644 --- a/lib/karafka/web/ui/views/dashboard/_ranges_selector.erb +++ b/lib/karafka/web/ui/views/dashboard/_ranges_selector.erb @@ -2,20 +2,36 @@
    diff --git a/lib/karafka/web/ui/views/dashboard/index.erb b/lib/karafka/web/ui/views/dashboard/index.erb index dcf55af0..502abc80 100644 --- a/lib/karafka/web/ui/views/dashboard/index.erb +++ b/lib/karafka/web/ui/views/dashboard/index.erb @@ -25,6 +25,7 @@ <%== partial 'shared/tab_nav', locals: { title: 'Batches', id: 'batches' } %> <%== partial 'shared/tab_nav', locals: { title: 'Lags stored', id: 'lags-stored' } %> <%== partial 'shared/tab_nav', locals: { title: 'Topics pace', id: 'topics-pace' } %> + <%== partial 'shared/tab_nav', locals: { title: 'Max LSO time', id: 'max-lso-time' } %>
    @@ -34,9 +35,8 @@
    - <%== partial 'dashboard/feature_pro' %> - <% data = { batches: set.call(20), errors: set.call, dead: set.call, retries: set.call }.to_json %> - <%== partial 'shared/chart', locals: { data: data, id: 'batches', blurred: true } %> + <% data = @aggregated_charts.with(:batches, :errors, :dead, :retries) %> + <%== partial 'shared/chart', locals: { data: data, id: 'batches' } %>
    @@ -50,6 +50,12 @@ <% data = { topic1: set.call(20), topic2: set.call(10), topic3: set.call(100) }.to_json %> <%== partial 'shared/chart', locals: { data: data, id: 'topics-pace', blurred: true } %>
    + +
    + <%== partial 'dashboard/feature_pro' %> + <% data = { max_lso: set.call(2, 0..5) }.to_json %> + <%== partial 'shared/chart', locals: { data: data, id: 'max-lso-time', blurred: true } %> +
    @@ -66,13 +72,13 @@ <%== partial 'shared/tab_nav', locals: { title: 'Utilization', id: 'utilization', active: true } %> <%== partial 'shared/tab_nav', locals: { title: 'RSS', id: 'rss' } %> <%== partial 'shared/tab_nav', locals: { title: 'Concurrency', id: 'concurrency' } %> + <%== partial 'shared/tab_nav', locals: { title: 'Data transfers', id: 'data-transfers' } %>
    - <%== partial 'dashboard/feature_pro' %> - <% data = { utilization: set.call(50) }.to_json %> - <%== partial 'shared/chart', locals: { data: data, id: 'utilization', blurred: true } %> + <% data = @aggregated_charts.with(:utilization) %> + <%== partial 'shared/chart', locals: { data: data, id: 'utilization', label_type_y: 'percentage' } %>
    @@ -82,9 +88,14 @@
    + <% data = @aggregated_charts.with(:processes, :workers, :listeners) %> + <%== partial 'shared/chart', locals: { data: data, id: 'concurrency' } %> +
    + +
    <%== partial 'dashboard/feature_pro' %> - <% data = { processes: set.call(2, 0..0), workers: set.call(4, 0..0), listeners: set.call(3, 0..0) }.to_json %> - <%== partial 'shared/chart', locals: { data: data, id: 'concurrency', blurred: true } %> + <% data = { bytes_received: set.call(4, 10..15), bytes_sent: set.call(2, 0..5) }.to_json %> + <%== partial 'shared/chart', locals: { data: data, id: 'data-transfers', blurred: true } %>
    diff --git a/lib/karafka/web/ui/views/errors/show.erb b/lib/karafka/web/ui/views/errors/show.erb index b05b8562..325e7838 100644 --- a/lib/karafka/web/ui/views/errors/show.erb +++ b/lib/karafka/web/ui/views/errors/show.erb @@ -36,7 +36,7 @@ -
    +
    Backtrace @@ -45,32 +45,11 @@
    -
    - <%== partial 'shared/feature_pro' %> -
    -
    -
    this is just an example backtrace
    -please subscribe to our Pro offering to be able to view the real one
    -gems/karafka-rdkafka/lib/rdkafka/consumer.rb:255:in `query_watermark_offsets'
    -gems/karafka/lib/karafka/admin.rb:56:in `block in read_topic'
    -gems/karafka/lib/karafka/admin.rb:184:in `with_consumer'
    -gems/karafka/lib/karafka/admin.rb:55:in `read_topic'
    -/mnt/software/Karafka/karafka-web/lib/karafka/web/processing/consumers/state.rb:19:in `current'
    -/mnt/software/Karafka/karafka-web/lib/karafka/web/processing/consumers/aggregator.rb:45:in `state'
    -/mnt/software/Karafka/karafka-web/lib/karafka/web/processing/consumers/aggregator.rb:38:in `to_json'
    -gems/karafka/lib/karafka/processing/strategies/default.rb:136:in `block in handle_shutdown'
    -gems/karafka-core/lib/karafka/core/monitoring/notifications.rb:118:in `measure_time_taken'
    -gems/karafka-core/lib/karafka/core/monitoring/notifications.rb:94:in `instrument'
    -gems/karafka-core/lib/karafka/core/monitoring/monitor.rb:34:in `instrument'
    -gems/karafka/lib/karafka/processing/strategies/default.rb:135:in `handle_shutdown'
    -gems/karafka/lib/karafka/base_consumer.rb:134:in `on_shutdown'
    -gems/karafka/lib/karafka/processing/executor.rb:123:in `shutdown'
    -gems/karafka/lib/karafka/processing/jobs/shutdown.rb:18:in `call'
    -gems/karafka/lib/karafka/helpers/async.rb:28:in `block in async_call'
    +
    <%= @error_message.payload[:backtrace] %>
    diff --git a/lib/karafka/web/ui/views/jobs/_breadcrumbs.erb b/lib/karafka/web/ui/views/jobs/_breadcrumbs.erb index d15aaa6f..a81adb95 100644 --- a/lib/karafka/web/ui/views/jobs/_breadcrumbs.erb +++ b/lib/karafka/web/ui/views/jobs/_breadcrumbs.erb @@ -1,5 +1,21 @@ + +<% if current_path.include?('/running') %> + +<% end %> + +<% if current_path.include?('/pending') %> + +<% end %> diff --git a/lib/karafka/web/ui/views/jobs/_job.erb b/lib/karafka/web/ui/views/jobs/_job.erb index 33002c20..33c83c45 100644 --- a/lib/karafka/web/ui/views/jobs/_job.erb +++ b/lib/karafka/web/ui/views/jobs/_job.erb @@ -22,6 +22,6 @@ #<%= job.type %>
    diff --git a/lib/karafka/web/ui/views/jobs/_no_jobs.erb b/lib/karafka/web/ui/views/jobs/_no_jobs.erb index 49b0ce34..8fe00377 100644 --- a/lib/karafka/web/ui/views/jobs/_no_jobs.erb +++ b/lib/karafka/web/ui/views/jobs/_no_jobs.erb @@ -2,7 +2,7 @@
    diff --git a/lib/karafka/web/ui/views/jobs/_tabs.erb b/lib/karafka/web/ui/views/jobs/_tabs.erb new file mode 100644 index 00000000..7224ef94 --- /dev/null +++ b/lib/karafka/web/ui/views/jobs/_tabs.erb @@ -0,0 +1,27 @@ + diff --git a/lib/karafka/web/ui/views/jobs/index.erb b/lib/karafka/web/ui/views/jobs/pending.erb similarity index 60% rename from lib/karafka/web/ui/views/jobs/index.erb rename to lib/karafka/web/ui/views/jobs/pending.erb index 5e47cb26..de88d5bb 100644 --- a/lib/karafka/web/ui/views/jobs/index.erb +++ b/lib/karafka/web/ui/views/jobs/pending.erb @@ -1,7 +1,9 @@ -<%== view_title('Running jobs', hr: true) %> +<%== view_title('Pending jobs', hr: false) %> + +<%== partial 'jobs/tabs' %> <% if @jobs.empty? && params.current_page <= 1 %> - <%== partial 'jobs/no_jobs' %> + <%== partial 'jobs/no_jobs', locals: { type: 'pending' } %> <% elsif @jobs.empty? %> <%== partial 'shared/no_paginated_data' %> <% else %> @@ -11,11 +13,11 @@
    NameStarted<%== sort_link(:name) %><%== sort_link('Started', :started_at, rev: true) %> Memory UtilizationTotal lag<%== sort_link(:lag_stored) %>
    - <%== relative_time job.started_at %> + <%== relative_time job.updated_at %>
    - - - - - + + + + + diff --git a/lib/karafka/web/ui/pro/views/jobs/index.erb b/lib/karafka/web/ui/views/jobs/running.erb similarity index 60% rename from lib/karafka/web/ui/pro/views/jobs/index.erb rename to lib/karafka/web/ui/views/jobs/running.erb index 28c5a4bc..f7d0e609 100644 --- a/lib/karafka/web/ui/pro/views/jobs/index.erb +++ b/lib/karafka/web/ui/views/jobs/running.erb @@ -1,7 +1,9 @@ -<%== view_title('Running jobs', hr: true) %> +<%== view_title('Running jobs', hr: false) %> + +<%== partial 'jobs/tabs' %> <% if @jobs.empty? && params.current_page <= 1 %> - <%== partial 'jobs/no_jobs' %> + <%== partial 'jobs/no_jobs', locals: { type: 'running' } %> <% elsif @jobs.empty? %> <%== partial 'shared/no_paginated_data' %> <% else %> @@ -11,15 +13,11 @@
    ProcessTopicConsumerTypeStarted at<%== sort_link('Process', :name) %><%== sort_link(:topic) %><%== sort_link(:consumer) %><%== sort_link(:type) %><%== sort_link('Created at', :updated_at, rev: true) %>
    - - - - - - - - - + + + + + diff --git a/lib/karafka/web/ui/views/routing/_consumer_group.erb b/lib/karafka/web/ui/views/routing/_consumer_group.erb index e5ba4bcb..a6d17224 100644 --- a/lib/karafka/web/ui/views/routing/_consumer_group.erb +++ b/lib/karafka/web/ui/views/routing/_consumer_group.erb @@ -13,8 +13,8 @@ - - + + diff --git a/lib/karafka/web/ui/views/shared/_navigation.erb b/lib/karafka/web/ui/views/shared/_navigation.erb index 0f75a60b..1d783509 100644 --- a/lib/karafka/web/ui/views/shared/_navigation.erb +++ b/lib/karafka/web/ui/views/shared/_navigation.erb @@ -15,7 +15,7 @@ diff --git a/lib/karafka/web/ui/views/shared/_pagination.erb b/lib/karafka/web/ui/views/shared/_pagination.erb index 43144285..fe38bf67 100644 --- a/lib/karafka/web/ui/views/shared/_pagination.erb +++ b/lib/karafka/web/ui/views/shared/_pagination.erb @@ -4,7 +4,7 @@
    ProcessTopicConsumerTypeMessagesFirst offsetLast offsetCommitted offsetStarted at<%== sort_link('Process', :name) %><%== sort_link(:topic) %><%== sort_link(:consumer) %><%== sort_link(:type) %><%== sort_link('Started at', :updated_at, rev: true) %>
    Subscription groupTopicActive<%== sort_link('Topic', :name) %><%== sort_link('Active', :active?) %>