diff --git a/config/config.exs b/config/config.exs
index d63cc9d2..f11974e1 100644
--- a/config/config.exs
+++ b/config/config.exs
@@ -16,3 +16,8 @@ if config_env() == :test do
end
config :phoenix, :json_library, if(Code.ensure_loaded?(JSON), do: JSON, else: Jason)
+
+config :opentelemetry, span_processor: {Sentry.OpenTelemetry.SpanProcessor, []}
+
+config :opentelemetry,
+ sampler: {Sentry.OpenTelemetry.Sampler, [drop: ["Elixir.Oban.Stager process"]]}
diff --git a/lib/sentry/application.ex b/lib/sentry/application.ex
index e592ae04..86e0a306 100644
--- a/lib/sentry/application.ex
+++ b/lib/sentry/application.ex
@@ -27,6 +27,7 @@ defmodule Sentry.Application do
Sentry.Sources,
Sentry.Dedupe,
Sentry.ClientReport.Sender,
+ Sentry.OpenTelemetry.SpanStorage,
{Sentry.Integrations.CheckInIDMappings,
[
max_expected_check_in_time:
diff --git a/lib/sentry/opentelemetry/sampler.ex b/lib/sentry/opentelemetry/sampler.ex
new file mode 100644
index 00000000..7581e090
--- /dev/null
+++ b/lib/sentry/opentelemetry/sampler.ex
@@ -0,0 +1,27 @@
+defmodule Sentry.OpenTelemetry.Sampler do
+ @moduledoc false
+
+ def setup(config) do
+ config
+ end
+
+ def description(_) do
+ "SentrySampler"
+ end
+
+ def should_sample(
+ _ctx,
+ _trace_id,
+ _links,
+ span_name,
+ _span_kind,
+ _attributes,
+ config
+ ) do
+ if span_name in config[:drop] do
+ {:drop, [], []}
+ else
+ {:record_and_sample, [], []}
+ end
+ end
+end
diff --git a/lib/sentry/opentelemetry/span_processor.ex b/lib/sentry/opentelemetry/span_processor.ex
new file mode 100644
index 00000000..ce55a9c3
--- /dev/null
+++ b/lib/sentry/opentelemetry/span_processor.ex
@@ -0,0 +1,194 @@
+defmodule Sentry.OpenTelemetry.SpanProcessor do
+ @moduledoc false
+
+ require OpenTelemetry.SemConv.ClientAttributes, as: ClientAttributes
+ require OpenTelemetry.SemConv.Incubating.DBAttributes, as: DBAttributes
+ require OpenTelemetry.SemConv.Incubating.HTTPAttributes, as: HTTPAttributes
+ require OpenTelemetry.SemConv.Incubating.URLAttributes, as: URLAttributes
+ require OpenTelemetry.SemConv.Incubating.MessagingAttributes, as: MessagingAttributes
+ @behaviour :otel_span_processor
+
+ require Logger
+
+ alias Sentry.{Transaction, OpenTelemetry.SpanStorage, OpenTelemetry.SpanRecord}
+ alias Sentry.Interfaces.Span
+
+ @impl true
+ def on_start(_ctx, otel_span, _config) do
+ span_record = SpanRecord.new(otel_span)
+
+ SpanStorage.store_span(span_record)
+
+ otel_span
+ end
+
+ @impl true
+ def on_end(otel_span, _config) do
+ span_record = SpanRecord.new(otel_span)
+
+ SpanStorage.update_span(span_record)
+
+ if span_record.parent_span_id == nil do
+ root_span_record = SpanStorage.get_root_span(span_record.span_id)
+ child_span_records = SpanStorage.get_child_spans(span_record.span_id)
+ transaction = build_transaction(root_span_record, child_span_records)
+
+ result =
+ case Sentry.send_transaction(transaction) do
+ {:ok, _id} ->
+ true
+
+ :ignored ->
+ true
+
+ {:error, error} ->
+ Logger.error("Failed to send transaction to Sentry: #{inspect(error)}")
+ {:error, :invalid_span}
+ end
+
+ :ok = SpanStorage.remove_span(span_record.span_id)
+
+ result
+ else
+ true
+ end
+ end
+
+ @impl true
+ def force_flush(_config) do
+ :ok
+ end
+
+ defp build_transaction(root_span_record, child_span_records) do
+ root_span = build_span(root_span_record)
+ child_spans = Enum.map(child_span_records, &build_span(&1))
+
+ Transaction.new(%{
+ span_id: root_span.span_id,
+ transaction: transaction_name(root_span_record),
+ transaction_info: %{source: :custom},
+ start_timestamp: root_span_record.start_time,
+ timestamp: root_span_record.end_time,
+ contexts: %{
+ trace: build_trace_context(root_span_record),
+ otel: build_otel_context(root_span_record)
+ },
+ spans: child_spans
+ })
+ end
+
+ defp transaction_name(
+ %{attributes: %{unquote(to_string(MessagingAttributes.messaging_system())) => :oban}} =
+ span_record
+ ) do
+ span_record.attributes["oban.job.worker"]
+ end
+
+ defp transaction_name(span_record), do: span_record.name
+
+ defp build_trace_context(span_record) do
+ {op, description} = get_op_description(span_record)
+
+ %{
+ trace_id: span_record.trace_id,
+ span_id: span_record.span_id,
+ parent_span_id: span_record.parent_span_id,
+ op: op,
+ description: description,
+ origin: span_record.origin,
+ data: span_record.attributes
+ }
+ end
+
+ defp build_otel_context(span_record), do: span_record.attributes
+
+ defp get_op_description(
+ %{
+ attributes: %{
+ unquote(to_string(HTTPAttributes.http_request_method())) => http_request_method
+ }
+ } = span_record
+ ) do
+ op = "http.#{span_record.kind}"
+ client_address = Map.get(span_record.attributes, to_string(ClientAttributes.client_address()))
+ url_path = Map.get(span_record.attributes, to_string(URLAttributes.url_path()))
+
+ description =
+ to_string(http_request_method) <>
+ ((client_address && " from #{client_address}") || "") <>
+ ((url_path && " #{url_path}") || "")
+
+ {op, description}
+ end
+
+ defp get_op_description(
+ %{attributes: %{unquote(to_string(DBAttributes.db_system())) => _db_system}} =
+ span_record
+ ) do
+ db_query_text = Map.get(span_record.attributes, "db.statement")
+
+ {"db", db_query_text}
+ end
+
+ defp get_op_description(%{
+ attributes:
+ %{unquote(to_string(MessagingAttributes.messaging_system())) => :oban} = attributes
+ }) do
+ {"queue.process", attributes["oban.job.worker"]}
+ end
+
+ defp get_op_description(span_record) do
+ {span_record.name, span_record.name}
+ end
+
+ defp build_span(span_record) do
+ {op, description} = get_op_description(span_record)
+
+ %Span{
+ op: op,
+ description: description,
+ start_timestamp: span_record.start_time,
+ timestamp: span_record.end_time,
+ trace_id: span_record.trace_id,
+ span_id: span_record.span_id,
+ parent_span_id: span_record.parent_span_id,
+ origin: span_record.origin,
+ data: Map.put(span_record.attributes, "otel.kind", span_record.kind),
+ status: span_status(span_record)
+ }
+ end
+
+ defp span_status(%{
+ attributes: %{
+ unquote(to_string(HTTPAttributes.http_response_status_code())) =>
+ http_response_status_code
+ }
+ }) do
+ to_status(http_response_status_code)
+ end
+
+ defp span_status(_span_record), do: nil
+
+ # WebSocket upgrade spans doesn't have a HTTP status
+ defp to_status(nil), do: nil
+
+ defp to_status(status) when status in 200..299, do: "ok"
+
+ for {status, string} <- %{
+ 400 => "invalid_argument",
+ 401 => "unauthenticated",
+ 403 => "permission_denied",
+ 404 => "not_found",
+ 409 => "already_exists",
+ 429 => "resource_exhausted",
+ 499 => "cancelled",
+ 500 => "internal_error",
+ 501 => "unimplemented",
+ 503 => "unavailable",
+ 504 => "deadline_exceeded"
+ } do
+ defp to_status(unquote(status)), do: unquote(string)
+ end
+
+ defp to_status(_any), do: "unknown_error"
+end
diff --git a/lib/sentry/opentelemetry/span_record.ex b/lib/sentry/opentelemetry/span_record.ex
new file mode 100644
index 00000000..63988c4f
--- /dev/null
+++ b/lib/sentry/opentelemetry/span_record.ex
@@ -0,0 +1,70 @@
+defmodule Sentry.OpenTelemetry.SpanRecord do
+ require Record
+ require OpenTelemetry
+
+ @fields Record.extract(:span, from_lib: "opentelemetry/include/otel_span.hrl")
+ Record.defrecordp(:span, @fields)
+
+ defstruct @fields ++ [:origin]
+
+ def new(otel_span) do
+ otel_attrs = span(otel_span)
+
+ {:attributes, _, _, _, attributes} = otel_attrs[:attributes]
+
+ origin =
+ case otel_attrs[:instrumentation_scope] do
+ {:instrumentation_scope, origin, _version, _} ->
+ origin
+
+ _ ->
+ :undefined
+ end
+
+ attrs =
+ otel_attrs
+ |> Keyword.delete(:attributes)
+ |> Keyword.merge(
+ trace_id: cast_trace_id(otel_attrs[:trace_id]),
+ span_id: cast_span_id(otel_attrs[:span_id]),
+ parent_span_id: cast_span_id(otel_attrs[:parent_span_id]),
+ origin: origin,
+ start_time: cast_timestamp(otel_attrs[:start_time]),
+ end_time: cast_timestamp(otel_attrs[:end_time]),
+ attributes: normalize_attributes(attributes)
+ )
+ |> Map.new()
+
+ struct(__MODULE__, attrs)
+ end
+
+ defp normalize_attributes(attributes) do
+ Enum.map(attributes, fn {key, value} ->
+ {to_string(key), value}
+ end)
+ |> Map.new()
+ end
+
+ defp cast_span_id(nil), do: nil
+ defp cast_span_id(:undefined), do: nil
+ defp cast_span_id(span_id), do: bytes_to_hex(span_id, 16)
+
+ defp cast_trace_id(trace_id), do: bytes_to_hex(trace_id, 32)
+
+ defp cast_timestamp(:undefined), do: nil
+ defp cast_timestamp(nil), do: nil
+
+ defp cast_timestamp(timestamp) do
+ nano_timestamp = OpenTelemetry.timestamp_to_nano(timestamp)
+ {:ok, datetime} = DateTime.from_unix(div(nano_timestamp, 1_000_000), :millisecond)
+
+ DateTime.to_iso8601(datetime)
+ end
+
+ defp bytes_to_hex(bytes, length) do
+ case(:otel_utils.format_binary_string("~#{length}.16.0b", [bytes])) do
+ {:ok, result} -> result
+ {:error, _} -> raise "Failed to convert bytes to hex: #{inspect(bytes)}"
+ end
+ end
+end
diff --git a/lib/sentry/opentelemetry/span_storage.ex b/lib/sentry/opentelemetry/span_storage.ex
new file mode 100644
index 00000000..9542d502
--- /dev/null
+++ b/lib/sentry/opentelemetry/span_storage.ex
@@ -0,0 +1,137 @@
+defmodule Sentry.OpenTelemetry.SpanStorage do
+ @moduledoc false
+ use GenServer
+
+ require Logger
+
+ @table :span_storage
+ @cleanup_interval :timer.minutes(5)
+ @span_ttl :timer.minutes(30)
+
+ @spec start_link(keyword()) :: GenServer.on_start()
+ def start_link(opts \\ []) do
+ name = Keyword.get(opts, :name, __MODULE__)
+ GenServer.start_link(__MODULE__, opts, name: name)
+ end
+
+ @impl true
+ def init(opts) do
+ _table =
+ if :ets.whereis(@table) == :undefined do
+ :ets.new(@table, [:named_table, :public, :bag])
+ end
+
+ cleanup_interval = Keyword.get(opts, :cleanup_interval, @cleanup_interval)
+ schedule_cleanup(cleanup_interval)
+
+ {:ok, %{cleanup_interval: cleanup_interval}}
+ end
+
+ def store_span(span_data) when span_data.parent_span_id == nil do
+ stored_at = System.system_time(:second)
+
+ case :ets.lookup(@table, {:root_span, span_data.span_id}) do
+ [] -> :ets.insert(@table, {{:root_span, span_data.span_id}, {span_data, stored_at}})
+ _ -> :ok
+ end
+ end
+
+ def store_span(span_data) do
+ stored_at = System.system_time(:second)
+ _ = :ets.insert(@table, {span_data.parent_span_id, {span_data, stored_at}})
+ end
+
+ def get_root_span(span_id) do
+ case :ets.lookup(@table, {:root_span, span_id}) do
+ [{{:root_span, ^span_id}, {span, _stored_at}}] -> span
+ [] -> nil
+ end
+ end
+
+ def get_child_spans(parent_span_id) do
+ :ets.lookup(@table, parent_span_id)
+ |> Enum.map(fn {_parent_id, {span, _stored_at}} -> span end)
+ end
+
+ def update_span(span_data) do
+ stored_at = System.system_time(:second)
+
+ if span_data.parent_span_id == nil do
+ case :ets.lookup(@table, {:root_span, span_data.span_id}) do
+ [] ->
+ :ets.insert(@table, {{:root_span, span_data.span_id}, {span_data, stored_at}})
+
+ _ ->
+ :ets.delete(@table, {:root_span, span_data.span_id})
+ :ets.insert(@table, {{:root_span, span_data.span_id}, {span_data, stored_at}})
+ end
+ else
+ existing_spans = :ets.lookup(@table, span_data.parent_span_id)
+
+ Enum.each(existing_spans, fn {parent_id, {span, stored_at}} ->
+ if span.span_id == span_data.span_id do
+ :ets.delete_object(@table, {parent_id, {span, stored_at}})
+ :ets.insert(@table, {span_data.parent_span_id, {span_data, stored_at}})
+ end
+ end)
+ end
+
+ :ok
+ end
+
+ def remove_span(span_id) do
+ case get_root_span(span_id) do
+ nil ->
+ :ok
+
+ _root_span ->
+ :ets.delete(@table, {:root_span, span_id})
+ remove_child_spans(span_id)
+ end
+ end
+
+ def remove_child_spans(parent_span_id) do
+ :ets.delete(@table, parent_span_id)
+ :ok
+ end
+
+ @impl true
+ def handle_info(:cleanup_stale_spans, state) do
+ cleanup_stale_spans()
+ schedule_cleanup(state.cleanup_interval)
+ {:noreply, state}
+ end
+
+ defp schedule_cleanup(interval) do
+ Process.send_after(self(), :cleanup_stale_spans, interval)
+ end
+
+ defp cleanup_stale_spans do
+ now = System.system_time(:second)
+ cutoff_time = now - @span_ttl
+
+ :ets.match_object(@table, {{:root_span, :_}, {:_, :_}})
+ |> Enum.each(fn {{:root_span, span_id}, {_span, stored_at}} ->
+ if stored_at < cutoff_time do
+ Logger.debug("Cleaning up stale root span: #{span_id}")
+ remove_span(span_id)
+ end
+ end)
+
+ :ets.match_object(@table, {:_, {:_, :_}})
+ |> Enum.each(fn {parent_id, {span, stored_at}} = object ->
+ cond do
+ get_root_span(parent_id) != nil and stored_at < cutoff_time ->
+ Logger.debug("Cleaning up stale child span: #{span.span_id}")
+ :ets.delete_object(@table, object)
+
+ get_root_span(parent_id) == nil and stored_at < cutoff_time ->
+ Logger.debug("Cleaning up stale orphaned child span: #{span.span_id}")
+ :ets.delete_object(@table, object)
+
+ true ->
+ :ok
+ end
+ end)
+ end
+end
diff --git a/mix.exs b/mix.exs
index acfb35fa..a7337a23 100644
--- a/mix.exs
+++ b/mix.exs
@@ -111,7 +111,12 @@ defmodule Sentry.Mixfile do
# Required by Phoenix.LiveView's testing
{:floki, ">= 0.30.0", only: :test},
{:oban, "~> 2.17 and >= 2.17.6", only: [:test]},
- {:quantum, "~> 3.0", only: [:test]}
+ {:quantum, "~> 3.0", only: [:test]},
+
+ # Required by Tracing
+ {:opentelemetry, "~> 1.5"},
+ {:opentelemetry_api, "~> 1.4"},
+ {:opentelemetry_semantic_conventions, "~> 1.27"}
]
end
diff --git a/mix.lock b/mix.lock
index 4d7db4c2..522c8a6b 100644
--- a/mix.lock
+++ b/mix.lock
@@ -32,6 +32,9 @@
"nimble_ownership": {:hex, :nimble_ownership, "1.0.0", "3f87744d42c21b2042a0aa1d48c83c77e6dd9dd357e425a038dd4b49ba8b79a1", [:mix], [], "hexpm", "7c16cc74f4e952464220a73055b557a273e8b1b7ace8489ec9d86e9ad56cb2cc"},
"nimble_parsec": {:hex, :nimble_parsec, "1.4.0", "51f9b613ea62cfa97b25ccc2c1b4216e81df970acd8e16e8d1bdc58fef21370d", [:mix], [], "hexpm", "9c565862810fb383e9838c1dd2d7d2c437b3d13b267414ba6af33e50d2d1cf28"},
"oban": {:hex, :oban, "2.18.3", "1608c04f8856c108555c379f2f56bc0759149d35fa9d3b825cb8a6769f8ae926", [:mix], [{:ecto_sql, "~> 3.10", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:ecto_sqlite3, "~> 0.9", [hex: :ecto_sqlite3, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16", [hex: :postgrex, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "36ca6ca84ef6518f9c2c759ea88efd438a3c81d667ba23b02b062a0aa785475e"},
+ "opentelemetry": {:hex, :opentelemetry, "1.5.0", "7dda6551edfc3050ea4b0b40c0d2570423d6372b97e9c60793263ef62c53c3c2", [:rebar3], [{:opentelemetry_api, "~> 1.4", [hex: :opentelemetry_api, repo: "hexpm", optional: false]}], "hexpm", "cdf4f51d17b592fc592b9a75f86a6f808c23044ba7cf7b9534debbcc5c23b0ee"},
+ "opentelemetry_api": {:hex, :opentelemetry_api, "1.4.0", "63ca1742f92f00059298f478048dfb826f4b20d49534493d6919a0db39b6db04", [:mix, :rebar3], [], "hexpm", "3dfbbfaa2c2ed3121c5c483162836c4f9027def469c41578af5ef32589fcfc58"},
+ "opentelemetry_semantic_conventions": {:hex, :opentelemetry_semantic_conventions, "1.27.0", "acd0194a94a1e57d63da982ee9f4a9f88834ae0b31b0bd850815fe9be4bbb45f", [:mix, :rebar3], [], "hexpm", "9681ccaa24fd3d810b4461581717661fd85ff7019b082c2dff89c7d5b1fc2864"},
"parse_trans": {:hex, :parse_trans, "3.4.1", "6e6aa8167cb44cc8f39441d05193be6e6f4e7c2946cb2759f015f8c56b76e5ff", [:rebar3], [], "hexpm", "620a406ce75dada827b82e453c19cf06776be266f5a67cff34e1ef2cbb60e49a"},
"phoenix": {:hex, :phoenix, "1.7.17", "2fcdceecc6fb90bec26fab008f96abbd0fd93bc9956ec7985e5892cf545152ca", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.7", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:websock_adapter, "~> 0.5.3", [hex: :websock_adapter, repo: "hexpm", optional: false]}], "hexpm", "50e8ad537f3f7b0efb1509b2f75b5c918f697be6a45d48e49a30d3b7c0e464c9"},
"phoenix_html": {:hex, :phoenix_html, "4.1.1", "4c064fd3873d12ebb1388425a8f2a19348cef56e7289e1998e2d2fa758aa982e", [:mix], [], "hexpm", "f2f2df5a72bc9a2f510b21497fd7d2b86d932ec0598f0210fed4114adc546c6f"},
diff --git a/test/sentry/opentelemetry/sampler_test.exs b/test/sentry/opentelemetry/sampler_test.exs
new file mode 100644
index 00000000..026f432c
--- /dev/null
+++ b/test/sentry/opentelemetry/sampler_test.exs
@@ -0,0 +1,19 @@
+defmodule Sentry.Opentelemetry.SamplerTest do
+ use Sentry.Case, async: false
+
+ alias Sentry.OpenTelemetry.Sampler
+
+ test "drops spans with the given name" do
+ assert {:drop, [], []} =
+ Sampler.should_sample(nil, nil, nil, "Elixir.Oban.Stager process", nil, nil,
+ drop: ["Elixir.Oban.Stager process"]
+ )
+ end
+
+ test "records and samples spans with the given name" do
+ assert {:record_and_sample, [], []} =
+ Sampler.should_sample(nil, nil, nil, "Elixir.Oban.Worker process", nil, nil,
+ drop: []
+ )
+ end
+end
diff --git a/test/sentry/opentelemetry/span_processor_test.exs b/test/sentry/opentelemetry/span_processor_test.exs
new file mode 100644
index 00000000..5b170ca6
--- /dev/null
+++ b/test/sentry/opentelemetry/span_processor_test.exs
@@ -0,0 +1,126 @@
+defmodule Sentry.Opentelemetry.SpanProcessorTest do
+ use Sentry.Case, async: false
+
+ import Sentry.TestHelpers
+
+ alias Sentry.OpenTelemetry.SpanStorage
+
+ setup do
+ on_exit(fn ->
+ # Only try to clean up tables if they exist
+ if :ets.whereis(:span_storage) != :undefined do
+ :ets.delete_all_objects(:span_storage)
+ end
+ end)
+
+ :ok
+ end
+
+ defmodule TestEndpoint do
+ require OpenTelemetry.Tracer, as: Tracer
+
+ def instrumented_function do
+ Tracer.with_span "instrumented_function" do
+ :timer.sleep(100)
+
+ child_instrumented_function("one")
+ child_instrumented_function("two")
+ end
+ end
+
+ def child_instrumented_function(name) do
+ Tracer.with_span "child_instrumented_function_#{name}" do
+ :timer.sleep(140)
+ end
+ end
+ end
+
+ test "sends captured root spans as transactions" do
+ put_test_config(environment_name: "test")
+
+ Sentry.Test.start_collecting_sentry_reports()
+
+ TestEndpoint.child_instrumented_function("one")
+
+ assert [%Sentry.Transaction{} = transaction] = Sentry.Test.pop_sentry_transactions()
+
+ assert transaction.event_id
+ assert transaction.environment == "test"
+ assert transaction.transaction_info == %{source: :custom}
+ assert_valid_iso8601(transaction.timestamp)
+ assert_valid_iso8601(transaction.start_timestamp)
+ assert transaction.timestamp > transaction.start_timestamp
+ assert_valid_trace_id(transaction.contexts.trace.trace_id)
+ assert length(transaction.spans) == 0
+ end
+
+ test "sends captured spans as transactions with child spans" do
+ put_test_config(environment_name: "test")
+
+ Sentry.Test.start_collecting_sentry_reports()
+
+ TestEndpoint.instrumented_function()
+
+ assert [%Sentry.Transaction{} = transaction] = Sentry.Test.pop_sentry_transactions()
+
+ assert_valid_iso8601(transaction.timestamp)
+ assert_valid_iso8601(transaction.start_timestamp)
+ assert transaction.timestamp > transaction.start_timestamp
+ assert length(transaction.spans) == 2
+
+ [child_span_one, child_span_two] = transaction.spans
+ assert child_span_one.op == "child_instrumented_function_one"
+ assert child_span_two.op == "child_instrumented_function_two"
+ assert child_span_one.parent_span_id == transaction.contexts.trace.span_id
+ assert child_span_two.parent_span_id == transaction.contexts.trace.span_id
+
+ assert_valid_iso8601(child_span_one.timestamp)
+ assert_valid_iso8601(child_span_one.start_timestamp)
+ assert_valid_iso8601(child_span_two.timestamp)
+ assert_valid_iso8601(child_span_two.start_timestamp)
+
+ assert child_span_one.timestamp > child_span_one.start_timestamp
+ assert child_span_two.timestamp > child_span_two.start_timestamp
+ assert transaction.timestamp >= child_span_one.timestamp
+ assert transaction.timestamp >= child_span_two.timestamp
+ assert transaction.start_timestamp <= child_span_one.start_timestamp
+ assert transaction.start_timestamp <= child_span_two.start_timestamp
+
+ assert_valid_trace_id(transaction.contexts.trace.trace_id)
+ assert_valid_trace_id(child_span_one.trace_id)
+ assert_valid_trace_id(child_span_two.trace_id)
+ end
+
+ test "removes span records from storage after sending a transaction" do
+ put_test_config(environment_name: "test")
+
+ Sentry.Test.start_collecting_sentry_reports()
+
+ TestEndpoint.instrumented_function()
+
+ assert [%Sentry.Transaction{} = transaction] = Sentry.Test.pop_sentry_transactions()
+
+ assert nil == SpanStorage.get_root_span(transaction.contexts.trace.span_id)
+ assert [] == SpanStorage.get_child_spans(transaction.contexts.trace.span_id)
+ end
+
+ defp assert_valid_iso8601(timestamp) do
+ case DateTime.from_iso8601(timestamp) do
+ {:ok, datetime, _offset} ->
+ assert datetime.year >= 2023, "Expected year to be 2023 or later, got: #{datetime.year}"
+ assert is_binary(timestamp), "Expected timestamp to be a string"
+ assert String.ends_with?(timestamp, "Z"), "Expected timestamp to end with 'Z'"
+
+ {:error, reason} ->
+ flunk("Invalid ISO8601 timestamp: #{timestamp}, reason: #{inspect(reason)}")
+ end
+ end
+
+ defp assert_valid_trace_id(trace_id) do
+ assert is_binary(trace_id), "Expected trace_id to be a string"
+ assert String.length(trace_id) == 32, "Expected trace_id to be 32 characters long #{trace_id}"
+
+ assert String.match?(trace_id, ~r/^[a-f0-9]{32}$/),
+ "Expected trace_id to be a lowercase hex string"
+ end
+end
diff --git a/test/sentry/opentelemetry/span_storage_test.exs b/test/sentry/opentelemetry/span_storage_test.exs
new file mode 100644
index 00000000..df9f3ec7
--- /dev/null
+++ b/test/sentry/opentelemetry/span_storage_test.exs
@@ -0,0 +1,361 @@
+defmodule Sentry.OpenTelemetry.SpanStorageTest do
+ use ExUnit.Case, async: false
+
+ alias Sentry.OpenTelemetry.{SpanStorage, SpanRecord}
+
+ setup do
+ if :ets.whereis(:span_storage) != :undefined do
+ :ets.delete_all_objects(:span_storage)
+ else
+ start_supervised!(SpanStorage)
+ end
+
+ on_exit(fn ->
+ if :ets.whereis(:span_storage) != :undefined do
+ :ets.delete_all_objects(:span_storage)
+ end
+ end)
+
+ :ok
+ end
+
+ describe "root spans" do
+ test "stores and retrieves a root span" do
+ root_span = %SpanRecord{
+ span_id: "abc123",
+ parent_span_id: nil,
+ trace_id: "trace123",
+ name: "root_span"
+ }
+
+ SpanStorage.store_span(root_span)
+
+ assert ^root_span = SpanStorage.get_root_span("abc123")
+ end
+
+ test "updates an existing root span" do
+ root_span = %SpanRecord{
+ span_id: "abc123",
+ parent_span_id: nil,
+ trace_id: "trace123",
+ name: "root_span"
+ }
+
+ updated_root_span = %SpanRecord{
+ span_id: "abc123",
+ parent_span_id: nil,
+ trace_id: "trace123",
+ name: "updated_root_span"
+ }
+
+ SpanStorage.store_span(root_span)
+ SpanStorage.update_span(updated_root_span)
+
+ assert ^updated_root_span = SpanStorage.get_root_span("abc123")
+ end
+
+ test "removes a root span" do
+ root_span = %SpanRecord{
+ span_id: "abc123",
+ parent_span_id: nil,
+ trace_id: "trace123",
+ name: "root_span"
+ }
+
+ SpanStorage.store_span(root_span)
+ assert root_span == SpanStorage.get_root_span("abc123")
+
+ SpanStorage.remove_span("abc123")
+ assert nil == SpanStorage.get_root_span("abc123")
+ end
+
+ test "removes root span and all its children" do
+ root_span = %SpanRecord{
+ span_id: "root123",
+ parent_span_id: nil,
+ trace_id: "trace123",
+ name: "root_span"
+ }
+
+ child_span1 = %SpanRecord{
+ span_id: "child1",
+ parent_span_id: "root123",
+ trace_id: "trace123",
+ name: "child_span_1"
+ }
+
+ child_span2 = %SpanRecord{
+ span_id: "child2",
+ parent_span_id: "root123",
+ trace_id: "trace123",
+ name: "child_span_2"
+ }
+
+ SpanStorage.store_span(root_span)
+ SpanStorage.store_span(child_span1)
+ SpanStorage.store_span(child_span2)
+
+ assert root_span == SpanStorage.get_root_span("root123")
+ assert length(SpanStorage.get_child_spans("root123")) == 2
+
+ SpanStorage.remove_span("root123")
+
+ assert nil == SpanStorage.get_root_span("root123")
+ assert [] == SpanStorage.get_child_spans("root123")
+ end
+ end
+
+ describe "child spans" do
+ test "stores and retrieves child spans" do
+ child_span1 = %SpanRecord{
+ span_id: "child1",
+ parent_span_id: "parent123",
+ trace_id: "trace123",
+ name: "child_span_1"
+ }
+
+ child_span2 = %SpanRecord{
+ span_id: "child2",
+ parent_span_id: "parent123",
+ trace_id: "trace123",
+ name: "child_span_2"
+ }
+
+ SpanStorage.store_span(child_span1)
+ SpanStorage.store_span(child_span2)
+
+ children = SpanStorage.get_child_spans("parent123")
+ assert length(children) == 2
+ assert child_span1 in children
+ assert child_span2 in children
+ end
+
+ test "updates an existing child span" do
+ child_span = %SpanRecord{
+ span_id: "child1",
+ parent_span_id: "parent123",
+ trace_id: "trace123",
+ name: "child_span"
+ }
+
+ updated_child_span = %SpanRecord{
+ span_id: "child1",
+ parent_span_id: "parent123",
+ trace_id: "trace123",
+ name: "updated_child_span"
+ }
+
+ SpanStorage.store_span(child_span)
+ SpanStorage.update_span(updated_child_span)
+
+ children = SpanStorage.get_child_spans("parent123")
+ assert [^updated_child_span] = children
+ end
+
+ test "removes child spans" do
+ child_span1 = %SpanRecord{
+ span_id: "child1",
+ parent_span_id: "parent123",
+ trace_id: "trace123",
+ name: "child_span_1"
+ }
+
+ child_span2 = %SpanRecord{
+ span_id: "child2",
+ parent_span_id: "parent123",
+ trace_id: "trace123",
+ name: "child_span_2"
+ }
+
+ SpanStorage.store_span(child_span1)
+ SpanStorage.store_span(child_span2)
+ assert length(SpanStorage.get_child_spans("parent123")) == 2
+
+ SpanStorage.remove_child_spans("parent123")
+ assert [] == SpanStorage.get_child_spans("parent123")
+ end
+ end
+
+ test "handles complete span hierarchy" do
+ root_span = %SpanRecord{
+ span_id: "root123",
+ parent_span_id: nil,
+ trace_id: "trace123",
+ name: "root_span"
+ }
+
+ child_span1 = %SpanRecord{
+ span_id: "child1",
+ parent_span_id: "root123",
+ trace_id: "trace123",
+ name: "child_span_1"
+ }
+
+ child_span2 = %SpanRecord{
+ span_id: "child2",
+ parent_span_id: "root123",
+ trace_id: "trace123",
+ name: "child_span_2"
+ }
+
+ SpanStorage.store_span(root_span)
+ SpanStorage.store_span(child_span1)
+ SpanStorage.store_span(child_span2)
+
+ assert ^root_span = SpanStorage.get_root_span("root123")
+
+ children = SpanStorage.get_child_spans("root123")
+ assert length(children) == 2
+ assert child_span1 in children
+ assert child_span2 in children
+
+ SpanStorage.remove_span("root123")
+ SpanStorage.remove_child_spans("root123")
+
+ assert nil == SpanStorage.get_root_span("root123")
+ assert [] == SpanStorage.get_child_spans("root123")
+ end
+
+ describe "stale span cleanup" do
+ test "cleans up stale spans" do
+ start_supervised!({SpanStorage, cleanup_interval: 100, name: :cleanup_test})
+
+ root_span = %SpanRecord{
+ span_id: "stale_root",
+ parent_span_id: nil,
+ trace_id: "trace123",
+ name: "stale_root_span"
+ }
+
+ child_span = %SpanRecord{
+ span_id: "stale_child",
+ parent_span_id: "stale_root",
+ trace_id: "trace123",
+ name: "stale_child_span"
+ }
+
+ old_time = System.system_time(:second) - :timer.minutes(31)
+ :ets.insert(:span_storage, {{:root_span, "stale_root"}, {root_span, old_time}})
+ :ets.insert(:span_storage, {"stale_root", {child_span, old_time}})
+
+ fresh_root_span = %SpanRecord{
+ span_id: "fresh_root",
+ parent_span_id: nil,
+ trace_id: "trace123",
+ name: "fresh_root_span"
+ }
+
+ SpanStorage.store_span(fresh_root_span)
+
+ Process.sleep(200)
+
+ assert nil == SpanStorage.get_root_span("stale_root")
+ assert [] == SpanStorage.get_child_spans("stale_root")
+
+ assert SpanStorage.get_root_span("fresh_root")
+ end
+
+ test "cleans up orphaned child spans" do
+ start_supervised!({SpanStorage, cleanup_interval: 100, name: :cleanup_test})
+
+ child_span = %SpanRecord{
+ span_id: "stale_child",
+ parent_span_id: "non_existent_parent",
+ trace_id: "trace123",
+ name: "stale_child_span"
+ }
+
+ old_time = System.system_time(:second) - :timer.minutes(31)
+ :ets.insert(:span_storage, {"non_existent_parent", {child_span, old_time}})
+
+ Process.sleep(200)
+
+ assert [] == SpanStorage.get_child_spans("non_existent_parent")
+ end
+
+ test "cleans up expired root spans with all their children regardless of child timestamps" do
+ start_supervised!({SpanStorage, cleanup_interval: 100, name: :cleanup_test})
+
+ root_span = %SpanRecord{
+ span_id: "root123",
+ parent_span_id: nil,
+ trace_id: "trace123",
+ name: "root_span"
+ }
+
+ old_child = %SpanRecord{
+ span_id: "old_child",
+ parent_span_id: "root123",
+ trace_id: "trace123",
+ name: "old_child_span"
+ }
+
+ fresh_child = %SpanRecord{
+ span_id: "fresh_child",
+ parent_span_id: "root123",
+ trace_id: "trace123",
+ name: "fresh_child_span"
+ }
+
+ old_time = System.system_time(:second) - :timer.minutes(31)
+ :ets.insert(:span_storage, {{:root_span, "root123"}, {root_span, old_time}})
+
+ :ets.insert(:span_storage, {"root123", {old_child, old_time}})
+ SpanStorage.store_span(fresh_child)
+
+ Process.sleep(200)
+
+ assert nil == SpanStorage.get_root_span("root123")
+ assert [] == SpanStorage.get_child_spans("root123")
+ end
+
+ test "handles mixed expiration times in child spans" do
+ start_supervised!({SpanStorage, cleanup_interval: 100, name: :cleanup_test})
+
+ root_span = %SpanRecord{
+ span_id: "root123",
+ parent_span_id: nil,
+ trace_id: "trace123",
+ name: "root_span"
+ }
+
+ old_child1 = %SpanRecord{
+ span_id: "old_child1",
+ parent_span_id: "root123",
+ trace_id: "trace123",
+ name: "old_child_span_1"
+ }
+
+ old_child2 = %SpanRecord{
+ span_id: "old_child2",
+ parent_span_id: "root123",
+ trace_id: "trace123",
+ name: "old_child_span_2"
+ }
+
+ fresh_child = %SpanRecord{
+ span_id: "fresh_child",
+ parent_span_id: "root123",
+ trace_id: "trace123",
+ name: "fresh_child_span"
+ }
+
+ SpanStorage.store_span(root_span)
+
+ old_time = System.system_time(:second) - :timer.minutes(31)
+ :ets.insert(:span_storage, {"root123", {old_child1, old_time}})
+ :ets.insert(:span_storage, {"root123", {old_child2, old_time}})
+
+ SpanStorage.store_span(fresh_child)
+
+ Process.sleep(200)
+
+ assert root_span == SpanStorage.get_root_span("root123")
+ children = SpanStorage.get_child_spans("root123")
+ assert length(children) == 1
+ assert fresh_child in children
+ refute old_child1 in children
+ refute old_child2 in children
+ end
+ end
+end
diff --git a/test_integrations/phoenix_app/config/config.exs b/test_integrations/phoenix_app/config/config.exs
index a0ce0afe..68901111 100644
--- a/test_integrations/phoenix_app/config/config.exs
+++ b/test_integrations/phoenix_app/config/config.exs
@@ -8,6 +8,7 @@
import Config
config :phoenix_app,
+ ecto_repos: [PhoenixApp.Repo],
generators: [timestamp_type: :utc_datetime]
# Configures the endpoint
@@ -59,6 +60,11 @@ config :logger, :console,
config :phoenix, :json_library, if(Code.ensure_loaded?(JSON), do: JSON, else: Jason)
+config :opentelemetry, span_processor: {Sentry.OpenTelemetry.SpanProcessor, []}
+
+config :opentelemetry,
+ sampler: {Sentry.OpenTelemetry.Sampler, [drop: ["Elixir.Oban.Stager process"]]}
+
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{config_env()}.exs"
diff --git a/test_integrations/phoenix_app/config/dev.exs b/test_integrations/phoenix_app/config/dev.exs
index 9506d05c..1897f688 100644
--- a/test_integrations/phoenix_app/config/dev.exs
+++ b/test_integrations/phoenix_app/config/dev.exs
@@ -1,5 +1,10 @@
import Config
+# Configure your database
+config :phoenix_app, PhoenixApp.Repo,
+ adapter: Ecto.Adapters.SQLite3,
+ database: "db/dev.sqlite3"
+
# For development, we disable any cache and enable
# debugging and code reloading.
#
@@ -73,3 +78,19 @@ config :phoenix_live_view,
# Disable swoosh api client as it is only required for production adapters.
config :swoosh, :api_client, false
+
+dsn =
+ if System.get_env("SENTRY_LOCAL"),
+ do: System.get_env("SENTRY_DSN_LOCAL"),
+ else: System.get_env("SENTRY_DSN")
+
+config :sentry,
+ dsn: dsn,
+ environment_name: :dev,
+ enable_source_code_context: true,
+ send_result: :sync
+
+config :phoenix_app, Oban,
+ repo: PhoenixApp.Repo,
+ engine: Oban.Engines.Lite,
+ queues: [default: 10, background: 5]
diff --git a/test_integrations/phoenix_app/config/test.exs b/test_integrations/phoenix_app/config/test.exs
index 207b9cf2..e19bd5ac 100644
--- a/test_integrations/phoenix_app/config/test.exs
+++ b/test_integrations/phoenix_app/config/test.exs
@@ -1,5 +1,11 @@
import Config
+# Configure your database
+config :phoenix_app, PhoenixApp.Repo,
+ adapter: Ecto.Adapters.SQLite3,
+ pool: Ecto.Adapters.SQL.Sandbox,
+ database: "db/test.sqlite3"
+
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :phoenix_app, PhoenixAppWeb.Endpoint,
@@ -24,9 +30,13 @@ config :phoenix_live_view,
enable_expensive_runtime_checks: true
config :sentry,
- dsn: "http://public:secret@localhost:8080/1",
- environment_name: Mix.env(),
+ dsn: nil,
+ environment_name: :dev,
enable_source_code_context: true,
- root_source_code_paths: [File.cwd!()],
- test_mode: true,
- send_result: :sync
+ send_result: :sync,
+ test_mode: true
+
+config :phoenix_app, Oban,
+ repo: PhoenixApp.Repo,
+ engine: Oban.Engines.Lite,
+ queues: [default: 10, background: 5]
diff --git a/test_integrations/phoenix_app/lib/phoenix_app/accounts.ex b/test_integrations/phoenix_app/lib/phoenix_app/accounts.ex
new file mode 100644
index 00000000..2b626dad
--- /dev/null
+++ b/test_integrations/phoenix_app/lib/phoenix_app/accounts.ex
@@ -0,0 +1,104 @@
+defmodule PhoenixApp.Accounts do
+ @moduledoc """
+ The Accounts context.
+ """
+
+ import Ecto.Query, warn: false
+ alias PhoenixApp.Repo
+
+ alias PhoenixApp.Accounts.User
+
+ @doc """
+ Returns the list of users.
+
+ ## Examples
+
+ iex> list_users()
+ [%User{}, ...]
+
+ """
+ def list_users do
+ Repo.all(User)
+ end
+
+ @doc """
+ Gets a single user.
+
+ Raises `Ecto.NoResultsError` if the User does not exist.
+
+ ## Examples
+
+ iex> get_user!(123)
+ %User{}
+
+ iex> get_user!(456)
+ ** (Ecto.NoResultsError)
+
+ """
+ def get_user!(id), do: Repo.get!(User, id)
+
+ @doc """
+ Creates a user.
+
+ ## Examples
+
+ iex> create_user(%{field: value})
+ {:ok, %User{}}
+
+ iex> create_user(%{field: bad_value})
+ {:error, %Ecto.Changeset{}}
+
+ """
+ def create_user(attrs \\ %{}) do
+ %User{}
+ |> User.changeset(attrs)
+ |> Repo.insert()
+ end
+
+ @doc """
+ Updates a user.
+
+ ## Examples
+
+ iex> update_user(user, %{field: new_value})
+ {:ok, %User{}}
+
+ iex> update_user(user, %{field: bad_value})
+ {:error, %Ecto.Changeset{}}
+
+ """
+ def update_user(%User{} = user, attrs) do
+ user
+ |> User.changeset(attrs)
+ |> Repo.update()
+ end
+
+ @doc """
+ Deletes a user.
+
+ ## Examples
+
+ iex> delete_user(user)
+ {:ok, %User{}}
+
+ iex> delete_user(user)
+ {:error, %Ecto.Changeset{}}
+
+ """
+ def delete_user(%User{} = user) do
+ Repo.delete(user)
+ end
+
+ @doc """
+ Returns an `%Ecto.Changeset{}` for tracking user changes.
+
+ ## Examples
+
+ iex> change_user(user)
+ %Ecto.Changeset{data: %User{}}
+
+ """
+ def change_user(%User{} = user, attrs \\ %{}) do
+ User.changeset(user, attrs)
+ end
+end
diff --git a/test_integrations/phoenix_app/lib/phoenix_app/accounts/user.ex b/test_integrations/phoenix_app/lib/phoenix_app/accounts/user.ex
new file mode 100644
index 00000000..21fc3552
--- /dev/null
+++ b/test_integrations/phoenix_app/lib/phoenix_app/accounts/user.ex
@@ -0,0 +1,18 @@
+defmodule PhoenixApp.Accounts.User do
+ use Ecto.Schema
+ import Ecto.Changeset
+
+ schema "users" do
+ field :name, :string
+ field :age, :integer
+
+ timestamps(type: :utc_datetime)
+ end
+
+ @doc false
+ def changeset(user, attrs) do
+ user
+ |> cast(attrs, [:name, :age])
+ |> validate_required([:name, :age])
+ end
+end
diff --git a/test_integrations/phoenix_app/lib/phoenix_app/application.ex b/test_integrations/phoenix_app/lib/phoenix_app/application.ex
index b97f81ba..442c43e2 100644
--- a/test_integrations/phoenix_app/lib/phoenix_app/application.ex
+++ b/test_integrations/phoenix_app/lib/phoenix_app/application.ex
@@ -7,14 +7,28 @@ defmodule PhoenixApp.Application do
@impl true
def start(_type, _args) do
+ :ok = Application.ensure_started(:inets)
+
+ :logger.add_handler(:my_sentry_handler, Sentry.LoggerHandler, %{
+ config: %{metadata: [:file, :line]}
+ })
+
+ # OpentelemetryBandit.setup()
+ OpentelemetryPhoenix.setup(adapter: :bandit)
+ OpentelemetryOban.setup()
+ OpentelemetryEcto.setup([:phoenix_app, :repo], db_statement: :enabled)
+
children = [
PhoenixAppWeb.Telemetry,
+ PhoenixApp.Repo,
+ {Ecto.Migrator,
+ repos: Application.fetch_env!(:phoenix_app, :ecto_repos), skip: skip_migrations?()},
{DNSCluster, query: Application.get_env(:phoenix_app, :dns_cluster_query) || :ignore},
{Phoenix.PubSub, name: PhoenixApp.PubSub},
# Start the Finch HTTP client for sending emails
{Finch, name: PhoenixApp.Finch},
- # Start a worker by calling: PhoenixApp.Worker.start_link(arg)
- # {PhoenixApp.Worker, arg},
+ # Start Oban
+ {Oban, Application.fetch_env!(:phoenix_app, Oban)},
# Start to serve requests, typically the last entry
PhoenixAppWeb.Endpoint
]
@@ -25,12 +39,15 @@ defmodule PhoenixApp.Application do
Supervisor.start_link(children, opts)
end
- # TODO: Uncomment if we ever move the endpoint from test/support to the phoenix_app dir
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
- # @impl true
- # def config_change(changed, _new, removed) do
- # PhoenixAppWeb.Endpoint.config_change(changed, removed)
- # :ok
- # end
+ @impl true
+ def config_change(changed, _new, removed) do
+ PhoenixAppWeb.Endpoint.config_change(changed, removed)
+ :ok
+ end
+
+ defp skip_migrations?() do
+ System.get_env("RELEASE_NAME") != nil
+ end
end
diff --git a/test_integrations/phoenix_app/lib/phoenix_app/repo.ex b/test_integrations/phoenix_app/lib/phoenix_app/repo.ex
new file mode 100644
index 00000000..3976eb3b
--- /dev/null
+++ b/test_integrations/phoenix_app/lib/phoenix_app/repo.ex
@@ -0,0 +1,5 @@
+defmodule PhoenixApp.Repo do
+ use Ecto.Repo,
+ otp_app: :phoenix_app,
+ adapter: Ecto.Adapters.SQLite3
+end
diff --git a/test_integrations/phoenix_app/lib/phoenix_app/workers/test_worker.ex b/test_integrations/phoenix_app/lib/phoenix_app/workers/test_worker.ex
new file mode 100644
index 00000000..be57ffaf
--- /dev/null
+++ b/test_integrations/phoenix_app/lib/phoenix_app/workers/test_worker.ex
@@ -0,0 +1,21 @@
+defmodule PhoenixApp.Workers.TestWorker do
+ use Oban.Worker
+
+ @impl Oban.Worker
+ def perform(%Oban.Job{args: %{"sleep_time" => sleep_time, "should_fail" => should_fail}}) do
+ # Simulate some work
+ Process.sleep(sleep_time)
+
+ if should_fail do
+ raise "Simulated failure in test worker"
+ else
+ :ok
+ end
+ end
+
+ def perform(%Oban.Job{args: %{"sleep_time" => sleep_time}}) do
+ # Simulate some work
+ Process.sleep(sleep_time)
+ :ok
+ end
+end
diff --git a/test_integrations/phoenix_app/lib/phoenix_app_web/controllers/page_controller.ex b/test_integrations/phoenix_app/lib/phoenix_app_web/controllers/page_controller.ex
index b51d6b3c..dbc7812b 100644
--- a/test_integrations/phoenix_app/lib/phoenix_app_web/controllers/page_controller.ex
+++ b/test_integrations/phoenix_app/lib/phoenix_app_web/controllers/page_controller.ex
@@ -1,13 +1,29 @@
defmodule PhoenixAppWeb.PageController do
use PhoenixAppWeb, :controller
+ require OpenTelemetry.Tracer, as: Tracer
+
+ alias PhoenixApp.{Repo, User}
+
def home(conn, _params) do
- # The home page is often custom made,
- # so skip the default app layout.
render(conn, :home, layout: false)
end
def exception(_conn, _params) do
raise "Test exception"
end
+
+ def transaction(conn, _params) do
+ Tracer.with_span "test_span" do
+ :timer.sleep(100)
+ end
+
+ render(conn, :home, layout: false)
+ end
+
+ def users(conn, _params) do
+ Repo.all(User) |> Enum.map(& &1.name)
+
+ render(conn, :home, layout: false)
+ end
end
diff --git a/test_integrations/phoenix_app/lib/phoenix_app_web/endpoint.ex b/test_integrations/phoenix_app/lib/phoenix_app_web/endpoint.ex
index c1817a4e..cbc6c40a 100644
--- a/test_integrations/phoenix_app/lib/phoenix_app_web/endpoint.ex
+++ b/test_integrations/phoenix_app/lib/phoenix_app_web/endpoint.ex
@@ -35,7 +35,6 @@ defmodule PhoenixAppWeb.Endpoint do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
- plug Phoenix.Ecto.CheckRepoStatus, otp_app: :phoenix_app
end
plug Phoenix.LiveDashboard.RequestLogger,
diff --git a/test_integrations/phoenix_app/lib/phoenix_app_web/live/test_worker_live.ex b/test_integrations/phoenix_app/lib/phoenix_app_web/live/test_worker_live.ex
new file mode 100644
index 00000000..0ba8562a
--- /dev/null
+++ b/test_integrations/phoenix_app/lib/phoenix_app_web/live/test_worker_live.ex
@@ -0,0 +1,94 @@
+defmodule PhoenixAppWeb.TestWorkerLive do
+ use PhoenixAppWeb, :live_view
+
+ alias PhoenixApp.Workers.TestWorker
+
+ @impl true
+ def mount(_params, _session, socket) do
+ socket =
+ assign(socket,
+ form: to_form(%{"sleep_time" => 1000, "should_fail" => false, "queue" => "default"}),
+ auto_form: to_form(%{"job_count" => 5}),
+ jobs: list_jobs()
+ )
+
+ if connected?(socket) do
+ # Poll for job updates every second
+ :timer.send_interval(1000, self(), :update_jobs)
+ end
+
+ {:ok, socket}
+ end
+
+ @impl true
+ def handle_event("schedule", %{"test_job" => params}, socket) do
+ sleep_time = String.to_integer(params["sleep_time"])
+ should_fail = params["should_fail"] == "true"
+ queue = params["queue"]
+
+ case schedule_job(sleep_time, should_fail, queue) do
+ {:ok, _job} ->
+ {:noreply,
+ socket
+ |> put_flash(:info, "Job scheduled successfully!")
+ |> assign(jobs: list_jobs())}
+
+ {:error, changeset} ->
+ {:noreply,
+ socket
+ |> put_flash(:error, "Error scheduling job: #{inspect(changeset.errors)}")}
+ end
+ end
+
+ @impl true
+ def handle_event("auto_schedule", %{"auto" => %{"job_count" => count}}, socket) do
+ job_count = String.to_integer(count)
+
+ results =
+ Enum.map(1..job_count, fn _ ->
+ sleep_time = Enum.random(500..5000)
+ should_fail = Enum.random([true, false])
+ queue = Enum.random(["default", "background"])
+
+ schedule_job(sleep_time, should_fail, queue)
+ end)
+
+ failed_count = Enum.count(results, &match?({:error, _}, &1))
+ success_count = job_count - failed_count
+
+ socket =
+ socket
+ |> put_flash(:info, "Scheduled #{success_count} jobs successfully!")
+ |> assign(jobs: list_jobs())
+
+ if failed_count > 0 do
+ socket = put_flash(socket, :error, "Failed to schedule #{failed_count} jobs")
+ {:noreply, socket}
+ else
+ {:noreply, socket}
+ end
+ end
+
+ @impl true
+ def handle_info(:update_jobs, socket) do
+ {:noreply, assign(socket, jobs: list_jobs())}
+ end
+
+ defp schedule_job(sleep_time, should_fail, queue) do
+ TestWorker.new(
+ %{"sleep_time" => sleep_time, "should_fail" => should_fail},
+ queue: queue
+ )
+ |> Oban.insert()
+ end
+
+ defp list_jobs do
+ import Ecto.Query
+
+ Oban.Job
+ |> where([j], j.worker == "PhoenixApp.Workers.TestWorker")
+ |> order_by([j], desc: j.inserted_at)
+ |> limit(10)
+ |> PhoenixApp.Repo.all()
+ end
+end
diff --git a/test_integrations/phoenix_app/lib/phoenix_app_web/live/test_worker_live.html.heex b/test_integrations/phoenix_app/lib/phoenix_app_web/live/test_worker_live.html.heex
new file mode 100644
index 00000000..d4f75595
--- /dev/null
+++ b/test_integrations/phoenix_app/lib/phoenix_app_web/live/test_worker_live.html.heex
@@ -0,0 +1,103 @@
+
+
+
+
Schedule Test Worker
+
+
+ <.form for={@form} phx-submit="schedule" class="space-y-6">
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Auto Schedule Multiple Jobs
+
+
+ <.form for={@auto_form} phx-submit="auto_schedule" class="space-y-6">
+
+
+
+
+
+
+ Jobs will be created with random sleep times (500-5000ms), random queues, and random failure states.
+
+
+
+
+
+
+
+
+
+
+
+
+
Recent Jobs
+
+
+
+
+
+ ID |
+ Queue |
+ State |
+ Attempt |
+ Args |
+
+
+
+ <%= for job <- @jobs do %>
+
+ <%= job.id %> |
+ <%= job.queue %> |
+ <%= job.state %> |
+ <%= job.attempt %> |
+ <%= inspect(job.args) %> |
+
+ <% end %>
+
+
+
+
+
diff --git a/test_integrations/phoenix_app/lib/phoenix_app_web/live/user_live/form_component.ex b/test_integrations/phoenix_app/lib/phoenix_app_web/live/user_live/form_component.ex
new file mode 100644
index 00000000..622a6b05
--- /dev/null
+++ b/test_integrations/phoenix_app/lib/phoenix_app_web/live/user_live/form_component.ex
@@ -0,0 +1,83 @@
+defmodule PhoenixAppWeb.UserLive.FormComponent do
+ use PhoenixAppWeb, :live_component
+
+ alias PhoenixApp.Accounts
+
+ @impl true
+ def render(assigns) do
+ ~H"""
+
+ <.header>
+ <%= @title %>
+ <:subtitle>Use this form to manage user records in your database.
+
+
+ <.simple_form
+ for={@form}
+ id="user-form"
+ phx-target={@myself}
+ phx-change="validate"
+ phx-submit="save"
+ >
+ <.input field={@form[:name]} type="text" label="Name" />
+ <.input field={@form[:age]} type="number" label="Age" />
+ <:actions>
+ <.button phx-disable-with="Saving...">Save User
+
+
+
+ """
+ end
+
+ @impl true
+ def update(%{user: user} = assigns, socket) do
+ {:ok,
+ socket
+ |> assign(assigns)
+ |> assign_new(:form, fn ->
+ to_form(Accounts.change_user(user))
+ end)}
+ end
+
+ @impl true
+ def handle_event("validate", %{"user" => user_params}, socket) do
+ changeset = Accounts.change_user(socket.assigns.user, user_params)
+ {:noreply, assign(socket, form: to_form(changeset, action: :validate))}
+ end
+
+ def handle_event("save", %{"user" => user_params}, socket) do
+ save_user(socket, socket.assigns.action, user_params)
+ end
+
+ defp save_user(socket, :edit, user_params) do
+ case Accounts.update_user(socket.assigns.user, user_params) do
+ {:ok, user} ->
+ notify_parent({:saved, user})
+
+ {:noreply,
+ socket
+ |> put_flash(:info, "User updated successfully")
+ |> push_patch(to: socket.assigns.patch)}
+
+ {:error, %Ecto.Changeset{} = changeset} ->
+ {:noreply, assign(socket, form: to_form(changeset))}
+ end
+ end
+
+ defp save_user(socket, :new, user_params) do
+ case Accounts.create_user(user_params) do
+ {:ok, user} ->
+ notify_parent({:saved, user})
+
+ {:noreply,
+ socket
+ |> put_flash(:info, "User created successfully")
+ |> push_patch(to: socket.assigns.patch)}
+
+ {:error, %Ecto.Changeset{} = changeset} ->
+ {:noreply, assign(socket, form: to_form(changeset))}
+ end
+ end
+
+ defp notify_parent(msg), do: send(self(), {__MODULE__, msg})
+end
diff --git a/test_integrations/phoenix_app/lib/phoenix_app_web/live/user_live/index.ex b/test_integrations/phoenix_app/lib/phoenix_app_web/live/user_live/index.ex
new file mode 100644
index 00000000..4cbf8962
--- /dev/null
+++ b/test_integrations/phoenix_app/lib/phoenix_app_web/live/user_live/index.ex
@@ -0,0 +1,47 @@
+defmodule PhoenixAppWeb.UserLive.Index do
+ use PhoenixAppWeb, :live_view
+
+ alias PhoenixApp.Accounts
+ alias PhoenixApp.Accounts.User
+
+ @impl true
+ def mount(_params, _session, socket) do
+ {:ok, stream(socket, :users, Accounts.list_users())}
+ end
+
+ @impl true
+ def handle_params(params, _url, socket) do
+ {:noreply, apply_action(socket, socket.assigns.live_action, params)}
+ end
+
+ defp apply_action(socket, :edit, %{"id" => id}) do
+ socket
+ |> assign(:page_title, "Edit User")
+ |> assign(:user, Accounts.get_user!(id))
+ end
+
+ defp apply_action(socket, :new, _params) do
+ socket
+ |> assign(:page_title, "New User")
+ |> assign(:user, %User{})
+ end
+
+ defp apply_action(socket, :index, _params) do
+ socket
+ |> assign(:page_title, "Listing Users")
+ |> assign(:user, nil)
+ end
+
+ @impl true
+ def handle_info({PhoenixAppWeb.UserLive.FormComponent, {:saved, user}}, socket) do
+ {:noreply, stream_insert(socket, :users, user)}
+ end
+
+ @impl true
+ def handle_event("delete", %{"id" => id}, socket) do
+ user = Accounts.get_user!(id)
+ {:ok, _} = Accounts.delete_user(user)
+
+ {:noreply, stream_delete(socket, :users, user)}
+ end
+end
diff --git a/test_integrations/phoenix_app/lib/phoenix_app_web/live/user_live/index.html.heex b/test_integrations/phoenix_app/lib/phoenix_app_web/live/user_live/index.html.heex
new file mode 100644
index 00000000..33a964df
--- /dev/null
+++ b/test_integrations/phoenix_app/lib/phoenix_app_web/live/user_live/index.html.heex
@@ -0,0 +1,42 @@
+<.header>
+ Listing Users
+ <:actions>
+ <.link patch={~p"/users/new"}>
+ <.button>New User
+
+
+
+
+<.table
+ id="users"
+ rows={@streams.users}
+ row_click={fn {_id, user} -> JS.navigate(~p"/users/#{user}") end}
+>
+ <:col :let={{_id, user}} label="Name"><%= user.name %>
+ <:col :let={{_id, user}} label="Age"><%= user.age %>
+ <:action :let={{_id, user}}>
+
+ <.link navigate={~p"/users/#{user}"}>Show
+
+ <.link patch={~p"/users/#{user}/edit"}>Edit
+
+ <:action :let={{id, user}}>
+ <.link
+ phx-click={JS.push("delete", value: %{id: user.id}) |> hide("##{id}")}
+ data-confirm="Are you sure?"
+ >
+ Delete
+
+
+
+
+<.modal :if={@live_action in [:new, :edit]} id="user-modal" show on_cancel={JS.patch(~p"/users")}>
+ <.live_component
+ module={PhoenixAppWeb.UserLive.FormComponent}
+ id={@user.id || :new}
+ title={@page_title}
+ action={@live_action}
+ user={@user}
+ patch={~p"/users"}
+ />
+
diff --git a/test_integrations/phoenix_app/lib/phoenix_app_web/live/user_live/show.ex b/test_integrations/phoenix_app/lib/phoenix_app_web/live/user_live/show.ex
new file mode 100644
index 00000000..eaa24470
--- /dev/null
+++ b/test_integrations/phoenix_app/lib/phoenix_app_web/live/user_live/show.ex
@@ -0,0 +1,21 @@
+defmodule PhoenixAppWeb.UserLive.Show do
+ use PhoenixAppWeb, :live_view
+
+ alias PhoenixApp.Accounts
+
+ @impl true
+ def mount(_params, _session, socket) do
+ {:ok, socket}
+ end
+
+ @impl true
+ def handle_params(%{"id" => id}, _, socket) do
+ {:noreply,
+ socket
+ |> assign(:page_title, page_title(socket.assigns.live_action))
+ |> assign(:user, Accounts.get_user!(id))}
+ end
+
+ defp page_title(:show), do: "Show User"
+ defp page_title(:edit), do: "Edit User"
+end
diff --git a/test_integrations/phoenix_app/lib/phoenix_app_web/live/user_live/show.html.heex b/test_integrations/phoenix_app/lib/phoenix_app_web/live/user_live/show.html.heex
new file mode 100644
index 00000000..35b90bb2
--- /dev/null
+++ b/test_integrations/phoenix_app/lib/phoenix_app_web/live/user_live/show.html.heex
@@ -0,0 +1,27 @@
+<.header>
+ User <%= @user.id %>
+ <:subtitle>This is a user record from your database.
+ <:actions>
+ <.link patch={~p"/users/#{@user}/show/edit"} phx-click={JS.push_focus()}>
+ <.button>Edit user
+
+
+
+
+<.list>
+ <:item title="Name"><%= @user.name %>
+ <:item title="Age"><%= @user.age %>
+
+
+<.back navigate={~p"/users"}>Back to users
+
+<.modal :if={@live_action == :edit} id="user-modal" show on_cancel={JS.patch(~p"/users/#{@user}")}>
+ <.live_component
+ module={PhoenixAppWeb.UserLive.FormComponent}
+ id={@user.id}
+ title={@page_title}
+ action={@live_action}
+ user={@user}
+ patch={~p"/users/#{@user}"}
+ />
+
diff --git a/test_integrations/phoenix_app/lib/phoenix_app_web/router.ex b/test_integrations/phoenix_app/lib/phoenix_app_web/router.ex
index 409aeb27..ddf33edf 100644
--- a/test_integrations/phoenix_app/lib/phoenix_app_web/router.ex
+++ b/test_integrations/phoenix_app/lib/phoenix_app_web/router.ex
@@ -19,6 +19,16 @@ defmodule PhoenixAppWeb.Router do
get "/", PageController, :home
get "/exception", PageController, :exception
+ get "/transaction", PageController, :transaction
+
+ live "/test-worker", TestWorkerLive
+
+ live "/users", UserLive.Index, :index
+ live "/users/new", UserLive.Index, :new
+ live "/users/:id/edit", UserLive.Index, :edit
+
+ live "/users/:id", UserLive.Show, :show
+ live "/users/:id/show/edit", UserLive.Show, :edit
end
# Other scopes may use custom stacks.
diff --git a/test_integrations/phoenix_app/mix.exs b/test_integrations/phoenix_app/mix.exs
index 2055e414..1ae5d0e6 100644
--- a/test_integrations/phoenix_app/mix.exs
+++ b/test_integrations/phoenix_app/mix.exs
@@ -36,10 +36,21 @@ defmodule PhoenixApp.MixProject do
{:nimble_ownership, "~> 0.3.0 or ~> 1.0"},
{:postgrex, ">= 0.0.0"},
+ {:ecto, "~> 3.12"},
+ {:ecto_sql, "~> 3.12"},
+ {:ecto_sqlite3, "~> 0.16"},
{:phoenix, "~> 1.7.14"},
{:phoenix_html, "~> 4.1"},
{:phoenix_live_view, "~> 1.0"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
+ {:phoenix_ecto, "~> 4.6", optional: true},
+ {:heroicons,
+ github: "tailwindlabs/heroicons",
+ tag: "v2.1.1",
+ sparse: "optimized",
+ app: false,
+ compile: false,
+ depth: 1},
{:floki, ">= 0.30.0", only: :test},
{:phoenix_live_dashboard, "~> 0.8.3"},
{:esbuild, "~> 0.8", runtime: Mix.env() == :dev},
@@ -53,9 +64,19 @@ defmodule PhoenixApp.MixProject do
{:dns_cluster, "~> 0.1.1"},
{:bandit, "~> 1.5"},
{:bypass, "~> 2.1", only: :test},
- {:hackney, "~> 1.18", only: :test},
-
- {:sentry, path: "../.."}
+ {:opentelemetry, "~> 1.5"},
+ {:opentelemetry_api, "~> 1.4"},
+ {:opentelemetry_phoenix, "~> 2.0"},
+ {:opentelemetry_semantic_conventions, "~> 1.27"},
+ # TODO: Update once merged
+ {:opentelemetry_oban, "~> 1.1",
+ github: "danschultzer/opentelemetry-erlang-contrib",
+ branch: "oban-v1.27-semantics",
+ sparse: "instrumentation/opentelemetry_oban"},
+ {:opentelemetry_ecto, "~> 1.2"},
+ {:sentry, path: "../.."},
+ {:hackney, "~> 1.18"},
+ {:oban, "~> 2.10"}
]
end
diff --git a/test_integrations/phoenix_app/mix.lock b/test_integrations/phoenix_app/mix.lock
index a14316ee..0891f5d7 100644
--- a/test_integrations/phoenix_app/mix.lock
+++ b/test_integrations/phoenix_app/mix.lock
@@ -2,20 +2,27 @@
"bandit": {:hex, :bandit, "1.6.1", "9e01b93d72ddc21d8c576a704949e86ee6cde7d11270a1d3073787876527a48f", [:mix], [{:hpax, "~> 1.0", [hex: :hpax, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:thousand_island, "~> 1.0", [hex: :thousand_island, repo: "hexpm", optional: false]}, {:websock, "~> 0.5", [hex: :websock, repo: "hexpm", optional: false]}], "hexpm", "5a904bf010ea24b67979835e0507688e31ac873d4ffc8ed0e5413e8d77455031"},
"bypass": {:hex, :bypass, "2.1.0", "909782781bf8e20ee86a9cabde36b259d44af8b9f38756173e8f5e2e1fabb9b1", [:mix], [{:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.0", [hex: :plug_cowboy, repo: "hexpm", optional: false]}, {:ranch, "~> 1.3", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "d9b5df8fa5b7a6efa08384e9bbecfe4ce61c77d28a4282f79e02f1ef78d96b80"},
"castore": {:hex, :castore, "1.0.10", "43bbeeac820f16c89f79721af1b3e092399b3a1ecc8df1a472738fd853574911", [:mix], [], "hexpm", "1b0b7ea14d889d9ea21202c43a4fa015eb913021cb535e8ed91946f4b77a8848"},
+ "cc_precompiler": {:hex, :cc_precompiler, "0.1.10", "47c9c08d8869cf09b41da36538f62bc1abd3e19e41701c2cea2675b53c704258", [:mix], [{:elixir_make, "~> 0.7", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "f6e046254e53cd6b41c6bacd70ae728011aa82b2742a80d6e2214855c6e06b22"},
"certifi": {:hex, :certifi, "2.12.0", "2d1cca2ec95f59643862af91f001478c9863c2ac9cb6e2f89780bfd8de987329", [:rebar3], [], "hexpm", "ee68d85df22e554040cdb4be100f33873ac6051387baf6a8f6ce82272340ff1c"},
"cowboy": {:hex, :cowboy, "2.12.0", "f276d521a1ff88b2b9b4c54d0e753da6c66dd7be6c9fca3d9418b561828a3731", [:make, :rebar3], [{:cowlib, "2.13.0", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "8a7abe6d183372ceb21caa2709bec928ab2b72e18a3911aa1771639bef82651e"},
"cowboy_telemetry": {:hex, :cowboy_telemetry, "0.4.0", "f239f68b588efa7707abce16a84d0d2acf3a0f50571f8bb7f56a15865aae820c", [:rebar3], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7d98bac1ee4565d31b62d59f8823dfd8356a169e7fcbb83831b8a5397404c9de"},
"cowlib": {:hex, :cowlib, "2.13.0", "db8f7505d8332d98ef50a3ef34b34c1afddec7506e4ee4dd4a3a266285d282ca", [:make, :rebar3], [], "hexpm", "e1e1284dc3fc030a64b1ad0d8382ae7e99da46c3246b815318a4b848873800a4"},
"db_connection": {:hex, :db_connection, "2.7.0", "b99faa9291bb09892c7da373bb82cba59aefa9b36300f6145c5f201c7adf48ec", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "dcf08f31b2701f857dfc787fbad78223d61a32204f217f15e881dd93e4bdd3ff"},
- "decimal": {:hex, :decimal, "2.2.0", "df3d06bb9517e302b1bd265c1e7f16cda51547ad9d99892049340841f3e15836", [:mix], [], "hexpm", "af8daf87384b51b7e611fb1a1f2c4d4876b65ef968fa8bd3adf44cff401c7f21"},
+ "decimal": {:hex, :decimal, "2.3.0", "3ad6255aa77b4a3c4f818171b12d237500e63525c2fd056699967a3e7ea20f62", [:mix], [], "hexpm", "a4d66355cb29cb47c3cf30e71329e58361cfcb37c34235ef3bf1d7bf3773aeac"},
"dns_cluster": {:hex, :dns_cluster, "0.1.3", "0bc20a2c88ed6cc494f2964075c359f8c2d00e1bf25518a6a6c7fd277c9b0c66", [:mix], [], "hexpm", "46cb7c4a1b3e52c7ad4cbe33ca5079fbde4840dedeafca2baf77996c2da1bc33"},
+ "ecto": {:hex, :ecto, "3.12.5", "4a312960ce612e17337e7cefcf9be45b95a3be6b36b6f94dfb3d8c361d631866", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "6eb18e80bef8bb57e17f5a7f068a1719fbda384d40fc37acb8eb8aeca493b6ea"},
+ "ecto_sql": {:hex, :ecto_sql, "3.12.1", "c0d0d60e85d9ff4631f12bafa454bc392ce8b9ec83531a412c12a0d415a3a4d0", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.12", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.7", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.19 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "aff5b958a899762c5f09028c847569f7dfb9cc9d63bdb8133bff8a5546de6bf5"},
+ "ecto_sqlite3": {:hex, :ecto_sqlite3, "0.17.5", "fbee5c17ff6afd8e9ded519b0abb363926c65d30b27577232bb066b2a79957b8", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:ecto, "~> 3.12", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "~> 3.12", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:exqlite, "~> 0.22", [hex: :exqlite, repo: "hexpm", optional: false]}], "hexpm", "3b54734d998cbd032ac59403c36acf4e019670e8b6ceef9c6c33d8986c4e9704"},
+ "elixir_make": {:hex, :elixir_make, "0.9.0", "6484b3cd8c0cee58f09f05ecaf1a140a8c97670671a6a0e7ab4dc326c3109726", [:mix], [], "hexpm", "db23d4fd8b757462ad02f8aa73431a426fe6671c80b200d9710caf3d1dd0ffdb"},
"esbuild": {:hex, :esbuild, "0.8.1", "0cbf919f0eccb136d2eeef0df49c4acf55336de864e63594adcea3814f3edf41", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}, {:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "25fc876a67c13cb0a776e7b5d7974851556baeda2085296c14ab48555ea7560f"},
"expo": {:hex, :expo, "1.1.0", "f7b9ed7fb5745ebe1eeedf3d6f29226c5dd52897ac67c0f8af62a07e661e5c75", [:mix], [], "hexpm", "fbadf93f4700fb44c331362177bdca9eeb8097e8b0ef525c9cc501cb9917c960"},
+ "exqlite": {:hex, :exqlite, "0.27.1", "73fc0b3dc3b058a77a2b3771f82a6af2ddcf370b069906968a34083d2ffd2884", [:make, :mix], [{:cc_precompiler, "~> 0.1", [hex: :cc_precompiler, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:elixir_make, "~> 0.8", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "79ef5756451cfb022e8013e1ed00d0f8f7d1333c19502c394dc16b15cfb4e9b4"},
"file_system": {:hex, :file_system, "1.0.1", "79e8ceaddb0416f8b8cd02a0127bdbababe7bf4a23d2a395b983c1f8b3f73edd", [:mix], [], "hexpm", "4414d1f38863ddf9120720cd976fce5bdde8e91d8283353f0e31850fa89feb9e"},
"finch": {:hex, :finch, "0.19.0", "c644641491ea854fc5c1bbaef36bfc764e3f08e7185e1f084e35e0672241b76d", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.6.2 or ~> 1.7", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 1.1", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "fc5324ce209125d1e2fa0fcd2634601c52a787aff1cd33ee833664a5af4ea2b6"},
"floki": {:hex, :floki, "0.37.0", "b83e0280bbc6372f2a403b2848013650b16640cd2470aea6701f0632223d719e", [:mix], [], "hexpm", "516a0c15a69f78c47dc8e0b9b3724b29608aa6619379f91b1ffa47109b5d0dd3"},
"gettext": {:hex, :gettext, "0.26.1", "38e14ea5dcf962d1fc9f361b63ea07c0ce715a8ef1f9e82d3dfb8e67e0416715", [:mix], [{:expo, "~> 0.5.1 or ~> 1.0", [hex: :expo, repo: "hexpm", optional: false]}], "hexpm", "01ce56f188b9dc28780a52783d6529ad2bc7124f9744e571e1ee4ea88bf08734"},
"hackney": {:hex, :hackney, "1.20.1", "8d97aec62ddddd757d128bfd1df6c5861093419f8f7a4223823537bad5d064e2", [:rebar3], [{:certifi, "~> 2.12.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~> 6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~> 1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~> 1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.4.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~> 1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~> 0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "fe9094e5f1a2a2c0a7d10918fee36bfec0ec2a979994cff8cfe8058cd9af38e3"},
+ "heroicons": {:git, "https://github.com/tailwindlabs/heroicons.git", "88ab3a0d790e6a47404cba02800a6b25d2afae50", [tag: "v2.1.1", sparse: "optimized", depth: 1]},
"hpax": {:hex, :hpax, "1.0.1", "c857057f89e8bd71d97d9042e009df2a42705d6d690d54eca84c8b29af0787b0", [:mix], [], "hexpm", "4e2d5a4f76ae1e3048f35ae7adb1641c36265510a2d4638157fbcb53dda38445"},
"idna": {:hex, :idna, "6.1.1", "8a63070e9f7d0c62eb9d9fcb360a7de382448200fbbd1b106cc96d3d8099df8d", [:rebar3], [{:unicode_util_compat, "~> 0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "92376eb7894412ed19ac475e4a86f7b413c1b9fbb5bd16dccd57934157944cea"},
"jason": {:hex, :jason, "1.4.4", "b9226785a9aa77b6857ca22832cffa5d5011a667207eb2a0ad56adb5db443b8a", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "c5eb0cab91f094599f94d55bc63409236a8ec69a21a67814529e8d5f6cc90b3b"},
@@ -26,8 +33,20 @@
"nimble_options": {:hex, :nimble_options, "1.1.1", "e3a492d54d85fc3fd7c5baf411d9d2852922f66e69476317787a7b2bb000a61b", [:mix], [], "hexpm", "821b2470ca9442c4b6984882fe9bb0389371b8ddec4d45a9504f00a66f650b44"},
"nimble_ownership": {:hex, :nimble_ownership, "1.0.0", "3f87744d42c21b2042a0aa1d48c83c77e6dd9dd357e425a038dd4b49ba8b79a1", [:mix], [], "hexpm", "7c16cc74f4e952464220a73055b557a273e8b1b7ace8489ec9d86e9ad56cb2cc"},
"nimble_pool": {:hex, :nimble_pool, "1.1.0", "bf9c29fbdcba3564a8b800d1eeb5a3c58f36e1e11d7b7fb2e084a643f645f06b", [:mix], [], "hexpm", "af2e4e6b34197db81f7aad230c1118eac993acc0dae6bc83bac0126d4ae0813a"},
+ "oban": {:hex, :oban, "2.18.3", "1608c04f8856c108555c379f2f56bc0759149d35fa9d3b825cb8a6769f8ae926", [:mix], [{:ecto_sql, "~> 3.10", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:ecto_sqlite3, "~> 0.9", [hex: :ecto_sqlite3, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16", [hex: :postgrex, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "36ca6ca84ef6518f9c2c759ea88efd438a3c81d667ba23b02b062a0aa785475e"},
+ "opentelemetry": {:hex, :opentelemetry, "1.5.0", "7dda6551edfc3050ea4b0b40c0d2570423d6372b97e9c60793263ef62c53c3c2", [:rebar3], [{:opentelemetry_api, "~> 1.4", [hex: :opentelemetry_api, repo: "hexpm", optional: false]}], "hexpm", "cdf4f51d17b592fc592b9a75f86a6f808c23044ba7cf7b9534debbcc5c23b0ee"},
+ "opentelemetry_api": {:hex, :opentelemetry_api, "1.4.0", "63ca1742f92f00059298f478048dfb826f4b20d49534493d6919a0db39b6db04", [:mix, :rebar3], [], "hexpm", "3dfbbfaa2c2ed3121c5c483162836c4f9027def469c41578af5ef32589fcfc58"},
+ "opentelemetry_bandit": {:git, "https://github.com/solnic/opentelemetry-bandit.git", "1e00505fb3bb02001a3400f8a807cd1c7f7f957d", []},
+ "opentelemetry_ecto": {:hex, :opentelemetry_ecto, "1.2.0", "2382cb47ddc231f953d3b8263ed029d87fbf217915a1da82f49159d122b64865", [:mix], [{:opentelemetry_api, "~> 1.0", [hex: :opentelemetry_api, repo: "hexpm", optional: false]}, {:opentelemetry_process_propagator, "~> 0.2", [hex: :opentelemetry_process_propagator, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "70dfa2e79932e86f209df00e36c980b17a32f82d175f0068bf7ef9a96cf080cf"},
+ "opentelemetry_oban": {:git, "https://github.com/danschultzer/opentelemetry-erlang-contrib.git", "fda7ab9acde6d845393f8bb4a9876ebb98aedd75", [branch: "oban-v1.27-semantics", sparse: "instrumentation/opentelemetry_oban"]},
+ "opentelemetry_phoenix": {:hex, :opentelemetry_phoenix, "2.0.0", "3a22f620a26613ba02e7289238da145c2ddcd58bd37b780b200080139d24b176", [:mix], [{:nimble_options, "~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:opentelemetry_api, "~> 1.4", [hex: :opentelemetry_api, repo: "hexpm", optional: false]}, {:opentelemetry_process_propagator, "~> 0.3", [hex: :opentelemetry_process_propagator, repo: "hexpm", optional: false]}, {:opentelemetry_semantic_conventions, "~> 1.27", [hex: :opentelemetry_semantic_conventions, repo: "hexpm", optional: false]}, {:opentelemetry_telemetry, "~> 1.1", [hex: :opentelemetry_telemetry, repo: "hexpm", optional: false]}, {:otel_http, "~> 0.2", [hex: :otel_http, repo: "hexpm", optional: false]}, {:plug, ">= 1.11.0", [hex: :plug, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c2c0969c561a87703cda64e9f0c37e9dec6dceee11c2d2eafef8d3f4138ec364"},
+ "opentelemetry_process_propagator": {:hex, :opentelemetry_process_propagator, "0.3.0", "ef5b2059403a1e2b2d2c65914e6962e56371570b8c3ab5323d7a8d3444fb7f84", [:mix, :rebar3], [{:opentelemetry_api, "~> 1.0", [hex: :opentelemetry_api, repo: "hexpm", optional: false]}], "hexpm", "7243cb6de1523c473cba5b1aefa3f85e1ff8cc75d08f367104c1e11919c8c029"},
+ "opentelemetry_semantic_conventions": {:hex, :opentelemetry_semantic_conventions, "1.27.0", "acd0194a94a1e57d63da982ee9f4a9f88834ae0b31b0bd850815fe9be4bbb45f", [:mix, :rebar3], [], "hexpm", "9681ccaa24fd3d810b4461581717661fd85ff7019b082c2dff89c7d5b1fc2864"},
+ "opentelemetry_telemetry": {:hex, :opentelemetry_telemetry, "1.1.2", "410ab4d76b0921f42dbccbe5a7c831b8125282850be649ee1f70050d3961118a", [:mix, :rebar3], [{:opentelemetry_api, "~> 1.3", [hex: :opentelemetry_api, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.1", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "641ab469deb181957ac6d59bce6e1321d5fe2a56df444fc9c19afcad623ab253"},
+ "otel_http": {:hex, :otel_http, "0.2.0", "b17385986c7f1b862f5d577f72614ecaa29de40392b7618869999326b9a61d8a", [:rebar3], [], "hexpm", "f2beadf922c8cfeb0965488dd736c95cc6ea8b9efce89466b3904d317d7cc717"},
"parse_trans": {:hex, :parse_trans, "3.4.1", "6e6aa8167cb44cc8f39441d05193be6e6f4e7c2946cb2759f015f8c56b76e5ff", [:rebar3], [], "hexpm", "620a406ce75dada827b82e453c19cf06776be266f5a67cff34e1ef2cbb60e49a"},
"phoenix": {:hex, :phoenix, "1.7.17", "2fcdceecc6fb90bec26fab008f96abbd0fd93bc9956ec7985e5892cf545152ca", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.7", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:websock_adapter, "~> 0.5.3", [hex: :websock_adapter, repo: "hexpm", optional: false]}], "hexpm", "50e8ad537f3f7b0efb1509b2f75b5c918f697be6a45d48e49a30d3b7c0e464c9"},
+ "phoenix_ecto": {:hex, :phoenix_ecto, "4.6.3", "f686701b0499a07f2e3b122d84d52ff8a31f5def386e03706c916f6feddf69ef", [:mix], [{:ecto, "~> 3.5", [hex: :ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.1", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "909502956916a657a197f94cc1206d9a65247538de8a5e186f7537c895d95764"},
"phoenix_html": {:hex, :phoenix_html, "4.1.1", "4c064fd3873d12ebb1388425a8f2a19348cef56e7289e1998e2d2fa758aa982e", [:mix], [], "hexpm", "f2f2df5a72bc9a2f510b21497fd7d2b86d932ec0598f0210fed4114adc546c6f"},
"phoenix_live_dashboard": {:hex, :phoenix_live_dashboard, "0.8.4", "4508e481f791ce62ec6a096e13b061387158cbeefacca68c6c1928e1305e23ed", [:mix], [{:ecto, "~> 3.6.2 or ~> 3.7", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_mysql_extras, "~> 0.5", [hex: :ecto_mysql_extras, repo: "hexpm", optional: true]}, {:ecto_psql_extras, "~> 0.7", [hex: :ecto_psql_extras, repo: "hexpm", optional: true]}, {:ecto_sqlite3_extras, "~> 1.1.7 or ~> 1.2.0", [hex: :ecto_sqlite3_extras, repo: "hexpm", optional: true]}, {:mime, "~> 1.6 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:phoenix_live_view, "~> 0.19 or ~> 1.0", [hex: :phoenix_live_view, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 0.6 or ~> 1.0", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}], "hexpm", "2984aae96994fbc5c61795a73b8fb58153b41ff934019cfb522343d2d3817d59"},
"phoenix_live_reload": {:hex, :phoenix_live_reload, "1.5.3", "f2161c207fda0e4fb55165f650f7f8db23f02b29e3bff00ff7ef161d6ac1f09d", [:mix], [{:file_system, "~> 0.3 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}], "hexpm", "b4ec9cd73cb01ff1bd1cac92e045d13e7030330b74164297d1aee3907b54803c"},
@@ -37,7 +56,7 @@
"plug": {:hex, :plug, "1.16.1", "40c74619c12f82736d2214557dedec2e9762029b2438d6d175c5074c933edc9d", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "a13ff6b9006b03d7e33874945b2755253841b238c34071ed85b0e86057f8cddc"},
"plug_cowboy": {:hex, :plug_cowboy, "2.7.2", "fdadb973799ae691bf9ecad99125b16625b1c6039999da5fe544d99218e662e4", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "245d8a11ee2306094840c000e8816f0cbed69a23fc0ac2bcf8d7835ae019bb2f"},
"plug_crypto": {:hex, :plug_crypto, "2.1.0", "f44309c2b06d249c27c8d3f65cfe08158ade08418cf540fd4f72d4d6863abb7b", [:mix], [], "hexpm", "131216a4b030b8f8ce0f26038bc4421ae60e4bb95c5cf5395e1421437824c4fa"},
- "postgrex": {:hex, :postgrex, "0.19.1", "73b498508b69aded53907fe48a1fee811be34cc720e69ef4ccd568c8715495ea", [:mix], [{:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "8bac7885a18f381e091ec6caf41bda7bb8c77912bb0e9285212829afe5d8a8f8"},
+ "postgrex": {:hex, :postgrex, "0.19.3", "a0bda6e3bc75ec07fca5b0a89bffd242ca209a4822a9533e7d3e84ee80707e19", [:mix], [{:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "d31c28053655b78f47f948c85bb1cf86a9c1f8ead346ba1aa0d0df017fa05b61"},
"ranch": {:hex, :ranch, "1.8.0", "8c7a100a139fd57f17327b6413e4167ac559fbc04ca7448e9be9057311597a1d", [:make, :rebar3], [], "hexpm", "49fbcfd3682fab1f5d109351b61257676da1a2fdbe295904176d5e521a2ddfe5"},
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.7", "354c321cf377240c7b8716899e182ce4890c5938111a1296add3ec74cf1715df", [:make, :mix, :rebar3], [], "hexpm", "fe4c190e8f37401d30167c8c405eda19469f34577987c76dde613e838bbc67f8"},
"swoosh": {:hex, :swoosh, "1.17.1", "01295a82bddd2c6cac1e65856e29444d7c23c4501e0ebc69cea8a82018227e25", [:mix], [{:bandit, ">= 1.0.0", [hex: :bandit, repo: "hexpm", optional: true]}, {:cowboy, "~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:ex_aws, "~> 2.1", [hex: :ex_aws, repo: "hexpm", optional: true]}, {:finch, "~> 0.6", [hex: :finch, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13 or ~> 1.0", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mua, "~> 0.2.3", [hex: :mua, repo: "hexpm", optional: true]}, {:multipart, "~> 0.4", [hex: :multipart, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: true]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:req, "~> 0.5 or ~> 1.0", [hex: :req, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.2 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "3b20d25e580cb79af631335a1bdcfbffd835c08ebcdc16e98577223a241a18a1"},
diff --git a/test_integrations/phoenix_app/priv/repo/migrations/20240926155911_create_users.exs b/test_integrations/phoenix_app/priv/repo/migrations/20240926155911_create_users.exs
new file mode 100644
index 00000000..21f4a335
--- /dev/null
+++ b/test_integrations/phoenix_app/priv/repo/migrations/20240926155911_create_users.exs
@@ -0,0 +1,12 @@
+defmodule PhoenixApp.Repo.Migrations.CreateUsers do
+ use Ecto.Migration
+
+ def change do
+ create table(:users) do
+ add :name, :string
+ add :age, :integer
+
+ timestamps(type: :utc_datetime)
+ end
+ end
+end
diff --git a/test_integrations/phoenix_app/priv/repo/migrations/20241213222834_add_oban.exs b/test_integrations/phoenix_app/priv/repo/migrations/20241213222834_add_oban.exs
new file mode 100644
index 00000000..f7aa7789
--- /dev/null
+++ b/test_integrations/phoenix_app/priv/repo/migrations/20241213222834_add_oban.exs
@@ -0,0 +1,11 @@
+defmodule PhoenixApp.Repo.Migrations.AddOban do
+ use Ecto.Migration
+
+ def up do
+ Oban.Migration.up()
+ end
+
+ def down do
+ Oban.Migration.down()
+ end
+end
diff --git a/test_integrations/phoenix_app/test/phoenix_app/oban_test.exs b/test_integrations/phoenix_app/test/phoenix_app/oban_test.exs
new file mode 100644
index 00000000..f2cc97e2
--- /dev/null
+++ b/test_integrations/phoenix_app/test/phoenix_app/oban_test.exs
@@ -0,0 +1,43 @@
+defmodule Sentry.Integrations.Phoenix.ObanTest do
+ use PhoenixAppWeb.ConnCase, async: false
+ use Oban.Testing, repo: PhoenixApp.Repo
+
+ import Sentry.TestHelpers
+
+ setup do
+ put_test_config(dsn: "http://public:secret@localhost:8080/1")
+ Sentry.Test.start_collecting_sentry_reports()
+
+ :ok
+ end
+
+ defmodule TestWorker do
+ use Oban.Worker
+
+ @impl Oban.Worker
+ def perform(_args) do
+ :timer.sleep(100)
+ end
+ end
+
+ test "captures Oban worker execution as transaction" do
+ :ok = perform_job(TestWorker, %{test: "args"})
+
+ transactions = Sentry.Test.pop_sentry_transactions()
+ assert length(transactions) == 1
+
+ [transaction] = transactions
+
+ assert transaction.transaction == "Sentry.Integrations.Phoenix.ObanTest.TestWorker"
+ assert transaction.transaction_info == %{source: :custom}
+
+ trace = transaction.contexts.trace
+ assert trace.origin == "opentelemetry_oban"
+ assert trace.op == "queue.process"
+ assert trace.description == "Sentry.Integrations.Phoenix.ObanTest.TestWorker"
+ assert trace.data["oban.job.job_id"]
+ assert trace.data["messaging.destination"] == "default"
+ assert trace.data["oban.job.attempt"] == 1
+ assert [] = transaction.spans
+ end
+end
diff --git a/test_integrations/phoenix_app/test/phoenix_app/repo_test.exs b/test_integrations/phoenix_app/test/phoenix_app/repo_test.exs
new file mode 100644
index 00000000..7a61e524
--- /dev/null
+++ b/test_integrations/phoenix_app/test/phoenix_app/repo_test.exs
@@ -0,0 +1,28 @@
+defmodule PhoenixApp.RepoTest do
+ use PhoenixApp.DataCase
+
+ alias PhoenixApp.{Repo, Accounts.User}
+
+ import Sentry.TestHelpers
+
+ setup do
+ put_test_config(dsn: "http://public:secret@localhost:8080/1")
+
+ Sentry.Test.start_collecting_sentry_reports()
+ end
+
+ test "instrumented top-level ecto transaction span" do
+ Repo.all(User) |> Enum.map(& &1.id)
+
+ transactions = Sentry.Test.pop_sentry_transactions()
+
+ assert length(transactions) == 1
+
+ assert [transaction] = transactions
+
+ assert transaction.transaction_info == %{source: :custom}
+ assert transaction.contexts.trace.op == "db"
+ assert String.starts_with?(transaction.contexts.trace.description, "SELECT")
+ assert transaction.contexts.trace.data["db.system"] == :sqlite
+ end
+end
diff --git a/test_integrations/phoenix_app/test/phoenix_app_web/controllers/exception_test.exs b/test_integrations/phoenix_app/test/phoenix_app_web/controllers/exception_test.exs
index b1e81b86..dffdd822 100644
--- a/test_integrations/phoenix_app/test/phoenix_app_web/controllers/exception_test.exs
+++ b/test_integrations/phoenix_app/test/phoenix_app_web/controllers/exception_test.exs
@@ -4,21 +4,12 @@ defmodule Sentry.Integrations.Phoenix.ExceptionTest do
import Sentry.TestHelpers
setup do
- bypass = Bypass.open()
- put_test_config(dsn: "http://public:secret@localhost:#{bypass.port}/1")
- %{bypass: bypass}
- end
+ put_test_config(dsn: "http://public:secret@localhost:8080/1")
- test "GET /exception sends exception to Sentry", %{conn: conn, bypass: bypass} do
- Bypass.expect(bypass, fn conn ->
- {:ok, body, conn} = Plug.Conn.read_body(conn)
- assert body =~ "RuntimeError"
- assert body =~ "Test exception"
- assert conn.request_path == "/api/1/envelope/"
- assert conn.method == "POST"
- Plug.Conn.resp(conn, 200, ~s<{"id": "340"}>)
- end)
+ Sentry.Test.start_collecting_sentry_reports()
+ end
+ test "GET /exception sends exception to Sentry", %{conn: conn} do
assert_raise RuntimeError, "Test exception", fn ->
get(conn, ~p"/exception")
end
diff --git a/test_integrations/phoenix_app/test/phoenix_app_web/controllers/transaction_test.exs b/test_integrations/phoenix_app/test/phoenix_app_web/controllers/transaction_test.exs
new file mode 100644
index 00000000..eb747ea2
--- /dev/null
+++ b/test_integrations/phoenix_app/test/phoenix_app_web/controllers/transaction_test.exs
@@ -0,0 +1,65 @@
+defmodule Sentry.Integrations.Phoenix.TransactionTest do
+ use PhoenixAppWeb.ConnCase, async: true
+
+ import Sentry.TestHelpers
+
+ setup do
+ put_test_config(dsn: "http://public:secret@localhost:8080/1")
+
+ Sentry.Test.start_collecting_sentry_reports()
+ end
+
+ test "GET /transaction", %{conn: conn} do
+ # TODO: Wrap this in a transaction that the web server usually
+ # would wrap it in.
+ get(conn, ~p"/transaction")
+
+ transactions = Sentry.Test.pop_sentry_transactions()
+
+ assert length(transactions) == 1
+
+ assert [transaction] = transactions
+
+ assert transaction.transaction == "test_span"
+ assert transaction.transaction_info == %{source: :custom}
+
+ trace = transaction.contexts.trace
+ assert trace.origin == "phoenix_app"
+ assert trace.op == "test_span"
+ assert trace.data == %{}
+
+ assert [] = transaction.spans
+ end
+
+ test "GET /users", %{conn: conn} do
+ get(conn, ~p"/users")
+
+ transactions = Sentry.Test.pop_sentry_transactions()
+
+ assert length(transactions) == 2
+
+ assert [mount_transaction, handle_params_transaction] = transactions
+
+ assert mount_transaction.transaction == "PhoenixAppWeb.UserLive.Index.mount"
+ assert mount_transaction.transaction_info == %{source: :custom}
+
+ trace = mount_transaction.contexts.trace
+ assert trace.origin == "opentelemetry_phoenix"
+ assert trace.op == "PhoenixAppWeb.UserLive.Index.mount"
+ assert trace.data == %{}
+
+ assert [span_ecto] = mount_transaction.spans
+
+ assert span_ecto.op == "db"
+ assert span_ecto.description == "SELECT u0.\"id\", u0.\"name\", u0.\"age\", u0.\"inserted_at\", u0.\"updated_at\" FROM \"users\" AS u0"
+
+ assert handle_params_transaction.transaction == "PhoenixAppWeb.UserLive.Index.handle_params"
+ assert handle_params_transaction.transaction_info == %{source: :custom}
+
+ trace = handle_params_transaction.contexts.trace
+ assert trace.origin == "opentelemetry_phoenix"
+ assert trace.op == "PhoenixAppWeb.UserLive.Index.handle_params"
+ assert trace.data == %{}
+ assert [] = handle_params_transaction.spans
+ end
+end
diff --git a/test_integrations/phoenix_app/test/phoenix_app_web/live/user_live_test.exs b/test_integrations/phoenix_app/test/phoenix_app_web/live/user_live_test.exs
new file mode 100644
index 00000000..ad86afe5
--- /dev/null
+++ b/test_integrations/phoenix_app/test/phoenix_app_web/live/user_live_test.exs
@@ -0,0 +1,140 @@
+defmodule PhoenixAppWeb.UserLiveTest do
+ use PhoenixAppWeb.ConnCase
+
+ import Sentry.TestHelpers
+ import Phoenix.LiveViewTest
+ import PhoenixApp.AccountsFixtures
+
+ @create_attrs %{name: "some name", age: 42}
+ @update_attrs %{name: "some updated name", age: 43}
+ @invalid_attrs %{name: nil, age: nil}
+
+ setup do
+ put_test_config(dsn: "http://public:secret@localhost:8080/1")
+
+ Sentry.Test.start_collecting_sentry_reports()
+ end
+
+ defp create_user(_) do
+ user = user_fixture()
+ %{user: user}
+ end
+
+ describe "Index" do
+ setup [:create_user]
+
+ test "lists all users", %{conn: conn, user: user} do
+ {:ok, _index_live, html} = live(conn, ~p"/users")
+
+ assert html =~ "Listing Users"
+ assert html =~ user.name
+ end
+
+ test "saves new user", %{conn: conn} do
+ {:ok, index_live, _html} = live(conn, ~p"/users")
+
+ assert index_live |> element("a", "New User") |> render_click() =~
+ "New User"
+
+ assert_patch(index_live, ~p"/users/new")
+
+ assert index_live
+ |> form("#user-form", user: @invalid_attrs)
+ |> render_change() =~ "can't be blank"
+
+ assert index_live
+ |> form("#user-form", user: @create_attrs)
+ |> render_submit()
+
+ assert_patch(index_live, ~p"/users")
+
+ html = render(index_live)
+ assert html =~ "User created successfully"
+ assert html =~ "some name"
+
+ transactions = Sentry.Test.pop_sentry_transactions()
+
+ transaction_save =
+ Enum.find(transactions, fn transaction ->
+ transaction.transaction == "PhoenixAppWeb.UserLive.Index.handle_event#save"
+ end)
+
+ assert transaction_save.transaction == "PhoenixAppWeb.UserLive.Index.handle_event#save"
+ assert transaction_save.transaction_info.source == :custom
+ assert transaction_save.contexts.trace.op == "PhoenixAppWeb.UserLive.Index.handle_event#save"
+ assert transaction_save.contexts.trace.origin == "opentelemetry_phoenix"
+
+ assert length(transaction_save.spans) == 1
+ assert [span] = transaction_save.spans
+ assert span.op == "db"
+ assert span.description =~ "INSERT INTO \"users\""
+ assert span.data["db.system"] == :sqlite
+ assert span.data["db.type"] == :sql
+ assert span.origin == "opentelemetry_ecto"
+ end
+
+ test "updates user in listing", %{conn: conn, user: user} do
+ {:ok, index_live, _html} = live(conn, ~p"/users")
+
+ assert index_live |> element("#users-#{user.id} a", "Edit") |> render_click() =~
+ "Edit User"
+
+ assert_patch(index_live, ~p"/users/#{user}/edit")
+
+ assert index_live
+ |> form("#user-form", user: @invalid_attrs)
+ |> render_change() =~ "can't be blank"
+
+ assert index_live
+ |> form("#user-form", user: @update_attrs)
+ |> render_submit()
+
+ assert_patch(index_live, ~p"/users")
+
+ html = render(index_live)
+ assert html =~ "User updated successfully"
+ assert html =~ "some updated name"
+ end
+
+ test "deletes user in listing", %{conn: conn, user: user} do
+ {:ok, index_live, _html} = live(conn, ~p"/users")
+
+ assert index_live |> element("#users-#{user.id} a", "Delete") |> render_click()
+ refute has_element?(index_live, "#users-#{user.id}")
+ end
+ end
+
+ describe "Show" do
+ setup [:create_user]
+
+ test "displays user", %{conn: conn, user: user} do
+ {:ok, _show_live, html} = live(conn, ~p"/users/#{user}")
+
+ assert html =~ "Show User"
+ assert html =~ user.name
+ end
+
+ test "updates user within modal", %{conn: conn, user: user} do
+ {:ok, show_live, _html} = live(conn, ~p"/users/#{user}")
+
+ assert show_live |> element("a", "Edit") |> render_click() =~
+ "Edit User"
+
+ assert_patch(show_live, ~p"/users/#{user}/show/edit")
+
+ assert show_live
+ |> form("#user-form", user: @invalid_attrs)
+ |> render_change() =~ "can't be blank"
+
+ assert show_live
+ |> form("#user-form", user: @update_attrs)
+ |> render_submit()
+
+ assert_patch(show_live, ~p"/users/#{user}")
+
+ html = render(show_live)
+ assert html =~ "User updated successfully"
+ assert html =~ "some updated name"
+ end
+ end
+end
diff --git a/test_integrations/phoenix_app/test/support/data_case.ex b/test_integrations/phoenix_app/test/support/data_case.ex
index 648de1de..d58f0fe0 100644
--- a/test_integrations/phoenix_app/test/support/data_case.ex
+++ b/test_integrations/phoenix_app/test/support/data_case.ex
@@ -20,9 +20,9 @@ defmodule PhoenixApp.DataCase do
quote do
alias PhoenixApp.Repo
- # import Ecto
- # import Ecto.Changeset
- # import Ecto.Query
+ import Ecto
+ import Ecto.Changeset
+ import Ecto.Query
import PhoenixApp.DataCase
end
end
@@ -35,9 +35,9 @@ defmodule PhoenixApp.DataCase do
@doc """
Sets up the sandbox based on the test tags.
"""
- def setup_sandbox(_tags) do
- # pid = Ecto.Adapters.SQL.Sandbox.start_owner!(PhoenixApp.Repo, shared: not tags[:async])
- # on_exit(fn -> Ecto.Adapters.SQL.Sandbox.stop_owner(pid) end)
+ def setup_sandbox(tags) do
+ pid = Ecto.Adapters.SQL.Sandbox.start_owner!(PhoenixApp.Repo, shared: not tags[:async])
+ on_exit(fn -> Ecto.Adapters.SQL.Sandbox.stop_owner(pid) end)
end
@doc """
@@ -48,11 +48,11 @@ defmodule PhoenixApp.DataCase do
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
- # def errors_on(changeset) do
- # Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
- # Regex.replace(~r"%{(\w+)}", message, fn _, key ->
- # opts |> Keyword.get(String.to_existing_atom(key), key) |> to_string()
- # end)
- # end)
- # end
+ def errors_on(changeset) do
+ Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
+ Regex.replace(~r"%{(\w+)}", message, fn _, key ->
+ opts |> Keyword.get(String.to_existing_atom(key), key) |> to_string()
+ end)
+ end)
+ end
end
diff --git a/test_integrations/phoenix_app/test/support/fixtures/accounts_fixtures.ex b/test_integrations/phoenix_app/test/support/fixtures/accounts_fixtures.ex
new file mode 100644
index 00000000..eb0799e2
--- /dev/null
+++ b/test_integrations/phoenix_app/test/support/fixtures/accounts_fixtures.ex
@@ -0,0 +1,21 @@
+defmodule PhoenixApp.AccountsFixtures do
+ @moduledoc """
+ This module defines test helpers for creating
+ entities via the `PhoenixApp.Accounts` context.
+ """
+
+ @doc """
+ Generate a user.
+ """
+ def user_fixture(attrs \\ %{}) do
+ {:ok, user} =
+ attrs
+ |> Enum.into(%{
+ age: 42,
+ name: "some name"
+ })
+ |> PhoenixApp.Accounts.create_user()
+
+ user
+ end
+end
diff --git a/test_integrations/phoenix_app/test/test_helper.exs b/test_integrations/phoenix_app/test/test_helper.exs
index 97b7531c..8b917f93 100644
--- a/test_integrations/phoenix_app/test/test_helper.exs
+++ b/test_integrations/phoenix_app/test/test_helper.exs
@@ -1,2 +1,2 @@
ExUnit.start()
-# Ecto.Adapters.SQL.Sandbox.mode(PhoenixApp.Repo, :manual)
+Ecto.Adapters.SQL.Sandbox.mode(PhoenixApp.Repo, :manual)