diff --git a/.commit-me.json b/.commit-me.json new file mode 100644 index 0000000000..80224890c4 --- /dev/null +++ b/.commit-me.json @@ -0,0 +1,4 @@ +{ + "include-pull-requests": true, + "types": [ "chore", "ci", "docs", "feat", "fix", "perf", "refactor", "release", "revert", "squash", "style", "test" ] +} diff --git a/.github/workflows/conventional-commits.yml b/.github/workflows/conventional-commits.yml new file mode 100644 index 0000000000..26cd79d03a --- /dev/null +++ b/.github/workflows/conventional-commits.yml @@ -0,0 +1,32 @@ +name: Conventional Commits Validation + +on: + workflow_dispatch: + pull_request: + types: + - opened + - synchronize + - reopened + - edited + +permissions: + contents: read + pull-requests: read + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number }} # Ensure that only one instance of this workflow is running per Pull Request + cancel-in-progress: true # Cancel any previous runs of this workflow + +jobs: + validate-commits: + name: Conventional Commits Validation + runs-on: ubuntu-latest + steps: + - uses: dev-build-deploy/commit-me@v1.5.0 + env: + FORCE_COLOR: 3 + with: + token: ${{ secrets.GITHUB_TOKEN }} + include-commits: false + update-labels: false + config: ".commit-me.json" diff --git a/README.md b/README.md index 76c7b82084..68d9faf8ff 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,6 @@ Approvers ([@open-telemetry/ruby-approvers](https://github.com/orgs/open-telemet - [Andrew Hayworth](https://github.com/ahayworth), Shopify - [Sam Handler](https://github.com/plantfansam), Shopify - [Robb Kidd](https://github.com/robbkidd), Honeycomb -- [Kayla Reopelle](https://github.com/kaylareopelle), New Relic *Find more about the approver role in [community repository](https://github.com/open-telemetry/community/blob/master/community-membership.md#approver).* @@ -43,6 +42,7 @@ Maintainers ([@open-telemetry/ruby-maintainers](https://github.com/orgs/open-tel - [Francis Bogsanyi](https://github.com/fbogsany), Shopify - [Matthew Wear](https://github.com/mwear), Lightstep - [Daniel Azuma](https://github.com/dazuma), Google +- [Kayla Reopelle](https://github.com/kaylareopelle), New Relic *Find more about the maintainer role in [community repository](https://github.com/open-telemetry/community/blob/master/community-membership.md#maintainer).* diff --git a/exporter/otlp-metrics/README.md b/exporter/otlp-metrics/README.md index 4d8986546f..ef91e019b7 100644 --- a/exporter/otlp-metrics/README.md +++ b/exporter/otlp-metrics/README.md @@ -101,12 +101,13 @@ The collector exporter can be configured explicitly in code, or via environment | Parameter | Environment variable | Default | | ------------------- | -------------------------------------------- | ----------------------------------- | | `endpoint:` | `OTEL_EXPORTER_OTLP_ENDPOINT` | `"http://localhost:4318/v1/metrics"` | -| `certificate_file:`| `OTEL_EXPORTER_OTLP_CERTIFICATE` | | +| `certificate_file: `| `OTEL_EXPORTER_OTLP_CERTIFICATE` | | +| `client_certificate_file: `| `OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE` | | +| `client_key_file:` | `OTEL_EXPORTER_OTLP_CLIENT_KEY` | | | `headers:` | `OTEL_EXPORTER_OTLP_HEADERS` | | | `compression:` | `OTEL_EXPORTER_OTLP_COMPRESSION` | `"gzip"` | | `timeout:` | `OTEL_EXPORTER_OTLP_TIMEOUT` | `10` | -| `ssl_verify_mode:` | `OTEL_RUBY_EXPORTER_OTLP_SSL_VERIFY_PEER` or | `OpenSSL::SSL:VERIFY_PEER` | -| | `OTEL_RUBY_EXPORTER_OTLP_SSL_VERIFY_NONE` | | +| `ssl_verify_mode:` | `OTEL_RUBY_EXPORTER_OTLP_SSL_VERIFY_PEER` or `OTEL_RUBY_EXPORTER_OTLP_SSL_VERIFY_NONE` | `OpenSSL::SSL:VERIFY_PEER` | `ssl_verify_mode:` parameter values should be flags for server certificate verification: `OpenSSL::SSL:VERIFY_PEER` and `OpenSSL::SSL:VERIFY_NONE` are acceptable. These values can also be set using the appropriately named environment variables as shown where `VERIFY_PEER` will take precedence over `VERIFY_NONE`. Please see [the Net::HTTP docs](https://ruby-doc.org/stdlib-2.7.6/libdoc/net/http/rdoc/Net/HTTP.html#verify_mode) for more information about these flags. diff --git a/exporter/otlp-metrics/lib/opentelemetry/exporter/otlp/metrics/metrics_exporter.rb b/exporter/otlp-metrics/lib/opentelemetry/exporter/otlp/metrics/metrics_exporter.rb index 44302e0e92..a6150fefb4 100644 --- a/exporter/otlp-metrics/lib/opentelemetry/exporter/otlp/metrics/metrics_exporter.rb +++ b/exporter/otlp-metrics/lib/opentelemetry/exporter/otlp/metrics/metrics_exporter.rb @@ -47,6 +47,8 @@ def self.ssl_verify_mode def initialize(endpoint: OpenTelemetry::Common::Utilities.config_opt('OTEL_EXPORTER_OTLP_METRICS_ENDPOINT', 'OTEL_EXPORTER_OTLP_ENDPOINT', default: 'http://localhost:4318/v1/metrics'), certificate_file: OpenTelemetry::Common::Utilities.config_opt('OTEL_EXPORTER_OTLP_METRICS_CERTIFICATE', 'OTEL_EXPORTER_OTLP_CERTIFICATE'), + client_certificate_file: OpenTelemetry::Common::Utilities.config_opt('OTEL_EXPORTER_OTLP_METRICS_CLIENT_CERTIFICATE', 'OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE'), + client_key_file: OpenTelemetry::Common::Utilities.config_opt('OTEL_EXPORTER_OTLP_METRICS_CLIENT_KEY', 'OTEL_EXPORTER_OTLP_CLIENT_KEY'), ssl_verify_mode: MetricsExporter.ssl_verify_mode, headers: OpenTelemetry::Common::Utilities.config_opt('OTEL_EXPORTER_OTLP_METRICS_HEADERS', 'OTEL_EXPORTER_OTLP_HEADERS', default: {}), compression: OpenTelemetry::Common::Utilities.config_opt('OTEL_EXPORTER_OTLP_METRICS_COMPRESSION', 'OTEL_EXPORTER_OTLP_COMPRESSION', default: 'gzip'), @@ -63,7 +65,7 @@ def initialize(endpoint: OpenTelemetry::Common::Utilities.config_opt('OTEL_EXPOR URI(endpoint) end - @http = http_connection(@uri, ssl_verify_mode, certificate_file) + @http = http_connection(@uri, ssl_verify_mode, certificate_file, client_certificate_file, client_key_file) @path = @uri.path @headers = prepare_headers(headers) diff --git a/exporter/otlp-metrics/lib/opentelemetry/exporter/otlp/metrics/util.rb b/exporter/otlp-metrics/lib/opentelemetry/exporter/otlp/metrics/util.rb index 37f1896da0..363fb91504 100644 --- a/exporter/otlp-metrics/lib/opentelemetry/exporter/otlp/metrics/util.rb +++ b/exporter/otlp-metrics/lib/opentelemetry/exporter/otlp/metrics/util.rb @@ -9,17 +9,19 @@ module Exporter module OTLP module Metrics # Util module provide essential functionality for exporter - module Util + module Util # rubocop:disable Metrics/ModuleLength KEEP_ALIVE_TIMEOUT = 30 RETRY_COUNT = 5 ERROR_MESSAGE_INVALID_HEADERS = 'headers must be a String with comma-separated URL Encoded UTF-8 k=v pairs or a Hash' DEFAULT_USER_AGENT = "OTel-OTLP-MetricsExporter-Ruby/#{OpenTelemetry::Exporter::OTLP::Metrics::VERSION} Ruby/#{RUBY_VERSION} (#{RUBY_PLATFORM}; #{RUBY_ENGINE}/#{RUBY_ENGINE_VERSION})".freeze - def http_connection(uri, ssl_verify_mode, certificate_file) + def http_connection(uri, ssl_verify_mode, certificate_file, client_certificate_file, client_key_file) http = Net::HTTP.new(uri.host, uri.port) http.use_ssl = uri.scheme == 'https' http.verify_mode = ssl_verify_mode http.ca_file = certificate_file unless certificate_file.nil? + http.cert = OpenSSL::X509::Certificate.new(File.read(client_certificate_file)) unless client_certificate_file.nil? + http.key = OpenSSL::PKey::RSA.new(File.read(client_key_file)) unless client_key_file.nil? http.keep_alive_timeout = KEEP_ALIVE_TIMEOUT http end diff --git a/exporter/otlp-metrics/test/opentelemetry/exporter/otlp/metrics/metrics_exporter_test.rb b/exporter/otlp-metrics/test/opentelemetry/exporter/otlp/metrics/metrics_exporter_test.rb index 5add4cbc55..bd9d458768 100644 --- a/exporter/otlp-metrics/test/opentelemetry/exporter/otlp/metrics/metrics_exporter_test.rb +++ b/exporter/otlp-metrics/test/opentelemetry/exporter/otlp/metrics/metrics_exporter_test.rb @@ -13,6 +13,12 @@ METRICS_FAILURE = OpenTelemetry::SDK::Metrics::Export::FAILURE METRICS_VERSION = OpenTelemetry::Exporter::OTLP::Metrics::VERSION METRICS_DEFAULT_USER_AGENT = OpenTelemetry::Exporter::OTLP::Metrics::Util::DEFAULT_USER_AGENT + METRICS_CLIENT_CERT_A_PATH = File.dirname(__FILE__) + '/mtls-client-a.pem' + METRICS_CLIENT_CERT_A = OpenSSL::X509::Certificate.new(File.read(METRICS_CLIENT_CERT_A_PATH)) + METRICS_CLIENT_KEY_A = OpenSSL::PKey::RSA.new(File.read(METRICS_CLIENT_CERT_A_PATH)) + METRICS_CLIENT_CERT_B_PATH = File.dirname(__FILE__) + '/mtls-client-b.pem' + METRICS_CLIENT_CERT_B = OpenSSL::X509::Certificate.new(File.read(METRICS_CLIENT_CERT_B_PATH)) + METRICS_CLIENT_KEY_B = OpenSSL::PKey::RSA.new(File.read(METRICS_CLIENT_CERT_B_PATH)) describe '#initialize' do it 'initializes with defaults' do @@ -24,6 +30,8 @@ _(exp.instance_variable_get(:@compression)).must_equal 'gzip' http = exp.instance_variable_get(:@http) _(http.ca_file).must_be_nil + _(http.cert).must_be_nil + _(http.key).must_be_nil _(http.use_ssl?).must_equal false _(http.address).must_equal 'localhost' _(http.verify_mode).must_equal OpenSSL::SSL::VERIFY_PEER @@ -76,6 +84,8 @@ it 'sets parameters from the environment' do exp = OpenTelemetry::TestHelpers.with_env('OTEL_EXPORTER_OTLP_ENDPOINT' => 'https://localhost:1234', 'OTEL_EXPORTER_OTLP_CERTIFICATE' => '/foo/bar', + 'OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE' => METRICS_CLIENT_CERT_A_PATH, + 'OTEL_EXPORTER_OTLP_CLIENT_KEY' => METRICS_CLIENT_CERT_A_PATH, 'OTEL_EXPORTER_OTLP_HEADERS' => 'a=b,c=d', 'OTEL_EXPORTER_OTLP_COMPRESSION' => 'gzip', 'OTEL_RUBY_EXPORTER_OTLP_SSL_VERIFY_NONE' => 'true', @@ -88,6 +98,8 @@ _(exp.instance_variable_get(:@compression)).must_equal 'gzip' http = exp.instance_variable_get(:@http) _(http.ca_file).must_equal '/foo/bar' + _(http.cert).must_equal METRICS_CLIENT_CERT_A + _(http.key.params).must_equal METRICS_CLIENT_KEY_A.params _(http.use_ssl?).must_equal true _(http.address).must_equal 'localhost' _(http.verify_mode).must_equal OpenSSL::SSL::VERIFY_NONE @@ -97,12 +109,16 @@ it 'prefers explicit parameters rather than the environment' do exp = OpenTelemetry::TestHelpers.with_env('OTEL_EXPORTER_OTLP_ENDPOINT' => 'https://localhost:1234', 'OTEL_EXPORTER_OTLP_CERTIFICATE' => '/foo/bar', + 'OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE' => METRICS_CLIENT_CERT_A_PATH, + 'OTEL_EXPORTER_OTLP_CLIENT_KEY' => METRICS_CLIENT_CERT_A_PATH, 'OTEL_EXPORTER_OTLP_HEADERS' => 'a:b,c:d', 'OTEL_EXPORTER_OTLP_COMPRESSION' => 'flate', 'OTEL_RUBY_EXPORTER_OTLP_SSL_VERIFY_PEER' => 'true', 'OTEL_EXPORTER_OTLP_TIMEOUT' => '11') do OpenTelemetry::Exporter::OTLP::Metrics::MetricsExporter.new(endpoint: 'http://localhost:4321', certificate_file: '/baz', + client_certificate_file: METRICS_CLIENT_CERT_B_PATH, + client_key_file: METRICS_CLIENT_CERT_B_PATH, headers: { 'x' => 'y' }, compression: 'gzip', ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE, @@ -114,6 +130,8 @@ _(exp.instance_variable_get(:@compression)).must_equal 'gzip' http = exp.instance_variable_get(:@http) _(http.ca_file).must_equal '/baz' + _(http.cert).must_equal METRICS_CLIENT_CERT_B + _(http.key.params).must_equal METRICS_CLIENT_KEY_B.params _(http.use_ssl?).must_equal false _(http.verify_mode).must_equal OpenSSL::SSL::VERIFY_NONE _(http.address).must_equal 'localhost' diff --git a/exporter/otlp-metrics/test/opentelemetry/exporter/otlp/metrics/mtls-client-a.pem b/exporter/otlp-metrics/test/opentelemetry/exporter/otlp/metrics/mtls-client-a.pem new file mode 100644 index 0000000000..b1ea23ebd9 --- /dev/null +++ b/exporter/otlp-metrics/test/opentelemetry/exporter/otlp/metrics/mtls-client-a.pem @@ -0,0 +1,50 @@ +-----BEGIN CERTIFICATE----- +MIIDmzCCAoOgAwIBAgIUGfmv/4kRRFbg319TIHkcwDMC3pUwDQYJKoZIhvcNAQEL +BQAwXTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM +GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDEWMBQGA1UEAwwNYS5leGFtcGxlLnRs +ZDAeFw0yNDAzMDYyMjM4MzNaFw0yNDAzMDcyMjM4MzNaMF0xCzAJBgNVBAYTAkFV +MRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRz +IFB0eSBMdGQxFjAUBgNVBAMMDWEuZXhhbXBsZS50bGQwggEiMA0GCSqGSIb3DQEB +AQUAA4IBDwAwggEKAoIBAQCaJG6fvdjFs9cnbF8i3wzO3VPUFH4hbAg5pV6rs81s +LuJnnlG3WX1sxYQGASqLKIzPiz3g4nKFaBfXvBYXo7M/AuJ2tEspedIcvdTqwx2k +owaaX7Y9lSx+h1OhovrviCqrqX5t9cIZWSeTU1bETcoTEd9/5usVe3XeaqmY8mAP +OA0dBKeotGIOxtTEP23CxW1AJwWPLZC1go8ycvsTQfmeif+g+6BOcKeZxkayhCvo +Ous+dt2dXa3x3yROe8ffZ5lAkPBLHfEUOSk/zpQnlkGzVrbXP4LCxLDQ8PlD3qEm +HbCK+c29mNeTjeoye5EVeQDO0ATiNh1/vSlMPpOY93IdAgMBAAGjUzBRMB0GA1Ud +DgQWBBQ5kq4dpjcQ8JSiyYrPswAoeltJJTAfBgNVHSMEGDAWgBQ5kq4dpjcQ8JSi +yYrPswAoeltJJTAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQBt +E0JyqwqL63ZrlxiBnnYrk0LoCuXjss7B9p3+6F5v0qKxxC+fpAVoh3CTRkDJRerz +ORXECXJ4drBetvje/dTX+5pNSLOOyQfOCaSohO4S82xNLSpFd6LcjYsfN8he482w +E1wuLoi9abktDVmX+sNVeiUUeuDMyqm51NRAlmzDhxTOPvqljdFZXRQO3X00qiQV +YJs7e1xql0R7DbrwOG5J2lenCwfj51ngmIpGxAaU3eLMAqLT6AZHhZxiATzCtCL9 +2cAKOrW/O8dRVhuWbCjFCIJIRIPrThbMaaw6p4mMkED7dnbOZofXLDgM+pxT5/eU +8DnQZlgXAkIlZY+9r3oG +-----END CERTIFICATE----- +-----BEGIN PRIVATE KEY----- +MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQCaJG6fvdjFs9cn +bF8i3wzO3VPUFH4hbAg5pV6rs81sLuJnnlG3WX1sxYQGASqLKIzPiz3g4nKFaBfX +vBYXo7M/AuJ2tEspedIcvdTqwx2kowaaX7Y9lSx+h1OhovrviCqrqX5t9cIZWSeT +U1bETcoTEd9/5usVe3XeaqmY8mAPOA0dBKeotGIOxtTEP23CxW1AJwWPLZC1go8y +cvsTQfmeif+g+6BOcKeZxkayhCvoOus+dt2dXa3x3yROe8ffZ5lAkPBLHfEUOSk/ +zpQnlkGzVrbXP4LCxLDQ8PlD3qEmHbCK+c29mNeTjeoye5EVeQDO0ATiNh1/vSlM +PpOY93IdAgMBAAECggEAGE1LJaM8VHs0njIPU8Of1NN/Pm4WrrVGHLSOVvrLldVU +e6qxznrs8+O2G24+o176yFP3Jwf8rzzImYo9X2+/OF1/j+CAAyOCNWbWdUba2xSa +22bgqBfnQnGahV7ZOj+ZHqRj2vlGp1FvlGIsyVlMVTJZruQcxy/GVxEw+PypmWxp +u1MOjYEZWjvqJuxTjjnYDcQezfy9Qu0JNOF4+cVVKydewGApmcdDGThcDltxWizM +n144vgcfR97g2jDKs0GxSAuCbfYo2xoetei5iEVKmlXI/OuUIS78LSud9oBOoYPF ++5MVYOISTNcE+/GEp/BNTq6E8Kk4A1dhaYMvC2qXUQKBgQDKAtFfVP4DusMQd4PD +oixZ50GENohG4uGCDVBT/RJXUDueil5G4h9iSGxSt0nVOe3nBv5sHMgAARX3S+Pl +717tbzDgLXqfEqhCj9Kow7BtOussfSQ8hwxBazh9GEva48/Bx+OXFLcpPAcmc9WU +oQzCCb1yeZ7gsK6yNiLJTQGYCQKBgQDDVokcPHseqFJMOdd9Hr4Ci9gshbZj4htb +EXiUf50PP0Qco1VJ0MnPW9hateEdRYGVjLeCKHi1XHi8ARQQRJXVI4BI9yiIt8VO +snnFiEYJ/wgq4lyO8eWeNUaimFvhKHDBcz5tKwtmS4CGf7eAHdTB6R0izLtxkXcs +6+ZiO/bGdQKBgHdXVNPKBUq0wdpvkMM5gpQWP6lZAgdGr8zCCsujfXthpecSfYHI +wpuwh3YSXCcA0yAiDJpYInuGKLDw/5DuahlBEBHQLFnfjtHL37rd6NOO9DJTN94e +NkpLipK0kNOetDUZ3sV5cn+EvACme+4TetMDKA2B9i9tkbcsrj5YJPHpAoGAc5Gh +MTl/RlYjysF2AqrLlEoUrdK2ZEYEFU8y3fjafYjazW69KR0EKVCXoqN0+pKC5m4I +rFMxh2ucau7gZfeOBjoozgKc0raXX8YsUXgcqBFhTa37QP9Q8NdoYQ5vXblFbM64 +InKTHgSRmAG8GWqM0+UNvecPB1QfBE7VUU1U5XkCgYA34SlJVa1es7hifeAtb2XC +jVsHeEwcVnVq4S8aNo51taBJqPR9QIg4bssj4QmaMJntyQl94CE7NM11OssQ4rez +lY+BZGSmkuEFybqJ5CwHsKk+Cjdm4agqqU/uupOxFPxEzcD2YDgFto7RMPDP/Daf +iH9tE2qrnzQvE43+caLAuQ== +-----END PRIVATE KEY----- \ No newline at end of file diff --git a/exporter/otlp-metrics/test/opentelemetry/exporter/otlp/metrics/mtls-client-b.pem b/exporter/otlp-metrics/test/opentelemetry/exporter/otlp/metrics/mtls-client-b.pem new file mode 100644 index 0000000000..8701951523 --- /dev/null +++ b/exporter/otlp-metrics/test/opentelemetry/exporter/otlp/metrics/mtls-client-b.pem @@ -0,0 +1,50 @@ +-----BEGIN CERTIFICATE----- +MIIDmzCCAoOgAwIBAgIUALrFiUtkMZj2wNSxHvtR+KBoBfowDQYJKoZIhvcNAQEL +BQAwXTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM +GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDEWMBQGA1UEAwwNYi5leGFtcGxlLnRs +ZDAeFw0yNDAzMDYyMjM5NDdaFw0yNDAzMDcyMjM5NDdaMF0xCzAJBgNVBAYTAkFV +MRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRz +IFB0eSBMdGQxFjAUBgNVBAMMDWIuZXhhbXBsZS50bGQwggEiMA0GCSqGSIb3DQEB +AQUAA4IBDwAwggEKAoIBAQC78NVeBRf/WK39UM1BpiQROA8mV8YXB8ugGB7AlOC2 +uLmVdWknMoS155afr4u0DICyNGDkxhWxyqG9UEsBOBmlNZSg85ewjdSMSBRh6Lxk +uWP9mVS0f1nRVMMBYNbkEPIJ5T2IRCwHP6/gpNKO9prH06atLCs6HP8y0cLKCWNJ +WixJJpT5goRldEBKTIUtOfM8Sa7ktoYeEvmGmjXjgP9pcdlmC3pjfzTk4+HH3cKL +RGG4dlhSTrOjVNBL30GWQjiNCM2fAHugUcrcGsmXhbBzmkBa8Rs6mI0ZJQIa/bWv +6KfWJ1eDF+VIVyhaQPeEkLgatP5NyuaqafvBrTMT1/GHAgMBAAGjUzBRMB0GA1Ud +DgQWBBQPlAhA673kZnZ9rvRwDejlc2kcjDAfBgNVHSMEGDAWgBQPlAhA673kZnZ9 +rvRwDejlc2kcjDAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQBx +OOFo6UxqdtFrN7BZPV00e2tlz3on21JlTxlk92fL0lHL86XJPMr1znBg0OkD1riw +OYJQ47wLZAgYIhs8UTrcZIZd1xwbJ4fjSDRFHOnI2BHJ/5pR9/NFsrOgeOorLbZf +x6aa+mQt1qYltTsH8gA1PP5syUcHmlSVLk5NWreMaHEEU8THVsyhxlv8yE+Zuh5y +JX1KpH4Eo3ekM4RwGuqjbtMgumD8gQf4lHysMEyemQOnebKxViz0bfLfEOMYIXBf +DaavjgfinXAQnOhItlHXisuAIDxSajnyR0kvTDdRbZBruRpUeKBcENQO95D4b5uA +BNnkf1CmWZoTwwqvDdNZ +-----END CERTIFICATE----- +-----BEGIN PRIVATE KEY----- +MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC78NVeBRf/WK39 +UM1BpiQROA8mV8YXB8ugGB7AlOC2uLmVdWknMoS155afr4u0DICyNGDkxhWxyqG9 +UEsBOBmlNZSg85ewjdSMSBRh6LxkuWP9mVS0f1nRVMMBYNbkEPIJ5T2IRCwHP6/g +pNKO9prH06atLCs6HP8y0cLKCWNJWixJJpT5goRldEBKTIUtOfM8Sa7ktoYeEvmG +mjXjgP9pcdlmC3pjfzTk4+HH3cKLRGG4dlhSTrOjVNBL30GWQjiNCM2fAHugUcrc +GsmXhbBzmkBa8Rs6mI0ZJQIa/bWv6KfWJ1eDF+VIVyhaQPeEkLgatP5NyuaqafvB +rTMT1/GHAgMBAAECggEACftFJGnaLReaoK+KlzKptmMR0D7X0x/43UHVWvO36Oca +rtNo3Z9ok6NUc/AGLW5/Ofe02wb3iVUdDfRBEjQr4y2wp8kIL2T4+ueryB9s518+ +mO4XJOxL6uWOOHkvheFYhCoGq0FjopGWrQmpR9UqbJ59uwjI1Zygo878LMV0M9Bc +5ABaF06yRVyZRH5lAC8zihBUsleE5jUM4FULHRUZFUVJx8ivFtJnLqLuywMOlkq4 +yY4Vf0M77dwVVY5enbnTbKEI25Csf47Gp9i5JS2jXY9ORPNjpYb8LHvp8NZTNczd +mGxQsWd6pWohnGkZdbJqQoQxMfCNYev5ejf7bJ1AoQKBgQDkpFtsIA0ExmcefIsE +ChuEpFUXko8wjNeCWFOjJx+Sed9RaRaQiSYOKmWwDjaP7dlJwUC0RIRy65gOGGnf +PUHOd7nEBtyDIjpSsjWWBjfsUEAqrhze0qpXSPmBzhNlOdsN01VzTgTNiTpea7hi +izgYjd1kLvJ5p+CV5NIxNhp+qQKBgQDSbbyKf6vPDNIZb9V/ef10Co31ahDVwlFa +P3FrZv+9eDMJQfXtkRTlu5Auo+TDaFFSMjrb5rTJcEuzBwHyyodqPvPsYC33/DVQ +jGYVqjQuG5q473DNebtinn5JR40ZfHiJlpx2Ms5xdbPIhN1efTRXmYIov0AIaKuR +on9LE8X8rwKBgFsyIzTxY7/v0tmaG2i1D1zMnxQT5QEcbCkVSebdh/5IlgZGwDVO +PtuPlZevU5v85ppAdqpwWdPsnG2i1zevmzvbDUFe6z1yvYiWhEEeodej+rQLVoCZ +zk+aT8qyg5HwjarqDD89czT380wN8zF7DhjdHN0EzLoxd6bR6fSu+8phAoGAV4v0 +PyLy1gedeZu/lXOpcRfbC9l++5AGzKdMhsSpbaiOgzGAIcCUkye/ysfBK1NBUhM3 +zblkSdKAjBFETEDaqedbEGLLfTre644eArF3WB9/9aUYp0QYI+WQ4Of12j6g341b +twlYPngbvjcY6nDoz/E757v55gW2K7cRgqjNXF0CgYEAxNfclcdKbUtGAsttZdY3 ++dcdBtqcLvpYlMsZPQaxNppyKBI5svtK715FsVbmLhINqiNo1aKIA5M3E2P88Fa2 +nqVrKsBOn3gCe+GFlFeWwNAfRlfmP1ZUHDp07zvNtRm4ZR/3hdAze4DbyWv58LfL +WSjqCjjBeurblkRv2QTXu2k= +-----END PRIVATE KEY----- \ No newline at end of file diff --git a/exporter/otlp-metrics/test/test_helper.rb b/exporter/otlp-metrics/test/test_helper.rb index 461b180653..9fc4466458 100644 --- a/exporter/otlp-metrics/test/test_helper.rb +++ b/exporter/otlp-metrics/test/test_helper.rb @@ -17,7 +17,7 @@ OpenTelemetry.logger = Logger.new(File::NULL) module MockSum - def collect(start_time, end_time) + def collect(start_time, end_time, data_points) start_time = 1_699_593_427_329_946_585 # rubocop:disable Lint/ShadowedArgument end_time = 1_699_593_427_329_946_586 # rubocop:disable Lint/ShadowedArgument super diff --git a/logs_sdk/lib/opentelemetry/sdk/logs.rb b/logs_sdk/lib/opentelemetry/sdk/logs.rb index fbd978c68c..6003e3228b 100644 --- a/logs_sdk/lib/opentelemetry/sdk/logs.rb +++ b/logs_sdk/lib/opentelemetry/sdk/logs.rb @@ -11,6 +11,7 @@ require_relative 'logs/log_record_data' require_relative 'logs/log_record_processor' require_relative 'logs/export' +require_relative 'logs/log_record_limits' module OpenTelemetry module SDK diff --git a/logs_sdk/lib/opentelemetry/sdk/logs/log_record.rb b/logs_sdk/lib/opentelemetry/sdk/logs/log_record.rb index 10d1dcb7ab..dc7cb83373 100644 --- a/logs_sdk/lib/opentelemetry/sdk/logs/log_record.rb +++ b/logs_sdk/lib/opentelemetry/sdk/logs/log_record.rb @@ -9,6 +9,10 @@ module SDK module Logs # Implementation of OpenTelemetry::Logs::LogRecord that records log events. class LogRecord < OpenTelemetry::Logs::LogRecord + EMPTY_ATTRIBUTES = {}.freeze + + private_constant :EMPTY_ATTRIBUTES + attr_accessor :timestamp, :observed_timestamp, :severity_text, @@ -49,6 +53,8 @@ class LogRecord < OpenTelemetry::Logs::LogRecord # source of the log, desrived from the LoggerProvider. # @param [optional OpenTelemetry::SDK::InstrumentationScope] instrumentation_scope # The instrumentation scope, derived from the emitting Logger + # @param [optional] OpenTelemetry::SDK::LogRecordLimits] log_record_limits + # Attribute limits # # # @return [LogRecord] @@ -63,7 +69,8 @@ def initialize( span_id: nil, trace_flags: nil, resource: nil, - instrumentation_scope: nil + instrumentation_scope: nil, + log_record_limits: nil ) @timestamp = timestamp @observed_timestamp = observed_timestamp || timestamp || Time.now @@ -76,7 +83,10 @@ def initialize( @trace_flags = trace_flags @resource = resource @instrumentation_scope = instrumentation_scope + @log_record_limits = log_record_limits || LogRecordLimits::DEFAULT @total_recorded_attributes = @attributes&.size || 0 + + trim_attributes(@attributes) end def to_log_record_data @@ -103,6 +113,51 @@ def to_integer_nanoseconds(timestamp) (timestamp.to_r * 10**9).to_i end + + def trim_attributes(attributes) + return if attributes.nil? + + # truncate total attributes + truncate_attributes(attributes, @log_record_limits.attribute_count_limit) + + # truncate attribute values + truncate_attribute_values(attributes, @log_record_limits.attribute_length_limit) + + # validate attributes + validate_attributes(attributes) + + nil + end + + def truncate_attributes(attributes, attribute_limit) + excess = attributes.size - attribute_limit + excess.times { attributes.shift } if excess.positive? + end + + def validate_attributes(attrs) + # Similar to Internal.valid_attributes?, but with different messages + # Future refactor opportunity: https://github.com/open-telemetry/opentelemetry-ruby/issues/1739 + attrs.keep_if do |k, v| + if !Internal.valid_key?(k) + OpenTelemetry.handle_error(message: "invalid log record attribute key type #{k.class} on record: '#{body}'") + return false + elsif !Internal.valid_value?(v) + OpenTelemetry.handle_error(message: "invalid log record attribute value type #{v.class} for key '#{k}' on record: '#{body}'") + return false + end + + true + end + end + + def truncate_attribute_values(attributes, attribute_length_limit) + return EMPTY_ATTRIBUTES if attributes.nil? + return attributes if attribute_length_limit.nil? + + attributes.transform_values! { |value| OpenTelemetry::Common::Utilities.truncate_attribute_value(value, attribute_length_limit) } + + attributes + end end end end diff --git a/logs_sdk/lib/opentelemetry/sdk/logs/log_record_limits.rb b/logs_sdk/lib/opentelemetry/sdk/logs/log_record_limits.rb new file mode 100644 index 0000000000..6440b7a4aa --- /dev/null +++ b/logs_sdk/lib/opentelemetry/sdk/logs/log_record_limits.rb @@ -0,0 +1,43 @@ +# frozen_string_literal: true + +# Copyright The OpenTelemetry Authors +# +# SPDX-License-Identifier: Apache-2.0 + +module OpenTelemetry + module SDK + module Logs + # Class that holds log record attribute limit parameters. + class LogRecordLimits + # The global default max number of attributes per {LogRecord}. + attr_reader :attribute_count_limit + + # The global default max length of attribute value per {LogRecord}. + attr_reader :attribute_length_limit + + # Returns a {LogRecordLimits} with the desired values. + # + # @return [LogRecordLimits] with the desired values. + # @raise [ArgumentError] if any of the max numbers are not positive. + def initialize(attribute_count_limit: Integer(OpenTelemetry::Common::Utilities.config_opt( + 'OTEL_LOG_RECORD_ATTRIBUTE_COUNT_LIMIT', + 'OTEL_ATTRIBUTE_COUNT_LIMIT', + default: 128 + )), + attribute_length_limit: OpenTelemetry::Common::Utilities.config_opt( + 'OTEL_LOG_RECORD_ATTRIBUTE_VALUE_LENGTH_LIMIT', + 'OTEL_ATTRIBUTE_VALUE_LENGTH_LIMIT' + )) + raise ArgumentError, 'attribute_count_limit must be positive' unless attribute_count_limit.positive? + raise ArgumentError, 'attribute_length_limit must not be less than 32' unless attribute_length_limit.nil? || Integer(attribute_length_limit) >= 32 + + @attribute_count_limit = attribute_count_limit + @attribute_length_limit = attribute_length_limit.nil? ? nil : Integer(attribute_length_limit) + end + + # The default {LogRecordLimits}. + DEFAULT = new + end + end + end +end diff --git a/logs_sdk/lib/opentelemetry/sdk/logs/logger_provider.rb b/logs_sdk/lib/opentelemetry/sdk/logs/logger_provider.rb index 99e7df10e5..2f28e24800 100644 --- a/logs_sdk/lib/opentelemetry/sdk/logs/logger_provider.rb +++ b/logs_sdk/lib/opentelemetry/sdk/logs/logger_provider.rb @@ -18,10 +18,13 @@ class LoggerProvider < OpenTelemetry::Logs::LoggerProvider # # @param [optional Resource] resource The resource to associate with # new LogRecords created by {Logger}s created by this LoggerProvider. + # @param [optional LogRecordLimits] log_record_limits The limits for + # attributes count and attribute length for LogRecords. # # @return [OpenTelemetry::SDK::Logs::LoggerProvider] - def initialize(resource: OpenTelemetry::SDK::Resources::Resource.create) + def initialize(resource: OpenTelemetry::SDK::Resources::Resource.create, log_record_limits: LogRecordLimits::DEFAULT) @log_record_processors = [] + @log_record_limits = log_record_limits @mutex = Mutex.new @resource = resource @stopped = false @@ -142,7 +145,8 @@ def on_emit(timestamp: nil, span_id: span_id, trace_flags: trace_flags, resource: @resource, - instrumentation_scope: instrumentation_scope) + instrumentation_scope: instrumentation_scope, + log_record_limits: @log_record_limits) @log_record_processors.each { |processor| processor.on_emit(log_record, context) } end diff --git a/logs_sdk/test/opentelemetry/sdk/logs/log_record_limits_test.rb b/logs_sdk/test/opentelemetry/sdk/logs/log_record_limits_test.rb new file mode 100644 index 0000000000..04c6238276 --- /dev/null +++ b/logs_sdk/test/opentelemetry/sdk/logs/log_record_limits_test.rb @@ -0,0 +1,79 @@ +# frozen_string_literal: true + +# Copyright The OpenTelemetry Authors +# +# SPDX-License-Identifier: Apache-2.0 + +require 'test_helper' + +describe OpenTelemetry::SDK::Logs::LogRecordLimits do + let(:log_record_limits) { OpenTelemetry::SDK::Logs::LogRecordLimits.new } + + describe '#initialize' do + it 'provides defaults' do + _(log_record_limits.attribute_count_limit).must_equal 128 + _(log_record_limits.attribute_length_limit).must_be_nil + end + + it 'prioritizes specific environment varibles for attribute value length limits' do + OpenTelemetry::TestHelpers.with_env('OTEL_ATTRIBUTE_VALUE_LENGTH_LIMIT' => '35', + 'OTEL_LOG_RECORD_ATTRIBUTE_VALUE_LENGTH_LIMIT' => '33') do + _(log_record_limits.attribute_length_limit).must_equal 33 + end + end + + it 'uses general attribute value length limits in the absence of more specific ones' do + OpenTelemetry::TestHelpers.with_env('OTEL_ATTRIBUTE_VALUE_LENGTH_LIMIT' => '35') do + _(log_record_limits.attribute_length_limit).must_equal 35 + end + end + + it 'reflects environment variables' do + OpenTelemetry::TestHelpers.with_env('OTEL_LOG_RECORD_ATTRIBUTE_COUNT_LIMIT' => '1', + 'OTEL_LOG_RECORD_ATTRIBUTE_VALUE_LENGTH_LIMIT' => '32') do + _(log_record_limits.attribute_count_limit).must_equal 1 + _(log_record_limits.attribute_length_limit).must_equal 32 + end + end + + it 'reflects explicit overrides' do + OpenTelemetry::TestHelpers.with_env('OTEL_LOG_RECORD_ATTRIBUTE_COUNT_LIMIT' => '1', + 'OTEL_LOG_RECORD_ATTRIBUTE_VALUE_LENGTH_LIMIT' => '4') do + log_record_limits = OpenTelemetry::SDK::Logs::LogRecordLimits.new(attribute_count_limit: 10, + attribute_length_limit: 32) + _(log_record_limits.attribute_count_limit).must_equal 10 + _(log_record_limits.attribute_length_limit).must_equal 32 + end + end + + it 'reflects generic attribute env vars' do + OpenTelemetry::TestHelpers.with_env('OTEL_ATTRIBUTE_COUNT_LIMIT' => '1', + 'OTEL_ATTRIBUTE_VALUE_LENGTH_LIMIT' => '32') do + _(log_record_limits.attribute_count_limit).must_equal 1 + _(log_record_limits.attribute_length_limit).must_equal 32 + end + end + + it 'prefers model-specific attribute env vars over generic attribute env vars' do + OpenTelemetry::TestHelpers.with_env('OTEL_LOG_RECORD_ATTRIBUTE_COUNT_LIMIT' => '1', + 'OTEL_ATTRIBUTE_COUNT_LIMIT' => '2', + 'OTEL_LOG_RECORD_ATTRIBUTE_VALUE_LENGTH_LIMIT' => '32', + 'OTEL_ATTRIBUTE_VALUE_LENGTH_LIMIT' => '33') do + _(log_record_limits.attribute_count_limit).must_equal 1 + _(log_record_limits.attribute_length_limit).must_equal 32 + end + end + + it 'raises if attribute_count_limit is not positive' do + assert_raises ArgumentError do + OpenTelemetry::SDK::Logs::LogRecordLimits.new(attribute_count_limit: -1) + end + end + + it 'raises if attribute_length_limit is less than 32' do + assert_raises ArgumentError do + OpenTelemetry::SDK::Logs::LogRecordLimits.new(attribute_length_limit: 31) + end + end + end +end diff --git a/logs_sdk/test/opentelemetry/sdk/logs/log_record_test.rb b/logs_sdk/test/opentelemetry/sdk/logs/log_record_test.rb index ca38350e1f..71a2557e9d 100644 --- a/logs_sdk/test/opentelemetry/sdk/logs/log_record_test.rb +++ b/logs_sdk/test/opentelemetry/sdk/logs/log_record_test.rb @@ -92,5 +92,85 @@ assert_equal(args[:instrumentation_scope], log_record_data.instrumentation_scope) end end + + describe 'attribute limits' do + it 'uses the limits set by the logger provider via the logger' do + # Spy on the console output + captured_stdout = StringIO.new + original_stdout = $stdout + $stdout = captured_stdout + + # Create the LoggerProvider with the console exporter and an attribute limit of 1 + limits = Logs::LogRecordLimits.new(attribute_count_limit: 1) + logger_provider = Logs::LoggerProvider.new(log_record_limits: limits) + console_exporter = Logs::Export::SimpleLogRecordProcessor.new(Logs::Export::ConsoleLogRecordExporter.new) + logger_provider.add_log_record_processor(console_exporter) + + # Create a logger that uses the given LoggerProvider + logger = Logs::Logger.new('', '', logger_provider) + + # Emit a log from that logger, with attribute count exceeding the limit + logger.on_emit(attributes: { 'a' => 'a', 'b' => 'b' }) + + # Look at the captured output to see if the attributes have been truncated + assert_match(/attributes={"b"=>"b"}/, captured_stdout.string) + refute_match(/"a"=>"a"/, captured_stdout.string) + + # Return STDOUT to its normal output + $stdout = original_stdout + end + + it 'emits an error message if attribute key is invalid' do + OpenTelemetry::TestHelpers.with_test_logger do |log_stream| + logger.on_emit(attributes: { a: 'a' }) + assert_match(/invalid log record attribute key type Symbol/, log_stream.string) + end + end + + it 'emits an error message if the attribute value is invalid' do + OpenTelemetry::TestHelpers.with_test_logger do |log_stream| + logger.on_emit(attributes: { 'a' => Class.new }) + assert_match(/invalid log record attribute value type Class/, log_stream.string) + end + end + + it 'uses the default limits if none provided' do + log_record = Logs::LogRecord.new + default = Logs::LogRecordLimits::DEFAULT + + assert_equal(default.attribute_count_limit, log_record.instance_variable_get(:@log_record_limits).attribute_count_limit) + # default length is nil + assert_nil(log_record.instance_variable_get(:@log_record_limits).attribute_length_limit) + end + + it 'trims the oldest attributes' do + limits = Logs::LogRecordLimits.new(attribute_count_limit: 1) + attributes = { 'old' => 'old', 'new' => 'new' } + log_record = Logs::LogRecord.new(log_record_limits: limits, attributes: attributes) + + assert_equal({ 'new' => 'new' }, log_record.attributes) + end + end + + describe 'attribute value limit' do + it 'truncates the values that are too long' do + length_limit = 32 + too_long = 'a' * (length_limit + 1) + just_right = 'a' * (length_limit - 3) # truncation removes 3 chars for the '...' + limits = Logs::LogRecordLimits.new(attribute_length_limit: length_limit) + log_record = Logs::LogRecord.new(log_record_limits: limits, attributes: { 'key' => too_long }) + + assert_equal({ 'key' => "#{just_right}..." }, log_record.attributes) + end + + it 'does not alter values within the range' do + length_limit = 32 + within_range = 'a' * length_limit + limits = Logs::LogRecordLimits.new(attribute_length_limit: length_limit) + log_record = Logs::LogRecord.new(log_record_limits: limits, attributes: { 'key' => within_range }) + + assert_equal({ 'key' => within_range }, log_record.attributes) + end + end end end diff --git a/logs_sdk/test/opentelemetry/sdk/logs/logger_provider_test.rb b/logs_sdk/test/opentelemetry/sdk/logs/logger_provider_test.rb index 023a9a67c9..a2ee0e59cc 100644 --- a/logs_sdk/test/opentelemetry/sdk/logs/logger_provider_test.rb +++ b/logs_sdk/test/opentelemetry/sdk/logs/logger_provider_test.rb @@ -24,6 +24,15 @@ end end + describe '#initialize' do + it 'activates a default LogRecordLimits' do + assert_equal( + OpenTelemetry::SDK::Logs::LogRecordLimits::DEFAULT, + logger_provider.instance_variable_get(:@log_record_limits) + ) + end + end + describe '#add_log_record_processor' do it "adds the processor to the logger provider's processors" do assert_equal(0, logger_provider.instance_variable_get(:@log_record_processors).length) @@ -73,15 +82,15 @@ # :version is nil by default, but explicitly setting it here # to make the test easier to read logger = logger_provider.logger(name: 'name', version: nil) - assert_equal(logger.instance_variable_get(:@instrumentation_scope).version, '') + assert_equal('', logger.instance_variable_get(:@instrumentation_scope).version) end it 'creates a new logger with the passed-in name and version' do name = 'name' version = 'version' logger = logger_provider.logger(name: name, version: version) - assert_equal(logger.instance_variable_get(:@instrumentation_scope).name, name) - assert_equal(logger.instance_variable_get(:@instrumentation_scope).version, version) + assert_equal(name, logger.instance_variable_get(:@instrumentation_scope).name) + assert_equal(version, logger.instance_variable_get(:@instrumentation_scope).version) end end diff --git a/metrics_sdk/lib/opentelemetry/sdk/metrics.rb b/metrics_sdk/lib/opentelemetry/sdk/metrics.rb index 42807c708b..35593185f9 100644 --- a/metrics_sdk/lib/opentelemetry/sdk/metrics.rb +++ b/metrics_sdk/lib/opentelemetry/sdk/metrics.rb @@ -20,3 +20,4 @@ module Metrics require 'opentelemetry/sdk/metrics/meter' require 'opentelemetry/sdk/metrics/meter_provider' require 'opentelemetry/sdk/metrics/state' +require 'opentelemetry/sdk/metrics/view' diff --git a/metrics_sdk/lib/opentelemetry/sdk/metrics/aggregation.rb b/metrics_sdk/lib/opentelemetry/sdk/metrics/aggregation.rb index f36aef0152..62f820eb93 100644 --- a/metrics_sdk/lib/opentelemetry/sdk/metrics/aggregation.rb +++ b/metrics_sdk/lib/opentelemetry/sdk/metrics/aggregation.rb @@ -19,3 +19,5 @@ module Aggregation require 'opentelemetry/sdk/metrics/aggregation/histogram_data_point' require 'opentelemetry/sdk/metrics/aggregation/explicit_bucket_histogram' require 'opentelemetry/sdk/metrics/aggregation/sum' +require 'opentelemetry/sdk/metrics/aggregation/last_value' +require 'opentelemetry/sdk/metrics/aggregation/drop' diff --git a/metrics_sdk/lib/opentelemetry/sdk/metrics/aggregation/drop.rb b/metrics_sdk/lib/opentelemetry/sdk/metrics/aggregation/drop.rb new file mode 100644 index 0000000000..f638c649a5 --- /dev/null +++ b/metrics_sdk/lib/opentelemetry/sdk/metrics/aggregation/drop.rb @@ -0,0 +1,37 @@ +# frozen_string_literal: true + +# Copyright The OpenTelemetry Authors +# +# SPDX-License-Identifier: Apache-2.0 + +module OpenTelemetry + module SDK + module Metrics + module Aggregation + # Contains the implementation of the Drop aggregation + class Drop + attr_reader :aggregation_temporality + + def initialize(aggregation_temporality: :delta) + @aggregation_temporality = aggregation_temporality + end + + def collect(start_time, end_time, data_points) + data_points.values.map!(&:dup) + end + + def update(increment, attributes, data_points) + data_points[attributes] = NumberDataPoint.new( + {}, + 0, + 0, + 0, + 0 + ) + nil + end + end + end + end + end +end diff --git a/metrics_sdk/lib/opentelemetry/sdk/metrics/aggregation/explicit_bucket_histogram.rb b/metrics_sdk/lib/opentelemetry/sdk/metrics/aggregation/explicit_bucket_histogram.rb index 50862c86fb..0d357e1d49 100644 --- a/metrics_sdk/lib/opentelemetry/sdk/metrics/aggregation/explicit_bucket_histogram.rb +++ b/metrics_sdk/lib/opentelemetry/sdk/metrics/aggregation/explicit_bucket_histogram.rb @@ -25,25 +25,25 @@ def initialize( boundaries: DEFAULT_BOUNDARIES, record_min_max: true ) - @data_points = {} + @aggregation_temporality = aggregation_temporality.to_sym @boundaries = boundaries && !boundaries.empty? ? boundaries.sort : nil @record_min_max = record_min_max end - def collect(start_time, end_time) + def collect(start_time, end_time, data_points) if @aggregation_temporality == :delta # Set timestamps and 'move' data point values to result. - hdps = @data_points.values.map! do |hdp| + hdps = data_points.values.map! do |hdp| hdp.start_time_unix_nano = start_time hdp.time_unix_nano = end_time hdp end - @data_points.clear + data_points.clear hdps else # Update timestamps and take a snapshot. - @data_points.values.map! do |hdp| + data_points.values.map! do |hdp| hdp.start_time_unix_nano ||= start_time # Start time of a data point is from the first observation. hdp.time_unix_nano = end_time hdp = hdp.dup @@ -53,14 +53,14 @@ def collect(start_time, end_time) end end - def update(amount, attributes) - hdp = @data_points.fetch(attributes) do + def update(amount, attributes, data_points) + hdp = data_points.fetch(attributes) do if @record_min_max min = Float::INFINITY max = -Float::INFINITY end - @data_points[attributes] = HistogramDataPoint.new( + data_points[attributes] = HistogramDataPoint.new( attributes, nil, # :start_time_unix_nano nil, # :time_unix_nano diff --git a/metrics_sdk/lib/opentelemetry/sdk/metrics/aggregation/last_value.rb b/metrics_sdk/lib/opentelemetry/sdk/metrics/aggregation/last_value.rb new file mode 100644 index 0000000000..b2cffb74e2 --- /dev/null +++ b/metrics_sdk/lib/opentelemetry/sdk/metrics/aggregation/last_value.rb @@ -0,0 +1,53 @@ +# frozen_string_literal: true + +# Copyright The OpenTelemetry Authors +# +# SPDX-License-Identifier: Apache-2.0 + +module OpenTelemetry + module SDK + module Metrics + module Aggregation + # Contains the implementation of the LastValue aggregation + class LastValue + attr_reader :aggregation_temporality + + def initialize(aggregation_temporality: :delta) + @aggregation_temporality = aggregation_temporality + end + + def collect(start_time, end_time, data_points) + if @aggregation_temporality == :delta + # Set timestamps and 'move' data point values to result. + ndps = data_points.values.map! do |ndp| + ndp.start_time_unix_nano = start_time + ndp.time_unix_nano = end_time + ndp + end + data_points.clear + ndps + else + # Update timestamps and take a snapshot. + data_points.values.map! do |ndp| + ndp.start_time_unix_nano ||= start_time # Start time of a data point is from the first observation. + ndp.time_unix_nano = end_time + ndp.dup + end + end + end + + def update(increment, attributes, data_points) + data_points[attributes] = NumberDataPoint.new( + attributes, + nil, + nil, + increment, + nil + ) + nil + end + end + end + end + end +end diff --git a/metrics_sdk/lib/opentelemetry/sdk/metrics/aggregation/sum.rb b/metrics_sdk/lib/opentelemetry/sdk/metrics/aggregation/sum.rb index 8973a90ccd..c2771b38e3 100644 --- a/metrics_sdk/lib/opentelemetry/sdk/metrics/aggregation/sum.rb +++ b/metrics_sdk/lib/opentelemetry/sdk/metrics/aggregation/sum.rb @@ -16,22 +16,21 @@ class Sum def initialize(aggregation_temporality: ENV.fetch('OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE', :delta)) # TODO: the default should be :cumulative, see issue #1555 @aggregation_temporality = aggregation_temporality.to_sym - @data_points = {} end - def collect(start_time, end_time) + def collect(start_time, end_time, data_points) if @aggregation_temporality == :delta # Set timestamps and 'move' data point values to result. - ndps = @data_points.values.map! do |ndp| + ndps = data_points.values.map! do |ndp| ndp.start_time_unix_nano = start_time ndp.time_unix_nano = end_time ndp end - @data_points.clear + data_points.clear ndps else # Update timestamps and take a snapshot. - @data_points.values.map! do |ndp| + data_points.values.map! do |ndp| ndp.start_time_unix_nano ||= start_time # Start time of a data point is from the first observation. ndp.time_unix_nano = end_time ndp.dup @@ -39,8 +38,8 @@ def collect(start_time, end_time) end end - def update(increment, attributes) - ndp = @data_points[attributes] || @data_points[attributes] = NumberDataPoint.new( + def update(increment, attributes, data_points) + ndp = data_points[attributes] || data_points[attributes] = NumberDataPoint.new( attributes, nil, nil, diff --git a/metrics_sdk/lib/opentelemetry/sdk/metrics/export/in_memory_metric_pull_exporter.rb b/metrics_sdk/lib/opentelemetry/sdk/metrics/export/in_memory_metric_pull_exporter.rb index 8291ed9fe7..d0d8ccb902 100644 --- a/metrics_sdk/lib/opentelemetry/sdk/metrics/export/in_memory_metric_pull_exporter.rb +++ b/metrics_sdk/lib/opentelemetry/sdk/metrics/export/in_memory_metric_pull_exporter.rb @@ -25,7 +25,7 @@ def pull def export(metrics, timeout: nil) @mutex.synchronize do - @metric_snapshots << metrics + @metric_snapshots.concat(Array(metrics)) end SUCCESS end diff --git a/metrics_sdk/lib/opentelemetry/sdk/metrics/meter_provider.rb b/metrics_sdk/lib/opentelemetry/sdk/metrics/meter_provider.rb index 205ff5db0d..1f3f867052 100644 --- a/metrics_sdk/lib/opentelemetry/sdk/metrics/meter_provider.rb +++ b/metrics_sdk/lib/opentelemetry/sdk/metrics/meter_provider.rb @@ -14,7 +14,7 @@ class MeterProvider < OpenTelemetry::Metrics::MeterProvider Key = Struct.new(:name, :version) private_constant(:Key) - attr_reader :resource, :metric_readers + attr_reader :resource, :metric_readers, :registered_views def initialize(resource: OpenTelemetry::SDK::Resources::Resource.create) @mutex = Mutex.new @@ -22,6 +22,7 @@ def initialize(resource: OpenTelemetry::SDK::Resources::Resource.create) @stopped = false @metric_readers = [] @resource = resource + @registered_views = [] end # Returns a {Meter} instance. @@ -125,13 +126,30 @@ def register_synchronous_instrument(instrument) end end - # The type of the Instrument(s) (optional). - # The name of the Instrument(s). OpenTelemetry SDK authors MAY choose to support wildcard characters, with the question mark (?) matching exactly one character and the asterisk character (*) matching zero or more characters. - # The name of the Meter (optional). - # The version of the Meter (optional). - # The schema_url of the Meter (optional). - def add_view - # TODO: For each meter add this view to all applicable instruments + # A View provides SDK users with the flexibility to customize the metrics that are output by the SDK. + # + # Example: + # + # OpenTelemetry.meter_provider.add_view('test', :aggregation => Aggregation::Drop.new, + # :type => :counter, :unit => 'smidgen', + # :meter_name => 'test', :meter_version => '1.0') + # + # + # @param [String] name Name of the view. + # @param [optional Hash] options For more precise matching, {View} and {MetricsStream} + # options may include: + # aggregation: An instance of an aggregation class, e.g. {ExplicitBucketHistogram}, {Sum}, {LastValue} + # type: A Symbol representing the instrument kind, e.g. :observable_gauge, :counter + # unit: A String matching an instrumentation unit, e.g. 'smidgen' + # meter_name: A String matching a meter name, e.g. meter_provider.meter('sample_meter_name', version: '1.2.0'), would be 'sample_meter_name' + # meter_version: A String matching a meter version, e.g. meter_provider.meter('sample_meter_name', version: '1.2.0'), would be '1.2.0' + # + # @return [nil] returns nil + # + def add_view(name, **options) + # TODO: add schema_url as part of options + @registered_views << View::RegisteredView.new(name, **options) + nil end end end diff --git a/metrics_sdk/lib/opentelemetry/sdk/metrics/state/metric_store.rb b/metrics_sdk/lib/opentelemetry/sdk/metrics/state/metric_store.rb index de2ecb83b6..b89eb09161 100644 --- a/metrics_sdk/lib/opentelemetry/sdk/metrics/state/metric_store.rb +++ b/metrics_sdk/lib/opentelemetry/sdk/metrics/state/metric_store.rb @@ -23,7 +23,8 @@ def initialize def collect @mutex.synchronize do @epoch_end_time = now_in_nano - snapshot = @metric_streams.map { |ms| ms.collect(@epoch_start_time, @epoch_end_time) } + # snapshot = @metric_streams.map { |ms| ms.collect(@epoch_start_time, @epoch_end_time) } + snapshot = @metric_streams.flat_map { |ms| ms.collect(@epoch_start_time, @epoch_end_time) } @epoch_start_time = @epoch_end_time snapshot end diff --git a/metrics_sdk/lib/opentelemetry/sdk/metrics/state/metric_stream.rb b/metrics_sdk/lib/opentelemetry/sdk/metrics/state/metric_stream.rb index 7f98115dce..05033f522c 100644 --- a/metrics_sdk/lib/opentelemetry/sdk/metrics/state/metric_stream.rb +++ b/metrics_sdk/lib/opentelemetry/sdk/metrics/state/metric_stream.rb @@ -30,30 +30,61 @@ def initialize( @instrument_kind = instrument_kind @meter_provider = meter_provider @instrumentation_scope = instrumentation_scope - @aggregation = aggregation + @default_aggregation = aggregation + @data_points = {} + @registered_views = [] + find_registered_view @mutex = Mutex.new end def collect(start_time, end_time) @mutex.synchronize do - MetricData.new( - @name, - @description, - @unit, - @instrument_kind, - @meter_provider.resource, - @instrumentation_scope, - @aggregation.collect(start_time, end_time), - @aggregation.aggregation_temporality, - start_time, - end_time - ) + metric_data = [] + if @registered_views.empty? + metric_data << aggregate_metric_data(start_time, end_time) + else + @registered_views.each { |view| metric_data << aggregate_metric_data(start_time, end_time, aggregation: view.aggregation) } + end + + metric_data end end def update(value, attributes) - @mutex.synchronize { @aggregation.update(value, attributes) } + if @registered_views.empty? + @mutex.synchronize { @default_aggregation.update(value, attributes, @data_points) } + else + @registered_views.each do |view| + @mutex.synchronize do + attributes ||= {} + attributes.merge!(view.attribute_keys) + view.aggregation.update(value, attributes, @data_points) if view.valid_aggregation? + end + end + end + end + + def aggregate_metric_data(start_time, end_time, aggregation: nil) + aggregator = aggregation || @default_aggregation + MetricData.new( + @name, + @description, + @unit, + @instrument_kind, + @meter_provider.resource, + @instrumentation_scope, + aggregator.collect(start_time, end_time, @data_points), + aggregator.aggregation_temporality, + start_time, + end_time + ) + end + + def find_registered_view + return if @meter_provider.nil? + + @meter_provider.registered_views.each { |view| @registered_views << view if view.match_instrument?(self) } end def to_s diff --git a/metrics_sdk/lib/opentelemetry/sdk/metrics/view.rb b/metrics_sdk/lib/opentelemetry/sdk/metrics/view.rb new file mode 100644 index 0000000000..9606f383b1 --- /dev/null +++ b/metrics_sdk/lib/opentelemetry/sdk/metrics/view.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +# Copyright The OpenTelemetry Authors +# +# SPDX-License-Identifier: Apache-2.0 + +module OpenTelemetry + module SDK + module Metrics + # A View provides SDK users with the flexibility to customize the metrics that are output by the SDK. + module View + end + end + end +end + +require 'opentelemetry/sdk/metrics/view/registered_view' diff --git a/metrics_sdk/lib/opentelemetry/sdk/metrics/view/registered_view.rb b/metrics_sdk/lib/opentelemetry/sdk/metrics/view/registered_view.rb new file mode 100644 index 0000000000..881f0f261e --- /dev/null +++ b/metrics_sdk/lib/opentelemetry/sdk/metrics/view/registered_view.rb @@ -0,0 +1,58 @@ +# frozen_string_literal: true + +# Copyright The OpenTelemetry Authors +# +# SPDX-License-Identifier: Apache-2.0 + +module OpenTelemetry + module SDK + module Metrics + module View + # RegisteredView is an internal class used to match Views with a given {MetricStream} + class RegisteredView + attr_reader :name, :aggregation, :attribute_keys, :regex + + def initialize(name, **options) + @name = name + @options = options + @aggregation = options[:aggregation] + @attribute_keys = options[:attribute_keys] || {} + + generate_regex_pattern(name) + end + + def match_instrument?(metric_stream) + return false if @name && !name_match(metric_stream.name) + return false if @options[:type] && @options[:type] != metric_stream.instrument_kind + return false if @options[:unit] && @options[:unit] != metric_stream.unit + return false if @options[:meter_name] && @options[:meter_name] != metric_stream.instrumentation_scope.name + return false if @options[:meter_version] && @options[:meter_version] != metric_stream.instrumentation_scope.version + + true + end + + def name_match(stream_name) + !!@regex&.match(stream_name) + end + + def valid_aggregation? + @aggregation.class.name.rpartition('::')[0] == 'OpenTelemetry::SDK::Metrics::Aggregation' + end + + private + + def generate_regex_pattern(view_name) + regex_pattern = Regexp.escape(view_name) + + regex_pattern.gsub!('\*', '.*') + regex_pattern.gsub!('\?', '.') + + @regex = Regexp.new("^#{regex_pattern}$") + rescue StandardError + @regex = nil + end + end + end + end + end +end diff --git a/metrics_sdk/test/integration/in_memory_metric_pull_exporter_test.rb b/metrics_sdk/test/integration/in_memory_metric_pull_exporter_test.rb index bed4bdf935..3e060e79dc 100644 --- a/metrics_sdk/test/integration/in_memory_metric_pull_exporter_test.rb +++ b/metrics_sdk/test/integration/in_memory_metric_pull_exporter_test.rb @@ -26,7 +26,7 @@ counter.add(4, attributes: { 'd' => 'e' }) metric_exporter.pull - last_snapshot = metric_exporter.metric_snapshots.last + last_snapshot = metric_exporter.metric_snapshots _(last_snapshot).wont_be_empty _(last_snapshot[0].name).must_equal('counter') diff --git a/metrics_sdk/test/integration/periodic_metric_reader_test.rb b/metrics_sdk/test/integration/periodic_metric_reader_test.rb index 7bf00a08fc..5abdfbbf8d 100644 --- a/metrics_sdk/test/integration/periodic_metric_reader_test.rb +++ b/metrics_sdk/test/integration/periodic_metric_reader_test.rb @@ -34,7 +34,7 @@ _(snapshot.size).must_equal(2) - first_snapshot = snapshot[0] + first_snapshot = snapshot _(first_snapshot[0].name).must_equal('counter') _(first_snapshot[0].unit).must_equal('smidgen') _(first_snapshot[0].description).must_equal('a small amount of something') diff --git a/metrics_sdk/test/opentelemetry/sdk/metrics/aggregation/drop_test.rb b/metrics_sdk/test/opentelemetry/sdk/metrics/aggregation/drop_test.rb new file mode 100644 index 0000000000..8c873107d9 --- /dev/null +++ b/metrics_sdk/test/opentelemetry/sdk/metrics/aggregation/drop_test.rb @@ -0,0 +1,41 @@ +# frozen_string_literal: true + +# Copyright The OpenTelemetry Authors +# +# SPDX-License-Identifier: Apache-2.0 + +require 'test_helper' + +describe OpenTelemetry::SDK::Metrics::Aggregation::LastValue do + let(:data_points) { {} } + let(:drop_aggregation) { OpenTelemetry::SDK::Metrics::Aggregation::Drop.new(aggregation_temporality: aggregation_temporality) } + let(:aggregation_temporality) { :delta } + + # Time in nano + let(:start_time) { (Time.now.to_r * 1_000_000_000).to_i } + let(:end_time) { ((Time.now + 60).to_r * 1_000_000_000).to_i } + + it 'sets the timestamps' do + drop_aggregation.update(0, {}, data_points) + ndp = drop_aggregation.collect(start_time, end_time, data_points)[0] + _(ndp.start_time_unix_nano).must_equal(0) + _(ndp.time_unix_nano).must_equal(0) + end + + it 'aggregates and collects should collect no value for all collection' do + drop_aggregation.update(1, {}, data_points) + drop_aggregation.update(2, {}, data_points) + + drop_aggregation.update(2, { 'foo' => 'bar' }, data_points) + drop_aggregation.update(2, { 'foo' => 'bar' }, data_points) + + ndps = drop_aggregation.collect(start_time, end_time, data_points) + + _(ndps.size).must_equal(2) + _(ndps[0].value).must_equal(0) + _(ndps[0].attributes).must_equal({}) + + _(ndps[1].value).must_equal(0) + _(ndps[1].attributes).must_equal({}) + end +end diff --git a/metrics_sdk/test/opentelemetry/sdk/metrics/aggregation/explicit_bucket_histogram_test.rb b/metrics_sdk/test/opentelemetry/sdk/metrics/aggregation/explicit_bucket_histogram_test.rb index bace00d42e..4666ce37e6 100644 --- a/metrics_sdk/test/opentelemetry/sdk/metrics/aggregation/explicit_bucket_histogram_test.rb +++ b/metrics_sdk/test/opentelemetry/sdk/metrics/aggregation/explicit_bucket_histogram_test.rb @@ -7,6 +7,7 @@ require 'test_helper' describe OpenTelemetry::SDK::Metrics::Aggregation::ExplicitBucketHistogram do + let(:data_points) { {} } let(:ebh) do OpenTelemetry::SDK::Metrics::Aggregation::ExplicitBucketHistogram.new( aggregation_temporality: aggregation_temporality, @@ -44,19 +45,19 @@ describe '#collect' do it 'returns all the data points' do - ebh.update(0, {}) - ebh.update(1, {}) - ebh.update(5, {}) - ebh.update(6, {}) - ebh.update(10, {}) - - ebh.update(-10, 'foo' => 'bar') - ebh.update(1, 'foo' => 'bar') - ebh.update(22, 'foo' => 'bar') - ebh.update(55, 'foo' => 'bar') - ebh.update(80, 'foo' => 'bar') - - hdps = ebh.collect(start_time, end_time) + ebh.update(0, {}, data_points) + ebh.update(1, {}, data_points) + ebh.update(5, {}, data_points) + ebh.update(6, {}, data_points) + ebh.update(10, {}, data_points) + + ebh.update(-10, { 'foo' => 'bar' }, data_points) + ebh.update(1, { 'foo' => 'bar' }, data_points) + ebh.update(22, { 'foo' => 'bar' }, data_points) + ebh.update(55, { 'foo' => 'bar' }, data_points) + ebh.update(80, { 'foo' => 'bar' }, data_points) + + hdps = ebh.collect(start_time, end_time, data_points) _(hdps.size).must_equal(2) _(hdps[0].attributes).must_equal({}) _(hdps[0].count).must_equal(5) @@ -76,34 +77,34 @@ end it 'sets the timestamps' do - ebh.update(0, {}) - hdp = ebh.collect(start_time, end_time)[0] + ebh.update(0, {}, data_points) + hdp = ebh.collect(start_time, end_time, data_points)[0] _(hdp.start_time_unix_nano).must_equal(start_time) _(hdp.time_unix_nano).must_equal(end_time) end it 'calculates the count' do - ebh.update(0, {}) - ebh.update(0, {}) - ebh.update(0, {}) - ebh.update(0, {}) - hdp = ebh.collect(start_time, end_time)[0] + ebh.update(0, {}, data_points) + ebh.update(0, {}, data_points) + ebh.update(0, {}, data_points) + ebh.update(0, {}, data_points) + hdp = ebh.collect(start_time, end_time, data_points)[0] _(hdp.count).must_equal(4) end it 'does not aggregate between collects with default delta aggregation' do - ebh.update(0, {}) - ebh.update(1, {}) - ebh.update(5, {}) - ebh.update(6, {}) - ebh.update(10, {}) - hdps = ebh.collect(start_time, end_time) - - ebh.update(0, {}) - ebh.update(1, {}) - ebh.update(5, {}) - ebh.update(6, {}) - ebh.update(10, {}) + ebh.update(0, {}, data_points) + ebh.update(1, {}, data_points) + ebh.update(5, {}, data_points) + ebh.update(6, {}, data_points) + ebh.update(10, {}, data_points) + hdps = ebh.collect(start_time, end_time, data_points) + + ebh.update(0, {}, data_points) + ebh.update(1, {}, data_points) + ebh.update(5, {}, data_points) + ebh.update(6, {}, data_points) + ebh.update(10, {}, data_points) # Assert that the recent update does not # impact the already collected metrics _(hdps[0].count).must_equal(5) @@ -112,7 +113,7 @@ _(hdps[0].max).must_equal(10) _(hdps[0].bucket_counts).must_equal([1, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0]) - hdps = ebh.collect(start_time, end_time) + hdps = ebh.collect(start_time, end_time, data_points) # Assert that we are not accumulating values # between calls to collect _(hdps[0].count).must_equal(5) @@ -126,18 +127,18 @@ let(:aggregation_temporality) { :not_delta } it 'allows metrics to accumulate' do - ebh.update(0, {}) - ebh.update(1, {}) - ebh.update(5, {}) - ebh.update(6, {}) - ebh.update(10, {}) - hdps = ebh.collect(start_time, end_time) - - ebh.update(0, {}) - ebh.update(1, {}) - ebh.update(5, {}) - ebh.update(6, {}) - ebh.update(10, {}) + ebh.update(0, {}, data_points) + ebh.update(1, {}, data_points) + ebh.update(5, {}, data_points) + ebh.update(6, {}, data_points) + ebh.update(10, {}, data_points) + hdps = ebh.collect(start_time, end_time, data_points) + + ebh.update(0, {}, data_points) + ebh.update(1, {}, data_points) + ebh.update(5, {}, data_points) + ebh.update(6, {}, data_points) + ebh.update(10, {}, data_points) # Assert that the recent update does not # impact the already collected metrics _(hdps[0].count).must_equal(5) @@ -146,7 +147,7 @@ _(hdps[0].max).must_equal(10) _(hdps[0].bucket_counts).must_equal([1, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0]) - hdps1 = ebh.collect(start_time, end_time) + hdps1 = ebh.collect(start_time, end_time, data_points) # Assert that we are accumulating values # and not just capturing the delta since # the previous collect call @@ -169,38 +170,38 @@ describe '#update' do it 'accumulates across the default boundaries' do - ebh.update(0, {}) + ebh.update(0, {}, data_points) - ebh.update(1, {}) - ebh.update(5, {}) + ebh.update(1, {}, data_points) + ebh.update(5, {}, data_points) - ebh.update(6, {}) - ebh.update(10, {}) + ebh.update(6, {}, data_points) + ebh.update(10, {}, data_points) - ebh.update(11, {}) - ebh.update(25, {}) + ebh.update(11, {}, data_points) + ebh.update(25, {}, data_points) - ebh.update(26, {}) - ebh.update(50, {}) + ebh.update(26, {}, data_points) + ebh.update(50, {}, data_points) - ebh.update(51, {}) - ebh.update(75, {}) + ebh.update(51, {}, data_points) + ebh.update(75, {}, data_points) - ebh.update(76, {}) - ebh.update(100, {}) + ebh.update(76, {}, data_points) + ebh.update(100, {}, data_points) - ebh.update(101, {}) - ebh.update(250, {}) + ebh.update(101, {}, data_points) + ebh.update(250, {}, data_points) - ebh.update(251, {}) - ebh.update(500, {}) + ebh.update(251, {}, data_points) + ebh.update(500, {}, data_points) - ebh.update(501, {}) - ebh.update(1000, {}) + ebh.update(501, {}, data_points) + ebh.update(1000, {}, data_points) - ebh.update(1001, {}) + ebh.update(1001, {}, data_points) - hdp = ebh.collect(start_time, end_time)[0] + hdp = ebh.collect(start_time, end_time, data_points)[0] _(hdp.bucket_counts).must_equal([1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1]) _(hdp.sum).must_equal(4040) _(hdp.min).must_equal(0) @@ -211,8 +212,8 @@ let(:boundaries) { [4, 2, 1] } it 'sorts it' do - ebh.update(0, {}) - _(ebh.collect(start_time, end_time)[0].explicit_bounds).must_equal([1, 2, 4]) + ebh.update(0, {}, data_points) + _(ebh.collect(start_time, end_time, data_points)[0].explicit_bounds).must_equal([1, 2, 4]) end end @@ -220,8 +221,8 @@ let(:record_min_max) { false } it 'does not record min max values' do - ebh.update(-1, {}) - hdp = ebh.collect(start_time, end_time)[0] + ebh.update(-1, {}, data_points) + hdp = ebh.collect(start_time, end_time, data_points)[0] _(hdp.min).must_be_nil _(hdp.min).must_be_nil end @@ -231,14 +232,14 @@ let(:boundaries) { [0, 2, 4] } it 'aggregates' do - ebh.update(-1, {}) - ebh.update(0, {}) - ebh.update(1, {}) - ebh.update(2, {}) - ebh.update(3, {}) - ebh.update(4, {}) - ebh.update(5, {}) - hdp = ebh.collect(start_time, end_time)[0] + ebh.update(-1, {}, data_points) + ebh.update(0, {}, data_points) + ebh.update(1, {}, data_points) + ebh.update(2, {}, data_points) + ebh.update(3, {}, data_points) + ebh.update(4, {}, data_points) + ebh.update(5, {}, data_points) + hdp = ebh.collect(start_time, end_time, data_points)[0] _(hdp.bucket_counts).must_equal([2, 2, 2, 1]) end @@ -248,9 +249,9 @@ let(:boundaries) { [0] } it 'aggregates' do - ebh.update(-1, {}) - ebh.update(1, {}) - hdp = ebh.collect(start_time, end_time)[0] + ebh.update(-1, {}, data_points) + ebh.update(1, {}, data_points) + hdp = ebh.collect(start_time, end_time, data_points)[0] _(hdp.bucket_counts).must_equal([1, 1]) end @@ -260,9 +261,9 @@ let(:boundaries) { [] } it 'aggregates but does not record bucket counts' do - ebh.update(-1, {}) - ebh.update(3, {}) - hdp = ebh.collect(start_time, end_time)[0] + ebh.update(-1, {}, data_points) + ebh.update(3, {}, data_points) + hdp = ebh.collect(start_time, end_time, data_points)[0] _(hdp.bucket_counts).must_be_nil _(hdp.explicit_bounds).must_be_nil @@ -277,9 +278,9 @@ let(:boundaries) { nil } it 'aggregates but does not record bucket counts' do - ebh.update(-1, {}) - ebh.update(3, {}) - hdp = ebh.collect(start_time, end_time)[0] + ebh.update(-1, {}, data_points) + ebh.update(3, {}, data_points) + hdp = ebh.collect(start_time, end_time, data_points)[0] _(hdp.bucket_counts).must_be_nil _(hdp.explicit_bounds).must_be_nil diff --git a/metrics_sdk/test/opentelemetry/sdk/metrics/aggregation/last_value_test.rb b/metrics_sdk/test/opentelemetry/sdk/metrics/aggregation/last_value_test.rb new file mode 100644 index 0000000000..8714b698d5 --- /dev/null +++ b/metrics_sdk/test/opentelemetry/sdk/metrics/aggregation/last_value_test.rb @@ -0,0 +1,39 @@ +# frozen_string_literal: true + +# Copyright The OpenTelemetry Authors +# +# SPDX-License-Identifier: Apache-2.0 + +require 'test_helper' + +describe OpenTelemetry::SDK::Metrics::Aggregation::LastValue do + let(:data_points) { {} } + let(:last_value_aggregation) { OpenTelemetry::SDK::Metrics::Aggregation::LastValue.new(aggregation_temporality: aggregation_temporality) } + let(:aggregation_temporality) { :delta } + + # Time in nano + let(:start_time) { (Time.now.to_r * 1_000_000_000).to_i } + let(:end_time) { ((Time.now + 60).to_r * 1_000_000_000).to_i } + + it 'sets the timestamps' do + last_value_aggregation.update(0, {}, data_points) + ndp = last_value_aggregation.collect(start_time, end_time, data_points)[0] + _(ndp.start_time_unix_nano).must_equal(start_time) + _(ndp.time_unix_nano).must_equal(end_time) + end + + it 'aggregates and collects should collect the last value' do + last_value_aggregation.update(1, {}, data_points) + last_value_aggregation.update(2, {}, data_points) + + last_value_aggregation.update(2, { 'foo' => 'bar' }, data_points) + last_value_aggregation.update(2, { 'foo' => 'bar' }, data_points) + + ndps = last_value_aggregation.collect(start_time, end_time, data_points) + _(ndps[0].value).must_equal(2) + _(ndps[0].attributes).must_equal({}, data_points) + + _(ndps[1].value).must_equal(2) + _(ndps[1].attributes).must_equal('foo' => 'bar') + end +end diff --git a/metrics_sdk/test/opentelemetry/sdk/metrics/aggregation/sum_test.rb b/metrics_sdk/test/opentelemetry/sdk/metrics/aggregation/sum_test.rb index 86b7282507..66e7667ecc 100644 --- a/metrics_sdk/test/opentelemetry/sdk/metrics/aggregation/sum_test.rb +++ b/metrics_sdk/test/opentelemetry/sdk/metrics/aggregation/sum_test.rb @@ -7,6 +7,7 @@ require 'test_helper' describe OpenTelemetry::SDK::Metrics::Aggregation::Sum do + let(:data_points) { {} } let(:sum_aggregation) { OpenTelemetry::SDK::Metrics::Aggregation::Sum.new(aggregation_temporality: aggregation_temporality) } let(:aggregation_temporality) { :delta } @@ -36,38 +37,38 @@ end it 'sets the timestamps' do - sum_aggregation.update(0, {}) - ndp = sum_aggregation.collect(start_time, end_time)[0] + sum_aggregation.update(0, {}, data_points) + ndp = sum_aggregation.collect(start_time, end_time, data_points)[0] _(ndp.start_time_unix_nano).must_equal(start_time) _(ndp.time_unix_nano).must_equal(end_time) end it 'aggregates and collects' do - sum_aggregation.update(1, {}) - sum_aggregation.update(2, {}) + sum_aggregation.update(1, {}, data_points) + sum_aggregation.update(2, {}, data_points) - sum_aggregation.update(2, 'foo' => 'bar') - sum_aggregation.update(2, 'foo' => 'bar') + sum_aggregation.update(2, { 'foo' => 'bar' }, data_points) + sum_aggregation.update(2, { 'foo' => 'bar' }, data_points) - ndps = sum_aggregation.collect(start_time, end_time) + ndps = sum_aggregation.collect(start_time, end_time, data_points) _(ndps[0].value).must_equal(3) - _(ndps[0].attributes).must_equal({}) + _(ndps[0].attributes).must_equal({}, data_points) _(ndps[1].value).must_equal(4) _(ndps[1].attributes).must_equal('foo' => 'bar') end it 'does not aggregate between collects' do - sum_aggregation.update(1, {}) - sum_aggregation.update(2, {}) - ndps = sum_aggregation.collect(start_time, end_time) + sum_aggregation.update(1, {}, data_points) + sum_aggregation.update(2, {}, data_points) + ndps = sum_aggregation.collect(start_time, end_time, data_points) - sum_aggregation.update(1, {}) + sum_aggregation.update(1, {}, data_points) # Assert that the recent update does not # impact the already collected metrics _(ndps[0].value).must_equal(3) - ndps = sum_aggregation.collect(start_time, end_time) + ndps = sum_aggregation.collect(start_time, end_time, data_points) # Assert that we are not accumulating values # between calls to collect _(ndps[0].value).must_equal(1) @@ -77,16 +78,16 @@ let(:aggregation_temporality) { :not_delta } it 'allows metrics to accumulate' do - sum_aggregation.update(1, {}) - sum_aggregation.update(2, {}) - ndps = sum_aggregation.collect(start_time, end_time) + sum_aggregation.update(1, {}, data_points) + sum_aggregation.update(2, {}, data_points) + ndps = sum_aggregation.collect(start_time, end_time, data_points) - sum_aggregation.update(1, {}) + sum_aggregation.update(1, {}, data_points) # Assert that the recent update does not # impact the already collected metrics _(ndps[0].value).must_equal(3) - ndps = sum_aggregation.collect(start_time, end_time) + ndps = sum_aggregation.collect(start_time, end_time, data_points) # Assert that we are accumulating values # and not just capturing the delta since # the previous collect call diff --git a/metrics_sdk/test/opentelemetry/sdk/metrics/instrument/counter_test.rb b/metrics_sdk/test/opentelemetry/sdk/metrics/instrument/counter_test.rb index c00e2eba7b..ff5c3edfe2 100644 --- a/metrics_sdk/test/opentelemetry/sdk/metrics/instrument/counter_test.rb +++ b/metrics_sdk/test/opentelemetry/sdk/metrics/instrument/counter_test.rb @@ -20,7 +20,7 @@ it 'counts' do counter.add(1, attributes: { 'foo' => 'bar' }) metric_exporter.pull - last_snapshot = metric_exporter.metric_snapshots.last + last_snapshot = metric_exporter.metric_snapshots _(last_snapshot[0].name).must_equal('counter') _(last_snapshot[0].unit).must_equal('smidgen') diff --git a/metrics_sdk/test/opentelemetry/sdk/metrics/instrument/histogram_test.rb b/metrics_sdk/test/opentelemetry/sdk/metrics/instrument/histogram_test.rb index bddc7f5568..771ffaef83 100644 --- a/metrics_sdk/test/opentelemetry/sdk/metrics/instrument/histogram_test.rb +++ b/metrics_sdk/test/opentelemetry/sdk/metrics/instrument/histogram_test.rb @@ -21,7 +21,7 @@ histogram.record(5, attributes: { 'foo' => 'bar' }) histogram.record(6, attributes: { 'foo' => 'bar' }) metric_exporter.pull - last_snapshot = metric_exporter.metric_snapshots.last + last_snapshot = metric_exporter.metric_snapshots _(last_snapshot[0].name).must_equal('histogram') _(last_snapshot[0].unit).must_equal('smidgen') diff --git a/metrics_sdk/test/opentelemetry/sdk/metrics/instrument/up_down_counter_test.rb b/metrics_sdk/test/opentelemetry/sdk/metrics/instrument/up_down_counter_test.rb index 456c2c5052..687ad27a89 100644 --- a/metrics_sdk/test/opentelemetry/sdk/metrics/instrument/up_down_counter_test.rb +++ b/metrics_sdk/test/opentelemetry/sdk/metrics/instrument/up_down_counter_test.rb @@ -21,7 +21,7 @@ up_down_counter.add(1, attributes: { 'foo' => 'bar' }) up_down_counter.add(-2, attributes: { 'foo' => 'bar' }) metric_exporter.pull - last_snapshot = metric_exporter.metric_snapshots.last + last_snapshot = metric_exporter.metric_snapshots _(last_snapshot[0].name).must_equal('up_down_counter') _(last_snapshot[0].unit).must_equal('smidgen') diff --git a/metrics_sdk/test/opentelemetry/sdk/metrics/meter_provider_test.rb b/metrics_sdk/test/opentelemetry/sdk/metrics/meter_provider_test.rb index f781d613b3..a82513a3c4 100644 --- a/metrics_sdk/test/opentelemetry/sdk/metrics/meter_provider_test.rb +++ b/metrics_sdk/test/opentelemetry/sdk/metrics/meter_provider_test.rb @@ -134,8 +134,26 @@ end end - # TODO: OpenTelemetry.meter_provider.add_view describe '#add_view' do + it 'adds a view with aggregation' do + OpenTelemetry.meter_provider.add_view('test', aggregation: ::OpenTelemetry::SDK::Metrics::Aggregation::Drop.new) + + registered_views = OpenTelemetry.meter_provider.instance_variable_get(:@registered_views) + + _(registered_views.size).must_equal 1 + _(registered_views[0].class).must_equal ::OpenTelemetry::SDK::Metrics::View::RegisteredView + _(registered_views[0].name).must_equal 'test' + _(registered_views[0].aggregation.class).must_equal ::OpenTelemetry::SDK::Metrics::Aggregation::Drop + end + + it 'add a view without aggregation but aggregation as nil' do + OpenTelemetry.meter_provider.add_view('test') + + registered_views = OpenTelemetry.meter_provider.instance_variable_get(:@registered_views) + + _(registered_views.size).must_equal 1 + _(registered_views[0].aggregation).must_be_nil + end end private diff --git a/metrics_sdk/test/opentelemetry/sdk/metrics/view/registered_view_test.rb b/metrics_sdk/test/opentelemetry/sdk/metrics/view/registered_view_test.rb new file mode 100644 index 0000000000..4638485777 --- /dev/null +++ b/metrics_sdk/test/opentelemetry/sdk/metrics/view/registered_view_test.rb @@ -0,0 +1,148 @@ +# frozen_string_literal: true + +# Copyright The OpenTelemetry Authors +# +# SPDX-License-Identifier: Apache-2.0 + +require 'test_helper' + +describe OpenTelemetry::SDK::Metrics::View::RegisteredView do + describe '#registered_view' do + before { reset_metrics_sdk } + + it 'emits metrics with no data_points if view is drop' do + OpenTelemetry::SDK.configure + + metric_exporter = OpenTelemetry::SDK::Metrics::Export::InMemoryMetricPullExporter.new + OpenTelemetry.meter_provider.add_metric_reader(metric_exporter) + + meter = OpenTelemetry.meter_provider.meter('test') + OpenTelemetry.meter_provider.add_view('counter', aggregation: ::OpenTelemetry::SDK::Metrics::Aggregation::Drop.new) + + counter = meter.create_counter('counter', unit: 'smidgen', description: 'a small amount of something') + + counter.add(1) + counter.add(2, attributes: { 'a' => 'b' }) + + metric_exporter.pull + last_snapshot = metric_exporter.metric_snapshots + + _(last_snapshot).wont_be_empty + _(last_snapshot[0].name).must_equal('counter') + _(last_snapshot[0].unit).must_equal('smidgen') + _(last_snapshot[0].description).must_equal('a small amount of something') + + _(last_snapshot[0].instrumentation_scope.name).must_equal('test') + + _(last_snapshot[0].data_points[0].value).must_equal 0 + _(last_snapshot[0].data_points[0].start_time_unix_nano).must_equal 0 + _(last_snapshot[0].data_points[0].time_unix_nano).must_equal 0 + + _(last_snapshot[0].data_points[1].value).must_equal 0 + _(last_snapshot[0].data_points[1].start_time_unix_nano).must_equal 0 + _(last_snapshot[0].data_points[1].time_unix_nano).must_equal 0 + end + + it 'emits metrics with only last value in data_points if view is last_value' do + OpenTelemetry::SDK.configure + + metric_exporter = OpenTelemetry::SDK::Metrics::Export::InMemoryMetricPullExporter.new + OpenTelemetry.meter_provider.add_metric_reader(metric_exporter) + + meter = OpenTelemetry.meter_provider.meter('test') + OpenTelemetry.meter_provider.add_view('counter', aggregation: ::OpenTelemetry::SDK::Metrics::Aggregation::LastValue.new) + + counter = meter.create_counter('counter', unit: 'smidgen', description: 'a small amount of something') + + counter.add(1) + counter.add(2) + counter.add(3) + counter.add(4) + + metric_exporter.pull + last_snapshot = metric_exporter.metric_snapshots + + _(last_snapshot[0].data_points).wont_be_empty + _(last_snapshot[0].data_points[0].value).must_equal 4 + end + + it 'emits metrics with sum of value in data_points if view is last_value but not matching to instrument' do + OpenTelemetry::SDK.configure + + metric_exporter = OpenTelemetry::SDK::Metrics::Export::InMemoryMetricPullExporter.new + OpenTelemetry.meter_provider.add_metric_reader(metric_exporter) + + meter = OpenTelemetry.meter_provider.meter('test') + OpenTelemetry.meter_provider.add_view('retnuoc', aggregation: ::OpenTelemetry::SDK::Metrics::Aggregation::LastValue.new) + + counter = meter.create_counter('counter', unit: 'smidgen', description: 'a small amount of something') + + counter.add(1) + counter.add(2) + counter.add(3) + counter.add(4) + + metric_exporter.pull + last_snapshot = metric_exporter.metric_snapshots + + _(last_snapshot[0].data_points).wont_be_empty + _(last_snapshot[0].data_points[0].value).must_equal 10 + end + end + + describe '#registered_view select instrument' do + let(:registered_view) { OpenTelemetry::SDK::Metrics::View::RegisteredView.new(nil, aggregation: ::OpenTelemetry::SDK::Metrics::Aggregation::LastValue.new) } + let(:instrumentation_scope) do + OpenTelemetry::SDK::InstrumentationScope.new('test_scope', '1.0.1') + end + + let(:metric_stream) do + OpenTelemetry::SDK::Metrics::State::MetricStream.new('test', 'description', 'smidgen', :counter, nil, instrumentation_scope, nil) + end + + it 'registered view with matching name' do + registered_view.instance_variable_set(:@name, 'test') + registered_view.send(:generate_regex_pattern, 'test') + _(registered_view.match_instrument?(metric_stream)).must_equal true + end + + it 'registered view with matching type' do + registered_view.instance_variable_set(:@options, { type: :counter }) + _(registered_view.match_instrument?(metric_stream)).must_equal true + end + + it 'registered view with matching version' do + registered_view.instance_variable_set(:@options, { meter_version: '1.0.1' }) + _(registered_view.match_instrument?(metric_stream)).must_equal true + end + + it 'registered view with matching meter_name' do + registered_view.instance_variable_set(:@options, { meter_name: 'test_scope' }) + _(registered_view.match_instrument?(metric_stream)).must_equal true + end + + it 'do not registered view with unmatching name and matching type' do + registered_view.instance_variable_set(:@options, { type: :counter }) + registered_view.instance_variable_set(:@name, 'tset') + _(registered_view.match_instrument?(metric_stream)).must_equal false + end + + describe '#name_match' do + it 'name_match_for_wild_card' do + registered_view.instance_variable_set(:@name, 'log*2024?.txt') + registered_view.send(:generate_regex_pattern, 'log*2024?.txt') + _(registered_view.name_match('logfile20242.txt')).must_equal true + _(registered_view.name_match('log2024a.txt')).must_equal true + _(registered_view.name_match('log_test_2024.txt')).must_equal false + end + + it 'name_match_for_*' do + registered_view.instance_variable_set(:@name, '*') + registered_view.send(:generate_regex_pattern, '*') + _(registered_view.name_match('*')).must_equal true + _(registered_view.name_match('aaaaaaaaa')).must_equal true + _(registered_view.name_match('!@#$%^&')).must_equal true + end + end + end +end