Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into develop-k8s-agent-ope…
Browse files Browse the repository at this point in the history
…rator
  • Loading branch information
TimPansino committed Jun 11, 2024
2 parents 250ca8c + 45b575f commit 48250b2
Show file tree
Hide file tree
Showing 62 changed files with 5,577 additions and 533 deletions.
71 changes: 68 additions & 3 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,8 @@ jobs:
- mongodb
- mssql
- mysql
- postgres
- postgres16
- postgres9
- rabbitmq
- redis
- rediscluster
Expand Down Expand Up @@ -207,7 +208,7 @@ jobs:
path: ./**/.coverage.*
retention-days: 1

postgres:
postgres16:
env:
TOTAL_GROUPS: 2

Expand All @@ -223,7 +224,71 @@ jobs:
--add-host=host.docker.internal:host-gateway
timeout-minutes: 30
services:
postgres:
postgres16:
image: postgres:16
env:
POSTGRES_PASSWORD: postgres
ports:
- 8080:5432
- 8081:5432
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # 4.1.1

- name: Fetch git tags
run: |
git config --global --add safe.directory "$GITHUB_WORKSPACE"
git fetch --tags origin
- name: Configure pip cache
run: |
mkdir -p /github/home/.cache/pip
chown -R $(whoami) /github/home/.cache/pip
- name: Get Environments
id: get-envs
run: |
echo "envs=$(tox -l | grep '^${{ github.job }}\-' | ./.github/workflows/get-envs.py)" >> $GITHUB_OUTPUT
env:
GROUP_NUMBER: ${{ matrix.group-number }}

- name: Test
run: |
tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto
env:
TOX_PARALLEL_NO_SPINNER: 1
PY_COLORS: 0

- name: Upload Coverage Artifacts
uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # 4.3.1
with:
name: coverage-${{ github.job }}-${{ strategy.job-index }}
path: ./**/.coverage.*
retention-days: 1

postgres9:
env:
TOTAL_GROUPS: 1

strategy:
fail-fast: false
matrix:
group-number: [1]

runs-on: ubuntu-20.04
container:
image: ghcr.io/newrelic/newrelic-python-agent-ci:latest
options: >-
--add-host=host.docker.internal:host-gateway
timeout-minutes: 30
services:
postgres9:
image: postgres:9
env:
POSTGRES_PASSWORD: postgres
Expand Down
11 changes: 5 additions & 6 deletions newrelic/api/web_transaction.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,7 @@
)
from newrelic.common.object_names import callable_name
from newrelic.common.object_wrapper import FunctionWrapper, wrap_object
from newrelic.core.attribute import create_attributes, process_user_attribute
from newrelic.core.attribute_filter import DST_BROWSER_MONITORING, DST_NONE
from newrelic.core.attribute_filter import DST_BROWSER_MONITORING
from newrelic.packages import six

_logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -457,15 +456,15 @@ def browser_timing_header(self, nonce=None):

# create the data structure that pull all our data in

broswer_agent_configuration = self.browser_monitoring_intrinsics(obfuscation_key)
browser_agent_configuration = self.browser_monitoring_intrinsics(obfuscation_key)

if attributes:
attributes = obfuscate(json_encode(attributes), obfuscation_key)
broswer_agent_configuration["atts"] = attributes
browser_agent_configuration["atts"] = attributes

header = _js_agent_header_fragment % (
_encode_nonce(nonce),
json_encode(broswer_agent_configuration),
json_encode(browser_agent_configuration),
self._settings.js_agent_loader,
)

Expand Down Expand Up @@ -568,7 +567,7 @@ def __iter__(self):
yield "content-length", self.environ["CONTENT_LENGTH"]
elif key == "CONTENT_TYPE":
yield "content-type", self.environ["CONTENT_TYPE"]
elif key == "HTTP_CONTENT_LENGTH" or key == "HTTP_CONTENT_TYPE":
elif key in ("HTTP_CONTENT_LENGTH", "HTTP_CONTENT_TYPE"):
# These keys are illegal and should be ignored
continue
elif key.startswith("HTTP_"):
Expand Down
38 changes: 38 additions & 0 deletions newrelic/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -2409,6 +2409,12 @@ def _process_module_builtin_defaults():
"instrument_langchain_vectorstore_similarity_search",
)

_process_module_definition(
"langchain_community.vectorstores.oraclevs",
"newrelic.hooks.mlmodel_langchain",
"instrument_langchain_vectorstore_similarity_search",
)

_process_module_definition(
"langchain_community.vectorstores.pathway",
"newrelic.hooks.mlmodel_langchain",
Expand Down Expand Up @@ -2445,6 +2451,12 @@ def _process_module_builtin_defaults():
"instrument_langchain_vectorstore_similarity_search",
)

_process_module_definition(
"langchain_community.vectorstores.relyt",
"newrelic.hooks.mlmodel_langchain",
"instrument_langchain_vectorstore_similarity_search",
)

_process_module_definition(
"langchain_community.vectorstores.rocksetdb",
"newrelic.hooks.mlmodel_langchain",
Expand Down Expand Up @@ -2547,6 +2559,12 @@ def _process_module_builtin_defaults():
"instrument_langchain_vectorstore_similarity_search",
)

_process_module_definition(
"langchain_community.vectorstores.upstash",
"newrelic.hooks.mlmodel_langchain",
"instrument_langchain_vectorstore_similarity_search",
)

_process_module_definition(
"langchain_community.vectorstores.usearch",
"newrelic.hooks.mlmodel_langchain",
Expand Down Expand Up @@ -2583,6 +2601,12 @@ def _process_module_builtin_defaults():
"instrument_langchain_vectorstore_similarity_search",
)

_process_module_definition(
"langchain_community.vectorstores.vlite",
"newrelic.hooks.mlmodel_langchain",
"instrument_langchain_vectorstore_similarity_search",
)

_process_module_definition(
"langchain_community.vectorstores.weaviate",
"newrelic.hooks.mlmodel_langchain",
Expand Down Expand Up @@ -3077,6 +3101,9 @@ def _process_module_builtin_defaults():

_process_module_definition("pymssql", "newrelic.hooks.database_pymssql", "instrument_pymssql")

_process_module_definition("psycopg", "newrelic.hooks.database_psycopg", "instrument_psycopg")
_process_module_definition("psycopg.sql", "newrelic.hooks.database_psycopg", "instrument_psycopg_sql")

_process_module_definition("psycopg2", "newrelic.hooks.database_psycopg2", "instrument_psycopg2")
_process_module_definition(
"psycopg2._psycopg2",
Expand Down Expand Up @@ -4351,6 +4378,11 @@ def _process_module_builtin_defaults():
"newrelic.hooks.application_celery",
"instrument_celery_app_task",
)
_process_module_definition(
"celery.app.trace",
"newrelic.hooks.application_celery",
"instrument_celery_app_trace",
)
_process_module_definition("celery.worker", "newrelic.hooks.application_celery", "instrument_celery_worker")
_process_module_definition(
"celery.concurrency.processes",
Expand Down Expand Up @@ -4480,6 +4512,12 @@ def _process_module_builtin_defaults():
"instrument_gearman_worker",
)

_process_module_definition(
"aiobotocore.endpoint",
"newrelic.hooks.external_aiobotocore",
"instrument_aiobotocore_endpoint",
)

_process_module_definition(
"botocore.endpoint",
"newrelic.hooks.external_botocore",
Expand Down
1 change: 1 addition & 0 deletions newrelic/core/agent_protocol.py
Original file line number Diff line number Diff line change
Expand Up @@ -490,6 +490,7 @@ def __init__(self, settings, host=None, client_cls=ServerlessModeClient):
"protocol_version": self.VERSION,
"execution_environment": os.environ.get("AWS_EXECUTION_ENV", None),
"agent_version": version,
"agent_language": "python",
}

def finalize(self):
Expand Down
2 changes: 1 addition & 1 deletion newrelic/core/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -751,7 +751,7 @@ def default_otlp_host(host):
_settings.gc_runtime_metrics.top_object_count_limit = 5

_settings.memory_runtime_pid_metrics.enabled = _environ_as_bool(
"NEW_RELIC_MEMORY_RUNTIME_METRICS_ENABLED", default=True
"NEW_RELIC_MEMORY_RUNTIME_PID_METRICS_ENABLED", default=True
)

_settings.transaction_events.enabled = True
Expand Down
10 changes: 6 additions & 4 deletions newrelic/core/environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,10 +234,12 @@ def environment_settings():
if name in stdlib_builtin_module_names:
continue

try:
version = get_package_version(name)
except Exception:
version = None
# Don't attempt to look up version information for our hooks
if not nr_hook:
try:
version = get_package_version(name)
except Exception:
version = None

# If it has no version it's likely not a real package so don't report it unless
# it's a new relic hook.
Expand Down
51 changes: 41 additions & 10 deletions newrelic/hooks/application_celery.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,16 +28,18 @@
from newrelic.api.message_trace import MessageTrace
from newrelic.api.pre_function import wrap_pre_function
from newrelic.api.transaction import current_transaction
from newrelic.common.object_wrapper import FunctionWrapper, wrap_function_wrapper
from newrelic.common.object_wrapper import FunctionWrapper, wrap_function_wrapper, _NRBoundFunctionWrapper
from newrelic.core.agent import shutdown_agent

UNKNOWN_TASK_NAME = "<Unknown Task>"
MAPPING_TASK_NAMES = {"celery.starmap", "celery.map"}


def task_name(*args, **kwargs):
def task_info(instance, *args, **kwargs):
# Grab the current task, which can be located in either place
if args:
if instance:
task = instance
elif args:
task = args[0]
elif "task" in kwargs:
task = kwargs["task"]
Expand All @@ -46,27 +48,27 @@ def task_name(*args, **kwargs):

# Task can be either a task instance or a signature, which subclasses dict, or an actual dict in some cases.
task_name = getattr(task, "name", None) or task.get("task", UNKNOWN_TASK_NAME)
task_source = task

# Under mapping tasks, the root task name isn't descriptive enough so we append the
# subtask name to differentiate between different mapping tasks
if task_name in MAPPING_TASK_NAMES:
try:
subtask = kwargs["task"]["task"]
task_name = "/".join((task_name, subtask))
task_source = task.app._tasks[subtask]
except Exception:
pass

return task_name
return task_name, task_source


def CeleryTaskWrapper(wrapped):
def wrapper(wrapped, instance, args, kwargs):
transaction = current_transaction(active_only=False)

if instance is not None:
_name = task_name(instance, *args, **kwargs)
else:
_name = task_name(*args, **kwargs)
# Grab task name and source
_name, _source = task_info(instance, *args, **kwargs)

# A Celery Task can be called either outside of a transaction, or
# within the context of an existing transaction. There are 3
Expand All @@ -93,11 +95,11 @@ def wrapper(wrapped, instance, args, kwargs):
return wrapped(*args, **kwargs)

elif transaction:
with FunctionTrace(_name, source=instance):
with FunctionTrace(_name, source=_source):
return wrapped(*args, **kwargs)

else:
with BackgroundTask(application_instance(), _name, "Celery", source=instance) as transaction:
with BackgroundTask(application_instance(), _name, "Celery", source=_source) as transaction:
# Attempt to grab distributed tracing headers
try:
# Headers on earlier versions of Celery may end up as attributes
Expand Down Expand Up @@ -200,6 +202,26 @@ def wrap_Celery_send_task(wrapped, instance, args, kwargs):
return wrapped(*args, **kwargs)


def wrap_worker_optimizations(wrapped, instance, args, kwargs):
# Attempt to uninstrument BaseTask before stack protection is installed or uninstalled
try:
from celery.app.task import BaseTask

if isinstance(BaseTask.__call__, _NRBoundFunctionWrapper):
BaseTask.__call__ = BaseTask.__call__.__wrapped__
except Exception:
BaseTask = None

# Allow metaprogramming to run
result = wrapped(*args, **kwargs)

# Rewrap finalized BaseTask
if BaseTask: # Ensure imports succeeded
BaseTask.__call__ = CeleryTaskWrapper(BaseTask.__call__)

return result


def instrument_celery_app_base(module):
if hasattr(module, "Celery") and hasattr(module.Celery, "send_task"):
wrap_function_wrapper(module, "Celery.send_task", wrap_Celery_send_task)
Expand Down Expand Up @@ -239,3 +261,12 @@ def force_agent_shutdown(*args, **kwargs):

if hasattr(module, "Worker"):
wrap_pre_function(module, "Worker._do_exit", force_agent_shutdown)


def instrument_celery_app_trace(module):
# Uses same wrapper for setup and reset worker optimizations to prevent patching and unpatching from removing wrappers
if hasattr(module, "setup_worker_optimizations"):
wrap_function_wrapper(module, "setup_worker_optimizations", wrap_worker_optimizations)

if hasattr(module, "reset_worker_optimizations"):
wrap_function_wrapper(module, "reset_worker_optimizations", wrap_worker_optimizations)
Loading

0 comments on commit 48250b2

Please sign in to comment.