diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 131e7c5c32..d20a7c02a7 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -34,8 +34,8 @@ jobs: runs-on: ubuntu-20.04 needs: - python - - elasticsearchserver01 - elasticsearchserver07 + - elasticsearchserver08 - gearman - grpc - kafka @@ -656,7 +656,7 @@ jobs: path: ./**/.coverage.* retention-days: 1 - elasticsearchserver01: + elasticsearchserver07: env: TOTAL_GROUPS: 1 @@ -669,8 +669,8 @@ jobs: timeout-minutes: 30 services: - es01: - image: elasticsearch:1.4.4 + es07: + image: elasticsearch:7.17.8 env: "discovery.type": "single-node" ports: @@ -708,7 +708,7 @@ jobs: path: ./**/.coverage.* retention-days: 1 - elasticsearchserver07: + elasticsearchserver08: env: TOTAL_GROUPS: 1 @@ -721,9 +721,10 @@ jobs: timeout-minutes: 30 services: - es01: - image: elasticsearch:7.13.2 + es08: + image: elasticsearch:8.6.0 env: + "xpack.security.enabled": "false" "discovery.type": "single-node" ports: - 8080:9200 diff --git a/.gitignore b/.gitignore index 8226b0e97f..d4550713fe 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,6 @@ +.DS_Store +.DS_Store/ + # Linter megalinter-reports/ diff --git a/newrelic/config.py b/newrelic/config.py index f193182479..203318287e 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -2665,64 +2665,147 @@ def _process_module_builtin_defaults(): "aioredis.connection", "newrelic.hooks.datastore_aioredis", "instrument_aioredis_connection" ) - _process_module_definition("redis.asyncio.client", "newrelic.hooks.datastore_aioredis", "instrument_aioredis_client") + _process_module_definition( + "redis.asyncio.client", "newrelic.hooks.datastore_aioredis", "instrument_aioredis_client" + ) - _process_module_definition("redis.asyncio.commands", "newrelic.hooks.datastore_aioredis", "instrument_aioredis_client") + _process_module_definition( + "redis.asyncio.commands", "newrelic.hooks.datastore_aioredis", "instrument_aioredis_client" + ) _process_module_definition( "redis.asyncio.connection", "newrelic.hooks.datastore_aioredis", "instrument_aioredis_connection" ) + # v7 and below _process_module_definition( "elasticsearch.client", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client", ) + # v8 and above + _process_module_definition( + "elasticsearch._sync.client", + "newrelic.hooks.datastore_elasticsearch", + "instrument_elasticsearch_client_v8", + ) + + # v7 and below _process_module_definition( "elasticsearch.client.cat", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_cat", ) + # v8 and above + _process_module_definition( + "elasticsearch._sync.client.cat", + "newrelic.hooks.datastore_elasticsearch", + "instrument_elasticsearch_client_cat_v8", + ) + + # v7 and below _process_module_definition( "elasticsearch.client.cluster", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_cluster", ) + # v8 and above + _process_module_definition( + "elasticsearch._sync.client.cluster", + "newrelic.hooks.datastore_elasticsearch", + "instrument_elasticsearch_client_cluster_v8", + ) + + # v7 and below _process_module_definition( "elasticsearch.client.indices", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_indices", ) + # v8 and above + _process_module_definition( + "elasticsearch._sync.client.indices", + "newrelic.hooks.datastore_elasticsearch", + "instrument_elasticsearch_client_indices_v8", + ) + + # v7 and below _process_module_definition( "elasticsearch.client.nodes", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_nodes", ) + # v8 and above + _process_module_definition( + "elasticsearch._sync.client.nodes", + "newrelic.hooks.datastore_elasticsearch", + "instrument_elasticsearch_client_nodes_v8", + ) + + # v7 and below _process_module_definition( "elasticsearch.client.snapshot", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_snapshot", ) + # v8 and above + _process_module_definition( + "elasticsearch._sync.client.snapshot", + "newrelic.hooks.datastore_elasticsearch", + "instrument_elasticsearch_client_snapshot_v8", + ) + + # v7 and below _process_module_definition( "elasticsearch.client.tasks", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_tasks", ) + # v8 and above + _process_module_definition( + "elasticsearch._sync.client.tasks", + "newrelic.hooks.datastore_elasticsearch", + "instrument_elasticsearch_client_tasks_v8", + ) + + # v7 and below _process_module_definition( "elasticsearch.client.ingest", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_ingest", ) + # v8 and above + _process_module_definition( + "elasticsearch._sync.client.ingest", + "newrelic.hooks.datastore_elasticsearch", + "instrument_elasticsearch_client_ingest_v8", + ) + + # v7 and below _process_module_definition( "elasticsearch.connection.base", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_connection_base", ) + # v8 and above + _process_module_definition( + "elastic_transport._node._base", + "newrelic.hooks.datastore_elasticsearch", + "instrument_elastic_transport__node__base", + ) + + # v7 and below _process_module_definition( "elasticsearch.transport", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_transport", ) + # v8 and above + _process_module_definition( + "elastic_transport._transport", + "newrelic.hooks.datastore_elasticsearch", + "instrument_elastic_transport__transport", + ) _process_module_definition("pika.adapters", "newrelic.hooks.messagebroker_pika", "instrument_pika_adapters") _process_module_definition("pika.channel", "newrelic.hooks.messagebroker_pika", "instrument_pika_channel") diff --git a/newrelic/hooks/datastore_elasticsearch.py b/newrelic/hooks/datastore_elasticsearch.py index b4c6f3bb6f..2417aabfe5 100644 --- a/newrelic/hooks/datastore_elasticsearch.py +++ b/newrelic/hooks/datastore_elasticsearch.py @@ -14,7 +14,8 @@ from newrelic.api.datastore_trace import DatastoreTrace from newrelic.api.transaction import current_transaction -from newrelic.common.object_wrapper import wrap_function_wrapper +from newrelic.common.object_wrapper import function_wrapper, wrap_function_wrapper +from newrelic.common.package_version_utils import get_package_version_tuple from newrelic.packages import six # An index name can be a string, None or a sequence. In the case of None @@ -23,6 +24,8 @@ # obviously can also be more than one index name. Where we are certain # there is only a single index name we use it, otherwise we use 'other'. +ES_VERSION = get_package_version_tuple("elasticsearch") + def _index_name(index): if not index or index == "*": @@ -32,11 +35,25 @@ def _index_name(index): return index -def _extract_kwargs_index(*args, **kwargs): - return _index_name(kwargs.get("index")) +def _extract_args_index(index=None, *args, **kwargs): + return _index_name(index) -def _extract_args_index(index=None, *args, **kwargs): +def _extract_args_allocation_explain_index( + current_node=None, + error_trace=None, + filter_path=None, + human=None, + include_disk_info=None, + include_yes_decisions=None, + index=None, + *args, + **kwargs +): + return _index_name(index) + + +def _extract_args_name_index(name=None, index=None, *args, **kwargs): return _index_name(index) @@ -44,6 +61,22 @@ def _extract_args_body_index(body=None, index=None, *args, **kwargs): return _index_name(index) +def _extract_args_requests_index(requests=None, index=None, *args, **kwargs): + return _index_name(index) + + +def _extract_args_searches_index(searches=None, index=None, *args, **kwargs): + return _index_name(index) + + +def _extract_args_search_templates_index(search_templates=None, index=None, *args, **kwargs): + return _index_name(index) + + +def _extract_args_operations_index(operations=None, index=None, *args, **kwargs): + return _index_name(index) + + def _extract_args_doctype_body_index(doc_type=None, body=None, index=None, *args, **kwargs): return _index_name(index) @@ -52,11 +85,11 @@ def _extract_args_field_index(field=None, index=None, *args, **kwargs): return _index_name(index) -def _extract_args_name_body_index(name=None, body=None, index=None, *args, **kwargs): +def _extract_args_fields_index(fields=None, index=None, *args, **kwargs): return _index_name(index) -def _extract_args_name_index(name=None, index=None, *args, **kwargs): +def _extract_args_name_body_index(name=None, body=None, index=None, *args, **kwargs): return _index_name(index) @@ -64,19 +97,27 @@ def _extract_args_metric_index(metric=None, index=None, *args, **kwargs): return _index_name(index) +def _extract_args_settings_index(settings=None, index=None, *args, **kwargs): + return _index_name(index) + + +def instrument_es_methods(module, _class, client_methods, prefix=None): + for method_name, arg_extractor in client_methods: + if hasattr(getattr(module, _class), method_name): + wrap_elasticsearch_client_method(module, _class, method_name, arg_extractor, prefix) + + def wrap_elasticsearch_client_method(module, class_name, method_name, arg_extractor, prefix=None): def _nr_wrapper_Elasticsearch_method_(wrapped, instance, args, kwargs): transaction = current_transaction() if transaction is None: return wrapped(*args, **kwargs) - - # When arg_extractor is None, it means there is no target field + # When index is None, it means there is no target field # associated with this method. Hence this method will only # create an operation metric and no statement metric. This is # handled by setting the target to None when calling the # DatastoreTraceWrapper. - if arg_extractor is None: index = None else: @@ -105,7 +146,7 @@ def _nr_wrapper_Elasticsearch_method_(wrapped, instance, args, kwargs): wrap_function_wrapper(module, "%s.%s" % (class_name, method_name), _nr_wrapper_Elasticsearch_method_) -_elasticsearch_client_methods = ( +_elasticsearch_client_methods_below_v8 = ( ("abort_benchmark", None), ("benchmark", _extract_args_index), ("bulk", None), @@ -147,13 +188,68 @@ def _nr_wrapper_Elasticsearch_method_(wrapped, instance, args, kwargs): ) +_elasticsearch_client_methods_v8 = ( + ("bulk", _extract_args_operations_index), + ("clear_scroll", None), + ("close", None), + ("close_point_in_time", None), + ("count", _extract_args_index), + ("create", _extract_args_index), + ("delete", _extract_args_index), + ("delete_by_query", _extract_args_index), + ("delete_by_query_rethrottle", None), + ("delete_script", None), + ("exists", _extract_args_index), + ("exists_source", _extract_args_index), + ("explain", _extract_args_index), + ("field_caps", _extract_args_index), + ("get", _extract_args_index), + ("get_script", None), + ("get_script_context", None), + ("get_script_languages", None), + ("get_source", _extract_args_index), + ("index", _extract_args_index), + ("info", None), + ("knn_search", _extract_args_index), + ("mget", _extract_args_index), + ("msearch", _extract_args_searches_index), + ("msearch_template", _extract_args_search_templates_index), + ("mtermvectors", _extract_args_index), + ("open_point_in_time", _extract_args_index), + ("options", None), + ("ping", None), + ("put_script", None), + ("rank_eval", _extract_args_requests_index), + ("reindex", None), + ("reindex_rethrottle", None), + ("render_search_template", None), + ("scripts_painless_execute", None), + ("scroll", None), + ("search", _extract_args_index), + ("search_mvt", _extract_args_index), + ("search_shards", _extract_args_index), + ("terms_enum", _extract_args_index), + ("termvector", _extract_args_index), + ("termvectors", _extract_args_index), + ("update", _extract_args_index), + ("update_by_query", _extract_args_index), + ("update_by_query_rethrottle", None), +) + + def instrument_elasticsearch_client(module): - for method_name, arg_extractor in _elasticsearch_client_methods: - if hasattr(getattr(module, "Elasticsearch"), method_name): - wrap_elasticsearch_client_method(module, "Elasticsearch", method_name, arg_extractor) + # The module path was remapped in v8 to match previous versions. + # In order to avoid double wrapping we check the version before + # wrapping. + if ES_VERSION < (8,): + instrument_es_methods(module, "Elasticsearch", _elasticsearch_client_methods_below_v8) + + +def instrument_elasticsearch_client_v8(module): + instrument_es_methods(module, "Elasticsearch", _elasticsearch_client_methods_v8) -_elasticsearch_client_indices_methods = ( +_elasticsearch_client_indices_methods_below_v8 = ( ("analyze", _extract_args_index), ("clear_cache", _extract_args_index), ("close", _extract_args_index), @@ -196,13 +292,77 @@ def instrument_elasticsearch_client(module): ) +_elasticsearch_client_indices_methods_v8 = ( + ("add_block", _extract_args_index), + ("analyze", _extract_args_index), + ("clear_cache", _extract_args_index), + ("clone", _extract_args_index), + ("close", _extract_args_index), + ("create", _extract_args_index), + ("create_data_stream", None), + ("data_streams_stats", None), + ("delete", _extract_args_index), + ("delete_alias", _extract_args_index), + ("delete_data_stream", None), + ("delete_index_template", None), + ("delete_template", None), + ("disk_usage", _extract_args_index), + ("downsample", _extract_args_index), + ("exists", _extract_args_index), + ("exists_alias", _extract_args_name_index), + ("exists_index_template", None), + ("exists_template", None), + ("field_usage_stats", _extract_args_index), + ("flush", _extract_args_index), + ("forcemerge", _extract_args_index), + ("get", _extract_args_index), + ("get_alias", _extract_args_index), + ("get_data_stream", None), + ("get_field_mapping", _extract_args_fields_index), + ("get_index_template", None), + ("get_mapping", _extract_args_index), + ("get_settings", _extract_args_index), + ("get_template", None), + ("migrate_to_data_stream", None), + ("modify_data_stream", None), + ("open", _extract_args_index), + ("promote_data_stream", None), + ("put_alias", _extract_args_index), + ("put_index_template", None), + ("put_mapping", _extract_args_index), + ("put_settings", _extract_args_settings_index), + ("put_template", None), + ("recovery", _extract_args_index), + ("refresh", _extract_args_index), + ("reload_search_analyzers", _extract_args_index), + ("resolve_index", None), + ("rollover", None), + ("segments", _extract_args_index), + ("shard_stores", _extract_args_index), + ("shrink", _extract_args_index), + ("simulate_index_template", None), + ("simulate_template", None), + ("split", _extract_args_index), + ("stats", _extract_args_index), + ("unfreeze", _extract_args_index), + ("update_aliases", None), + ("validate_query", _extract_args_index), +) + + def instrument_elasticsearch_client_indices(module): - for method_name, arg_extractor in _elasticsearch_client_indices_methods: - if hasattr(getattr(module, "IndicesClient"), method_name): - wrap_elasticsearch_client_method(module, "IndicesClient", method_name, arg_extractor, "indices") + # The module path was remapped in v8 to match previous versions. + # In order to avoid double wrapping we check the version before + # wrapping. + if ES_VERSION < (8,): + instrument_es_methods(module, "IndicesClient", _elasticsearch_client_indices_methods_below_v8, "indices") + +def instrument_elasticsearch_client_indices_v8(module): + instrument_es_methods(module, "IndicesClient", _elasticsearch_client_indices_methods_v8, "indices") -_elasticsearch_client_cat_methods = ( + +_elasticsearch_client_cat_methods_below_v8 = ( ("aliases", None), ("allocation", None), ("count", _extract_args_index), @@ -220,18 +380,72 @@ def instrument_elasticsearch_client_indices(module): ("thread_pool", None), ) +_elasticsearch_client_cat_methods_v8 = ( + ("aliases", None), + ("allocation", None), + ("component_templates", None), + ("count", _extract_args_index), + ("fielddata", None), + ("health", None), + ("help", None), + ("indices", _extract_args_index), + ("master", None), + ("ml_data_frame_analytics", None), + ("ml_datafeeds", None), + ("ml_jobs", None), + ("ml_trained_models", None), + ("nodeattrs", None), + ("nodes", None), + ("pending_tasks", None), + ("plugins", None), + ("recovery", _extract_args_index), + ("repositories", None), + ("segments", _extract_args_index), + ("shards", _extract_args_index), + ("snapshots", None), + ("tasks", None), + ("templates", None), + ("thread_pool", None), + ("transforms", None), +) + def instrument_elasticsearch_client_cat(module): - for method_name, arg_extractor in _elasticsearch_client_cat_methods: - if hasattr(getattr(module, "CatClient"), method_name): - wrap_elasticsearch_client_method(module, "CatClient", method_name, arg_extractor, "cat") + # The module path was remapped in v8 to match previous versions. + # In order to avoid double wrapping we check the version before + # wrapping. + if ES_VERSION < (8,): + instrument_es_methods(module, "CatClient", _elasticsearch_client_cat_methods_below_v8, "cat") + + +def instrument_elasticsearch_client_cat_v8(module): + instrument_es_methods(module, "CatClient", _elasticsearch_client_cat_methods_v8, "cat") + + +_elasticsearch_client_cluster_methods_below_v8 = ( + ("get_settings", None), + ("health", _extract_args_index), + ("pending_tasks", None), + ("put_settings", None), + ("reroute", None), + ("state", _extract_args_metric_index), + ("stats", None), +) -_elasticsearch_client_cluster_methods = ( +_elasticsearch_client_cluster_methods_v8 = ( + ("allocation_explain", _extract_args_allocation_explain_index), + ("delete_component_template", None), + ("delete_voting_config_exclusions", None), + ("exists_component_template", None), + ("get_component_template", None), ("get_settings", None), ("health", _extract_args_index), ("pending_tasks", None), + ("post_voting_config_exclusions", None), + ("put_component_template", None), ("put_settings", None), + ("remote_info", None), ("reroute", None), ("state", _extract_args_metric_index), ("stats", None), @@ -239,26 +453,60 @@ def instrument_elasticsearch_client_cat(module): def instrument_elasticsearch_client_cluster(module): - for method_name, arg_extractor in _elasticsearch_client_cluster_methods: - if hasattr(getattr(module, "ClusterClient"), method_name): - wrap_elasticsearch_client_method(module, "ClusterClient", method_name, arg_extractor, "cluster") + # The module path was remapped in v8 to match previous versions. + # In order to avoid double wrapping we check the version before + # wrapping. + if ES_VERSION < (8,): + instrument_es_methods(module, "ClusterClient", _elasticsearch_client_cluster_methods_below_v8, "cluster") + + +def instrument_elasticsearch_client_cluster_v8(module): + instrument_es_methods(module, "ClusterClient", _elasticsearch_client_cluster_methods_v8, "cluster") -_elasticsearch_client_nodes_methods = ( +_elasticsearch_client_nodes_methods_below_v8 = ( ("hot_threads", None), ("info", None), ("shutdown", None), ("stats", None), ) +_elasticsearch_client_nodes_methods_v8 = ( + ("clear_repositories_metering_archive", None), + ("get_repositories_metering_info", None), + ("hot_threads", None), + ("info", None), + ("reload_secure_settings", None), + ("stats", None), + ("usage", None), +) def instrument_elasticsearch_client_nodes(module): - for method_name, arg_extractor in _elasticsearch_client_nodes_methods: - if hasattr(getattr(module, "NodesClient"), method_name): - wrap_elasticsearch_client_method(module, "NodesClient", method_name, arg_extractor, "nodes") + # The module path was remapped in v8 to match previous versions. + # In order to avoid double wrapping we check the version before + # wrapping. + if ES_VERSION < (8,): + instrument_es_methods(module, "NodesClient", _elasticsearch_client_nodes_methods_below_v8, "nodes") + +def instrument_elasticsearch_client_nodes_v8(module): + instrument_es_methods(module, "NodesClient", _elasticsearch_client_nodes_methods_v8, "nodes") -_elasticsearch_client_snapshot_methods = ( + +_elasticsearch_client_snapshot_methods_below_v8 = ( + ("create", None), + ("create_repository", None), + ("delete", None), + ("delete_repository", None), + ("get", None), + ("get_repository", None), + ("restore", None), + ("status", None), + ("verify_repository", None), +) +_elasticsearch_client_snapshot_methods_v8 = ( + ("cleanup_repository", None), + ("clone", None), ("create", None), ("create_repository", None), ("delete", None), @@ -272,9 +520,15 @@ def instrument_elasticsearch_client_nodes(module): def instrument_elasticsearch_client_snapshot(module): - for method_name, arg_extractor in _elasticsearch_client_snapshot_methods: - if hasattr(getattr(module, "SnapshotClient"), method_name): - wrap_elasticsearch_client_method(module, "SnapshotClient", method_name, arg_extractor, "snapshot") + # The module path was remapped in v8 to match previous versions. + # In order to avoid double wrapping we check the version before + # wrapping. + if ES_VERSION < (8,): + instrument_es_methods(module, "SnapshotClient", _elasticsearch_client_snapshot_methods_below_v8, "snapshot") + + +def instrument_elasticsearch_client_snapshot_v8(module): + instrument_es_methods(module, "SnapshotClient", _elasticsearch_client_snapshot_methods_v8, "snapshot") _elasticsearch_client_tasks_methods = ( @@ -285,23 +539,44 @@ def instrument_elasticsearch_client_snapshot(module): def instrument_elasticsearch_client_tasks(module): - for method_name, arg_extractor in _elasticsearch_client_tasks_methods: - if hasattr(getattr(module, "TasksClient"), method_name): - wrap_elasticsearch_client_method(module, "TasksClient", method_name, arg_extractor, "tasks") + # The module path was remapped in v8 to match previous versions. + # In order to avoid double wrapping we check the version before + # wrapping. + if ES_VERSION < (8,): + instrument_es_methods(module, "TasksClient", _elasticsearch_client_tasks_methods, "tasks") + + +def instrument_elasticsearch_client_tasks_v8(module): + instrument_es_methods(module, "TasksClient", _elasticsearch_client_tasks_methods, "tasks") -_elasticsearch_client_ingest_methods = ( +_elasticsearch_client_ingest_methods_below_v8 = ( ("get_pipeline", None), ("put_pipeline", None), ("delete_pipeline", None), ("simulate", None), ) +_elasticsearch_client_ingest_methods_v8 = ( + ("delete_pipeline", None), + ("geo_ip_stats", None), + ("get_pipeline", None), + ("processor_grok", None), + ("put_pipeline", None), + ("simulate", None), +) + def instrument_elasticsearch_client_ingest(module): - for method_name, arg_extractor in _elasticsearch_client_ingest_methods: - if hasattr(getattr(module, "IngestClient"), method_name): - wrap_elasticsearch_client_method(module, "IngestClient", method_name, arg_extractor, "ingest") + # The module path was remapped in v8 to match previous versions. + # In order to avoid double wrapping we check the version before + # wrapping. + if ES_VERSION < (8,): + instrument_es_methods(module, "IngestClient", _elasticsearch_client_ingest_methods_below_v8, "ingest") + + +def instrument_elasticsearch_client_ingest_v8(module): + instrument_es_methods(module, "IngestClient", _elasticsearch_client_ingest_methods_v8, "ingest") # @@ -326,6 +601,17 @@ def instrument_elasticsearch_connection_base(module): wrap_function_wrapper(module, "Connection.__init__", _nr_Connection__init__wrapper) +def BaseNode__init__wrapper(wrapped, instance, args, kwargs): + result = wrapped(*args, **kwargs) + instance._nr_host_port = (instance.host, str(instance.port)) + return result + + +def instrument_elastic_transport__node__base(module): + if hasattr(module, "BaseNode"): + wrap_function_wrapper(module, "BaseNode.__init__", BaseNode__init__wrapper) + + def _nr_get_connection_wrapper(wrapped, instance, args, kwargs): """Read instance info from Connection and stash on Transaction.""" @@ -343,7 +629,7 @@ def _nr_get_connection_wrapper(wrapped, instance, args, kwargs): if tracer_settings.instance_reporting.enabled: host, port_path_or_id = conn._nr_host_port instance_info = (host, port_path_or_id, None) - except: + except Exception: instance_info = ("unknown", "unknown", None) transaction._nr_datastore_instance_info = instance_info @@ -351,5 +637,26 @@ def _nr_get_connection_wrapper(wrapped, instance, args, kwargs): return conn +def _nr_perform_request_wrapper(wrapped, instance, args, kwargs): + """Read instance info from Connection and stash on Transaction.""" + + transaction = current_transaction() + + if transaction is None: + return wrapped(*args, **kwargs) + + if not hasattr(instance.node_pool.get, "_nr_wrapped"): + instance.node_pool.get = function_wrapper(_nr_get_connection_wrapper)(instance.node_pool.get) + instance.node_pool.get._nr_wrapped = True + + return wrapped(*args, **kwargs) + + def instrument_elasticsearch_transport(module): - wrap_function_wrapper(module, "Transport.get_connection", _nr_get_connection_wrapper) + if hasattr(module, "Transport") and hasattr(module.Transport, "get_connection"): + wrap_function_wrapper(module, "Transport.get_connection", _nr_get_connection_wrapper) + + +def instrument_elastic_transport__transport(module): + if hasattr(module, "Transport") and hasattr(module.Transport, "perform_request"): + wrap_function_wrapper(module, "Transport.perform_request", _nr_perform_request_wrapper) diff --git a/tests/datastore_elasticsearch/conftest.py b/tests/datastore_elasticsearch/conftest.py index d665bce870..5cb0b0824e 100644 --- a/tests/datastore_elasticsearch/conftest.py +++ b/tests/datastore_elasticsearch/conftest.py @@ -13,25 +13,43 @@ # limitations under the License. import pytest +from testing_support.db_settings import elasticsearch_settings +from testing_support.fixtures import ( # noqa + code_coverage_fixture, + collector_agent_registration_fixture, + collector_available_fixture, +) -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from newrelic.common.package_version_utils import get_package_version _coverage_source = [ - 'newrelic.hooks.datastore_elasticsearch', + "newrelic.hooks.datastore_elasticsearch", ] code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { - 'transaction_tracer.explain_threshold': 0.0, - 'transaction_tracer.transaction_threshold': 0.0, - 'transaction_tracer.stack_trace_threshold': 0.0, - 'debug.log_data_collector_payloads': True, - 'debug.record_transaction_failure': True + "transaction_tracer.explain_threshold": 0.0, + "transaction_tracer.transaction_threshold": 0.0, + "transaction_tracer.stack_trace_threshold": 0.0, + "debug.log_data_collector_payloads": True, + "debug.record_transaction_failure": True, } collector_agent_registration = collector_agent_registration_fixture( - app_name='Python Agent Test (datastore_elasticsearch)', - default_settings=_default_settings, - linked_applications=['Python Agent Test (datastore)']) + app_name="Python Agent Test (datastore_elasticsearch)", + default_settings=_default_settings, + linked_applications=["Python Agent Test (datastore)"], +) + +ES_VERSION = tuple([int(n) for n in get_package_version("elasticsearch").split(".")]) +ES_SETTINGS = elasticsearch_settings()[0] +ES_MULTIPLE_SETTINGS = elasticsearch_settings() +ES_URL = "http://%s:%s" % (ES_SETTINGS["host"], ES_SETTINGS["port"]) + + +@pytest.fixture(scope="session") +def client(): + from elasticsearch import Elasticsearch + + return Elasticsearch(ES_URL) diff --git a/tests/datastore_elasticsearch/test_connection.py b/tests/datastore_elasticsearch/test_connection.py index 37df49b804..2e888af9b5 100644 --- a/tests/datastore_elasticsearch/test_connection.py +++ b/tests/datastore_elasticsearch/test_connection.py @@ -12,21 +12,53 @@ # See the License for the specific language governing permissions and # limitations under the License. -from elasticsearch.connection.base import Connection +import pytest + +try: + from elasticsearch.connection.base import Connection +except ImportError: + from elastic_transport._models import NodeConfig + from elastic_transport._node._base import BaseNode as Connection + +from conftest import ES_VERSION, ES_SETTINGS + + +HOST = {"scheme": "http", "host": ES_SETTINGS["host"], "port": int(ES_SETTINGS["port"])} + +IS_V8 = ES_VERSION >= (8,) +SKIP_IF_V7 = pytest.mark.skipif(not IS_V8, reason="Skipping v8 tests.") +SKIP_IF_V8 = pytest.mark.skipif(IS_V8, reason="Skipping v7 tests.") def test_connection_default(): - conn = Connection() - assert conn._nr_host_port == ('localhost', '9200') + if IS_V8: + conn = Connection(NodeConfig(**HOST)) + else: + conn = Connection(**HOST) + + assert conn._nr_host_port == ("localhost", ES_SETTINGS["port"]) + +@SKIP_IF_V7 +def test_connection_config(): + conn = Connection(NodeConfig(scheme="http", host="foo", port=8888)) + assert conn._nr_host_port == ("foo", "8888") + + +@SKIP_IF_V8 def test_connection_host_arg(): - conn = Connection('the_host') - assert conn._nr_host_port == ('the_host', '9200') + conn = Connection("the_host") + assert conn._nr_host_port == ("the_host", "9200") + +@SKIP_IF_V8 def test_connection_args(): - conn = Connection('the_host', 9999) - assert conn._nr_host_port == ('the_host', '9999') + conn = Connection("the_host", 9999) + assert conn._nr_host_port == ("the_host", "9999") + +@SKIP_IF_V8 def test_connection_kwargs(): - conn = Connection(host='foo', port=8888) - assert conn._nr_host_port == ('foo', '8888') + conn = Connection(host="foo", port=8888) + assert conn._nr_host_port == ("foo", "8888") + diff --git a/tests/datastore_elasticsearch/test_database_duration.py b/tests/datastore_elasticsearch/test_database_duration.py index a76f700b1b..e2599c67b6 100644 --- a/tests/datastore_elasticsearch/test_database_duration.py +++ b/tests/datastore_elasticsearch/test_database_duration.py @@ -14,38 +14,48 @@ import sqlite3 -from elasticsearch import Elasticsearch +from testing_support.validators.validate_database_duration import ( + validate_database_duration, +) from newrelic.api.background_task import background_task -from testing_support.db_settings import elasticsearch_settings -from testing_support.validators.validate_database_duration import validate_database_duration +from conftest import ES_VERSION -ES_SETTINGS = elasticsearch_settings()[0] -ES_URL = 'http://%s:%s' % (ES_SETTINGS['host'], ES_SETTINGS['port']) -def _exercise_es(es): - es.index(index="contacts", doc_type="person", - body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1) - es.index(index="contacts", doc_type="person", - body={"name": "Jessica Coder", "age": 32, "title": "Programmer"}, id=2) - es.index(index="contacts", doc_type="person", - body={"name": "Freddy Tester", "age": 29, "title": "Assistant"}, id=3) - es.indices.refresh('contacts') + + +def _exercise_es_v7(es): + es.index(index="contacts", doc_type="person", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1) + es.index( + index="contacts", doc_type="person", body={"name": "Jessica Coder", "age": 32, "title": "Programmer"}, id=2 + ) + es.index(index="contacts", doc_type="person", body={"name": "Freddy Tester", "age": 29, "title": "Assistant"}, id=3) + es.indices.refresh("contacts") + + +def _exercise_es_v8(es): + es.index(index="contacts", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1) + es.index(index="contacts", body={"name": "Jessica Coder", "age": 32, "title": "Programmer"}, id=2) + es.index(index="contacts", body={"name": "Freddy Tester", "age": 29, "title": "Assistant"}, id=3) + es.indices.refresh(index="contacts") + + +_exercise_es = _exercise_es_v7 if ES_VERSION < (8, 0, 0) else _exercise_es_v8 + @validate_database_duration() @background_task() -def test_elasticsearch_database_duration(): - client = Elasticsearch(ES_URL) +def test_elasticsearch_database_duration(client): _exercise_es(client) + @validate_database_duration() @background_task() -def test_elasticsearch_and_sqlite_database_duration(): +def test_elasticsearch_and_sqlite_database_duration(client): # Make Elasticsearch queries - client = Elasticsearch(ES_URL) _exercise_es(client) # Make sqlite queries diff --git a/tests/datastore_elasticsearch/test_elasticsearch.py b/tests/datastore_elasticsearch/test_elasticsearch.py index 65a0374df0..d2c892ea92 100644 --- a/tests/datastore_elasticsearch/test_elasticsearch.py +++ b/tests/datastore_elasticsearch/test_elasticsearch.py @@ -12,122 +12,120 @@ # See the License for the specific language governing permissions and # limitations under the License. -from elasticsearch import Elasticsearch import elasticsearch.client +from testing_support.fixtures import override_application_settings +from testing_support.util import instance_hostname +from testing_support.validators.validate_transaction_errors import ( + validate_transaction_errors, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.background_task import background_task -from testing_support.fixtures import override_application_settings -from testing_support.validators.validate_transaction_errors import validate_transaction_errors -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics -from testing_support.db_settings import elasticsearch_settings -from testing_support.util import instance_hostname +from conftest import ES_VERSION, ES_SETTINGS -ES_SETTINGS = elasticsearch_settings()[0] -ES_URL = 'http://%s:%s' % (ES_SETTINGS['host'], ES_SETTINGS['port']) # Settings _enable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': True, + "datastore_tracer.instance_reporting.enabled": True, } _disable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': False, + "datastore_tracer.instance_reporting.enabled": False, } # Metrics _base_scoped_metrics = [ - ('Datastore/statement/Elasticsearch/_all/cluster.health', 1), - ('Datastore/statement/Elasticsearch/_all/search', 2), - ('Datastore/statement/Elasticsearch/address/index', 2), - ('Datastore/statement/Elasticsearch/address/search', 1), - ('Datastore/statement/Elasticsearch/contacts/index', 3), - ('Datastore/statement/Elasticsearch/contacts/indices.refresh', 1), - ('Datastore/statement/Elasticsearch/contacts/search', 2), - ('Datastore/statement/Elasticsearch/other/search', 2), + ("Datastore/statement/Elasticsearch/_all/cluster.health", 1), + ("Datastore/statement/Elasticsearch/_all/search", 2), + ("Datastore/statement/Elasticsearch/address/index", 2), + ("Datastore/statement/Elasticsearch/address/search", 1), + ("Datastore/statement/Elasticsearch/contacts/index", 3), + ("Datastore/statement/Elasticsearch/contacts/indices.refresh", 1), + ("Datastore/statement/Elasticsearch/contacts/search", 2), + ("Datastore/statement/Elasticsearch/other/search", 2), ] _base_rollup_metrics = [ - ('Datastore/operation/Elasticsearch/cluster.health', 1), - ('Datastore/operation/Elasticsearch/index', 5), - ('Datastore/operation/Elasticsearch/indices.refresh', 1), - ('Datastore/operation/Elasticsearch/search', 7), - ('Datastore/statement/Elasticsearch/_all/cluster.health', 1), - ('Datastore/statement/Elasticsearch/_all/search', 2), - ('Datastore/statement/Elasticsearch/address/index', 2), - ('Datastore/statement/Elasticsearch/address/search', 1), - ('Datastore/statement/Elasticsearch/contacts/index', 3), - ('Datastore/statement/Elasticsearch/contacts/indices.refresh', 1), - ('Datastore/statement/Elasticsearch/contacts/search', 2), - ('Datastore/statement/Elasticsearch/other/search', 2), + ("Datastore/operation/Elasticsearch/cluster.health", 1), + ("Datastore/operation/Elasticsearch/index", 5), + ("Datastore/operation/Elasticsearch/indices.refresh", 1), + ("Datastore/operation/Elasticsearch/search", 7), + ("Datastore/statement/Elasticsearch/_all/cluster.health", 1), + ("Datastore/statement/Elasticsearch/_all/search", 2), + ("Datastore/statement/Elasticsearch/address/index", 2), + ("Datastore/statement/Elasticsearch/address/search", 1), + ("Datastore/statement/Elasticsearch/contacts/index", 3), + ("Datastore/statement/Elasticsearch/contacts/indices.refresh", 1), + ("Datastore/statement/Elasticsearch/contacts/search", 2), + ("Datastore/statement/Elasticsearch/other/search", 2), ] # Version support +def is_importable(module_path): + try: + __import__(module_path) + return True + except ImportError: + return False + + _all_count = 14 -try: - import elasticsearch.client.cat - _base_scoped_metrics.append( - ('Datastore/operation/Elasticsearch/cat.health', 1)) - _base_rollup_metrics.append( - ('Datastore/operation/Elasticsearch/cat.health', 1)) +if is_importable("elasticsearch.client.cat") or is_importable("elasticsearch._sync.client.cat"): + _base_scoped_metrics.append(("Datastore/operation/Elasticsearch/cat.health", 1)) + _base_rollup_metrics.append(("Datastore/operation/Elasticsearch/cat.health", 1)) _all_count += 1 -except ImportError: - _base_scoped_metrics.append( - ('Datastore/operation/Elasticsearch/cat.health', None)) - _base_rollup_metrics.append( - ('Datastore/operation/Elasticsearch/cat.health', None)) - -try: - import elasticsearch.client.nodes - _base_scoped_metrics.append( - ('Datastore/operation/Elasticsearch/nodes.info', 1)) - _base_rollup_metrics.append( - ('Datastore/operation/Elasticsearch/nodes.info', 1)) +else: + _base_scoped_metrics.append(("Datastore/operation/Elasticsearch/cat.health", None)) + _base_rollup_metrics.append(("Datastore/operation/Elasticsearch/cat.health", None)) + +if is_importable("elasticsearch.client.nodes") or is_importable("elasticsearch._sync.client.nodes"): + _base_scoped_metrics.append(("Datastore/operation/Elasticsearch/nodes.info", 1)) + _base_rollup_metrics.append(("Datastore/operation/Elasticsearch/nodes.info", 1)) _all_count += 1 -except ImportError: - _base_scoped_metrics.append( - ('Datastore/operation/Elasticsearch/nodes.info', None)) - _base_rollup_metrics.append( - ('Datastore/operation/Elasticsearch/nodes.info', None)) - -if (hasattr(elasticsearch.client, 'SnapshotClient') and - hasattr(elasticsearch.client.SnapshotClient, 'status')): - _base_scoped_metrics.append( - ('Datastore/operation/Elasticsearch/snapshot.status', 1)) - _base_rollup_metrics.append( - ('Datastore/operation/Elasticsearch/snapshot.status', 1)) +else: + _base_scoped_metrics.append(("Datastore/operation/Elasticsearch/nodes.info", None)) + _base_rollup_metrics.append(("Datastore/operation/Elasticsearch/nodes.info", None)) + +if hasattr(elasticsearch.client, "SnapshotClient") and hasattr(elasticsearch.client.SnapshotClient, "status"): + _base_scoped_metrics.append(("Datastore/operation/Elasticsearch/snapshot.status", 1)) + _base_rollup_metrics.append(("Datastore/operation/Elasticsearch/snapshot.status", 1)) _all_count += 1 else: - _base_scoped_metrics.append( - ('Datastore/operation/Elasticsearch/snapshot.status', None)) - _base_rollup_metrics.append( - ('Datastore/operation/Elasticsearch/snapshot.status', None)) - -if hasattr(elasticsearch.client.IndicesClient, 'status'): - _base_scoped_metrics.append( - ('Datastore/statement/Elasticsearch/_all/indices.status', 1)) - _base_rollup_metrics.extend([ - ('Datastore/operation/Elasticsearch/indices.status', 1), - ('Datastore/statement/Elasticsearch/_all/indices.status', 1), - ]) + _base_scoped_metrics.append(("Datastore/operation/Elasticsearch/snapshot.status", None)) + _base_rollup_metrics.append(("Datastore/operation/Elasticsearch/snapshot.status", None)) + +if hasattr(elasticsearch.client.IndicesClient, "status"): + _base_scoped_metrics.append(("Datastore/statement/Elasticsearch/_all/indices.status", 1)) + _base_rollup_metrics.extend( + [ + ("Datastore/operation/Elasticsearch/indices.status", 1), + ("Datastore/statement/Elasticsearch/_all/indices.status", 1), + ] + ) _all_count += 1 else: - _base_scoped_metrics.append( - ('Datastore/operation/Elasticsearch/indices.status', None)) - _base_rollup_metrics.extend([ - ('Datastore/operation/Elasticsearch/indices.status', None), - ('Datastore/statement/Elasticsearch/_all/indices.status', None), - ]) - -_base_rollup_metrics.extend([ - ('Datastore/all', _all_count), - ('Datastore/allOther', _all_count), - ('Datastore/Elasticsearch/all', _all_count), - ('Datastore/Elasticsearch/allOther', _all_count), -]) + _base_scoped_metrics.append(("Datastore/operation/Elasticsearch/indices.status", None)) + _base_rollup_metrics.extend( + [ + ("Datastore/operation/Elasticsearch/indices.status", None), + ("Datastore/statement/Elasticsearch/_all/indices.status", None), + ] + ) + +_base_rollup_metrics.extend( + [ + ("Datastore/all", _all_count), + ("Datastore/allOther", _all_count), + ("Datastore/Elasticsearch/all", _all_count), + ("Datastore/Elasticsearch/allOther", _all_count), + ] +) # Instance info @@ -137,74 +135,105 @@ _enable_scoped_metrics = list(_base_scoped_metrics) _enable_rollup_metrics = list(_base_rollup_metrics) -_host = instance_hostname(ES_SETTINGS['host']) -_port = ES_SETTINGS['port'] +_host = instance_hostname(ES_SETTINGS["host"]) +_port = ES_SETTINGS["port"] -_instance_metric_name = 'Datastore/instance/Elasticsearch/%s/%s' % ( - _host, _port) +_instance_metric_name = "Datastore/instance/Elasticsearch/%s/%s" % (_host, _port) -_enable_rollup_metrics.append( - (_instance_metric_name, _all_count) -) +_enable_rollup_metrics.append((_instance_metric_name, _all_count)) -_disable_rollup_metrics.append( - (_instance_metric_name, None) -) +_disable_rollup_metrics.append((_instance_metric_name, None)) # Query -def _exercise_es(es): - es.index(index="contacts", doc_type="person", - body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1) - es.index(index="contacts", doc_type="person", - body={"name": "Jessica Coder", "age": 32, "title": "Programmer"}, id=2) - es.index(index="contacts", doc_type="person", - body={"name": "Freddy Tester", "age": 29, "title": "Assistant"}, id=3) - es.indices.refresh('contacts') - es.index(index="address", doc_type="employee", body={"name": "Sherlock", - "address": "221B Baker Street, London"}, id=1) - es.index(index="address", doc_type="employee", body={"name": "Bilbo", - "address": "Bag End, Bagshot row, Hobbiton, Shire"}, id=2) - es.search(index='contacts', q='name:Joe') - es.search(index='contacts', q='name:jessica') - es.search(index='address', q='name:Sherlock') - es.search(index=['contacts', 'address'], q='name:Bilbo') - es.search(index='contacts,address', q='name:Bilbo') - es.search(index='*', q='name:Bilbo') - es.search(q='name:Bilbo') + +def _exercise_es_v7(es): + es.index(index="contacts", doc_type="person", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1) + es.index( + index="contacts", doc_type="person", body={"name": "Jessica Coder", "age": 32, "title": "Programmer"}, id=2 + ) + es.index(index="contacts", doc_type="person", body={"name": "Freddy Tester", "age": 29, "title": "Assistant"}, id=3) + es.indices.refresh("contacts") + es.index( + index="address", doc_type="employee", body={"name": "Sherlock", "address": "221B Baker Street, London"}, id=1 + ) + es.index( + index="address", + doc_type="employee", + body={"name": "Bilbo", "address": "Bag End, Bagshot row, Hobbiton, Shire"}, + id=2, + ) + es.search(index="contacts", q="name:Joe") + es.search(index="contacts", q="name:jessica") + es.search(index="address", q="name:Sherlock") + es.search(index=["contacts", "address"], q="name:Bilbo") + es.search(index="contacts,address", q="name:Bilbo") + es.search(index="*", q="name:Bilbo") + es.search(q="name:Bilbo") es.cluster.health() - if hasattr(es, 'cat'): + if hasattr(es, "cat"): es.cat.health() - if hasattr(es, 'nodes'): + if hasattr(es, "nodes"): es.nodes.info() - if hasattr(es, 'snapshot') and hasattr(es.snapshot, 'status'): + if hasattr(es, "snapshot") and hasattr(es.snapshot, "status"): es.snapshot.status() - if hasattr(es.indices, 'status'): + if hasattr(es.indices, "status"): es.indices.status() + +def _exercise_es_v8(es): + es.index(index="contacts", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1) + es.index(index="contacts", body={"name": "Jessica Coder", "age": 32, "title": "Programmer"}, id=2) + es.index(index="contacts", body={"name": "Freddy Tester", "age": 29, "title": "Assistant"}, id=3) + es.indices.refresh(index="contacts") + es.index(index="address", body={"name": "Sherlock", "address": "221B Baker Street, London"}, id=1) + es.index(index="address", body={"name": "Bilbo", "address": "Bag End, Bagshot row, Hobbiton, Shire"}, id=2) + es.search(index="contacts", q="name:Joe") + es.search(index="contacts", q="name:jessica") + es.search(index="address", q="name:Sherlock") + es.search(index=["contacts", "address"], q="name:Bilbo") + es.search(index="contacts,address", q="name:Bilbo") + es.search(index="*", q="name:Bilbo") + es.search(q="name:Bilbo") + es.cluster.health() + + if hasattr(es, "cat"): + es.cat.health() + if hasattr(es, "nodes"): + es.nodes.info() + if hasattr(es, "snapshot") and hasattr(es.snapshot, "status"): + es.snapshot.status() + if hasattr(es.indices, "status"): + es.indices.status() + + +_exercise_es = _exercise_es_v7 if ES_VERSION < (8, 0, 0) else _exercise_es_v8 + + # Test @validate_transaction_errors(errors=[]) @validate_transaction_metrics( - 'test_elasticsearch:test_elasticsearch_operation_disabled', - scoped_metrics=_disable_scoped_metrics, - rollup_metrics=_disable_rollup_metrics, - background_task=True) + "test_elasticsearch:test_elasticsearch_operation_disabled", + scoped_metrics=_disable_scoped_metrics, + rollup_metrics=_disable_rollup_metrics, + background_task=True, +) @override_application_settings(_disable_instance_settings) @background_task() -def test_elasticsearch_operation_disabled(): - client = Elasticsearch(ES_URL) +def test_elasticsearch_operation_disabled(client): _exercise_es(client) + @validate_transaction_errors(errors=[]) @validate_transaction_metrics( - 'test_elasticsearch:test_elasticsearch_operation_enabled', - scoped_metrics=_enable_scoped_metrics, - rollup_metrics=_enable_rollup_metrics, - background_task=True) + "test_elasticsearch:test_elasticsearch_operation_enabled", + scoped_metrics=_enable_scoped_metrics, + rollup_metrics=_enable_rollup_metrics, + background_task=True, +) @override_application_settings(_enable_instance_settings) @background_task() -def test_elasticsearch_operation_enabled(): - client = Elasticsearch(ES_URL) +def test_elasticsearch_operation_enabled(client): _exercise_es(client) diff --git a/tests/datastore_elasticsearch/test_instrumented_methods.py b/tests/datastore_elasticsearch/test_instrumented_methods.py index 28ca8f975d..4ad88c2a58 100644 --- a/tests/datastore_elasticsearch/test_instrumented_methods.py +++ b/tests/datastore_elasticsearch/test_instrumented_methods.py @@ -11,61 +11,131 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - import elasticsearch import elasticsearch.client +import pytest +from conftest import ES_VERSION +from testing_support.validators.validate_datastore_trace_inputs import ( + validate_datastore_trace_inputs, +) + +from newrelic.api.background_task import background_task -from newrelic.hooks.datastore_elasticsearch import ( - _elasticsearch_client_methods, - _elasticsearch_client_indices_methods, - _elasticsearch_client_cat_methods, - _elasticsearch_client_cluster_methods, - _elasticsearch_client_nodes_methods, - _elasticsearch_client_snapshot_methods, - _elasticsearch_client_tasks_methods, - _elasticsearch_client_ingest_methods, +RUN_IF_V8 = pytest.mark.skipif( + ES_VERSION < (8,), reason="Only run for v8+. We don't support all methods in previous versions." ) -def _test_methods_wrapped(object, method_name_tuples): - for method_name, _ in method_name_tuples: - method = getattr(object, method_name, None) - if method is not None: - err = '%s.%s isnt being wrapped' % (object, method) - assert hasattr(method, '__wrapped__'), err +@pytest.fixture +def client(client): + if ES_VERSION < (8, 0): + client.index( + index="contacts", doc_type="person", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1 + ) + else: + client.index(index="contacts", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1) + return client + + +@pytest.mark.parametrize( + "sub_module,method,args,kwargs,expected_index", + [ + (None, "exists", (), {"index": "contacts", "id": 1}, "contacts"), + (None, "info", (), {}, None), + pytest.param( + None, + "msearch", + (), + {"searches": [{}, {"query": {"match": {"message": "this is a test"}}}], "index": "contacts"}, + "contacts", + marks=RUN_IF_V8, + ), + ("indices", "exists", (), {"index": "contacts"}, "contacts"), + ("indices", "exists_template", (), {"name": "no-exist"}, None), + ("cat", "count", (), {"index": "contacts"}, "contacts"), + ("cat", "health", (), {}, None), + pytest.param( + "cluster", + "allocation_explain", + (), + {"index": "contacts", "shard": 0, "primary": True}, + "contacts", + marks=RUN_IF_V8, + ), + ("cluster", "get_settings", (), {}, None), + ("cluster", "health", (), {"index": "contacts"}, "contacts"), + ("nodes", "info", (), {}, None), + ("snapshot", "status", (), {}, None), + ("tasks", "list", (), {}, None), + ("ingest", "geo_ip_stats", (), {}, None), + ], +) +def test_method_on_client_datastore_trace_inputs(client, sub_module, method, args, kwargs, expected_index): + expected_operation = "%s.%s" % (sub_module, method) if sub_module else method + + @validate_datastore_trace_inputs(target=expected_index, operation=expected_operation) + @background_task() + def _test(): + if not sub_module: + getattr(client, method)(*args, **kwargs) + else: + getattr(getattr(client, sub_module), method)(*args, **kwargs) + + _test() + + +def _test_methods_wrapped(_object, ignored_methods=None): + if not ignored_methods: + ignored_methods = {"perform_request", "transport"} + + def is_wrapped(m): + return hasattr(getattr(_object, m), "__wrapped__") + + methods = {m for m in dir(_object) if not m[0] == "_"} + uninstrumented = {m for m in (methods - ignored_methods) if not is_wrapped(m)} + assert not uninstrumented, "There are uninstrumented methods: %s" % uninstrumented + + +@RUN_IF_V8 def test_instrumented_methods_client(): - _test_methods_wrapped(elasticsearch.Elasticsearch, - _elasticsearch_client_methods) + _test_methods_wrapped(elasticsearch.Elasticsearch) + +@RUN_IF_V8 def test_instrumented_methods_client_indices(): - _test_methods_wrapped(elasticsearch.client.IndicesClient, - _elasticsearch_client_indices_methods) + _test_methods_wrapped(elasticsearch.client.IndicesClient) + +@RUN_IF_V8 def test_instrumented_methods_client_cluster(): - _test_methods_wrapped(elasticsearch.client.ClusterClient, - _elasticsearch_client_cluster_methods) + _test_methods_wrapped(elasticsearch.client.ClusterClient) + +@RUN_IF_V8 def test_instrumented_methods_client_cat(): - if hasattr(elasticsearch.client, 'CatClient'): - _test_methods_wrapped(elasticsearch.client.CatClient, - _elasticsearch_client_cat_methods) + if hasattr(elasticsearch.client, "CatClient"): + _test_methods_wrapped(elasticsearch.client.CatClient) + +@RUN_IF_V8 def test_instrumented_methods_client_nodes(): - if hasattr(elasticsearch.client, 'NodesClient'): - _test_methods_wrapped(elasticsearch.client.NodesClient, - _elasticsearch_client_nodes_methods) + if hasattr(elasticsearch.client, "NodesClient"): + _test_methods_wrapped(elasticsearch.client.NodesClient) + +@RUN_IF_V8 def test_instrumented_methods_client_snapshot(): - if hasattr(elasticsearch.client, 'SnapshotClient'): - _test_methods_wrapped(elasticsearch.client.SnapshotClient, - _elasticsearch_client_snapshot_methods) + if hasattr(elasticsearch.client, "SnapshotClient"): + _test_methods_wrapped(elasticsearch.client.SnapshotClient) + +@RUN_IF_V8 def test_instrumented_methods_client_tasks(): - if hasattr(elasticsearch.client, 'TasksClient'): - _test_methods_wrapped(elasticsearch.client.TasksClient, - _elasticsearch_client_tasks_methods) + if hasattr(elasticsearch.client, "TasksClient"): + _test_methods_wrapped(elasticsearch.client.TasksClient) + +@RUN_IF_V8 def test_instrumented_methods_client_ingest(): - if hasattr(elasticsearch.client, 'IngestClient'): - _test_methods_wrapped(elasticsearch.client.IngestClient, - _elasticsearch_client_ingest_methods) + if hasattr(elasticsearch.client, "IngestClient"): + _test_methods_wrapped(elasticsearch.client.IngestClient) diff --git a/tests/datastore_elasticsearch/test_mget.py b/tests/datastore_elasticsearch/test_mget.py index 9f0b442e84..f3f7c09790 100644 --- a/tests/datastore_elasticsearch/test_mget.py +++ b/tests/datastore_elasticsearch/test_mget.py @@ -13,42 +13,43 @@ # limitations under the License. import pytest - from elasticsearch import Elasticsearch -from elasticsearch.connection_pool import RoundRobinSelector +try: + from elastic_transport import RoundRobinSelector +except ImportError: + from elasticsearch.connection_pool import RoundRobinSelector + +from conftest import ES_MULTIPLE_SETTINGS, ES_VERSION from testing_support.fixtures import override_application_settings -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics -from testing_support.db_settings import elasticsearch_settings from testing_support.util import instance_hostname +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.background_task import background_task -ES_MULTIPLE_SETTINGS = elasticsearch_settings() - # Settings _enable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': True, + "datastore_tracer.instance_reporting.enabled": True, } _disable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': False, + "datastore_tracer.instance_reporting.enabled": False, } # Metrics -_base_scoped_metrics = ( - ('Datastore/statement/Elasticsearch/contacts/index', 2), -) +_base_scoped_metrics = (("Datastore/statement/Elasticsearch/contacts/index", 2),) _base_rollup_metrics = ( - ('Datastore/all', 3), - ('Datastore/allOther', 3), - ('Datastore/Elasticsearch/all', 3), - ('Datastore/Elasticsearch/allOther', 3), - ('Datastore/operation/Elasticsearch/index', 2), - ('Datastore/operation/Elasticsearch/mget', 1), - ('Datastore/statement/Elasticsearch/contacts/index', 2), + ("Datastore/all", 3), + ("Datastore/allOther", 3), + ("Datastore/Elasticsearch/all", 3), + ("Datastore/Elasticsearch/allOther", 3), + ("Datastore/operation/Elasticsearch/index", 2), + ("Datastore/operation/Elasticsearch/mget", 1), + ("Datastore/statement/Elasticsearch/contacts/index", 2), ) _disable_scoped_metrics = list(_base_scoped_metrics) @@ -61,89 +62,101 @@ es_1 = ES_MULTIPLE_SETTINGS[0] es_2 = ES_MULTIPLE_SETTINGS[1] - host_1 = instance_hostname(es_1['host']) - port_1 = es_1['port'] + host_1 = instance_hostname(es_1["host"]) + port_1 = es_1["port"] - host_2 = instance_hostname(es_2['host']) - port_2 = es_2['port'] + host_2 = instance_hostname(es_2["host"]) + port_2 = es_2["port"] - instance_metric_name_1 = 'Datastore/instance/Elasticsearch/%s/%s' % ( - host_1, port_1) - instance_metric_name_2 = 'Datastore/instance/Elasticsearch/%s/%s' % ( - host_2, port_2) + instance_metric_name_1 = "Datastore/instance/Elasticsearch/%s/%s" % (host_1, port_1) + instance_metric_name_2 = "Datastore/instance/Elasticsearch/%s/%s" % (host_2, port_2) - _enable_rollup_metrics.extend([ + _enable_rollup_metrics.extend( + [ (instance_metric_name_1, 2), (instance_metric_name_2, 1), - ]) + ] + ) - _disable_rollup_metrics.extend([ + _disable_rollup_metrics.extend( + [ (instance_metric_name_1, None), (instance_metric_name_2, None), - ]) + ] + ) + + +@pytest.fixture(scope="module") +def client(): + urls = ["http://%s:%s" % (db["host"], db["port"]) for db in ES_MULTIPLE_SETTINGS] + # When selecting a connection from the pool, use the round robin method. + # This is actually the default already. Using round robin will ensure that + # doing two db calls will mean elastic search is talking to two different + # dbs. + if ES_VERSION >= (8,): + client = Elasticsearch(urls, node_selector_class=RoundRobinSelector, randomize_hosts=False) + else: + client = Elasticsearch(urls, selector_class=RoundRobinSelector, randomize_hosts=False) + return client + # Query + def _exercise_es_multi(es): # set on db 1 - es.index(index='contacts', doc_type='person', - body={'name': 'Joe Tester', 'age': 25, 'title': 'QA Engineer'}, - id=1) - - # set on db 2 - es.index(index='contacts', doc_type='person', - body={'name': 'Jane Tester', 'age': 22, 'title': 'Senior QA Engineer'}, - id=2) + if ES_VERSION >= (8,): + es.index(index="contacts", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1) + # set on db 2 + es.index(index="contacts", body={"name": "Jane Tester", "age": 22, "title": "Senior QA Engineer"}, id=2) + else: + es.index( + index="contacts", doc_type="person", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1 + ) + # set on db 2 + es.index( + index="contacts", + doc_type="person", + body={"name": "Jane Tester", "age": 22, "title": "Senior QA Engineer"}, + id=2, + ) # ask db 1, will return info from db 1 and 2 mget_body = { - 'docs': [ - {'_id': 1, '_index': 'contacts'}, - {'_id': 2, '_index': 'contacts'}, + "docs": [ + {"_id": 1, "_index": "contacts"}, + {"_id": 2, "_index": "contacts"}, ] } - results = es.mget(mget_body) - assert len(results['docs']) == 2 + results = es.mget(body=mget_body) + assert len(results["docs"]) == 2 + # Test -@pytest.mark.skipif(len(ES_MULTIPLE_SETTINGS) < 2, - reason='Test environment not configured with multiple databases.') + +@pytest.mark.skipif(len(ES_MULTIPLE_SETTINGS) < 2, reason="Test environment not configured with multiple databases.") @override_application_settings(_enable_instance_settings) @validate_transaction_metrics( - 'test_mget:test_multi_get_enabled', - scoped_metrics=_enable_scoped_metrics, - rollup_metrics=_enable_rollup_metrics, - background_task=True) + "test_mget:test_multi_get_enabled", + scoped_metrics=_enable_scoped_metrics, + rollup_metrics=_enable_rollup_metrics, + background_task=True, +) @background_task() -def test_multi_get_enabled(): - urls = ['http://%s:%s' % (db['host'], db['port']) for db in - ES_MULTIPLE_SETTINGS] - # When selecting a connection from the pool, use the round robin method. - # This is actually the default already. Using round robin will ensure that - # doing two db calls will mean elastic search is talking to two different - # dbs. - client = Elasticsearch(urls, selector_class=RoundRobinSelector, - randomize_hosts=False) +def test_multi_get_enabled(client): _exercise_es_multi(client) -@pytest.mark.skipif(len(ES_MULTIPLE_SETTINGS) < 2, - reason='Test environment not configured with multiple databases.') + +@pytest.mark.skipif(len(ES_MULTIPLE_SETTINGS) < 2, reason="Test environment not configured with multiple databases.") @override_application_settings(_disable_instance_settings) @validate_transaction_metrics( - 'test_mget:test_multi_get_disabled', - scoped_metrics=_disable_scoped_metrics, - rollup_metrics=_disable_rollup_metrics, - background_task=True) + "test_mget:test_multi_get_disabled", + scoped_metrics=_disable_scoped_metrics, + rollup_metrics=_disable_rollup_metrics, + background_task=True, +) @background_task() -def test_multi_get_disabled(): - urls = ['http://%s:%s' % (db['host'], db['port']) for db in - ES_MULTIPLE_SETTINGS] - # When selecting a connection from the pool, use the round robin method. - # This is actually the default already. Using round robin will ensure that - # doing two db calls will mean elastic search is talking to two different - # dbs. - client = Elasticsearch(urls, selector_class=RoundRobinSelector, - randomize_hosts=False) +def test_multi_get_disabled(client): _exercise_es_multi(client) diff --git a/tests/datastore_elasticsearch/test_multiple_dbs.py b/tests/datastore_elasticsearch/test_multiple_dbs.py index 70a7be4f12..71c47b1685 100644 --- a/tests/datastore_elasticsearch/test_multiple_dbs.py +++ b/tests/datastore_elasticsearch/test_multiple_dbs.py @@ -13,40 +13,36 @@ # limitations under the License. import pytest - +from conftest import ES_MULTIPLE_SETTINGS, ES_VERSION from elasticsearch import Elasticsearch - from testing_support.fixtures import override_application_settings -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics -from testing_support.db_settings import elasticsearch_settings from testing_support.util import instance_hostname +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.background_task import background_task -ES_MULTIPLE_SETTINGS = elasticsearch_settings() - # Settings _enable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': True, + "datastore_tracer.instance_reporting.enabled": True, } _disable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': False, + "datastore_tracer.instance_reporting.enabled": False, } # Metrics -_base_scoped_metrics = ( - ('Datastore/statement/Elasticsearch/contacts/index', 2), -) +_base_scoped_metrics = (("Datastore/statement/Elasticsearch/contacts/index", 2),) _base_rollup_metrics = ( - ('Datastore/all', 2), - ('Datastore/allOther', 2), - ('Datastore/Elasticsearch/all', 2), - ('Datastore/Elasticsearch/allOther', 2), - ('Datastore/operation/Elasticsearch/index', 2), - ('Datastore/statement/Elasticsearch/contacts/index', 2), + ("Datastore/all", 2), + ("Datastore/allOther", 2), + ("Datastore/Elasticsearch/all", 2), + ("Datastore/Elasticsearch/allOther", 2), + ("Datastore/operation/Elasticsearch/index", 2), + ("Datastore/statement/Elasticsearch/contacts/index", 2), ) _disable_scoped_metrics = list(_base_scoped_metrics) @@ -59,61 +55,71 @@ es_1 = ES_MULTIPLE_SETTINGS[0] es_2 = ES_MULTIPLE_SETTINGS[1] - host_1 = instance_hostname(es_1['host']) - port_1 = es_1['port'] + host_1 = instance_hostname(es_1["host"]) + port_1 = es_1["port"] - host_2 = instance_hostname(es_2['host']) - port_2 = es_2['port'] + host_2 = instance_hostname(es_2["host"]) + port_2 = es_2["port"] - instance_metric_name_1 = 'Datastore/instance/Elasticsearch/%s/%s' % ( - host_1, port_1) - instance_metric_name_2 = 'Datastore/instance/Elasticsearch/%s/%s' % ( - host_2, port_2) + instance_metric_name_1 = "Datastore/instance/Elasticsearch/%s/%s" % (host_1, port_1) + instance_metric_name_2 = "Datastore/instance/Elasticsearch/%s/%s" % (host_2, port_2) - _enable_rollup_metrics.extend([ + _enable_rollup_metrics.extend( + [ (instance_metric_name_1, 1), (instance_metric_name_2, 1), - ]) + ] + ) - _disable_rollup_metrics.extend([ + _disable_rollup_metrics.extend( + [ (instance_metric_name_1, None), (instance_metric_name_2, None), - ]) + ] + ) # Query + def _exercise_es(es): - es.index(index='contacts', doc_type='person', - body={'name': 'Joe Tester', 'age': 25, 'title': 'QA Engineer'}, id=1) + if ES_VERSION >= (8,): + es.index(index="contacts", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1) + else: + es.index( + index="contacts", doc_type="person", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1 + ) + # Test -@pytest.mark.skipif(len(ES_MULTIPLE_SETTINGS) < 2, - reason='Test environment not configured with multiple databases.') + +@pytest.mark.skipif(len(ES_MULTIPLE_SETTINGS) < 2, reason="Test environment not configured with multiple databases.") @override_application_settings(_enable_instance_settings) @validate_transaction_metrics( - 'test_multiple_dbs:test_multiple_dbs_enabled', - scoped_metrics=_enable_scoped_metrics, - rollup_metrics=_enable_rollup_metrics, - background_task=True) + "test_multiple_dbs:test_multiple_dbs_enabled", + scoped_metrics=_enable_scoped_metrics, + rollup_metrics=_enable_rollup_metrics, + background_task=True, +) @background_task() def test_multiple_dbs_enabled(): for db in ES_MULTIPLE_SETTINGS: - es_url = 'http://%s:%s' % (db['host'], db['port']) + es_url = "http://%s:%s" % (db["host"], db["port"]) client = Elasticsearch(es_url) _exercise_es(client) -@pytest.mark.skipif(len(ES_MULTIPLE_SETTINGS) < 2, - reason='Test environment not configured with multiple databases.') + +@pytest.mark.skipif(len(ES_MULTIPLE_SETTINGS) < 2, reason="Test environment not configured with multiple databases.") @override_application_settings(_disable_instance_settings) @validate_transaction_metrics( - 'test_multiple_dbs:test_multiple_dbs_disabled', - scoped_metrics=_disable_scoped_metrics, - rollup_metrics=_disable_rollup_metrics, - background_task=True) + "test_multiple_dbs:test_multiple_dbs_disabled", + scoped_metrics=_disable_scoped_metrics, + rollup_metrics=_disable_rollup_metrics, + background_task=True, +) @background_task() def test_multiple_dbs_disabled(): for db in ES_MULTIPLE_SETTINGS: - es_url = 'http://%s:%s' % (db['host'], db['port']) + es_url = "http://%s:%s" % (db["host"], db["port"]) client = Elasticsearch(es_url) _exercise_es(client) diff --git a/tests/datastore_elasticsearch/test_trace_node.py b/tests/datastore_elasticsearch/test_trace_node.py index 445b4a4eb2..af96b80b45 100644 --- a/tests/datastore_elasticsearch/test_trace_node.py +++ b/tests/datastore_elasticsearch/test_trace_node.py @@ -12,8 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from elasticsearch import Elasticsearch -from testing_support.db_settings import elasticsearch_settings from testing_support.fixtures import ( override_application_settings, validate_tt_parenting, @@ -25,8 +23,7 @@ from newrelic.api.background_task import background_task -ES_SETTINGS = elasticsearch_settings()[0] -ES_URL = "http://%s:%s" % (ES_SETTINGS["host"], ES_SETTINGS["port"]) +from conftest import ES_SETTINGS, ES_VERSION # Settings @@ -79,10 +76,16 @@ # Query -def _exercise_es(es): +def _exercise_es_v7(es): es.index(index="contacts", doc_type="person", body={"name": "Joe Tester", "age": 25, "title": "QA Master"}, id=1) +def _exercise_es_v8(es): + es.index(index="contacts", body={"name": "Joe Tester", "age": 25, "title": "QA Master"}, id=1) + + +_exercise_es = _exercise_es_v7 if ES_VERSION < (8, 0, 0) else _exercise_es_v8 + # Tests @@ -90,8 +93,7 @@ def _exercise_es(es): @validate_tt_collector_json(datastore_params=_enabled_required, datastore_forgone_params=_enabled_forgone) @validate_tt_parenting(_tt_parenting) @background_task() -def test_trace_node_datastore_params_enable_instance(): - client = Elasticsearch(ES_URL) +def test_trace_node_datastore_params_enable_instance(client): _exercise_es(client) @@ -99,8 +101,7 @@ def test_trace_node_datastore_params_enable_instance(): @validate_tt_collector_json(datastore_params=_disabled_required, datastore_forgone_params=_disabled_forgone) @validate_tt_parenting(_tt_parenting) @background_task() -def test_trace_node_datastore_params_disable_instance(): - client = Elasticsearch(ES_URL) +def test_trace_node_datastore_params_disable_instance(client): _exercise_es(client) @@ -108,6 +109,5 @@ def test_trace_node_datastore_params_disable_instance(): @validate_tt_collector_json(datastore_params=_instance_only_required, datastore_forgone_params=_instance_only_forgone) @validate_tt_parenting(_tt_parenting) @background_task() -def test_trace_node_datastore_params_instance_only(): - client = Elasticsearch(ES_URL) +def test_trace_node_datastore_params_instance_only(client): _exercise_es(client) diff --git a/tests/datastore_elasticsearch/test_transport.py b/tests/datastore_elasticsearch/test_transport.py index 49896ba079..a091a9a926 100644 --- a/tests/datastore_elasticsearch/test_transport.py +++ b/tests/datastore_elasticsearch/test_transport.py @@ -1,6 +1,6 @@ # Copyright 2010 New Relic, Inc. # -# Licensed under the Apache License, Version 2.0 (the "License"); +# Licensed under the Apache License, ES_VERSION 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # @@ -12,63 +12,99 @@ # See the License for the specific language governing permissions and # limitations under the License. -from elasticsearch import VERSION -from elasticsearch.client.utils import _make_path -from elasticsearch.transport import Transport -from elasticsearch.connection.http_requests import RequestsHttpConnection -from elasticsearch.connection.http_urllib3 import Urllib3HttpConnection +import pytest +from conftest import ES_SETTINGS, ES_VERSION from elasticsearch.serializer import JSONSerializer -from newrelic.api.application import application_instance as application -from newrelic.api.background_task import BackgroundTask +from newrelic.api.background_task import background_task +from newrelic.api.transaction import current_transaction -from testing_support.db_settings import elasticsearch_settings +try: + from elasticsearch.connection.http_requests import RequestsHttpConnection + from elasticsearch.connection.http_urllib3 import Urllib3HttpConnection + from elasticsearch.transport import Transport -ES_SETTINGS = elasticsearch_settings()[0] -HOST = { - 'host':ES_SETTINGS['host'], - 'port': int(ES_SETTINGS['port']) -} -INDEX = 'contacts' -DOC_TYPE = 'person' -ID = 1 -METHOD = _make_path(INDEX, DOC_TYPE, ID) -PARAMS = {} -HEADERS = {"Content-Type": "application/json"} -DATA = {"name": "Joe Tester"} -BODY = JSONSerializer().dumps(DATA).encode('utf-8') + NodeConfig = dict +except ImportError: + from elastic_transport._models import NodeConfig + from elastic_transport._node._http_requests import ( + RequestsHttpNode as RequestsHttpConnection, + ) + from elastic_transport._node._http_urllib3 import ( + Urllib3HttpNode as Urllib3HttpConnection, + ) + from elastic_transport._transport import Transport -def test_transport_get_connection(): - app = application() - with BackgroundTask(app, 'transport_perform_request') as transaction: - transport = Transport([HOST]) - transport.get_connection() +IS_V8 = ES_VERSION >= (8,) +IS_V7 = ES_VERSION >= (7,) and ES_VERSION < (8, 0) +IS_BELOW_V7 = ES_VERSION < (7,) - expected = (ES_SETTINGS['host'], ES_SETTINGS['port'], None) - assert transaction._nr_datastore_instance_info == expected +RUN_IF_V8 = pytest.mark.skipif(IS_V7 or IS_BELOW_V7, reason="Only run for v8+") +RUN_IF_V7 = pytest.mark.skipif(IS_V8 or IS_BELOW_V7, reason="Only run for v7") +RUN_IF_BELOW_V7 = pytest.mark.skipif(not IS_BELOW_V7, reason="Only run for versions below v7") -def test_transport_perform_request_urllib3(): - app = application() - with BackgroundTask(app, 'perform_request_urllib3') as transaction: - transport = Transport([HOST], connection_class=Urllib3HttpConnection) - if VERSION >= (7, 16, 0): - transport.perform_request('POST', METHOD, headers=HEADERS, params=PARAMS, body=DATA) - else: - transport.perform_request('POST', METHOD, params=PARAMS, body=DATA) - expected = (ES_SETTINGS['host'], ES_SETTINGS['port'], None) - assert transaction._nr_datastore_instance_info == expected +HOST = NodeConfig(scheme="http", host=ES_SETTINGS["host"], port=int(ES_SETTINGS["port"])) + +METHOD = "/contacts/person/1" +HEADERS = {"Content-Type": "application/json"} +DATA = {"name": "Joe Tester"} + +BODY = JSONSerializer().dumps(DATA) +if hasattr(BODY, "encode"): + BODY = BODY.encode("utf-8") + +@pytest.mark.parametrize( + "transport_kwargs, perform_request_kwargs", + [ + pytest.param({}, {"body": DATA}, id="DefaultTransport_below_v7", marks=RUN_IF_BELOW_V7), + pytest.param({}, {"headers": HEADERS, "body": DATA}, id="DefaultTransport_v7+", marks=RUN_IF_V7 or RUN_IF_V8), + pytest.param( + {"connection_class": Urllib3HttpConnection}, + {"body": DATA}, + id="Urllib3HttpConnectionv7", + marks=RUN_IF_BELOW_V7, + ), + pytest.param( + {"connection_class": RequestsHttpConnection}, + {"body": DATA}, + id="RequestsHttpConnectionv7", + marks=RUN_IF_BELOW_V7, + ), + pytest.param( + {"connection_class": Urllib3HttpConnection}, + {"headers": HEADERS, "body": DATA}, + id="Urllib3HttpConnectionv7", + marks=RUN_IF_V7, + ), + pytest.param( + {"connection_class": RequestsHttpConnection}, + {"headers": HEADERS, "body": DATA}, + id="RequestsHttpConnectionv7", + marks=RUN_IF_V7, + ), + pytest.param( + {"node_class": Urllib3HttpConnection}, + {"headers": HEADERS, "body": DATA}, + id="Urllib3HttpNodev8", + marks=RUN_IF_V8, + ), + pytest.param( + {"node_class": RequestsHttpConnection}, + {"headers": HEADERS, "body": DATA}, + id="RequestsHttpNodev8", + marks=RUN_IF_V8, + ), + ], +) +@background_task() +def test_transport_connection_classes(transport_kwargs, perform_request_kwargs): + transaction = current_transaction() -def test_transport_perform_request_requests(): - app = application() - with BackgroundTask(app, 'perform_request_requests') as transaction: - transport = Transport([HOST], connection_class=RequestsHttpConnection) - if VERSION >= (7, 16, 0): - transport.perform_request('POST', METHOD, headers=HEADERS, params=PARAMS, body=DATA) - else: - transport.perform_request('POST', METHOD, params=PARAMS, body=DATA) + transport = Transport([HOST], **transport_kwargs) + transport.perform_request("POST", METHOD, **perform_request_kwargs) - expected = (ES_SETTINGS['host'], ES_SETTINGS['port'], None) + expected = (ES_SETTINGS["host"], ES_SETTINGS["port"], None) assert transaction._nr_datastore_instance_info == expected diff --git a/tests/testing_support/validators/validate_datastore_trace_inputs.py b/tests/testing_support/validators/validate_datastore_trace_inputs.py new file mode 100644 index 0000000000..ade4ebea6f --- /dev/null +++ b/tests/testing_support/validators/validate_datastore_trace_inputs.py @@ -0,0 +1,50 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from testing_support.fixtures import catch_background_exceptions + +from newrelic.common.object_wrapper import transient_function_wrapper + +""" +operation: method name +target: search argument +""" + + +def validate_datastore_trace_inputs(operation=None, target=None): + @transient_function_wrapper("newrelic.api.datastore_trace", "DatastoreTrace.__init__") + @catch_background_exceptions + def _validate_datastore_trace_inputs(wrapped, instance, args, kwargs): + def _bind_params(product, target, operation, host=None, port_path_or_id=None, database_name=None, **kwargs): + return (product, target, operation, host, port_path_or_id, database_name, kwargs) + + ( + captured_product, + captured_target, + captured_operation, + captured_host, + captured_port_path_or_id, + captured_database_name, + captured_kwargs, + ) = _bind_params(*args, **kwargs) + + if target is not None: + assert captured_target == target, "%s didn't match expected %s" % (captured_target, target) + if operation is not None: + assert captured_operation == operation, "%s didn't match expected %s" % (captured_operation, operation) + + return wrapped(*args, **kwargs) + + return _validate_datastore_trace_inputs diff --git a/tox.ini b/tox.ini index 3e5a58cd87..07c040d29a 100644 --- a/tox.ini +++ b/tox.ini @@ -72,9 +72,8 @@ envlist = python-cross_agent-pypy-without_extensions, postgres-datastore_asyncpg-{py37,py38,py39,py310,py311}, memcached-datastore_bmemcached-{pypy,py27,py37,py38,py39,py310,py311}-memcached030, - elasticsearchserver01-datastore_pyelasticsearch-{py27,pypy}, - elasticsearchserver01-datastore_elasticsearch-py27-elasticsearch{00,01,02,05}, - elasticsearchserver07-datastore_elasticsearch-{py27,py37,py38,py39,py310,py311,pypy,pypy37}-elasticsearch{07}, + elasticsearchserver07-datastore_elasticsearch-{py27,py37,py38,py39,py310,py311,pypy,pypy37}-elasticsearch07, + elasticsearchserver08-datastore_elasticsearch-{py37,py38,py39,py310,py311,pypy37}-elasticsearch08, memcached-datastore_memcache-{py27,py37,py38,py39,py310,py311,pypy,pypy37}-memcached01, mysql-datastore_mysql-mysql080023-py27, mysql-datastore_mysql-mysqllatest-{py37,py38,py39,py310,py311}, @@ -236,11 +235,8 @@ deps = datastore_bmemcached-memcached030: python-binary-memcached<0.31 datastore_bmemcached-memcached030: uhashring<2.0 datastore_elasticsearch: requests - datastore_elasticsearch-elasticsearch00: elasticsearch<1.0 - datastore_elasticsearch-elasticsearch01: elasticsearch<2.0 - datastore_elasticsearch-elasticsearch02: elasticsearch<3.0 - datastore_elasticsearch-elasticsearch05: elasticsearch<6.0 datastore_elasticsearch-elasticsearch07: elasticsearch<8.0 + datastore_elasticsearch-elasticsearch08: elasticsearch<9.0 datastore_memcache-memcached01: python-memcached<2 datastore_mysql-mysqllatest: mysql-connector-python datastore_mysql-mysql080023: mysql-connector-python<8.0.24