diff --git a/airflow/tests/test_unit.py b/airflow/tests/test_unit.py index 033334be280d7..2516311205727 100644 --- a/airflow/tests/test_unit.py +++ b/airflow/tests/test_unit.py @@ -31,11 +31,11 @@ def test_service_checks_healthy_exp(aggregator, json_resp, expected_healthy_stat check = AirflowCheck('airflow', common.FULL_CONFIG, [instance]) with mock.patch('datadog_checks.airflow.airflow.AirflowCheck._get_version', return_value=None): - - with mock.patch('datadog_checks.base.utils.http.requests') as req: + mock_session = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=mock_session): mock_resp = mock.MagicMock(status_code=200) mock_resp.json.side_effect = [json_resp] - req.get.return_value = mock_resp + mock_session.get.return_value = mock_resp check.check(None) @@ -60,14 +60,14 @@ def test_service_checks_healthy_stable( check = AirflowCheck('airflow', common.FULL_CONFIG, [instance]) with mock.patch('datadog_checks.airflow.airflow.AirflowCheck._get_version', return_value='2.6.2'): - - with mock.patch('datadog_checks.base.utils.http.requests') as req: + mock_session = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=mock_session): mock_resp = mock.MagicMock(status_code=200) mock_resp.json.side_effect = [ {'metadatabase': {'status': metadb_status}, 'scheduler': {'status': scheduler_status}}, {'status': 'OK'}, ] - req.get.return_value = mock_resp + mock_session.get.return_value = mock_resp check.check(None) @@ -82,7 +82,8 @@ def test_dag_total_tasks(aggregator, task_instance): check = AirflowCheck('airflow', common.FULL_CONFIG, [instance]) with mock.patch('datadog_checks.airflow.airflow.AirflowCheck._get_version', return_value='2.6.2'): - with mock.patch('datadog_checks.base.utils.http.requests') as req: + req = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=req): mock_resp = mock.MagicMock(status_code=200) mock_resp.json.side_effect = [ {'metadatabase': {'status': 'healthy'}, 'scheduler': {'status': 'healthy'}}, @@ -101,7 +102,8 @@ def test_dag_task_ongoing_duration(aggregator, task_instance): with mock.patch('datadog_checks.airflow.airflow.AirflowCheck._get_version', return_value='2.6.2'): - with mock.patch('datadog_checks.base.utils.http.requests') as req: + req = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=req): mock_resp = mock.MagicMock(status_code=200) mock_resp.json.side_effect = [ {'metadatabase': {'status': 'healthy'}, 'scheduler': {'status': 'healthy'}}, @@ -145,7 +147,8 @@ def test_config_collect_ongoing_duration(collect_ongoing_duration, should_call_m check = AirflowCheck('airflow', common.FULL_CONFIG, [instance]) with mock.patch('datadog_checks.airflow.airflow.AirflowCheck._get_version', return_value='2.6.2'): - with mock.patch('datadog_checks.base.utils.http.requests') as req: + req = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=req): mock_resp = mock.MagicMock(status_code=200) mock_resp.json.side_effect = [ {'metadatabase': {'status': 'healthy'}, 'scheduler': {'status': 'healthy'}}, diff --git a/amazon_msk/tests/conftest.py b/amazon_msk/tests/conftest.py index 1298134d93e65..ace67250b8fa2 100644 --- a/amazon_msk/tests/conftest.py +++ b/amazon_msk/tests/conftest.py @@ -32,6 +32,7 @@ def mock_requests_get(url, *args, **kwargs): @pytest.fixture def mock_data(): + # Mock requests.get because it is used internally within boto3 with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): yield diff --git a/apache/tests/conftest.py b/apache/tests/conftest.py index 6292ce634df45..63c78ecf14d26 100644 --- a/apache/tests/conftest.py +++ b/apache/tests/conftest.py @@ -49,7 +49,8 @@ def check_status_page_ready(): @pytest.fixture def mock_hide_server_version(): - with mock.patch('datadog_checks.base.utils.http.requests') as req: + req = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=req): def mock_requests_get_headers(*args, **kwargs): r = requests.get(*args, **kwargs) diff --git a/arangodb/tests/test_arangodb.py b/arangodb/tests/test_arangodb.py index 9786150bf5eb0..14ec1ce25b8c1 100644 --- a/arangodb/tests/test_arangodb.py +++ b/arangodb/tests/test_arangodb.py @@ -52,11 +52,11 @@ def test_invalid_endpoint(aggregator, instance_invalid_endpoint, dd_run_check): def test_check(instance, dd_run_check, aggregator, tag_condition, base_tags): check = ArangodbCheck('arangodb', {}, [instance]) - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): fixture = url.rsplit('/', 1)[-1] return MockResponse(file_path=os.path.join(os.path.dirname(__file__), 'fixtures', tag_condition, fixture)) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(check) aggregator.assert_service_check( diff --git a/avi_vantage/tests/conftest.py b/avi_vantage/tests/conftest.py index 68cbb5fb70668..cb6d79793b997 100644 --- a/avi_vantage/tests/conftest.py +++ b/avi_vantage/tests/conftest.py @@ -58,7 +58,8 @@ def _get_metrics(metrics_folder=NO_TENANT_METRICS_FOLDER, endpoint=None): @pytest.fixture def mock_client(): - with mock.patch('datadog_checks.base.utils.http.requests') as req: + req = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=req): def get(url: AnyStr, *_: Any, **__: Any): parsed = urlparse(url) diff --git a/cert_manager/tests/test_cert_manager.py b/cert_manager/tests/test_cert_manager.py index b67f638659c27..ee1097015556e 100644 --- a/cert_manager/tests/test_cert_manager.py +++ b/cert_manager/tests/test_cert_manager.py @@ -15,7 +15,7 @@ @pytest.fixture() def error_metrics(): with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock(status_code=502, headers={'Content-Type': "text/plain"}), ): yield @@ -34,7 +34,7 @@ def test_check(aggregator, dd_run_check): def mock_requests_get(url, *args, **kwargs): return MockResponse(file_path=os.path.join(os.path.dirname(__file__), 'fixtures', 'cert_manager.txt')) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(check) expected_metrics = dict(CERT_METRICS) diff --git a/cilium/tests/conftest.py b/cilium/tests/conftest.py index 9df1401331796..2897cc741e53f 100644 --- a/cilium/tests/conftest.py +++ b/cilium/tests/conftest.py @@ -203,7 +203,7 @@ def mock_agent_data(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), @@ -217,7 +217,7 @@ def mock_operator_data(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), diff --git a/citrix_hypervisor/tests/conftest.py b/citrix_hypervisor/tests/conftest.py index 01062c83e2f83..9670622329d2e 100644 --- a/citrix_hypervisor/tests/conftest.py +++ b/citrix_hypervisor/tests/conftest.py @@ -50,5 +50,5 @@ def mock_requests_get(url, *args, **kwargs): @pytest.fixture def mock_responses(): - with mock.patch('requests.get', side_effect=mock_requests_get): + with mock.patch('requests.Session.get', side_effect=mock_requests_get): yield diff --git a/consul/tests/test_unit.py b/consul/tests/test_unit.py index 6e93105a5734c..a0588e2a86e77 100644 --- a/consul/tests/test_unit.py +++ b/consul/tests/test_unit.py @@ -158,7 +158,7 @@ def test_get_nodes_with_service_critical(aggregator): def test_consul_request(aggregator, instance, mocker): consul_check = ConsulCheck(common.CHECK_NAME, {}, [consul_mocks.MOCK_CONFIG]) mocker.patch("datadog_checks.base.utils.serialization.json.loads") - with mock.patch("datadog_checks.consul.consul.requests.get") as mock_requests_get: + with mock.patch("datadog_checks.consul.consul.requests.Session.get") as mock_requests_get: consul_check.consul_request("foo") url = "{}/{}".format(instance["url"], "foo") aggregator.assert_service_check("consul.can_connect", ConsulCheck.OK, tags=["url:{}".format(url)], count=1) @@ -549,8 +549,10 @@ def test_config(test_case, extra_config, expected_http_kwargs, mocker): check = ConsulCheck(common.CHECK_NAME, {}, instances=[instance]) mocker.patch("datadog_checks.base.utils.serialization.json.loads") - with mock.patch('datadog_checks.base.utils.http.requests') as r: - r.get.return_value = mock.MagicMock(status_code=200) + with mock.patch('datadog_checks.base.utils.http.requests.Session') as session: + mock_session = mock.MagicMock() + session.return_value = mock_session + mock_session.get.return_value = mock.MagicMock(status_code=200) check.check(None) @@ -564,4 +566,4 @@ def test_config(test_case, extra_config, expected_http_kwargs, mocker): 'allow_redirects': mock.ANY, } http_wargs.update(expected_http_kwargs) - r.get.assert_called_with('/v1/status/leader', **http_wargs) + mock_session.get.assert_called_with('/v1/status/leader', **http_wargs) diff --git a/couch/tests/test_unit.py b/couch/tests/test_unit.py index bfa3ba06dadae..7f2ad8678ae2d 100644 --- a/couch/tests/test_unit.py +++ b/couch/tests/test_unit.py @@ -30,7 +30,8 @@ def test_config(test_case, extra_config, expected_http_kwargs): instance.update(extra_config) check = CouchDb(common.CHECK_NAME, {}, instances=[instance]) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.return_value = mock.MagicMock(status_code=200, content='{}') check.check(instance) diff --git a/couchbase/tests/test_unit.py b/couchbase/tests/test_unit.py index ecfb4dec8eb50..a7f0292d2c88a 100644 --- a/couchbase/tests/test_unit.py +++ b/couchbase/tests/test_unit.py @@ -72,26 +72,25 @@ def test__get_query_monitoring_data(instance_query): ("legacy config", {'user': 'new_foo', 'ssl_verify': False}, {'auth': ('new_foo', 'password'), 'verify': False}), ], ) -def test_config(test_case, dd_run_check, extra_config, expected_http_kwargs, instance): +def test_config(test_case, extra_config, expected_http_kwargs, instance): + """ + Test that the legacy and new auth configurations are both supported. + """ instance.update(extra_config) - check = Couchbase('couchbase', {}, [instance]) - with mock.patch('datadog_checks.base.utils.http.requests') as r: - r.get.return_value = mock.MagicMock(status_code=200) - - dd_run_check(check) + check = Couchbase('couchbase', {}, [instance]) - http_wargs = { - 'auth': mock.ANY, - 'cert': mock.ANY, - 'headers': mock.ANY, - 'proxies': mock.ANY, - 'timeout': mock.ANY, - 'verify': mock.ANY, - 'allow_redirects': mock.ANY, - } - http_wargs.update(expected_http_kwargs) - r.get.assert_called_with('http://localhost:8091/pools/default/tasks', **http_wargs) + http_wargs = { + 'auth': mock.ANY, + 'cert': mock.ANY, + 'headers': mock.ANY, + 'proxies': mock.ANY, + 'timeout': mock.ANY, + 'verify': mock.ANY, + 'allow_redirects': mock.ANY, + } + http_wargs.update(expected_http_kwargs) + assert check.http.options == http_wargs @pytest.mark.parametrize( @@ -126,7 +125,7 @@ def test_extract_index_tags(instance, test_input, expected_tags): def test_unit(dd_run_check, check, instance, mocker, aggregator): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) dd_run_check(check(instance)) @@ -142,7 +141,7 @@ def test_unit(dd_run_check, check, instance, mocker, aggregator): def test_unit_query_metrics(dd_run_check, check, instance_query, mocker, aggregator): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) dd_run_check(check(instance_query)) diff --git a/crio/tests/test_crio.py b/crio/tests/test_crio.py index f6b77953ac2ca..adfd92a32ce34 100644 --- a/crio/tests/test_crio.py +++ b/crio/tests/test_crio.py @@ -19,7 +19,7 @@ def mock_data(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), diff --git a/datadog_checks_base/datadog_checks/base/checks/base.py b/datadog_checks_base/datadog_checks/base/checks/base.py index 6994a39d24524..ab7b45bc378cc 100644 --- a/datadog_checks_base/datadog_checks/base/checks/base.py +++ b/datadog_checks_base/datadog_checks/base/checks/base.py @@ -441,7 +441,7 @@ def diagnosis(self) -> Diagnosis: def get_tls_context(self, refresh=False, overrides=None): # type: (bool, Dict[AnyStr, Any]) -> ssl.SSLContext """ - Creates and cache an SSLContext instance based on user configuration. + Creates and cache an TlsContext instance based on user configuration. Note that user configuration can be overridden by using `overrides`. This should only be applied to older integration that manually set config values. diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index a21d286f37265..2045c11ade516 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -27,7 +27,7 @@ from ..errors import ConfigurationError from .common import ensure_bytes, ensure_unicode from .headers import get_default_headers, update_headers -from .network import CertAdapter, create_socket_connection +from .network import create_socket_connection from .time import get_timestamp # See Performance Optimizations in this package's README.md. @@ -85,12 +85,14 @@ 'skip_proxy': False, 'tls_ca_cert': None, 'tls_cert': None, + 'tls_ciphers': 'ALL', 'tls_use_host_header': False, 'tls_ignore_warning': False, 'tls_private_key': None, + 'tls_private_key_password': None, 'tls_protocols_allowed': DEFAULT_PROTOCOL_VERSIONS, + 'tls_validate_hostname': True, 'tls_verify': True, - 'tls_ciphers': 'ALL', 'timeout': DEFAULT_TIMEOUT, 'use_legacy_auth_encoding': True, 'username': None, @@ -115,6 +117,100 @@ UDS_SCHEME = 'unix' +def create_ssl_context(config, overrides=None): + # https://docs.python.org/3/library/ssl.html#ssl.SSLContext + # https://docs.python.org/3/library/ssl.html#ssl.PROTOCOL_TLS_CLIENT + context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS) + + if overrides is not None: + config = config.copy() + config.update(overrides) + + # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.check_hostname + context.check_hostname = config['tls_verify'] and config.get('tls_validate_hostname', True) + + # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.verify_mode + context.verify_mode = ssl.CERT_REQUIRED if config['tls_verify'] else ssl.CERT_NONE + + ciphers = config.get('tls_ciphers') + if ciphers: + if 'ALL' in ciphers: + updated_ciphers = "ALL" + else: + updated_ciphers = ":".join(ciphers) + + context.set_ciphers(updated_ciphers) + + # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_verify_locations + # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_default_certs + ca_cert = config['tls_ca_cert'] + try: + if ca_cert: + ca_cert = os.path.expanduser(ca_cert) + if os.path.isdir(ca_cert): + context.load_verify_locations(cafile=None, capath=ca_cert, cadata=None) + else: + context.load_verify_locations(cafile=ca_cert, capath=None, cadata=None) + else: + context.load_default_certs(ssl.Purpose.SERVER_AUTH) + except FileNotFoundError: + LOGGER.warning( + 'TLS CA certificate file not found: %s. Please check the `tls_ca_cert` configuration option.', + ca_cert, + ) + + # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_cert_chain + client_cert, client_key = config['tls_cert'], config['tls_private_key'] + client_key_pass = config['tls_private_key_password'] + try: + if client_key: + client_key = os.path.expanduser(client_key) + if client_cert: + client_cert = os.path.expanduser(client_cert) + context.load_cert_chain(client_cert, keyfile=client_key, password=client_key_pass) + except FileNotFoundError: + LOGGER.warning( + 'TLS client certificate file not found: %s. ' 'Please check the `tls_cert` configuration option.', + client_cert, + ) + + return context + + +class SSLContextAdapter(requests.adapters.HTTPAdapter): + """ + HTTPS adapter that uses SSL contexts. + This ensures consistent TLS configuration across all HTTPS requests. + """ + + def __init__(self, ssl_context, has_custom_context=False, **kwargs): + self.ssl_context = ssl_context + # This is used to determine if the adapter was created with a custom context + # for the purpose of reverting to the default context when needed. + self.has_custom_context = has_custom_context + super(SSLContextAdapter, self).__init__() + + def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): + pool_kwargs['ssl_context'] = self.ssl_context + return super(SSLContextAdapter, self).init_poolmanager(connections, maxsize, block=block, **pool_kwargs) + + def cert_verify(self, conn, url, verify, cert): + """ + This method is overridden to ensure that the SSL context + is configured on the integration side. + """ + pass + + def build_connection_pool_key_attributes(self, request, verify, cert=None): + """ + This method is overridden according to the requests library's + expectations to ensure that the custom SSL context is passed to urllib3. + """ + # See: https://github.com/psf/requests/blob/7341690e842a23cf18ded0abd9229765fa88c4e2/src/requests/adapters.py#L419-L423 + host_params, _ = super(SSLContextAdapter, self).build_connection_pool_key_attributes(request, verify, cert) + return host_params, {"ssl_context": self.ssl_context} + + class ResponseWrapper(ObjectProxy): def __init__(self, response, default_chunk_size): super(ResponseWrapper, self).__init__(response) @@ -152,7 +248,8 @@ class RequestsWrapper(object): 'auth_token_handler', 'request_size', 'tls_protocols_allowed', - 'tls_ciphers_allowed', + 'tls_config', + 'ssl_context', ) def __init__(self, instance, init_config, remapper=None, logger=None, session=None): @@ -254,7 +351,8 @@ def __init__(self, instance, init_config, remapper=None, logger=None, session=No allow_redirects = is_affirmative(config['allow_redirects']) - # https://requests.readthedocs.io/en/latest/user/advanced/#ssl-cert-verification + # For TLS verification, we now rely on the TLS context wrapper + # but still need to set verify for requests compatibility verify = True if isinstance(config['tls_ca_cert'], str): verify = config['tls_ca_cert'] @@ -347,13 +445,9 @@ def __init__(self, instance, init_config, remapper=None, logger=None, session=No if config['kerberos_cache']: self.request_hooks.append(lambda: handle_kerberos_cache(config['kerberos_cache'])) - ciphers = config.get('tls_ciphers') - if ciphers: - if 'ALL' in ciphers: - updated_ciphers = "ALL" - else: - updated_ciphers = ":".join(ciphers) - self.tls_ciphers_allowed = updated_ciphers + # Create TLS context for consistent TLS configuration + self.ssl_context = create_ssl_context(config) + self.tls_config = {key: value for key, value in config.items() if key.startswith('tls_')} def get(self, url, **options): return self._request('get', url, options) @@ -406,10 +500,23 @@ def _request(self, method, url, options): with ExitStack() as stack: for hook in self.request_hooks: stack.enter_context(hook()) + + new_context = self._get_new_context(url, new_options) if persist: - request_method = getattr(self.session, method) + session = self.session + if new_context is not None: + # If the context has changed, we need to create a new adapter + certadapter = SSLContextAdapter(new_context, has_custom_context=True) + session.mount(url, certadapter) + else: + current_adapter = session.get_adapter(url) + if isinstance(current_adapter, SSLContextAdapter) and current_adapter.has_custom_context: + # If we are using a custom context, we need to revert to the original context + certadapter = SSLContextAdapter(self.ssl_context) + session.mount(url, certadapter) else: - request_method = getattr(requests, method) + session = self.create_session(ssl_context=new_context) + request_method = getattr(session, method) if self.auth_token_handler: try: @@ -424,6 +531,35 @@ def _request(self, method, url, options): return ResponseWrapper(response, self.request_size) + def _get_new_context(self, url, new_options): + """ + Creates a new TLS context if the new request options differ from the default ones. + """ + # If `verify` or `cert` options differ from the defaults, a new adapter is created. + if new_options.get('verify') == self.options['verify'] and new_options.get('cert') == self.options['cert']: + return + # If the URL is not HTTPS, no need to update the TLS context. + if not url.startswith('https'): + return + + request_overrides = {} + if new_options.get('verify') != self.options['verify']: + if new_options.get('verify') is True: + request_overrides["tls_verify"] = True + elif new_options.get('verify') is False: + request_overrides["tls_verify"] = False + elif isinstance(new_options.get('verify'), str): + request_overrides["tls_verify"] = True + request_overrides["tls_ca_cert"] = new_options.get('verify') + if new_options.get('cert') != self.options['cert']: + if isinstance(new_options.get('cert'), str): + request_overrides["tls_cert"] = new_options.get('cert') + elif isinstance(new_options.get('cert'), tuple) and len(new_options.get('cert')) == 2: + request_overrides["tls_cert"] = new_options.get('cert')[0] + request_overrides["tls_private_key"] = new_options.get('cert')[1] + + return create_ssl_context(self.tls_config, overrides=request_overrides) + def make_request_aia_chasing(self, request_method, method, url, new_options, persist): try: response = request_method(url, **new_options) @@ -435,16 +571,15 @@ def make_request_aia_chasing(self, request_method, method, url, new_options, per certs = self.fetch_intermediate_certs(hostname, port) if not certs: raise e + new_context = create_ssl_context(self.tls_config, overrides={'tls_ca_cert': certs}) # retry the connection via session object - certadapter = CertAdapter(certs=certs) if not persist: - session = requests.Session() - for option, value in self.options.items(): - setattr(session, option, value) + session = self.create_session(new_context) else: session = self.session + certadapter = SSLContextAdapter(new_context) + session.mount(url, certadapter) request_method = getattr(session, method) - session.mount(url, certadapter) response = request_method(url, **new_options) return response @@ -471,9 +606,7 @@ def fetch_intermediate_certs(self, hostname, port=443): with sock: try: - context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS) - context.verify_mode = ssl.CERT_NONE - context.set_ciphers(self.tls_ciphers_allowed) + context = create_ssl_context(self.tls_config, overrides={'tls_verify': False}) with context.wrap_socket(sock, server_hostname=hostname) as secure_sock: der_cert = secure_sock.getpeercert(binary_form=True) @@ -521,7 +654,7 @@ def load_intermediate_certs(self, der_cert, certs): # Assume HTTP for now try: - response = requests.get(uri) # SKIP_HTTP_VALIDATION + response = self.get(uri, verify=False) # SKIP_HTTP_VALIDATION except Exception as e: self.logger.error('Error fetching intermediate certificate from `%s`: %s', uri, e) continue @@ -532,23 +665,47 @@ def load_intermediate_certs(self, der_cert, certs): self.load_intermediate_certs(intermediate_cert, certs) return certs - @property - def session(self): - if self._session is None: - self._session = requests.Session() + def create_session(self, ssl_context=None): + session = requests.Session() + context = self.ssl_context if ssl_context is None else ssl_context - # Enables HostHeaderSSLAdapter - # https://toolbelt.readthedocs.io/en/latest/adapters.html#hostheaderssladapter - if self.tls_use_host_header: - self._session.mount('https://', _http_utils.HostHeaderSSLAdapter()) - # Enable Unix Domain Socket (UDS) support. - # See: https://github.com/msabramo/requests-unixsocket - self._session.mount('{}://'.format(UDS_SCHEME), requests_unixsocket.UnixAdapter()) + # Enables HostHeaderSSLAdapter if needed + # https://toolbelt.readthedocs.io/en/latest/adapters.html#hostheaderssladapter + if self.tls_use_host_header: + # Create a combined adapter that supports both TLS context and host headers + class SSLContextHostHeaderAdapter(SSLContextAdapter, _http_utils.HostHeaderSSLAdapter): + def __init__(self, ssl_context, **kwargs): + SSLContextAdapter.__init__(self, ssl_context, **kwargs) + _http_utils.HostHeaderSSLAdapter.__init__(self, **kwargs) + + def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): + # Use TLS context from wrapper + pool_kwargs['ssl_context'] = self.ssl_context + return _http_utils.HostHeaderSSLAdapter.init_poolmanager( + self, connections, maxsize, block=block, **pool_kwargs + ) + + https_adapter = SSLContextHostHeaderAdapter(context) + else: + # Use SSLContextHTTPSAdapter for consistent TLS configuration + https_adapter = SSLContextAdapter(context) + + session.mount('https://', https_adapter) - # Attributes can't be passed to the constructor - for option, value in self.options.items(): - setattr(self._session, option, value) + # Enable Unix Domain Socket (UDS) support. + # See: https://github.com/msabramo/requests-unixsocket + session.mount('{}://'.format(UDS_SCHEME), requests_unixsocket.UnixAdapter()) + # Attributes can't be passed to the constructor + for option, value in self.options.items(): + setattr(session, option, value) + return session + + @property + def session(self): + if self._session is None: + # Create a new session if it doesn't exist + self._session = self.create_session() return self._session def handle_auth_token(self, **request): diff --git a/datadog_checks_base/datadog_checks/base/utils/network.py b/datadog_checks_base/datadog_checks/base/utils/network.py index 4c51899f00f3e..c8f135c65a6b3 100644 --- a/datadog_checks_base/datadog_checks/base/utils/network.py +++ b/datadog_checks_base/datadog_checks/base/utils/network.py @@ -1,7 +1,4 @@ import socket -import ssl - -from requests.adapters import HTTPAdapter, PoolManager def create_socket_connection(hostname, port=443, sock_type=socket.SOCK_STREAM, timeout=10): @@ -34,16 +31,3 @@ def create_socket_connection(hostname, port=443, sock_type=socket.SOCK_STREAM, t raise socket.error('Unable to resolve host, check your DNS: {}'.format(message)) # noqa: G raise - - -class CertAdapter(HTTPAdapter): - def __init__(self, **kwargs): - self.certs = kwargs['certs'] - super(CertAdapter, self).__init__() - - def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): - context = ssl.create_default_context() - for cert in self.certs: - context.load_verify_locations(cadata=cert) - pool_kwargs['ssl_context'] = context - self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, block=block, strict=True, **pool_kwargs) diff --git a/datadog_checks_base/datadog_checks/base/utils/tls.py b/datadog_checks_base/datadog_checks/base/utils/tls.py index 0fb43c028a112..e6fc7643a18dd 100644 --- a/datadog_checks_base/datadog_checks/base/utils/tls.py +++ b/datadog_checks_base/datadog_checks/base/utils/tls.py @@ -2,12 +2,11 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) import logging -import os -import ssl from copy import deepcopy from typing import TYPE_CHECKING, Any, AnyStr, Dict # noqa: F401 from ..config import is_affirmative +from .http import create_ssl_context if TYPE_CHECKING: from ..types import InstanceType # noqa: F401 @@ -36,7 +35,6 @@ class TlsContextWrapper(object): __slots__ = ('logger', 'config', 'tls_context') def __init__(self, instance, remapper=None, overrides=None): - # type: (InstanceType, Dict[AnyStr, Dict[AnyStr, Any]], Dict[AnyStr, Any]) -> None default_fields = dict(STANDARD_FIELDS) # Override existing config options if there exists any overrides @@ -98,56 +96,8 @@ def __init__(self, instance, remapper=None, overrides=None): del config[unique_name] self.config = config - self.tls_context = self._create_tls_context() - - def _create_tls_context(self): - # type: () -> ssl.SSLContext - - # https://docs.python.org/3/library/ssl.html#ssl.SSLContext - # https://docs.python.org/3/library/ssl.html#ssl.PROTOCOL_TLS - context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS) - - # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.verify_mode - context.verify_mode = ssl.CERT_REQUIRED if self.config['tls_verify'] else ssl.CERT_NONE - - # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.check_hostname - if context.verify_mode == ssl.CERT_REQUIRED: - context.check_hostname = self.config.get('tls_validate_hostname', True) - else: - context.check_hostname = False - - ciphers = self.config.get('tls_ciphers') - if ciphers: - if 'ALL' in ciphers: - updated_ciphers = "ALL" - else: - updated_ciphers = ":".join(ciphers) - - context.set_ciphers(updated_ciphers) - - # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_verify_locations - # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_default_certs - ca_cert = self.config['tls_ca_cert'] - if ca_cert: - ca_cert = os.path.expanduser(ca_cert) - if os.path.isdir(ca_cert): - context.load_verify_locations(cafile=None, capath=ca_cert, cadata=None) - else: - context.load_verify_locations(cafile=ca_cert, capath=None, cadata=None) - else: - context.load_default_certs(ssl.Purpose.SERVER_AUTH) - - # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_cert_chain - client_cert, client_key = self.config['tls_cert'], self.config['tls_private_key'] - client_key_pass = self.config['tls_private_key_password'] - if client_key: - client_key = os.path.expanduser(client_key) - if client_cert: - client_cert = os.path.expanduser(client_cert) - context.load_cert_chain(client_cert, keyfile=client_key, password=client_key_pass) - - return context + self.tls_context = create_ssl_context(self.config) def refresh_tls_context(self): # type: () -> None - self.tls_context = self._create_tls_context() + self.tls_context = create_ssl_context(self.config) diff --git a/datadog_checks_base/tests/base/checks/openmetrics/test_legacy/test_openmetrics.py b/datadog_checks_base/tests/base/checks/openmetrics/test_legacy/test_openmetrics.py index 17341a0fce77b..21b267c970a42 100644 --- a/datadog_checks_base/tests/base/checks/openmetrics/test_legacy/test_openmetrics.py +++ b/datadog_checks_base/tests/base/checks/openmetrics/test_legacy/test_openmetrics.py @@ -157,7 +157,7 @@ def test_poll_text_plain(mocked_prometheus_check, mocked_prometheus_scraper_conf mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': text_content_type} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): response = check.poll(mocked_prometheus_scraper_config) messages = list(check.parse_metric_family(response, mocked_prometheus_scraper_config)) messages.sort(key=lambda x: x.name) @@ -174,7 +174,7 @@ def test_poll_octet_stream(mocked_prometheus_check, mocked_prometheus_scraper_co mock_response.status_code = 200 mock_response.headers = {'Content-Type': 'application/octet-stream'} - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): response = check.poll(mocked_prometheus_scraper_config) messages = list(check.parse_metric_family(response, mocked_prometheus_scraper_config)) assert len(messages) == 40 @@ -1695,7 +1695,7 @@ def test_ignore_metrics_multiple_wildcards( mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': text_content_type} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): check.process(config) # Make sure metrics are ignored @@ -1803,7 +1803,7 @@ def test_metrics_with_ignore_label_values( mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': text_content_type} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): check.process(config) # Make sure metrics are ignored @@ -1854,7 +1854,7 @@ def test_match_metrics_multiple_wildcards( mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': text_content_type} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): check.process(config) aggregator.assert_metric('prometheus.go_memstats_mcache_inuse_bytes', count=1) @@ -2304,7 +2304,7 @@ def test_label_joins_gc(aggregator, mocked_prometheus_check, mocked_prometheus_s mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': text_content_type} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): check.process(mocked_prometheus_scraper_config) assert 'dd-agent-1337' in mocked_prometheus_scraper_config['_label_mapping']['pod'] assert 'dd-agent-62bgh' not in mocked_prometheus_scraper_config['_label_mapping']['pod'] @@ -2464,7 +2464,7 @@ def test_label_join_state_change(aggregator, mocked_prometheus_check, mocked_pro mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': text_content_type} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): check.process(mocked_prometheus_scraper_config) assert 15 == len(mocked_prometheus_scraper_config['_label_mapping']['pod']) assert mocked_prometheus_scraper_config['_label_mapping']['pod']['dd-agent-62bgh']['phase'] == 'Test' @@ -2582,7 +2582,7 @@ def mock_filter_get(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), @@ -2672,7 +2672,7 @@ def test_ssl_verify_not_raise_warning(caplog, mocked_openmetrics_check_factory, check = mocked_openmetrics_check_factory(instance) scraper_config = check.get_scraper_config(instance) - with caplog.at_level(logging.DEBUG), mock.patch('requests.get', return_value=MockResponse('httpbin.org')): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get', return_value=MockResponse('httpbin.org')): resp = check.send_request('https://httpbin.org/get', scraper_config) assert "httpbin.org" in resp.content.decode('utf-8') @@ -2696,7 +2696,7 @@ def test_send_request_with_dynamic_prometheus_url(caplog, mocked_openmetrics_che # `prometheus_url` changed just before calling `send_request` scraper_config['prometheus_url'] = 'https://www.example.com/foo/bar' - with caplog.at_level(logging.DEBUG), mock.patch('requests.get', return_value=MockResponse('httpbin.org')): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get', return_value=MockResponse('httpbin.org')): resp = check.send_request('https://httpbin.org/get', scraper_config) assert "httpbin.org" in resp.content.decode('utf-8') diff --git a/datadog_checks_base/tests/base/checks/prometheus/test_prometheus.py b/datadog_checks_base/tests/base/checks/prometheus/test_prometheus.py index a3a997aaf1b75..0625a62989f38 100644 --- a/datadog_checks_base/tests/base/checks/prometheus/test_prometheus.py +++ b/datadog_checks_base/tests/base/checks/prometheus/test_prometheus.py @@ -5,6 +5,7 @@ # Licensed under Simplified BSD License (see LICENSE) import logging import os +import ssl from collections import OrderedDict import mock @@ -334,7 +335,7 @@ def test_poll_protobuf(mocked_prometheus_check, bin_data): """Tests poll using the protobuf format""" check = mocked_prometheus_check mock_response = mock.MagicMock(status_code=200, content=bin_data, headers={'Content-Type': protobuf_content_type}) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): response = check.poll("http://fake.endpoint:10055/metrics") messages = list(check.parse_metric_family(response)) assert len(messages) == 61 @@ -347,7 +348,7 @@ def test_poll_text_plain(mocked_prometheus_check, text_data): mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): response = check.poll("http://fake.endpoint:10055/metrics") messages = list(check.parse_metric_family(response)) messages.sort(key=lambda x: x.name) @@ -1284,7 +1285,7 @@ def test_label_joins(sorted_tags_check): mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): check = sorted_tags_check check.NAMESPACE = 'ksm' check.label_joins = { @@ -1634,7 +1635,7 @@ def test_label_joins_gc(sorted_tags_check): mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): check = sorted_tags_check check.NAMESPACE = 'ksm' check.label_joins = {'kube_pod_info': {'label_to_match': 'pod', 'labels_to_get': ['node', 'pod_ip']}} @@ -1684,7 +1685,7 @@ def test_label_joins_gc(sorted_tags_check): mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): check.process("http://fake.endpoint:10055/metrics") assert 'dd-agent-1337' in check._label_mapping['pod'] assert 'dd-agent-62bgh' not in check._label_mapping['pod'] @@ -1700,7 +1701,7 @@ def test_label_joins_missconfigured(sorted_tags_check): mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): check = sorted_tags_check check.NAMESPACE = 'ksm' check.label_joins = {'kube_pod_info': {'label_to_match': 'pod', 'labels_to_get': ['node', 'not_existing']}} @@ -1753,7 +1754,7 @@ def test_label_join_not_existing(sorted_tags_check): mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): check = sorted_tags_check check.NAMESPACE = 'ksm' check.label_joins = {'kube_pod_info': {'label_to_match': 'not_existing', 'labels_to_get': ['node', 'pod_ip']}} @@ -1792,7 +1793,7 @@ def test_label_join_metric_not_existing(sorted_tags_check): mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): check = sorted_tags_check check.NAMESPACE = 'ksm' check.label_joins = {'not_existing': {'label_to_match': 'pod', 'labels_to_get': ['node', 'pod_ip']}} @@ -1831,7 +1832,7 @@ def test_label_join_with_hostname(sorted_tags_check): mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): check = sorted_tags_check check.NAMESPACE = 'ksm' check.label_joins = {'kube_pod_info': {'label_to_match': 'pod', 'labels_to_get': ['node']}} @@ -1883,7 +1884,7 @@ def mock_get(): with open(f_name, 'r') as f: text_data = f.read() mock_get = mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), @@ -1966,7 +1967,7 @@ def test_ssl_verify_not_raise_warning(caplog, mocked_prometheus_check, text_data check = mocked_prometheus_check - with caplog.at_level(logging.DEBUG), mock.patch('requests.get', return_value=MockResponse('httpbin.org')): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get', return_value=MockResponse('httpbin.org')): resp = check.poll('https://httpbin.org/get') assert 'httpbin.org' in resp.content.decode('utf-8') @@ -1982,7 +1983,7 @@ def test_ssl_verify_not_raise_warning_cert_false(caplog, mocked_prometheus_check check = mocked_prometheus_check check.ssl_ca_cert = False - with caplog.at_level(logging.DEBUG), mock.patch('requests.get', return_value=MockResponse('httpbin.org')): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get', return_value=MockResponse('httpbin.org')): resp = check.poll('https://httpbin.org/get') assert 'httpbin.org' in resp.content.decode('utf-8') @@ -2018,29 +2019,32 @@ def test_requests_wrapper_config(): ] ) - with mock.patch("requests.get") as get: - check.poll(instance_http['prometheus_endpoint'], instance=instance_http) - get.assert_called_with( - instance_http['prometheus_endpoint'], - stream=False, - headers=expected_headers, - auth=requests.auth.HTTPDigestAuth('data', 'dog'), - cert='/path/to/cert', - timeout=(42.0, 42.0), - proxies=None, - verify=True, - allow_redirects=True, - ) - - check.poll(instance_http['prometheus_endpoint']) - get.assert_called_with( - instance_http['prometheus_endpoint'], - stream=False, - headers=expected_headers, - auth=requests.auth.HTTPDigestAuth('data', 'dog'), - cert='/path/to/cert', - timeout=(42.0, 42.0), - proxies=None, - verify=True, - allow_redirects=True, - ) + with mock.patch("requests.Session.get") as get: + with mock.patch.object(ssl.SSLContext, 'load_cert_chain') as mock_load_cert_chain: + mock_load_cert_chain.return_value = None + + check.poll(instance_http['prometheus_endpoint'], instance=instance_http) + get.assert_called_with( + instance_http['prometheus_endpoint'], + stream=False, + headers=expected_headers, + auth=requests.auth.HTTPDigestAuth('data', 'dog'), + cert='/path/to/cert', + timeout=(42.0, 42.0), + proxies=None, + verify=True, + allow_redirects=True, + ) + + check.poll(instance_http['prometheus_endpoint']) + get.assert_called_with( + instance_http['prometheus_endpoint'], + stream=False, + headers=expected_headers, + auth=requests.auth.HTTPDigestAuth('data', 'dog'), + cert='/path/to/cert', + timeout=(42.0, 42.0), + proxies=None, + verify=True, + allow_redirects=True, + ) diff --git a/datadog_checks_base/tests/base/utils/http/test_api.py b/datadog_checks_base/tests/base/utils/http/test_api.py index 3440c300147d2..6986ae297d477 100644 --- a/datadog_checks_base/tests/base/utils/http/test_api.py +++ b/datadog_checks_base/tests/base/utils/http/test_api.py @@ -13,9 +13,9 @@ def test_get(): http = RequestsWrapper({}, {}) - with mock.patch('requests.get'): + with mock.patch('requests.Session.get'): http.get('https://www.google.com') - requests.get.assert_called_once_with('https://www.google.com', **http.options) + requests.Session.get.assert_called_once_with('https://www.google.com', **http.options) def test_get_session(): @@ -31,9 +31,9 @@ def test_get_option_override(): options = http.options.copy() options['auth'] = ('user', 'pass') - with mock.patch('requests.get'): + with mock.patch('requests.Session.get'): http.get('https://www.google.com', auth=options['auth']) - requests.get.assert_called_once_with('https://www.google.com', **options) + requests.Session.get.assert_called_once_with('https://www.google.com', **options) def test_get_session_option_override(): @@ -49,9 +49,9 @@ def test_get_session_option_override(): def test_post(): http = RequestsWrapper({}, {}) - with mock.patch('requests.post'): + with mock.patch('requests.Session.post'): http.post('https://www.google.com') - requests.post.assert_called_once_with('https://www.google.com', **http.options) + requests.Session.post.assert_called_once_with('https://www.google.com', **http.options) def test_post_session(): @@ -67,9 +67,9 @@ def test_post_option_override(): options = http.options.copy() options['auth'] = ('user', 'pass') - with mock.patch('requests.post'): + with mock.patch('requests.Session.post'): http.post('https://www.google.com', auth=options['auth']) - requests.post.assert_called_once_with('https://www.google.com', **options) + requests.Session.post.assert_called_once_with('https://www.google.com', **options) def test_post_session_option_override(): @@ -85,9 +85,9 @@ def test_post_session_option_override(): def test_head(): http = RequestsWrapper({}, {}) - with mock.patch('requests.head'): + with mock.patch('requests.Session.head'): http.head('https://www.google.com') - requests.head.assert_called_once_with('https://www.google.com', **http.options) + requests.Session.head.assert_called_once_with('https://www.google.com', **http.options) def test_head_session(): @@ -103,9 +103,9 @@ def test_head_option_override(): options = http.options.copy() options['auth'] = ('user', 'pass') - with mock.patch('requests.head'): + with mock.patch('requests.Session.head'): http.head('https://www.google.com', auth=options['auth']) - requests.head.assert_called_once_with('https://www.google.com', **options) + requests.Session.head.assert_called_once_with('https://www.google.com', **options) def test_head_session_option_override(): @@ -121,9 +121,9 @@ def test_head_session_option_override(): def test_put(): http = RequestsWrapper({}, {}) - with mock.patch('requests.put'): + with mock.patch('requests.Session.put'): http.put('https://www.google.com') - requests.put.assert_called_once_with('https://www.google.com', **http.options) + requests.Session.put.assert_called_once_with('https://www.google.com', **http.options) def test_put_session(): @@ -139,9 +139,9 @@ def test_put_option_override(): options = http.options.copy() options['auth'] = ('user', 'pass') - with mock.patch('requests.put'): + with mock.patch('requests.Session.put'): http.put('https://www.google.com', auth=options['auth']) - requests.put.assert_called_once_with('https://www.google.com', **options) + requests.Session.put.assert_called_once_with('https://www.google.com', **options) def test_put_session_option_override(): @@ -157,9 +157,9 @@ def test_put_session_option_override(): def test_patch(): http = RequestsWrapper({}, {}) - with mock.patch('requests.patch'): + with mock.patch('requests.Session.patch'): http.patch('https://www.google.com') - requests.patch.assert_called_once_with('https://www.google.com', **http.options) + requests.Session.patch.assert_called_once_with('https://www.google.com', **http.options) def test_patch_session(): @@ -175,9 +175,9 @@ def test_patch_option_override(): options = http.options.copy() options['auth'] = ('user', 'pass') - with mock.patch('requests.patch'): + with mock.patch('requests.Session.patch'): http.patch('https://www.google.com', auth=options['auth']) - requests.patch.assert_called_once_with('https://www.google.com', **options) + requests.Session.patch.assert_called_once_with('https://www.google.com', **options) def test_patch_session_option_override(): @@ -193,9 +193,9 @@ def test_patch_session_option_override(): def test_delete(): http = RequestsWrapper({}, {}) - with mock.patch('requests.delete'): + with mock.patch('requests.Session.delete'): http.delete('https://www.google.com') - requests.delete.assert_called_once_with('https://www.google.com', **http.options) + requests.Session.delete.assert_called_once_with('https://www.google.com', **http.options) def test_delete_session(): @@ -211,9 +211,9 @@ def test_delete_option_override(): options = http.options.copy() options['auth'] = ('user', 'pass') - with mock.patch('requests.delete'): + with mock.patch('requests.Session.delete'): http.delete('https://www.google.com', auth=options['auth']) - requests.delete.assert_called_once_with('https://www.google.com', **options) + requests.Session.delete.assert_called_once_with('https://www.google.com', **options) def test_delete_session_option_override(): @@ -229,9 +229,9 @@ def test_delete_session_option_override(): def test_options(): http = RequestsWrapper({}, {}) - with mock.patch('requests.options'): + with mock.patch('requests.Session.options'): http.options_method('https://www.google.com') - requests.options.assert_called_once_with('https://www.google.com', **http.options) + requests.Session.options.assert_called_once_with('https://www.google.com', **http.options) def test_options_session(): @@ -247,9 +247,9 @@ def test_options_option_override(): options = http.options.copy() options['auth'] = ('user', 'pass') - with mock.patch('requests.options'): + with mock.patch('requests.Session.options'): http.options_method('https://www.google.com', auth=options['auth']) - requests.options.assert_called_once_with('https://www.google.com', **options) + requests.Session.options.assert_called_once_with('https://www.google.com', **options) def test_options_session_option_override(): diff --git a/datadog_checks_base/tests/base/utils/http/test_authtoken.py b/datadog_checks_base/tests/base/utils/http/test_authtoken.py index 6b6913c80093a..c4edab004339b 100644 --- a/datadog_checks_base/tests/base/utils/http/test_authtoken.py +++ b/datadog_checks_base/tests/base/utils/http/test_authtoken.py @@ -399,7 +399,7 @@ def test_pattern_no_match(self): init_config = {} http = RequestsWrapper(instance, init_config) - with mock.patch('requests.get'): + with mock.patch('requests.Session.get'): write_file(token_file, '\nsecret\nsecret\n') with pytest.raises( @@ -424,7 +424,7 @@ def test_pattern_match(self): expected_headers = {'Authorization': 'Bearer bar'} expected_headers.update(DEFAULT_OPTIONS['headers']) - with mock.patch('requests.get') as get: + with mock.patch('requests.Session.get') as get: write_file(token_file, '\nfoobar\nfoobaz\n') http.get('https://www.google.com') @@ -463,7 +463,7 @@ def __init__(self, *args, **kwargs): def fetch_token(self, *args, **kwargs): return {'error': 'unauthorized_client'} - with mock.patch('requests.get'), mock.patch('oauthlib.oauth2.BackendApplicationClient'), mock.patch( + with mock.patch('requests.Session.get'), mock.patch('oauthlib.oauth2.BackendApplicationClient'), mock.patch( 'requests_oauthlib.OAuth2Session', side_effect=MockOAuth2Session ): with pytest.raises(Exception, match='OAuth2 client credentials grant error: unauthorized_client'): @@ -503,9 +503,11 @@ def __init__(self, *args, **kwargs): def fetch_token(self, *args, **kwargs): return token_response - with mock.patch('requests.get') as get, mock.patch('oauthlib.oauth2.BackendApplicationClient'), mock.patch( - 'requests_oauthlib.OAuth2Session', side_effect=MockOAuth2Session - ), mock.patch('datadog_checks.base.utils.http.get_timestamp', return_value=0): + with mock.patch('requests.Session.get') as get, mock.patch( + 'oauthlib.oauth2.BackendApplicationClient' + ), mock.patch('requests_oauthlib.OAuth2Session', side_effect=MockOAuth2Session), mock.patch( + 'datadog_checks.base.utils.http.get_timestamp', return_value=0 + ): http.get('https://www.google.com') get.assert_called_with( @@ -550,9 +552,9 @@ def fetch_token(self, *args, **kwargs): assert kwargs['audience'] == 'http://example.com' return {'access_token': 'foo', 'expires_in': 9000} - with mock.patch('requests.get') as get, mock.patch('oauthlib.oauth2.BackendApplicationClient'), mock.patch( - 'requests_oauthlib.OAuth2Session', side_effect=MockOAuth2Session - ): + with mock.patch('requests.Session.get') as get, mock.patch( + 'oauthlib.oauth2.BackendApplicationClient' + ), mock.patch('requests_oauthlib.OAuth2Session', side_effect=MockOAuth2Session): http.get('https://www.google.com') get.assert_called_with( @@ -610,7 +612,9 @@ def auth(*args, **kwargs): return MockResponse(status_code=404) http = RequestsWrapper(instance, init_config) - with mock.patch('requests.post', side_effect=login), mock.patch('requests.get', side_effect=auth): + with mock.patch('requests.Session.post', side_effect=login), mock.patch( + 'requests.Session.get', side_effect=auth + ): http.get('https://leader.mesos/service/some-service') @@ -629,7 +633,7 @@ def test_default_placeholder_same_as_value(self): expected_headers = {'X-Vault-Token': 'foobar'} expected_headers.update(DEFAULT_OPTIONS['headers']) - with mock.patch('requests.get') as get: + with mock.patch('requests.Session.get') as get: write_file(token_file, '\nfoobar\n') http.get('https://www.google.com') @@ -662,7 +666,7 @@ def test_read_before_first_request(self): expected_headers = {'Authorization': 'Bearer secret1'} expected_headers.update(DEFAULT_OPTIONS['headers']) - with mock.patch('requests.get') as get: + with mock.patch('requests.Session.get') as get: write_file(token_file, '\nsecret1\n') http.get('https://www.google.com') @@ -708,7 +712,7 @@ def test_refresh_after_connection_error(self): init_config = {} http = RequestsWrapper(instance, init_config) - with mock.patch('requests.get'): + with mock.patch('requests.Session.get'): write_file(token_file, '\nsecret1\n') http.get('https://www.google.com') @@ -723,7 +727,7 @@ def raise_error_once(*args, **kwargs): expected_headers = {'Authorization': 'Bearer secret2'} expected_headers.update(DEFAULT_OPTIONS['headers']) - with mock.patch('requests.get', side_effect=raise_error_once) as get: + with mock.patch('requests.Session.get', side_effect=raise_error_once) as get: write_file(token_file, '\nsecret2\n') http.get('https://www.google.com') @@ -753,7 +757,7 @@ def test_refresh_after_bad_status_code(self): init_config = {} http = RequestsWrapper(instance, init_config) - with mock.patch('requests.get'): + with mock.patch('requests.Session.get'): write_file(token_file, '\nsecret1\n') http.get('https://www.google.com') @@ -762,7 +766,7 @@ def error(): expected_headers = {'Authorization': 'Bearer secret2'} expected_headers.update(DEFAULT_OPTIONS['headers']) - with mock.patch('requests.get', return_value=mock.MagicMock(raise_for_status=error)) as get: + with mock.patch('requests.Session.get', return_value=mock.MagicMock(raise_for_status=error)) as get: write_file(token_file, '\nsecret2\n') http.get('https://www.google.com') diff --git a/datadog_checks_base/tests/base/utils/http/test_config.py b/datadog_checks_base/tests/base/utils/http/test_config.py index 5080f231d4fe1..8263c48b1a90e 100644 --- a/datadog_checks_base/tests/base/utils/http/test_config.py +++ b/datadog_checks_base/tests/base/utils/http/test_config.py @@ -1,6 +1,10 @@ # (C) Datadog, Inc. 2022-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) +import ssl + +import mock + from datadog_checks.base.utils.http import STANDARD_FIELDS, RequestsWrapper @@ -72,16 +76,23 @@ def test_config_verify(self): def test_config_ca_cert(self): instance = {'tls_ca_cert': 'ca_cert'} init_config = {} - http = RequestsWrapper(instance, init_config) - assert http.options['verify'] == 'ca_cert' + with mock.patch.object(ssl.SSLContext, 'load_verify_locations') as mock_load_verify_locations: + RequestsWrapper(instance, init_config) + + assert mock_load_verify_locations.call_count == 1 + assert mock_load_verify_locations.call_args[1]['cafile'] == 'ca_cert' def test_config_verify_and_ca_cert(self): instance = {'tls_verify': True, 'tls_ca_cert': 'ca_cert'} init_config = {} - http = RequestsWrapper(instance, init_config) - assert http.options['verify'] == 'ca_cert' + with mock.patch.object(ssl.SSLContext, 'load_verify_locations') as mock_load_verify_locations: + http = RequestsWrapper(instance, init_config) + + assert http.options['verify'] == 'ca_cert' + assert mock_load_verify_locations.call_count == 1 + assert mock_load_verify_locations.call_args[1]['cafile'] == 'ca_cert' class TestRemapper: diff --git a/datadog_checks_base/tests/base/utils/http/test_headers.py b/datadog_checks_base/tests/base/utils/http/test_headers.py index 030561e9993ec..ab42172973153 100644 --- a/datadog_checks_base/tests/base/utils/http/test_headers.py +++ b/datadog_checks_base/tests/base/utils/http/test_headers.py @@ -77,7 +77,7 @@ def test_extra_headers_on_http_method_call(): complete_headers.update({'answer': '42'}) extra_headers = {"foo": "bar"} - with mock.patch("requests.get") as get: + with mock.patch("requests.Session.get") as get: http.get("http://example.com/hello", extra_headers=extra_headers) expected_options = dict(complete_headers) diff --git a/datadog_checks_base/tests/base/utils/http/test_http.py b/datadog_checks_base/tests/base/utils/http/test_http.py index 23e426d5cb26d..8b7270eb96c52 100644 --- a/datadog_checks_base/tests/base/utils/http/test_http.py +++ b/datadog_checks_base/tests/base/utils/http/test_http.py @@ -50,13 +50,12 @@ class TestTLSCiphers: ) def test_cipher_construction(self, instance, expected_ciphers): init_config = {} - http = RequestsWrapper(instance, init_config) - mock_socket = mock.MagicMock() - with mock.patch.object(ssl.SSLContext, 'set_ciphers') as mock_set_ciphers, mock.patch( - 'datadog_checks.base.utils.http.create_socket_connection', return_value=mock_socket - ): - http.fetch_intermediate_certs('https://www.google.com') + # Mock SSL context creation before RequestsWrapper initialization + with mock.patch.object(ssl.SSLContext, 'set_ciphers') as mock_set_ciphers: + RequestsWrapper(instance, init_config) + + # Verify that set_ciphers was called with the expected ciphers during TLS context creation mock_set_ciphers.assert_called_once_with(expected_ciphers) @@ -176,7 +175,7 @@ class TestLogger: def test_default(self, caplog): check = AgentCheck('test', {}, [{}]) - with caplog.at_level(logging.DEBUG), mock.patch('requests.get'): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get'): check.http.get('https://www.google.com') expected_message = 'Sending GET request to https://www.google.com' @@ -190,7 +189,7 @@ def test_instance(self, caplog): assert check.http.logger is check.log - with caplog.at_level(logging.DEBUG), mock.patch('requests.get'): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get'): check.http.get('https://www.google.com') expected_message = 'Sending GET request to https://www.google.com' @@ -207,7 +206,7 @@ def test_init_config(self, caplog): assert check.http.logger is check.log - with caplog.at_level(logging.DEBUG), mock.patch('requests.get'): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get'): check.http.get('https://www.google.com') expected_message = 'Sending GET request to https://www.google.com' @@ -222,7 +221,7 @@ def test_instance_override(self, caplog): init_config = {'log_requests': True} check = AgentCheck('test', init_config, [instance]) - with caplog.at_level(logging.DEBUG), mock.patch('requests.get'): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get'): check.http.get('https://www.google.com') expected_message = 'Sending GET request to https://www.google.com' diff --git a/datadog_checks_base/tests/base/utils/http/test_kerberos_unit.py b/datadog_checks_base/tests/base/utils/http/test_kerberos_unit.py index 2e6ba622bd541..fd4a51a07b0e6 100644 --- a/datadog_checks_base/tests/base/utils/http/test_kerberos_unit.py +++ b/datadog_checks_base/tests/base/utils/http/test_kerberos_unit.py @@ -114,7 +114,8 @@ def test_config_kerberos_keytab_file(): assert os.environ.get('KRB5_CLIENT_KTNAME') is None with mock.patch( - 'requests.get', side_effect=lambda *args, **kwargs: MockResponse(os.environ.get('KRB5_CLIENT_KTNAME', '')) + 'requests.Session.get', + side_effect=lambda *args, **kwargs: MockResponse(os.environ.get('KRB5_CLIENT_KTNAME', '')), ): response = http.get('https://www.google.com') assert response.text == '/test/file' @@ -130,7 +131,9 @@ def test_config_kerberos_cache(): assert os.environ.get('KRB5CCNAME') is None - with mock.patch('requests.get', side_effect=lambda *args, **kwargs: MockResponse(os.environ.get('KRB5CCNAME', ''))): + with mock.patch( + 'requests.Session.get', side_effect=lambda *args, **kwargs: MockResponse(os.environ.get('KRB5CCNAME', '')) + ): response = http.get('https://www.google.com') assert response.text == '/test/file' @@ -145,7 +148,7 @@ def test_config_kerberos_cache_restores_rollback(): with EnvVars({'KRB5CCNAME': 'old'}): with mock.patch( - 'requests.get', side_effect=lambda *args, **kwargs: MockResponse(os.environ.get('KRB5CCNAME', '')) + 'requests.Session.get', side_effect=lambda *args, **kwargs: MockResponse(os.environ.get('KRB5CCNAME', '')) ): response = http.get('https://www.google.com') assert response.text == '/test/file' @@ -163,7 +166,7 @@ def test_config_kerberos_keytab_file_rollback(): assert os.environ.get('KRB5_CLIENT_KTNAME') == 'old' with mock.patch( - 'requests.get', + 'requests.Session.get', side_effect=lambda *args, **kwargs: MockResponse(os.environ.get('KRB5_CLIENT_KTNAME', '')), ): response = http.get('https://www.google.com') diff --git a/datadog_checks_base/tests/base/utils/http/test_proxy.py b/datadog_checks_base/tests/base/utils/http/test_proxy.py index ff8251b82b11a..c9b6898ce7113 100644 --- a/datadog_checks_base/tests/base/utils/http/test_proxy.py +++ b/datadog_checks_base/tests/base/utils/http/test_proxy.py @@ -198,23 +198,24 @@ def test_no_proxy_uris_coverage(): ), ], ) -@mock.patch('datadog_checks.base.utils.http.requests') -def test_proxy_passes_right_params_to_requests(requests, proxy, expected_proxy, url): +def test_proxy_passes_right_params_to_requests(proxy, expected_proxy, url): instance = {'proxy': proxy} init_config = {} http = RequestsWrapper(instance, init_config) - http.get(url) - call_args = { - 'auth': None, - 'cert': None, - 'headers': OrderedDict( - [('User-Agent', 'Datadog Agent/0.0.0'), ('Accept', '*/*'), ('Accept-Encoding', 'gzip, deflate')] - ), - 'proxies': expected_proxy, - 'timeout': (10.0, 10.0), - 'verify': True, - 'allow_redirects': True, - } - requests.get.assert_called_with(url, **call_args) + with mock.patch('requests.Session.get') as mock_get: + http.get(url) + + call_args = { + 'auth': None, + 'cert': None, + 'headers': OrderedDict( + [('User-Agent', 'Datadog Agent/0.0.0'), ('Accept', '*/*'), ('Accept-Encoding', 'gzip, deflate')] + ), + 'proxies': expected_proxy, + 'timeout': (10.0, 10.0), + 'verify': True, + 'allow_redirects': True, + } + mock_get.assert_called_with(url, **call_args) diff --git a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py index 3301a6a8f39c8..481ea658e0159 100644 --- a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py +++ b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py @@ -2,15 +2,21 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) import logging +import ssl import mock import pytest from requests.exceptions import SSLError -from datadog_checks.base.utils.http import RequestsWrapper +from datadog_checks.base.utils.http import RequestsWrapper, SSLContextAdapter pytestmark = [pytest.mark.unit] +IANA_TO_OPENSSL_NAME = { + 'TLS_RSA_WITH_AES_256_GCM_SHA384': 'AES256-GCM-SHA384', + 'TLS_RSA_WITH_AES_128_GCM_SHA256': 'AES128-GCM-SHA256', +} + class TestCert: def test_config_default(self): @@ -23,16 +29,23 @@ def test_config_default(self): def test_config_cert(self): instance = {'tls_cert': 'cert'} init_config = {} - http = RequestsWrapper(instance, init_config) - assert http.options['cert'] == 'cert' + with mock.patch.object(ssl.SSLContext, 'load_cert_chain') as mock_load_cert_chain: + RequestsWrapper(instance, init_config) + + assert mock_load_cert_chain.call_count == 1 + assert mock_load_cert_chain.call_args[0][0] == 'cert' def test_config_cert_and_private_key(self): instance = {'tls_cert': 'cert', 'tls_private_key': 'key'} init_config = {} - http = RequestsWrapper(instance, init_config) - assert http.options['cert'] == ('cert', 'key') + with mock.patch.object(ssl.SSLContext, 'load_cert_chain') as mock_load_cert_chain: + RequestsWrapper(instance, init_config) + + assert mock_load_cert_chain.call_count == 1 + assert mock_load_cert_chain.call_args[0][0] == 'cert' + assert mock_load_cert_chain.call_args[1]["keyfile"] == 'key' class TestIgnoreTLSWarning: @@ -71,7 +84,7 @@ def test_default_no_ignore(self, caplog): init_config = {} http = RequestsWrapper(instance, init_config) - with caplog.at_level(logging.DEBUG), mock.patch('requests.get'): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get'): http.get('https://www.google.com', verify=False) expected_message = 'An unverified HTTPS request is being made to https://www.google.com' @@ -86,7 +99,7 @@ def test_default_no_ignore_http(self, caplog): init_config = {} http = RequestsWrapper(instance, init_config) - with caplog.at_level(logging.DEBUG), mock.patch('requests.get'): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get'): http.get('http://www.google.com', verify=False) assert sum(1 for _, level, _ in caplog.record_tuples if level == logging.WARNING) == 0 @@ -96,7 +109,7 @@ def test_ignore(self, caplog): init_config = {} http = RequestsWrapper(instance, init_config) - with caplog.at_level(logging.DEBUG), mock.patch('requests.get'): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get'): http.get('https://www.google.com', verify=False) expected_message = 'An unverified HTTPS request is being made to https://www.google.com' @@ -108,7 +121,7 @@ def test_default_no_ignore_session(self, caplog): init_config = {} http = RequestsWrapper(instance, init_config) - with caplog.at_level(logging.DEBUG), mock.patch('requests.get'): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get'): http.get('https://www.google.com', verify=False) expected_message = 'An unverified HTTPS request is being made to https://www.google.com' @@ -123,7 +136,7 @@ def test_ignore_session(self, caplog): init_config = {} http = RequestsWrapper(instance, init_config) - with caplog.at_level(logging.DEBUG), mock.patch('requests.get'): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get'): http.get('https://www.google.com', verify=False) expected_message = 'An unverified HTTPS request is being made to https://www.google.com' @@ -135,7 +148,7 @@ def test_init_ignore(self, caplog): init_config = {'tls_ignore_warning': True} http = RequestsWrapper(instance, init_config) - with caplog.at_level(logging.DEBUG), mock.patch('requests.get'): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get'): http.get('https://www.google.com', verify=False) expected_message = 'An unverified HTTPS request is being made to https://www.google.com' @@ -147,7 +160,7 @@ def test_default_init_no_ignore(self, caplog): init_config = {'tls_ignore_warning': False} http = RequestsWrapper(instance, init_config) - with caplog.at_level(logging.DEBUG), mock.patch('requests.get'): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get'): http.get('https://www.google.com', verify=False) expected_message = 'An unverified HTTPS request is being made to https://www.google.com' @@ -162,7 +175,7 @@ def test_instance_ignore(self, caplog): init_config = {'tls_ignore_warning': False} http = RequestsWrapper(instance, init_config) - with caplog.at_level(logging.DEBUG), mock.patch('requests.get'): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get'): http.get('https://www.google.com', verify=False) expected_message = 'An unverified HTTPS request is being made to https://www.google.com' @@ -174,7 +187,7 @@ def test_instance_no_ignore(self, caplog): init_config = {'tls_ignore_warning': True} http = RequestsWrapper(instance, init_config) - with caplog.at_level(logging.DEBUG), mock.patch('requests.get'): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get'): http.get('https://www.google.com', verify=False) expected_message = 'An unverified HTTPS request is being made to https://www.google.com' @@ -228,7 +241,173 @@ def test_fetch_intermediate_certs(self, port): with mock.patch('datadog_checks.base.utils.http.create_socket_connection') as mock_create_socket_connection: with mock.patch('datadog_checks.base.utils.http.RequestsWrapper.handle_auth_token'): with pytest.raises(SSLError): - with mock.patch('requests.get', side_effect=SSLError): + with mock.patch('requests.Session.get', side_effect=SSLError): http.get('https://localhost:{}'.format(port)) mock_create_socket_connection.assert_called_with('localhost', port) + + def test_fetch_intermediate_certs_tls_ciphers(self): + """Test that fetch_intermediate_certs uses the correct ciphers.""" + instance = {'tls_verify': True, 'tls_ciphers': ['TLS_RSA_WITH_AES_256_GCM_SHA384']} + init_config = {} + http = RequestsWrapper(instance, init_config) + + with mock.patch('datadog_checks.base.utils.http.create_socket_connection') as mock_create_socket_connection: + mock_socket = mock.MagicMock() + mock_create_socket_connection.return_value = mock_socket + + with mock.patch('datadog_checks.base.utils.http.ssl.SSLContext') as mock_ssl_context_class: + mock_context = mock.MagicMock() + mock_ssl_context_class.return_value = mock_context + + # Mock the wrapped socket to avoid actual SSL operations + mock_wrapped_socket = mock.MagicMock() + mock_context.wrap_socket.return_value.__enter__.return_value = mock_wrapped_socket + mock_wrapped_socket.getpeercert.return_value = b'fake_cert_data' + mock_wrapped_socket.version.return_value = 'TLSv1.3' + + # Mock the certificate loading to avoid cryptography operations + with mock.patch('datadog_checks.base.utils.http.RequestsWrapper.load_intermediate_certs'): + http.fetch_intermediate_certs('example.com', 443) + + # Verify set_ciphers was called with the correct cipher list + mock_context.set_ciphers.assert_called_once_with('TLS_RSA_WITH_AES_256_GCM_SHA384') + + +class TestSSLContext: + """Test the core SSL context functionality.""" + + def test_requests_wrapper_creates_ssl_context(self): + """Test that RequestsWrapper creates an SSLContext instance.""" + instance = {'tls_verify': True} + init_config = {} + http = RequestsWrapper(instance, init_config) + + assert hasattr(http, 'ssl_context') + assert isinstance(http.ssl_context, ssl.SSLContext) + + def test_session_uses_ssl_context_adapter(self): + """Test that the session uses SSLContextAdapter for consistent TLS configuration.""" + instance = {'tls_verify': True} + init_config = {} + http = RequestsWrapper(instance, init_config) + + session = http.session + https_adapter = session.get_adapter('https://example.com') + + assert isinstance(https_adapter, SSLContextAdapter) + assert https_adapter.ssl_context is http.ssl_context + + def test_ssl_context_ciphers_can_be_modified(self): + """Test that the chosen ciphers can be set on CI runners.""" + context = ssl.create_default_context() + context.set_ciphers('TLS_RSA_WITH_AES_256_GCM_SHA384') + + def test_tls_ciphers_applied_consistently(self): + """Test that tls_ciphers are applied consistently.""" + instance = {'tls_verify': True, 'tls_ciphers': list(IANA_TO_OPENSSL_NAME.keys())} + init_config = {} + http = RequestsWrapper(instance, init_config) + + # Verify the TLS context wrapper has the cipher configuration + assert http.tls_config['tls_ciphers'] == instance['tls_ciphers'] + + # Verify the session adapter uses the same TLS context + session = http.session + https_adapter = session.get_adapter('https://example.com') + + assert isinstance(https_adapter, SSLContextAdapter) + assert https_adapter.ssl_context is http.ssl_context + # Verify that the ciphers are set correctly in the TLS context + for cipher in instance['tls_ciphers']: + # At least one entry's name field should match the OpenSSL name + assert any( + IANA_TO_OPENSSL_NAME.get(cipher) in c.get('name') for c in https_adapter.ssl_context.get_ciphers() + ) + + def test_default_tls_ciphers(self): + """Test that default TLS ciphers are applied when none are specified.""" + instance = {'tls_verify': True} + init_config = {} + + # Mock the SSLContext creation + with mock.patch.object(ssl.SSLContext, 'set_ciphers') as mock_set_ciphers: + RequestsWrapper(instance, init_config) + + # Verify that the default ciphers are set + assert mock_set_ciphers.call_count == 1 + assert mock_set_ciphers.call_args[0][0] == 'ALL' + + def test_host_header_compatibility(self): + """Test that host header functionality works with TLS context unification.""" + instance = { + 'tls_use_host_header': True, + 'headers': {'Host': 'custom-host.example.com'}, + 'tls_verify': True, + 'tls_ciphers': ['TLS_RSA_WITH_AES_256_GCM_SHA384'], + } + init_config = {} + http = RequestsWrapper(instance, init_config) + + session = http.session + https_adapter = session.get_adapter('https://example.com') + + # Should be the combined adapter that supports both TLS context and host headers + assert hasattr(https_adapter, 'ssl_context') + assert https_adapter.ssl_context is http.ssl_context + assert 'SSLContextHostHeaderAdapter' in str(type(https_adapter)) + + def test_remapper_functionality_preserved(self): + """Test that config remapping functionality is preserved with TLS context unification.""" + instance = { + 'disable_ssl_validation': True, + 'custom_private_key': '/path/to/key.pem', + } + remapper = { + 'disable_ssl_validation': {'name': 'tls_verify', 'invert': True}, + 'custom_private_key': {'name': 'tls_private_key'}, + } + init_config = {} + + # Mock the TLS context creation to avoid file operations + with mock.patch('datadog_checks.base.utils.http.create_ssl_context') as mock_create_context: + mock_context = mock.MagicMock() + mock_create_context.return_value = mock_context + + http = RequestsWrapper(instance, init_config, remapper=remapper) + + # Verify remapping worked - disable_ssl_validation: True should become tls_verify: False + assert http.tls_config['tls_verify'] is False + assert http.tls_config['tls_private_key'] == '/path/to/key.pem' + + def test_backward_compatibility_maintained(self): + """Test that all existing TLS configuration options still work.""" + instance = { + 'tls_verify': True, + 'tls_ca_cert': '/path/to/ca.pem', + 'tls_cert': '/path/to/cert.pem', + 'tls_private_key': '/path/to/key.pem', + 'tls_validate_hostname': True, + 'tls_ignore_warning': False, + 'tls_protocols_allowed': ['TLSv1.2', 'TLSv1.3'], + 'tls_ciphers': ['TLS_RSA_WITH_AES_256_GCM_SHA384'], + } + init_config = {} + + # Mock the TLS context creation to avoid file operations + with mock.patch('datadog_checks.base.utils.http.create_ssl_context') as mock_create_context: + mock_context = mock.MagicMock() + mock_create_context.return_value = mock_context + + # Should not raise any exceptions + http = RequestsWrapper(instance, init_config) + + # Verify all options are preserved in RequestsWrapper + assert http.options['verify'] == '/path/to/ca.pem' + assert http.options['cert'] == ('/path/to/cert.pem', '/path/to/key.pem') + assert http.ignore_tls_warning is False + assert http.tls_protocols_allowed == ['TLSv1.2', 'TLSv1.3'] + + # Verify TLS context wrapper has the right config + assert http.tls_config['tls_verify'] is True + assert http.tls_config['tls_ciphers'] == ['TLS_RSA_WITH_AES_256_GCM_SHA384'] diff --git a/datadog_checks_dev/datadog_checks/dev/plugin/pytest.py b/datadog_checks_dev/datadog_checks/dev/plugin/pytest.py index f9eaaf19b38a0..745c0eb69c5e9 100644 --- a/datadog_checks_dev/datadog_checks/dev/plugin/pytest.py +++ b/datadog_checks_dev/datadog_checks/dev/plugin/pytest.py @@ -292,7 +292,7 @@ def mock_http_response(mocker): from ..http import MockResponse yield lambda *args, **kwargs: mocker.patch( - kwargs.pop('method', 'requests.get'), return_value=MockResponse(*args, **kwargs) + kwargs.pop('method', 'requests.Session.get'), return_value=MockResponse(*args, **kwargs) ) diff --git a/datadog_checks_dev/tests/tooling/test_github.py b/datadog_checks_dev/tests/tooling/test_github.py index ab96b415fcd3e..a5671bcc2c4c8 100644 --- a/datadog_checks_dev/tests/tooling/test_github.py +++ b/datadog_checks_dev/tests/tooling/test_github.py @@ -27,7 +27,7 @@ def test_get_pr_approvers(github_response, expected_approvers): mock_response.json.return_value = github_response with mock.patch( - "requests.get", + "requests.Session.get", return_value=mock_response, ): assert get_pr_approvers("integrations-core", "42", {}) == expected_approvers diff --git a/datadog_cluster_agent/tests/conftest.py b/datadog_cluster_agent/tests/conftest.py index 084b6164a174a..4ccf2c30a4710 100644 --- a/datadog_cluster_agent/tests/conftest.py +++ b/datadog_cluster_agent/tests/conftest.py @@ -26,7 +26,7 @@ def mock_metrics_endpoint(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), diff --git a/dcgm/tests/conftest.py b/dcgm/tests/conftest.py index fe3a75dd256b7..ab5c6d305432e 100644 --- a/dcgm/tests/conftest.py +++ b/dcgm/tests/conftest.py @@ -45,7 +45,7 @@ def mock_metrics(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), @@ -59,7 +59,7 @@ def mock_label_remap(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), diff --git a/druid/tests/test_unit.py b/druid/tests/test_unit.py index ef7ee18b45228..3a21281fbc8bc 100644 --- a/druid/tests/test_unit.py +++ b/druid/tests/test_unit.py @@ -22,7 +22,8 @@ def test_missing_url_config(aggregator): def test_service_check_can_connect_success(aggregator, instance): check = DruidCheck('druid', {}, [instance]) - with mock.patch('datadog_checks.base.utils.http.requests') as req: + req = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=req): mock_resp = mock.MagicMock(status_code=200) mock_resp.json.return_value = {'abc': '123'} req.get.return_value = mock_resp @@ -41,7 +42,8 @@ def test_service_check_can_connect_success(aggregator, instance): def test_service_check_can_connect_failure(aggregator, instance, exception_class): check = DruidCheck('druid', {}, [instance]) - with mock.patch('datadog_checks.base.utils.http.requests') as req: + req = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=req): attrs = {'raise_for_status.side_effect': exception_class} req.get.side_effect = [mock.MagicMock(status_code=500, **attrs)] diff --git a/ecs_fargate/tests/test_unit.py b/ecs_fargate/tests/test_unit.py index ed46749111f04..c4f967166bcec 100644 --- a/ecs_fargate/tests/test_unit.py +++ b/ecs_fargate/tests/test_unit.py @@ -42,7 +42,7 @@ def test_failing_check(check, aggregator, dd_run_check): Testing fargate metadata endpoint error. """ with mock.patch( - 'datadog_checks.ecs_fargate.ecs_fargate.requests.get', return_value=MockResponse('{}', status_code=500) + 'datadog_checks.ecs_fargate.ecs_fargate.requests.Session.get', return_value=MockResponse('{}', status_code=500) ): dd_run_check(check) @@ -55,7 +55,7 @@ def test_invalid_response_check(check, aggregator, dd_run_check): Testing invalid fargate metadata payload. """ with mock.patch( - 'datadog_checks.ecs_fargate.ecs_fargate.requests.get', return_value=MockResponse('{}', status_code=200) + 'datadog_checks.ecs_fargate.ecs_fargate.requests.Session.get', return_value=MockResponse('{}', status_code=200) ): dd_run_check(check) @@ -67,7 +67,9 @@ def test_successful_check_linux(check, aggregator, dd_run_check): """ Testing successful fargate check on Linux. """ - with mock.patch('datadog_checks.ecs_fargate.ecs_fargate.requests.get', side_effect=mocked_requests_get_linux): + with mock.patch( + 'datadog_checks.ecs_fargate.ecs_fargate.requests.Session.get', side_effect=mocked_requests_get_linux + ): with mock.patch("datadog_checks.ecs_fargate.ecs_fargate.get_tags", side_effect=mocked_get_tags): with mock.patch("datadog_checks.ecs_fargate.ecs_fargate.c_is_excluded", side_effect=mocked_is_excluded): dd_run_check(check) @@ -146,7 +148,9 @@ def test_successful_check_windows(check, aggregator, dd_run_check): """ Testing successful fargate check on Windows. """ - with mock.patch('datadog_checks.ecs_fargate.ecs_fargate.requests.get', side_effect=mocked_requests_get_windows): + with mock.patch( + 'datadog_checks.ecs_fargate.ecs_fargate.requests.Session.get', side_effect=mocked_requests_get_windows + ): with mock.patch("datadog_checks.ecs_fargate.ecs_fargate.get_tags", side_effect=mocked_get_tags): with mock.patch("datadog_checks.ecs_fargate.ecs_fargate.c_is_excluded", side_effect=mocked_is_excluded): dd_run_check(check) @@ -204,7 +208,9 @@ def test_successful_check_wrong_sys_delta(check, aggregator, dd_run_check): """ Testing successful fargate check. """ - with mock.patch('datadog_checks.ecs_fargate.ecs_fargate.requests.get', side_effect=mocked_requests_get_sys_delta): + with mock.patch( + 'datadog_checks.ecs_fargate.ecs_fargate.requests.Session.get', side_effect=mocked_requests_get_sys_delta + ): with mock.patch("datadog_checks.ecs_fargate.ecs_fargate.get_tags", side_effect=mocked_get_tags): with mock.patch("datadog_checks.ecs_fargate.ecs_fargate.c_is_excluded", side_effect=mocked_is_excluded): dd_run_check(check) @@ -287,7 +293,8 @@ def test_config(test_case, extra_config, expected_http_kwargs, dd_run_check): instance = extra_config check = FargateCheck('ecs_fargate', {}, instances=[instance]) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.return_value = mock.MagicMock(status_code=200) dd_run_check(check) diff --git a/ecs_fargate/tests/test_unit_v4.py b/ecs_fargate/tests/test_unit_v4.py index d3d3c78703f9c..e7bf70546bfb9 100644 --- a/ecs_fargate/tests/test_unit_v4.py +++ b/ecs_fargate/tests/test_unit_v4.py @@ -42,7 +42,9 @@ def test_successful_check_linux_v4(check, aggregator, dd_run_check): Testing successful fargate check on Linux on ECS ENDPOINT API v4. """ - with mock.patch('datadog_checks.ecs_fargate.ecs_fargate.requests.get', side_effect=mocked_requests_get_linux_v4): + with mock.patch( + 'datadog_checks.ecs_fargate.ecs_fargate.requests.Session.get', side_effect=mocked_requests_get_linux_v4 + ): with mock.patch("datadog_checks.ecs_fargate.ecs_fargate.get_tags", side_effect=mocked_get_tags_v4): with mock.patch("datadog_checks.ecs_fargate.ecs_fargate.c_is_excluded", side_effect=mocked_is_excluded): dd_run_check(check) diff --git a/elastic/tests/test_unit.py b/elastic/tests/test_unit.py index ce0ca93e21346..551133e47a4fd 100644 --- a/elastic/tests/test_unit.py +++ b/elastic/tests/test_unit.py @@ -134,7 +134,7 @@ def test_get_template_metrics(aggregator, instance, mock_http_response): def test_get_template_metrics_raise_exception(aggregator, instance): with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=MockResponse(status_code=403), ): check = ESCheck('elastic', {}, instances=[instance]) @@ -151,7 +151,7 @@ def test_get_value_from_path(): def test__get_data_throws_authentication_error(instance): with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=MockResponse(status_code=400), ): check = ESCheck('elastic', {}, instances=[instance]) @@ -162,7 +162,7 @@ def test__get_data_throws_authentication_error(instance): def test__get_data_creates_critical_service_alert(aggregator, instance): with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=MockResponse(status_code=500), ): check = ESCheck('elastic', {}, instances=[instance]) @@ -193,7 +193,7 @@ def test__get_data_creates_critical_service_alert(aggregator, instance): ) def test_disable_legacy_sc_tags(aggregator, es_instance): with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=MockResponse(status_code=500), ): check = ESCheck('elastic', {}, instances=[es_instance]) diff --git a/envoy/tests/legacy/test_unit.py b/envoy/tests/legacy/test_unit.py index 0f76041d1a618..f0c6a19ad38f8 100644 --- a/envoy/tests/legacy/test_unit.py +++ b/envoy/tests/legacy/test_unit.py @@ -139,7 +139,8 @@ def test_config(extra_config, expected_http_kwargs, check, dd_run_check): instance.update(extra_config) check = check(instance) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.return_value = mock.MagicMock(status_code=200) dd_run_check(check) @@ -189,7 +190,7 @@ def test_metadata_with_exception( check.check_id = 'test:123' check.log = mock.MagicMock() - with mock.patch('requests.get', side_effect=exception): + with mock.patch('requests.Session.get', side_effect=exception): check._collect_metadata() datadog_agent.assert_metadata_count(0) check.log.warning.assert_called_with(*log_call_parameters) diff --git a/etcd/tests/test_integration.py b/etcd/tests/test_integration.py index 734bd86b67da7..da94be9a527c3 100644 --- a/etcd/tests/test_integration.py +++ b/etcd/tests/test_integration.py @@ -78,7 +78,8 @@ def test_config(instance, test_case, extra_config, expected_http_kwargs, dd_run_ instance.update(extra_config) check = Etcd(CHECK_NAME, {}, [instance]) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.return_value = mock.MagicMock(status_code=200) dd_run_check(check) diff --git a/external_dns/tests/conftest.py b/external_dns/tests/conftest.py index 48e4c325e9ec8..659cb475e7f29 100644 --- a/external_dns/tests/conftest.py +++ b/external_dns/tests/conftest.py @@ -19,7 +19,7 @@ def mock_external_dns(): text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split('\n'), headers={'Content-Type': 'text/plain'} ), diff --git a/fluxcd/tests/conftest.py b/fluxcd/tests/conftest.py index 2b43fc5bca29e..55a944c2d90bd 100644 --- a/fluxcd/tests/conftest.py +++ b/fluxcd/tests/conftest.py @@ -76,7 +76,7 @@ def mock_metrics_v1(): content = f.read() with mock.patch( - "requests.get", + "requests.Session.get", return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: content.split("\n"), @@ -94,7 +94,7 @@ def mock_metrics_v2(): content = f.read() with mock.patch( - "requests.get", + "requests.Session.get", return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: content.split("\n"), diff --git a/fly_io/tests/conftest.py b/fly_io/tests/conftest.py index beeb4fefe4628..7c49e6c751932 100644 --- a/fly_io/tests/conftest.py +++ b/fly_io/tests/conftest.py @@ -128,5 +128,5 @@ def get(url, *args, **kwargs): return mock.MagicMock(json=mock_json, status_code=mock_status_code) mock_get = mock.MagicMock(side_effect=get) - monkeypatch.setattr('requests.get', mock_get) + monkeypatch.setattr('requests.Session.get', mock_get) return mock_get diff --git a/gitlab/tests/conftest.py b/gitlab/tests/conftest.py index 9b8ce8222338c..b0d47e010b803 100644 --- a/gitlab/tests/conftest.py +++ b/gitlab/tests/conftest.py @@ -104,7 +104,7 @@ def dd_environment(): @pytest.fixture() def mock_data(): with mock.patch( - 'requests.get', + 'requests.Session.get', side_effect=mocked_requests_get, ): yield diff --git a/gitlab_runner/tests/test_unit.py b/gitlab_runner/tests/test_unit.py index 4d44c97dd137e..9d3fb039050aa 100644 --- a/gitlab_runner/tests/test_unit.py +++ b/gitlab_runner/tests/test_unit.py @@ -28,7 +28,8 @@ def test_timeout(test_case, timeout_config, expected_timeout): gitlab_runner = GitlabRunnerCheck('gitlab_runner', common.CONFIG['init_config'], instances=config['instances']) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.return_value = mock.MagicMock(status_code=200) gitlab_runner.check(config['instances'][0]) diff --git a/haproxy/tests/conftest.py b/haproxy/tests/conftest.py index b72ae7fc1806f..c4b3be16cc61e 100644 --- a/haproxy/tests/conftest.py +++ b/haproxy/tests/conftest.py @@ -210,7 +210,7 @@ def haproxy_mock(): filepath = os.path.join(HERE, 'fixtures', 'mock_data') with open(filepath, 'rb') as f: data = f.read() - p = mock.patch('requests.get', return_value=mock.Mock(content=data)) + p = mock.patch('requests.Session.get', return_value=mock.Mock(content=data)) yield p.start() p.stop() @@ -228,7 +228,7 @@ def haproxy_mock_evil(): filepath = os.path.join(HERE, 'fixtures', 'mock_data_evil') with open(filepath, 'rb') as f: data = f.read() - p = mock.patch('requests.get', return_value=mock.Mock(content=data)) + p = mock.patch('requests.Session.get', return_value=mock.Mock(content=data)) yield p.start() p.stop() @@ -238,7 +238,7 @@ def haproxy_mock_enterprise_version_info(): filepath = os.path.join(HERE, 'fixtures', 'enterprise_version_info.html') with open(filepath, 'rb') as f: data = f.read() - with mock.patch('requests.get', return_value=mock.Mock(content=data)) as p: + with mock.patch('requests.Session.get', return_value=mock.Mock(content=data)) as p: yield p diff --git a/haproxy/tests/legacy/test_unit.py b/haproxy/tests/legacy/test_unit.py index b7edf8efc7f48..f93fca547f5ce 100644 --- a/haproxy/tests/legacy/test_unit.py +++ b/haproxy/tests/legacy/test_unit.py @@ -433,7 +433,7 @@ def test_version_failure(aggregator, check, datadog_agent): filepath = os.path.join(os.path.dirname(common.HERE), 'fixtures', 'mock_data') with open(filepath, 'rb') as f: data = f.read() - with mock.patch('requests.get') as m: + with mock.patch('requests.Session.get') as m: m.side_effect = [RuntimeError("Ooops"), mock.Mock(content=data)] haproxy_check.check(config) diff --git a/harbor/tests/conftest.py b/harbor/tests/conftest.py index ea2a80f91a529..65fffa87bd2ca 100644 --- a/harbor/tests/conftest.py +++ b/harbor/tests/conftest.py @@ -107,7 +107,7 @@ def harbor_api(harbor_check, admin_instance, patch_requests): @pytest.fixture def patch_requests(): - with patch("requests.api.request", side_effect=mocked_requests): + with patch("requests.Session.request", side_effect=mocked_requests): yield @@ -117,32 +117,32 @@ def get_docker_compose_file(): return os.path.join(HERE, 'compose', harbor_folder, 'docker-compose.yml') -def mocked_requests(_, *args, **kwargs): +def mocked_requests(_, url, **kwargs): def match(url, *candidates_url): for c in candidates_url: if url == c.format(base_url=URL): return True return False - if match(args[0], LOGIN_URL): + if match(url, LOGIN_URL): return MockResponse() - elif match(args[0], HEALTH_URL): + elif match(url, HEALTH_URL): return MockResponse(json_data=HEALTH_FIXTURE) - elif match(args[0], PING_URL): + elif match(url, PING_URL): return MockResponse('Pong') - elif match(args[0], CHARTREPO_HEALTH_URL): + elif match(url, CHARTREPO_HEALTH_URL): return MockResponse(json_data=CHARTREPO_HEALTH_FIXTURE) - elif match(args[0], PROJECTS_URL): + elif match(url, PROJECTS_URL): return MockResponse(json_data=PROJECTS_FIXTURE) - elif match(args[0], REGISTRIES_URL): + elif match(url, REGISTRIES_URL): return MockResponse(json_data=REGISTRIES_FIXTURE) - elif match(args[0], REGISTRIES_PING_URL): + elif match(url, REGISTRIES_PING_URL): return MockResponse() - elif match(args[0], VOLUME_INFO_URL): + elif match(url, VOLUME_INFO_URL): if HARBOR_VERSION < VERSION_2_2: return MockResponse(json_data=VOLUME_INFO_PRE_2_2_FIXTURE) return MockResponse(json_data=VOLUME_INFO_FIXTURE) - elif match(args[0], SYSTEM_INFO_URL): + elif match(url, SYSTEM_INFO_URL): return MockResponse(json_data=SYSTEM_INFO_FIXTURE) return MockResponse(status_code=404) diff --git a/hdfs_datanode/tests/conftest.py b/hdfs_datanode/tests/conftest.py index 48b97b46c3a53..d8248b94a20a1 100644 --- a/hdfs_datanode/tests/conftest.py +++ b/hdfs_datanode/tests/conftest.py @@ -37,19 +37,19 @@ def instance(): @pytest.fixture def mocked_request(): - with patch('requests.get', new=requests_get_mock): + with patch('requests.Session.get', new=requests_get_mock): yield @pytest.fixture def mocked_metadata_request(): - with patch('requests.get', new=requests_metadata_mock): + with patch('requests.Session.get', new=requests_metadata_mock): yield @pytest.fixture def mocked_auth_request(): - with patch('requests.get', new=requests_auth_mock): + with patch('requests.Session.get', new=requests_auth_mock): yield diff --git a/hdfs_namenode/tests/conftest.py b/hdfs_namenode/tests/conftest.py index 47d43f92010d0..f6a0da366dfab 100644 --- a/hdfs_namenode/tests/conftest.py +++ b/hdfs_namenode/tests/conftest.py @@ -44,17 +44,17 @@ def check(): @pytest.fixture def mocked_request(): - with patch("requests.get", new=requests_get_mock): + with patch("requests.Session.get", new=requests_get_mock): yield @pytest.fixture def mocked_auth_request(): - with patch("requests.get", new=requests_auth_mock): + with patch("requests.Session.get", new=requests_auth_mock): yield -def requests_get_mock(url, *args, **kwargs): +def requests_get_mock(session, url, *args, **kwargs): if url == NAME_SYSTEM_STATE_URL: return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'hdfs_namesystem_state.json')) elif url == NAME_SYSTEM_URL: diff --git a/http_check/tests/test_http_integration.py b/http_check/tests/test_http_integration.py index 4ccbc5c601e7b..079bed7b8fd5a 100644 --- a/http_check/tests/test_http_integration.py +++ b/http_check/tests/test_http_integration.py @@ -432,7 +432,7 @@ def test_data_methods(aggregator, http_check): aggregator.reset() -def test_unexisting_ca_cert_should_throw_error(aggregator, dd_run_check): +def test_unexisting_ca_cert_should_log_warning(aggregator, dd_run_check): instance = { 'name': 'Test Web VM HTTPS SSL', 'url': 'https://foo.bar.net/', @@ -445,10 +445,11 @@ def test_unexisting_ca_cert_should_throw_error(aggregator, dd_run_check): } check = HTTPCheck('http_check', {'ca_certs': 'foo'}, [instance]) + with mock.patch('datadog_checks.base.utils.http.logging.Logger.warning') as mock_warning: - dd_run_check(check) - aggregator.assert_service_check(HTTPCheck.SC_STATUS, status=AgentCheck.CRITICAL) - assert 'invalid path: /tmp/unexisting.crt' in aggregator._service_checks[HTTPCheck.SC_STATUS][0].message + dd_run_check(check) + mock_warning.assert_called_once() + assert any(instance['tls_ca_cert'] in arg for arg in mock_warning.call_args) def test_instance_auth_token(dd_run_check): diff --git a/impala/tests/conftest.py b/impala/tests/conftest.py index 5aa771d5e50de..c20b489212e17 100644 --- a/impala/tests/conftest.py +++ b/impala/tests/conftest.py @@ -101,7 +101,7 @@ def mock_metrics(request): content = fixture_file.read() with mock.patch( - "requests.get", + "requests.Session.get", return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: content.split("\n"), diff --git a/kube_apiserver_metrics/tests/test_kube_apiserver_metrics_1_25.py b/kube_apiserver_metrics/tests/test_kube_apiserver_metrics_1_25.py index afec1846bb1d5..b1ff0388c4741 100644 --- a/kube_apiserver_metrics/tests/test_kube_apiserver_metrics_1_25.py +++ b/kube_apiserver_metrics/tests/test_kube_apiserver_metrics_1_25.py @@ -25,7 +25,7 @@ def mock_get(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), diff --git a/kube_apiserver_metrics/tests/test_sli_metrics.py b/kube_apiserver_metrics/tests/test_sli_metrics.py index d3a726bb592c4..b550165a81955 100644 --- a/kube_apiserver_metrics/tests/test_sli_metrics.py +++ b/kube_apiserver_metrics/tests/test_sli_metrics.py @@ -22,7 +22,7 @@ def mock_metrics(): with open(f_name, "r") as f: text_data = f.read() with mock.patch( - "requests.get", + "requests.Session.get", return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), diff --git a/kube_controller_manager/tests/test_kube_controller_manager.py b/kube_controller_manager/tests/test_kube_controller_manager.py index 7a16d6665e847..4c4ac8ceaf4e3 100644 --- a/kube_controller_manager/tests/test_kube_controller_manager.py +++ b/kube_controller_manager/tests/test_kube_controller_manager.py @@ -37,7 +37,7 @@ def mock_metrics(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), @@ -146,13 +146,13 @@ def test_service_check_ok(monkeypatch): ] # successful health check - with mock.patch("requests.get", return_value=mock.MagicMock(status_code=200)): + with mock.patch('requests.Session.get', return_value=mock.MagicMock(status_code=200)): check._perform_service_check(instance) # failed health check raise_error = mock.Mock() raise_error.side_effect = requests.HTTPError('health check failed') - with mock.patch("requests.get", return_value=mock.MagicMock(raise_for_status=raise_error)): + with mock.patch('requests.Session.get', return_value=mock.MagicMock(raise_for_status=raise_error)): check._perform_service_check(instance) check.service_check.assert_has_calls(calls) diff --git a/kube_controller_manager/tests/test_sli_metrics.py b/kube_controller_manager/tests/test_sli_metrics.py index c3ff350c550ef..420e7255ce8e2 100644 --- a/kube_controller_manager/tests/test_sli_metrics.py +++ b/kube_controller_manager/tests/test_sli_metrics.py @@ -22,7 +22,7 @@ def mock_metrics(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), @@ -205,7 +205,7 @@ def mock_request(): def test_detect_sli_endpoint(mock_metrics, instance): - with mock.patch('requests.get') as mock_request: + with mock.patch('requests.Session.get') as mock_request: mock_request.return_value.status_code = 200 c = KubeControllerManagerCheck(CHECK_NAME, {}, [instance]) c.check(instance) @@ -213,7 +213,7 @@ def test_detect_sli_endpoint(mock_metrics, instance): def test_detect_sli_endpoint_404(mock_metrics, instance): - with mock.patch('requests.get') as mock_request: + with mock.patch('requests.Session.get') as mock_request: mock_request.return_value.status_code = 404 c = KubeControllerManagerCheck(CHECK_NAME, {}, [instance]) c.check(instance) @@ -221,7 +221,7 @@ def test_detect_sli_endpoint_404(mock_metrics, instance): def test_detect_sli_endpoint_403(mock_metrics, mock_request, instance): - with mock.patch('requests.get') as mock_request: + with mock.patch('requests.Session.get') as mock_request: mock_request.return_value.status_code = 403 c = KubeControllerManagerCheck(CHECK_NAME, {}, [instance]) c.check(instance) diff --git a/kube_dns/tests/test_kube_dns.py b/kube_dns/tests/test_kube_dns.py index 6b92c0089388c..dcb44c2df6efd 100644 --- a/kube_dns/tests/test_kube_dns.py +++ b/kube_dns/tests/test_kube_dns.py @@ -24,7 +24,7 @@ def mock_get(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), @@ -96,13 +96,13 @@ def test_service_check_ok(self, monkeypatch): ] # successful health check - with mock.patch("requests.get", return_value=mock.MagicMock(status_code=200)): + with mock.patch('requests.Session.get', return_value=mock.MagicMock(status_code=200)): check._perform_service_check(instance) # failed health check raise_error = mock.Mock() raise_error.side_effect = requests.HTTPError('health check failed') - with mock.patch("requests.get", return_value=mock.MagicMock(raise_for_status=raise_error)): + with mock.patch('requests.Session.get', return_value=mock.MagicMock(raise_for_status=raise_error)): check._perform_service_check(instance) check.service_check.assert_has_calls(calls) diff --git a/kube_metrics_server/tests/test_kube_metrics_server.py b/kube_metrics_server/tests/test_kube_metrics_server.py index 96fea70a8ea66..4453dabbba072 100644 --- a/kube_metrics_server/tests/test_kube_metrics_server.py +++ b/kube_metrics_server/tests/test_kube_metrics_server.py @@ -28,7 +28,7 @@ def mock_metrics(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), @@ -76,13 +76,13 @@ def test_service_check_ok(monkeypatch): ] # successful health check - with mock.patch("requests.get", return_value=mock.MagicMock(status_code=200)): + with mock.patch('requests.Session.get', return_value=mock.MagicMock(status_code=200)): check._perform_service_check(instance) # failed health check raise_error = mock.Mock() raise_error.side_effect = requests.HTTPError('health check failed') - with mock.patch("requests.get", return_value=mock.MagicMock(raise_for_status=raise_error)): + with mock.patch('requests.Session.get', return_value=mock.MagicMock(raise_for_status=raise_error)): check._perform_service_check(instance) check.service_check.assert_has_calls(calls) diff --git a/kube_proxy/tests/test_kube_proxy.py b/kube_proxy/tests/test_kube_proxy.py index 941d6dbe4da83..c31b02b722f32 100644 --- a/kube_proxy/tests/test_kube_proxy.py +++ b/kube_proxy/tests/test_kube_proxy.py @@ -25,7 +25,7 @@ def mock_iptables(): with open(f_name, 'r') as f: text_data = f.read() mock_iptables = mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), @@ -40,7 +40,7 @@ def mock_userspace(): with open(f_name, 'r') as f: text_data = f.read() mock_userspace = mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), @@ -119,13 +119,13 @@ def test_service_check_ok(monkeypatch): ] # successful health check - with mock.patch("requests.get", return_value=mock.MagicMock(status_code=200)): + with mock.patch('requests.Session.get', return_value=mock.MagicMock(status_code=200)): check._perform_service_check(instance) # failed health check raise_error = mock.Mock() raise_error.side_effect = requests.HTTPError('health check failed') - with mock.patch("requests.get", return_value=mock.MagicMock(raise_for_status=raise_error)): + with mock.patch('requests.Session.get', return_value=mock.MagicMock(raise_for_status=raise_error)): check._perform_service_check(instance) check.service_check.assert_has_calls(calls) diff --git a/kube_proxy/tests/test_kube_proxy_1_21_1.py b/kube_proxy/tests/test_kube_proxy_1_21_1.py index 925bf54ee78c1..51eaddfbac5b6 100644 --- a/kube_proxy/tests/test_kube_proxy_1_21_1.py +++ b/kube_proxy/tests/test_kube_proxy_1_21_1.py @@ -22,7 +22,7 @@ def mock_iptables(): with open(f_name, 'r') as f: text_data = f.read() mock_iptables = mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), @@ -37,7 +37,7 @@ def mock_userspace(): with open(f_name, 'r') as f: text_data = f.read() mock_userspace = mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), diff --git a/kube_scheduler/tests/test_kube_scheduler_1_13.py b/kube_scheduler/tests/test_kube_scheduler_1_13.py index ac60b73837462..e7ae1df38bf1a 100644 --- a/kube_scheduler/tests/test_kube_scheduler_1_13.py +++ b/kube_scheduler/tests/test_kube_scheduler_1_13.py @@ -23,7 +23,7 @@ def mock_metrics(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), diff --git a/kube_scheduler/tests/test_kube_scheduler_1_14.py b/kube_scheduler/tests/test_kube_scheduler_1_14.py index 4f6e4bfb75eaf..03b6ae525e03b 100644 --- a/kube_scheduler/tests/test_kube_scheduler_1_14.py +++ b/kube_scheduler/tests/test_kube_scheduler_1_14.py @@ -25,7 +25,7 @@ def mock_metrics(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), @@ -118,13 +118,13 @@ def test_service_check_ok(monkeypatch): ] # successful health check - with mock.patch("requests.get", return_value=mock.MagicMock(status_code=200)): + with mock.patch('requests.Session.get', return_value=mock.MagicMock(status_code=200)): check._perform_service_check(instance) # failed health check raise_error = mock.Mock() raise_error.side_effect = requests.HTTPError('health check failed') - with mock.patch("requests.get", return_value=mock.MagicMock(raise_for_status=raise_error)): + with mock.patch('requests.Session.get', return_value=mock.MagicMock(raise_for_status=raise_error)): check._perform_service_check(instance) check.service_check.assert_has_calls(calls) diff --git a/kube_scheduler/tests/test_kube_scheduler_1_23.py b/kube_scheduler/tests/test_kube_scheduler_1_23.py index 6ea03cbe2e17d..4e97823166a79 100644 --- a/kube_scheduler/tests/test_kube_scheduler_1_23.py +++ b/kube_scheduler/tests/test_kube_scheduler_1_23.py @@ -23,7 +23,7 @@ def mock_metrics(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), diff --git a/kube_scheduler/tests/test_kube_scheduler_1_26.py b/kube_scheduler/tests/test_kube_scheduler_1_26.py index 853f7a7072bf7..0655cc65558cd 100644 --- a/kube_scheduler/tests/test_kube_scheduler_1_26.py +++ b/kube_scheduler/tests/test_kube_scheduler_1_26.py @@ -22,7 +22,7 @@ def mock_metrics(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), diff --git a/kube_scheduler/tests/test_kube_scheduler_1_29.py b/kube_scheduler/tests/test_kube_scheduler_1_29.py index 01ec0d749e391..16fe535ce6816 100644 --- a/kube_scheduler/tests/test_kube_scheduler_1_29.py +++ b/kube_scheduler/tests/test_kube_scheduler_1_29.py @@ -22,7 +22,7 @@ def mock_metrics(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), diff --git a/kube_scheduler/tests/test_sli_metrics.py b/kube_scheduler/tests/test_sli_metrics.py index 706050f730f43..dd98007f4e9d8 100644 --- a/kube_scheduler/tests/test_sli_metrics.py +++ b/kube_scheduler/tests/test_sli_metrics.py @@ -22,7 +22,7 @@ def mock_metrics(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), @@ -97,7 +97,7 @@ def mock_request(): def test_detect_sli_endpoint(mock_metrics, instance): - with mock.patch('requests.get') as mock_request: + with mock.patch('requests.Session.get') as mock_request: mock_request.return_value.status_code = 200 c = KubeSchedulerCheck(CHECK_NAME, {}, [instance]) c.check(instance) @@ -105,7 +105,7 @@ def test_detect_sli_endpoint(mock_metrics, instance): def test_detect_sli_endpoint_404(mock_metrics, instance): - with mock.patch('requests.get') as mock_request: + with mock.patch('requests.Session.get') as mock_request: mock_request.return_value.status_code = 404 c = KubeSchedulerCheck(CHECK_NAME, {}, [instance]) c.check(instance) @@ -113,7 +113,7 @@ def test_detect_sli_endpoint_404(mock_metrics, instance): def test_detect_sli_endpoint_403(mock_metrics, instance): - with mock.patch('requests.get') as mock_request: + with mock.patch('requests.Session.get') as mock_request: mock_request.return_value.status_code = 403 c = KubeSchedulerCheck(CHECK_NAME, {}, [instance]) c.check(instance) diff --git a/kubelet/tests/test_kubelet.py b/kubelet/tests/test_kubelet.py index 4328df2671443..ea344afbcdc46 100644 --- a/kubelet/tests/test_kubelet.py +++ b/kubelet/tests/test_kubelet.py @@ -563,7 +563,7 @@ def test_kubelet_credentials_update(monkeypatch, aggregator): get = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: mock_from_file('kubelet_metrics_1_14.txt').splitlines() ) - with mock.patch('requests.get', return_value=get): + with mock.patch('requests.Session.get', return_value=get): check.check(instance) assert check._http_handlers[instance['kubelet_metrics_endpoint']].options['verify'] is True @@ -573,7 +573,7 @@ def test_kubelet_credentials_update(monkeypatch, aggregator): status_code=200, iter_lines=lambda **kwargs: mock_from_file('kubelet_metrics_1_14.txt').splitlines() ) kubelet_conn_info = {'url': 'http://127.0.0.1:10255', 'ca_cert': False} - with mock.patch('requests.get', return_value=get), mock.patch( + with mock.patch('requests.Session.get', return_value=get), mock.patch( 'datadog_checks.kubelet.kubelet.get_connection_info', return_value=kubelet_conn_info ): check.check(instance) @@ -1049,7 +1049,7 @@ def test_perform_kubelet_check(monkeypatch): instance_tags = ["one:1"] get = MockedResponse() - with mock.patch("requests.get", side_effect=get): + with mock.patch('requests.Session.get', side_effect=get): check._perform_kubelet_check(instance_tags) get.assert_has_calls( @@ -1093,7 +1093,7 @@ def test_report_node_metrics_kubernetes1_18(monkeypatch, aggregator): get = mock.MagicMock(status_code=404, iter_lines=lambda **kwargs: "Error Code") get.raise_for_status.side_effect = requests.HTTPError('error') - with mock.patch('requests.get', return_value=get): + with mock.patch('requests.Session.get', return_value=get): check._report_node_metrics(['foo:bar']) aggregator.assert_all_metrics_covered() diff --git a/kubevirt_api/tests/test_unit.py b/kubevirt_api/tests/test_unit.py index 2324214c30057..1a4b040e86d05 100644 --- a/kubevirt_api/tests/test_unit.py +++ b/kubevirt_api/tests/test_unit.py @@ -18,7 +18,7 @@ def test_check_collects_all_metrics(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) check = KubeVirtApiCheck("kubevirt_api", {}, [instance]) @@ -95,7 +95,7 @@ def test_check_collects_all_metrics(dd_run_check, aggregator, instance, mocker): def test_check_sends_zero_count_for_vms(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) check = KubeVirtApiCheck("kubevirt_api", {}, [instance]) @@ -116,7 +116,7 @@ def test_check_sends_zero_count_for_vms(dd_run_check, aggregator, instance, mock def test_check_sends_zero_count_for_vmis(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) check = KubeVirtApiCheck("kubevirt_api", {}, [instance]) @@ -154,7 +154,7 @@ def test_emits_zero_can_connect_when_service_is_down(dd_run_check, aggregator, i def test_emits_one_can_connect_when_service_is_up(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) check = KubeVirtApiCheck("kubevirt_api", {}, [instance]) check._setup_kube_client = lambda: None @@ -171,7 +171,7 @@ def test_emits_one_can_connect_when_service_is_up(dd_run_check, aggregator, inst def test_raise_exception_when_metrics_endpoint_is_bad(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) check = KubeVirtApiCheck("kubevirt_api", {}, [BAD_METRICS_HOSTNAME_INSTANCE]) check._setup_kube_client = lambda: None @@ -190,7 +190,7 @@ def test_raise_exception_when_metrics_endpoint_is_bad(dd_run_check, aggregator, def test_raise_exception_cannot_connect_to_kubernetes_api(dd_run_check, aggregator, instance, mocker, caplog): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) check = KubeVirtApiCheck("kubevirt_api", {}, [instance]) with pytest.raises( @@ -202,7 +202,7 @@ def test_raise_exception_cannot_connect_to_kubernetes_api(dd_run_check, aggregat def test_log_warning_healthz_endpoint_not_provided(dd_run_check, aggregator, instance, mocker, caplog): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) new_instance = deepcopy(instance) new_instance.pop("kubevirt_api_healthz_endpoint") diff --git a/kubevirt_controller/tests/test_unit.py b/kubevirt_controller/tests/test_unit.py index 50cd1fd3fe596..0a6bcb988a40c 100644 --- a/kubevirt_controller/tests/test_unit.py +++ b/kubevirt_controller/tests/test_unit.py @@ -16,7 +16,7 @@ def test_emits_can_connect_one_when_service_is_up(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) check = KubeVirtControllerCheck("kubevirt_controller", {}, [instance]) dd_run_check(check) aggregator.assert_metric( @@ -36,7 +36,7 @@ def test_emits_can_connect_zero_when_service_is_down(dd_run_check, aggregator, i def test_check_collects_all_metrics(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) check = KubeVirtControllerCheck("kubevirt_controller", {}, [instance]) diff --git a/kubevirt_handler/tests/test_unit.py b/kubevirt_handler/tests/test_unit.py index feb3121468339..01e8e42c4a9ba 100644 --- a/kubevirt_handler/tests/test_unit.py +++ b/kubevirt_handler/tests/test_unit.py @@ -17,7 +17,7 @@ def test_check_collects_metrics(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) check = KubeVirtHandlerCheck("kubevirt_handler", {}, [instance]) dd_run_check(check) @@ -122,7 +122,7 @@ def test_check_collects_metrics(dd_run_check, aggregator, instance, mocker): def test_logs_warning_when_healthz_endpoint_is_missing(dd_run_check, aggregator, instance, mocker, caplog): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) del instance["kubevirt_handler_healthz_endpoint"] check = KubeVirtHandlerCheck("kubevirt_handler", {}, [instance]) dd_run_check(check) @@ -134,7 +134,7 @@ def test_logs_warning_when_healthz_endpoint_is_missing(dd_run_check, aggregator, def test_emits_can_connect_one_when_service_is_up(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) check = KubeVirtHandlerCheck("kubevirt_handler", {}, [instance]) dd_run_check(check) aggregator.assert_metric( @@ -157,7 +157,7 @@ def test_emits_can_connect_zero_when_service_is_down(dd_run_check, aggregator, i def test_version_metadata(instance, dd_run_check, datadog_agent, aggregator, mocker): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) check = KubeVirtHandlerCheck("kubevirt_handler", {}, [instance]) check.check_id = "test:123" dd_run_check(check) diff --git a/mapreduce/tests/conftest.py b/mapreduce/tests/conftest.py index 087f9adc5b68b..dea2ab20b4951 100644 --- a/mapreduce/tests/conftest.py +++ b/mapreduce/tests/conftest.py @@ -52,13 +52,13 @@ def instance(): @pytest.fixture def mocked_request(): - with patch("requests.get", new=requests_get_mock): + with patch("requests.Session.get", new=requests_get_mock): yield @pytest.fixture def mocked_auth_request(): - with patch("requests.get", new=requests_auth_mock): + with patch("requests.Session.get", new=requests_auth_mock): yield @@ -66,7 +66,7 @@ def get_custom_hosts(): return [(host, '127.0.0.1') for host in MOCKED_E2E_HOSTS] -def requests_get_mock(*args, **kwargs): +def requests_get_mock(session, *args, **kwargs): url = args[0] # The parameter that creates the query params (kwargs) is an unordered dict, # so the query params can be in any order @@ -94,7 +94,7 @@ def requests_get_mock(*args, **kwargs): raise Exception("There is no mock request for {}".format(url)) -def requests_auth_mock(*args, **kwargs): +def requests_auth_mock(session, *args, **kwargs): # Make sure we're passing in authentication assert 'auth' in kwargs, "Error, missing authentication" @@ -102,4 +102,4 @@ def requests_auth_mock(*args, **kwargs): assert kwargs['auth'] == (TEST_USERNAME, TEST_PASSWORD), "Incorrect username or password" # Return mocked request.get(...) - return requests_get_mock(*args, **kwargs) + return requests_get_mock(session, *args, **kwargs) diff --git a/marathon/tests/test_unit.py b/marathon/tests/test_unit.py index 4c39a7afe3be3..ef104e6264e90 100644 --- a/marathon/tests/test_unit.py +++ b/marathon/tests/test_unit.py @@ -107,7 +107,8 @@ def test_config(test_case, init_config, extra_config, expected_http_kwargs): instance.update(extra_config) check = Marathon('marathon', init_config, instances=[instance]) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.return_value = mock.MagicMock(status_code=200) check.check(instance) diff --git a/mesos_master/tests/test_check.py b/mesos_master/tests/test_check.py index 721acce7a3613..4c026dcabc9c2 100644 --- a/mesos_master/tests/test_check.py +++ b/mesos_master/tests/test_check.py @@ -152,7 +152,8 @@ def test_can_connect_service_check( ): check = MesosMaster('mesos_master', {}, [instance]) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.side_effect = request_mock_side_effects try: diff --git a/mesos_slave/tests/test_unit.py b/mesos_slave/tests/test_unit.py index 0cc7b699b6d96..098e3683313cf 100644 --- a/mesos_slave/tests/test_unit.py +++ b/mesos_slave/tests/test_unit.py @@ -180,7 +180,8 @@ def test_can_connect_service_check_state( instance, aggregator, test_case_name, request_mock_effects, expected_tags, expect_exception, expected_status ): check = MesosSlave('mesos_slave', {}, [instance]) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.side_effect = request_mock_effects try: check._process_state_info('http://hello.com', instance['tasks'], 5050, instance['tags']) @@ -198,7 +199,8 @@ def test_can_connect_service_with_instance_cluster_name(instance, aggregator): expected_tags = ['url:http://hello.com/state'] + cluster_name_tag + additional_tags expected_status = AgentCheck.OK check = MesosSlave('mesos_slave', {}, [instance]) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.side_effect = [mock.MagicMock(status_code=200, content='{}')] try: check._process_state_info('http://hello.com', instance['tasks'], 5050, instance['tags']) @@ -216,7 +218,8 @@ def test_can_connect_service_check_stats( instance, aggregator, test_case_name, request_mock_effects, expected_tags, expect_exception, expected_status ): check = MesosSlave('mesos_slave', {}, [instance]) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.side_effect = request_mock_effects try: check._process_stats_info('http://hello.com', instance['tags']) diff --git a/nginx/tests/test_unit.py b/nginx/tests/test_unit.py index 27e29dfcad00d..6209431250387 100644 --- a/nginx/tests/test_unit.py +++ b/nginx/tests/test_unit.py @@ -71,7 +71,8 @@ def test_config(check, instance, test_case, extra_config, expected_http_kwargs): c = check(instance) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.return_value = mock.MagicMock(status_code=200, content=b'{}') c.check(instance) @@ -93,7 +94,8 @@ def test_config(check, instance, test_case, extra_config, expected_http_kwargs): def test_no_version(check, instance, caplog): c = check(instance) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.return_value = mock.MagicMock(status_code=200, content=b'{}', headers={'server': 'nginx'}) c.check(instance) diff --git a/nginx/tests/test_vts.py b/nginx/tests/test_vts.py index 696f8498c397a..8a06abe7b5b5e 100644 --- a/nginx/tests/test_vts.py +++ b/nginx/tests/test_vts.py @@ -49,7 +49,7 @@ def test_vts(check, instance_vts, aggregator): @pytest.mark.unit def test_vts_unit(dd_run_check, aggregator, mocked_instance_vts, check, mocker): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) c = check(mocked_instance_vts) dd_run_check(c) diff --git a/nginx_ingress_controller/tests/test_nginx_ingress_controller.py b/nginx_ingress_controller/tests/test_nginx_ingress_controller.py index 8066617d61bc5..5ac427b270c62 100644 --- a/nginx_ingress_controller/tests/test_nginx_ingress_controller.py +++ b/nginx_ingress_controller/tests/test_nginx_ingress_controller.py @@ -21,7 +21,7 @@ def mock_data(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), diff --git a/nvidia_nim/tests/test_unit.py b/nvidia_nim/tests/test_unit.py index 980580e11fab3..deaeffedf3ae8 100644 --- a/nvidia_nim/tests/test_unit.py +++ b/nvidia_nim/tests/test_unit.py @@ -18,7 +18,7 @@ def test_check_nvidia_nim(dd_run_check, aggregator, datadog_agent, instance): check = NvidiaNIMCheck("nvidia_nim", {}, [instance]) check.check_id = "test:123" with mock.patch( - 'requests.get', + 'requests.Session.get', side_effect=[ MockResponse(file_path=get_fixture_path("nim_metrics.txt")), MockResponse(file_path=get_fixture_path("nim_version.json")), diff --git a/octopus_deploy/tests/conftest.py b/octopus_deploy/tests/conftest.py index fee5a733376d9..7ec3897c84a13 100644 --- a/octopus_deploy/tests/conftest.py +++ b/octopus_deploy/tests/conftest.py @@ -214,5 +214,5 @@ def get(url, *args, **kwargs): return CopyingMock(elapsed=mock_elapsed, json=mock_json, status_code=mock_status_code) mock_get = CopyingMock(side_effect=get) - monkeypatch.setattr('requests.get', mock_get) + monkeypatch.setattr('requests.Session.get', mock_get) return mock_get diff --git a/openstack_controller/tests/conftest.py b/openstack_controller/tests/conftest.py index 2583431328406..e6b933bb66871 100644 --- a/openstack_controller/tests/conftest.py +++ b/openstack_controller/tests/conftest.py @@ -1009,7 +1009,7 @@ def get(url, *args, **kwargs): return mock.MagicMock(elapsed=mock_elapsed, json=mock_json, status_code=mock_status_code) mock_get = mock.MagicMock(side_effect=get) - monkeypatch.setattr('requests.get', mock_get) + monkeypatch.setattr('requests.Session.get', mock_get) return mock_get @@ -1041,5 +1041,5 @@ def post(url, *args, **kwargs): return MockResponse(json_data=json_data, status_code=200, headers=headers) mock_post = mock.MagicMock(side_effect=post) - monkeypatch.setattr('requests.post', mock_post) + monkeypatch.setattr('requests.Session.post', mock_post) return mock_post diff --git a/openstack_controller/tests/legacy/test_openstack.py b/openstack_controller/tests/legacy/test_openstack.py index 50461fe84ef42..fe1559b5c8b0f 100644 --- a/openstack_controller/tests/legacy/test_openstack.py +++ b/openstack_controller/tests/legacy/test_openstack.py @@ -42,7 +42,7 @@ def test_api_error_log_no_password(check, instance, caplog): with caplog.at_level(logging.DEBUG): with pytest.raises(KeystoneUnreachable): - with mock.patch('datadog_checks.base.utils.http.requests.post') as req: + with mock.patch('datadog_checks.base.utils.http.requests.Session.post') as req: req.side_effect = HTTPError(mock.Mock(status=404), 'not found') check._api = SimpleApi(check.log, instance.get("keystone_server_url"), check.http) identity = Authenticator._get_user_identity(instance.get("user")) diff --git a/openstack_controller/tests/legacy/test_simple_api.py b/openstack_controller/tests/legacy/test_simple_api.py index e8346cc4f44de..3e5c30486982b 100644 --- a/openstack_controller/tests/legacy/test_simple_api.py +++ b/openstack_controller/tests/legacy/test_simple_api.py @@ -775,7 +775,7 @@ def test__make_request_failure(requests_wrapper): api = ApiFactory.create(log, instance, requests_wrapper) response_mock = mock.MagicMock() - with mock.patch("datadog_checks.openstack_controller.legacy.api.requests.get", return_value=response_mock): + with mock.patch("datadog_checks.openstack_controller.legacy.api.requests.Session.get", return_value=response_mock): response_mock.raise_for_status.side_effect = requests.exceptions.HTTPError response_mock.status_code = 401 with pytest.raises(AuthenticationNeeded): diff --git a/php_fpm/tests/test_unit.py b/php_fpm/tests/test_unit.py index ec4212b8e585b..b0f46d613c077 100644 --- a/php_fpm/tests/test_unit.py +++ b/php_fpm/tests/test_unit.py @@ -38,7 +38,8 @@ def test_should_not_retry(check, instance): backoff only works when response code is 503, otherwise the error should bubble up """ - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.side_effect = FooException("Generic http error here") with pytest.raises(FooException): check._process_status(instance['status_url'], [], None, False) @@ -48,7 +49,8 @@ def test_should_bail_out(check, instance): """ backoff should give up after 3 attempts """ - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): attrs = {'raise_for_status.side_effect': FooException()} r.get.side_effect = [ mock.MagicMock(status_code=503, **attrs), @@ -65,7 +67,8 @@ def test_backoff_success(check, instance, aggregator, payload): Success after 2 failed attempts """ instance['ping_url'] = None - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): attrs = {'json.return_value': payload} r.get.side_effect = [ mock.MagicMock(status_code=503), @@ -103,7 +106,8 @@ def test_config(test_case, extra_config, expected_http_kwargs, dd_run_check): instance.update(extra_config) check = PHPFPMCheck('php_fpm', {}, instances=[instance]) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.return_value = mock.MagicMock(status_code=200) dd_run_check(check) diff --git a/powerdns_recursor/tests/test_metadata.py b/powerdns_recursor/tests/test_metadata.py index a7f8511818c68..36ff99eebd981 100644 --- a/powerdns_recursor/tests/test_metadata.py +++ b/powerdns_recursor/tests/test_metadata.py @@ -24,19 +24,19 @@ def test_metadata_unit(datadog_agent): config_obj, tags = check._get_config(instance) - with mock.patch('requests.get', side_effect=requests.exceptions.Timeout()): + with mock.patch('requests.Session.get', side_effect=requests.exceptions.Timeout()): check._collect_metadata(config_obj) datadog_agent.assert_metadata_count(0) check.log.debug.assert_called_with('Error collecting PowerDNS Recursor version: %s', '') datadog_agent.reset() - with mock.patch('requests.get', return_value=MockResponse()): + with mock.patch('requests.Session.get', return_value=MockResponse()): check._collect_metadata(config_obj) datadog_agent.assert_metadata_count(0) check.log.debug.assert_called_with("Couldn't find the PowerDNS Recursor Server version header") datadog_agent.reset() - with mock.patch('requests.get', return_value=MockResponse(headers={'Server': 'wrong_stuff'})): + with mock.patch('requests.Session.get', return_value=MockResponse(headers={'Server': 'wrong_stuff'})): check._collect_metadata(config_obj) datadog_agent.assert_metadata_count(0) check.log.debug.assert_called_with( diff --git a/prometheus/tests/conftest.py b/prometheus/tests/conftest.py index dfffd2ff931a3..853e8f5e99efd 100644 --- a/prometheus/tests/conftest.py +++ b/prometheus/tests/conftest.py @@ -63,7 +63,7 @@ def poll_mock(): g3.labels(matched_label="foobar", node="host2", timestamp="456").set(float('inf')) poll_mock_patch = mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: ensure_unicode(generate_latest(registry)).split("\n"), diff --git a/proxysql/tests/test_unit.py b/proxysql/tests/test_unit.py index fc292e22b0908..0c842bdb1f1a3 100644 --- a/proxysql/tests/test_unit.py +++ b/proxysql/tests/test_unit.py @@ -101,7 +101,7 @@ def test_config_ok(dd_run_check, instance_basic): @pytest.mark.unit def test_tls_config_ok(dd_run_check, instance_basic_tls): - with mock.patch('datadog_checks.base.utils.tls.ssl') as ssl: + with mock.patch('datadog_checks.base.utils.http.ssl') as ssl: with mock.patch('datadog_checks.proxysql.proxysql.pymysql') as pymysql: check = get_check(instance_basic_tls) diff --git a/rabbitmq/tests/test_openmetrics.py b/rabbitmq/tests/test_openmetrics.py index c414af567610c..6fcfea374c5c4 100644 --- a/rabbitmq/tests/test_openmetrics.py +++ b/rabbitmq/tests/test_openmetrics.py @@ -245,7 +245,7 @@ def test_aggregated_and_unaggregated_endpoints(endpoint, metrics, aggregator, dd 'include_aggregated_endpoint': True, } ) - mocker.patch('requests.get', wraps=mock_http_responses) + mocker.patch('requests.Session.get', wraps=mock_http_responses) dd_run_check(check) meta_metrics = {'rabbitmq.build_info', 'rabbitmq.identity_info'} @@ -289,7 +289,7 @@ def test_detailed_only_metrics(aggregator, dd_run_check, mocker): 'include_aggregated_endpoint': True, } ) - mocker.patch('requests.get', wraps=mock_http_responses) + mocker.patch('requests.Session.get', wraps=mock_http_responses) dd_run_check(check) detailed_only_metrics = ( diff --git a/rabbitmq/tests/test_unit.py b/rabbitmq/tests/test_unit.py index f26ae5b38ca95..9075a7526dbec 100644 --- a/rabbitmq/tests/test_unit.py +++ b/rabbitmq/tests/test_unit.py @@ -25,7 +25,8 @@ def test__get_data(check): - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.side_effect = [requests.exceptions.HTTPError, ValueError] with pytest.raises(RabbitMQException) as e: check._get_data('') @@ -138,7 +139,8 @@ def test_config(check, test_case, extra_config, expected_http_kwargs): config.update(extra_config) check = RabbitMQ('rabbitmq', {}, instances=[config]) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.return_value = mock.MagicMock(status_code=200) check.check(config) diff --git a/ray/tests/test_unit.py b/ray/tests/test_unit.py index c11b8ad455759..f2e7725b5287f 100644 --- a/ray/tests/test_unit.py +++ b/ray/tests/test_unit.py @@ -17,7 +17,7 @@ ], ) def test_check(dd_run_check, aggregator, mocker, check, instance, metrics): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) dd_run_check(check(instance)) for expected_metric in metrics: @@ -33,7 +33,7 @@ def test_check(dd_run_check, aggregator, mocker, check, instance, metrics): def test_invalid_url(dd_run_check, aggregator, check, mocked_head_instance, mocker): mocked_head_instance["openmetrics_endpoint"] = "http://unknowwn" - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) with pytest.raises(Exception): dd_run_check(check(mocked_head_instance)) diff --git a/scylla/tests/conftest.py b/scylla/tests/conftest.py index b4b653494b3ae..5b28e13609d02 100644 --- a/scylla/tests/conftest.py +++ b/scylla/tests/conftest.py @@ -45,7 +45,7 @@ def mock_db_data(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), diff --git a/sonatype_nexus/tests/test_check.py b/sonatype_nexus/tests/test_check.py index 7f6a8064ac9dd..eda941ff13609 100644 --- a/sonatype_nexus/tests/test_check.py +++ b/sonatype_nexus/tests/test_check.py @@ -15,7 +15,7 @@ @pytest.fixture def mock_http_response(mocker): yield lambda *args, **kwargs: mocker.patch( - kwargs.pop("method", "requests.get"), return_value=MockResponse(*args, **kwargs) + kwargs.pop("method", "requests.Session.get"), return_value=MockResponse(*args, **kwargs) ) diff --git a/spark/datadog_checks/spark/spark.py b/spark/datadog_checks/spark/spark.py index d23d5815863a7..c0c74979f8f0d 100644 --- a/spark/datadog_checks/spark/spark.py +++ b/spark/datadog_checks/spark/spark.py @@ -51,6 +51,7 @@ class SparkCheck(AgentCheck): HTTP_CONFIG_REMAPPER = { 'ssl_verify': {'name': 'tls_verify'}, 'ssl_cert': {'name': 'tls_cert'}, + 'ssl_ca_cert': {'name': 'tls_ca_cert'}, 'ssl_key': {'name': 'tls_private_key'}, } diff --git a/spark/tests/test_spark.py b/spark/tests/test_spark.py index 364cd15a3949b..4cdab4c4201f0 100644 --- a/spark/tests/test_spark.py +++ b/spark/tests/test_spark.py @@ -178,7 +178,7 @@ def get_default_mock(url): raise KeyError(f"{url} does not match any response fixtures.") -def yarn_requests_get_mock(url, *args, **kwargs): +def yarn_requests_get_mock(session, url, *args, **kwargs): arg_url = Url(url) if arg_url == YARN_APP_URL: @@ -188,7 +188,7 @@ def yarn_requests_get_mock(url, *args, **kwargs): return get_default_mock(url) -def yarn_requests_auth_mock(*args, **kwargs): +def yarn_requests_auth_mock(session, url, *args, **kwargs): # Make sure we're passing in authentication assert 'auth' in kwargs, "Error, missing authentication" @@ -196,10 +196,10 @@ def yarn_requests_auth_mock(*args, **kwargs): assert kwargs['auth'] == (TEST_USERNAME, TEST_PASSWORD), "Incorrect username or password" # Return mocked request.get(...) - return yarn_requests_get_mock(*args, **kwargs) + return yarn_requests_get_mock(session, url, *args, **kwargs) -def mesos_requests_get_mock(url, *args, **kwargs): +def mesos_requests_get_mock(session, url, *args, **kwargs): arg_url = Url(url) if arg_url == MESOS_APP_URL: @@ -220,7 +220,7 @@ def mesos_requests_get_mock(url, *args, **kwargs): return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'metrics_json')) -def driver_requests_get_mock(url, *args, **kwargs): +def driver_requests_get_mock(session, url, *args, **kwargs): arg_url = Url(url) if arg_url == DRIVER_APP_URL: @@ -241,7 +241,7 @@ def driver_requests_get_mock(url, *args, **kwargs): return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'metrics_json')) -def standalone_requests_get_mock(url, *args, **kwargs): +def standalone_requests_get_mock(session, url, *args, **kwargs): arg_url = Url(url) if arg_url == STANDALONE_APP_URL: @@ -264,7 +264,7 @@ def standalone_requests_get_mock(url, *args, **kwargs): return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'metrics_json')) -def standalone_requests_pre20_get_mock(url, *args, **kwargs): +def standalone_requests_pre20_get_mock(session, url, *args, **kwargs): arg_url = Url(url) if arg_url == STANDALONE_APP_URL: @@ -299,7 +299,7 @@ def standalone_requests_pre20_get_mock(url, *args, **kwargs): return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'metrics_json')) -def proxy_with_warning_page_mock(url, *args, **kwargs): +def proxy_with_warning_page_mock(session, url, *args, **kwargs): cookies = kwargs.get('cookies') or {} proxy_cookie = cookies.get('proxy_cookie') url_parts = list(urlparse(url)) @@ -307,7 +307,7 @@ def proxy_with_warning_page_mock(url, *args, **kwargs): if proxy_cookie and query.get('proxyapproved') == 'true': del query['proxyapproved'] url_parts[4] = urlencode(query) - return standalone_requests_get_mock(urlunparse(url_parts), *args[1:], **kwargs) + return standalone_requests_get_mock(session, urlunparse(url_parts), *args[1:], **kwargs) else: # Display the html warning page with the redirect link query['proxyapproved'] = 'true' @@ -404,7 +404,8 @@ def proxy_with_warning_page_mock(url, *args, **kwargs): 'spark_url': SSL_SERVER_URL, 'cluster_name': CLUSTER_NAME, 'spark_cluster_mode': 'spark_standalone_mode', - 'ssl_verify': os.path.join(CERTIFICATE_DIR, 'cert.cert'), + 'ssl_verify': True, + 'ssl_ca_cert': os.path.join(CERTIFICATE_DIR, 'cert.cert'), 'executor_level_metrics': True, } @@ -688,7 +689,7 @@ def _assert(aggregator, values_and_tags): @pytest.mark.unit def test_yarn(aggregator, dd_run_check): - with mock.patch('requests.get', yarn_requests_get_mock): + with mock.patch('requests.Session.get', yarn_requests_get_mock): c = SparkCheck('spark', {}, [YARN_CONFIG]) dd_run_check(c) @@ -743,7 +744,7 @@ def test_yarn(aggregator, dd_run_check): @pytest.mark.unit def test_auth_yarn(aggregator, dd_run_check): - with mock.patch('requests.get', yarn_requests_auth_mock): + with mock.patch('requests.Session.get', yarn_requests_auth_mock): c = SparkCheck('spark', {}, [YARN_AUTH_CONFIG]) dd_run_check(c) @@ -763,7 +764,7 @@ def test_auth_yarn(aggregator, dd_run_check): @pytest.mark.unit def test_mesos(aggregator, dd_run_check): - with mock.patch('requests.get', mesos_requests_get_mock): + with mock.patch('requests.Session.get', mesos_requests_get_mock): c = SparkCheck('spark', {}, [MESOS_CONFIG]) dd_run_check(c) _assert( @@ -822,7 +823,7 @@ def test_mesos(aggregator, dd_run_check): @pytest.mark.unit def test_mesos_filter(aggregator, dd_run_check): - with mock.patch('requests.get', mesos_requests_get_mock): + with mock.patch('requests.Session.get', mesos_requests_get_mock): c = SparkCheck('spark', {}, [MESOS_FILTERED_CONFIG]) dd_run_check(c) @@ -835,7 +836,7 @@ def test_mesos_filter(aggregator, dd_run_check): @pytest.mark.unit def test_driver_unit(aggregator, dd_run_check): - with mock.patch('requests.get', driver_requests_get_mock): + with mock.patch('requests.Session.get', driver_requests_get_mock): c = SparkCheck('spark', {}, [DRIVER_CONFIG]) dd_run_check(c) @@ -895,7 +896,7 @@ def test_driver_unit(aggregator, dd_run_check): @pytest.mark.unit def test_standalone_unit(aggregator, dd_run_check): - with mock.patch('requests.get', standalone_requests_get_mock): + with mock.patch('requests.Session.get', standalone_requests_get_mock): c = SparkCheck('spark', {}, [STANDALONE_CONFIG]) dd_run_check(c) @@ -947,7 +948,7 @@ def test_standalone_unit(aggregator, dd_run_check): @pytest.mark.unit def test_standalone_stage_disabled_unit(aggregator, dd_run_check): - with mock.patch('requests.get', standalone_requests_get_mock): + with mock.patch('requests.Session.get', standalone_requests_get_mock): c = SparkCheck('spark', {}, [STANDALONE_CONFIG_STAGE_DISABLED]) dd_run_check(c) @@ -996,7 +997,7 @@ def test_standalone_stage_disabled_unit(aggregator, dd_run_check): @pytest.mark.unit def test_standalone_unit_with_proxy_warning_page(aggregator, dd_run_check): c = SparkCheck('spark', {}, [STANDALONE_CONFIG]) - with mock.patch('requests.get', proxy_with_warning_page_mock): + with mock.patch('requests.Session.get', proxy_with_warning_page_mock): dd_run_check(c) _assert( @@ -1048,7 +1049,7 @@ def test_standalone_unit_with_proxy_warning_page(aggregator, dd_run_check): @pytest.mark.unit def test_standalone_pre20(aggregator, dd_run_check): - with mock.patch('requests.get', standalone_requests_pre20_get_mock): + with mock.patch('requests.Session.get', standalone_requests_pre20_get_mock): c = SparkCheck('spark', {}, [STANDALONE_CONFIG_PRE_20]) dd_run_check(c) @@ -1101,7 +1102,7 @@ def test_standalone_pre20(aggregator, dd_run_check): @pytest.mark.unit def test_metadata(aggregator, datadog_agent, dd_run_check): - with mock.patch('requests.get', standalone_requests_pre20_get_mock): + with mock.patch('requests.Session.get', standalone_requests_pre20_get_mock): c = SparkCheck(CHECK_NAME, {}, [STANDALONE_CONFIG_PRE_20]) c.check_id = "test:123" dd_run_check(c) @@ -1127,7 +1128,7 @@ def test_disable_legacy_cluster_tags(aggregator, dd_run_check): instance = MESOS_FILTERED_CONFIG instance['disable_legacy_cluster_tag'] = True - with mock.patch('requests.get', mesos_requests_get_mock): + with mock.patch('requests.Session.get', mesos_requests_get_mock): c = SparkCheck('spark', {}, [instance]) dd_run_check(c) @@ -1156,7 +1157,7 @@ def test_enable_query_name_tag_for_structured_streaming( ): instance['enable_query_name_tag'] = True - with mock.patch('requests.get', requests_get_mock): + with mock.patch('requests.Session.get', requests_get_mock): c = SparkCheck('spark', {}, [instance]) dd_run_check(c) @@ -1240,7 +1241,7 @@ def test_do_not_crash_on_single_app_failure(): ids=["driver", "yarn", "mesos", "standalone", "standalone_pre_20"], ) def test_no_running_apps(aggregator, dd_run_check, instance, service_check, caplog): - with mock.patch('requests.get', return_value=MockResponse("{}")): + with mock.patch('requests.Session.get', return_value=MockResponse("{}")): with caplog.at_level(logging.WARNING): dd_run_check(SparkCheck('spark', {}, [instance])) @@ -1288,7 +1289,7 @@ def test_yarn_no_json_for_app_properties( In these cases we skip only the specific missing apps and metrics while collecting all others. """ - def get_without_json(url, *args, **kwargs): + def get_without_json(session, url, *args, **kwargs): arg_url = Url(url) if arg_url == property_url: return mock_response @@ -1322,9 +1323,9 @@ def get_without_json(url, *args, **kwargs): ] ) else: - return yarn_requests_get_mock(url, *args, **kwargs) + return yarn_requests_get_mock(session, url, *args, **kwargs) - mocker.patch('requests.get', get_without_json) + mocker.patch('requests.Session.get', get_without_json) dd_run_check(SparkCheck('spark', {}, [YARN_CONFIG])) for m in missing_metrics: aggregator.assert_metric_has_tag(m, 'app_name:PySparkShell', count=0) diff --git a/squid/tests/test_squid.py b/squid/tests/test_squid.py index 9732198af7bf9..55a40cbe7c835 100644 --- a/squid/tests/test_squid.py +++ b/squid/tests/test_squid.py @@ -79,7 +79,7 @@ def test_check_ok(aggregator, check, instance): ) @pytest.mark.usefixtures("dd_environment") def test_version_metadata(check, instance, datadog_agent, raw_version, version_metadata, count): - with mock.patch('datadog_checks.base.utils.http.requests.get') as g: + with mock.patch('datadog_checks.base.utils.http.requests.Session.get') as g: g.return_value.headers = {'Server': raw_version} check.check_id = 'test:123' diff --git a/squid/tests/test_unit.py b/squid/tests/test_unit.py index b6bffb0936202..332e8c9123e2d 100644 --- a/squid/tests/test_unit.py +++ b/squid/tests/test_unit.py @@ -62,7 +62,7 @@ def test_get_counters(check): due to a missing = character. See https://github.com/DataDog/integrations-core/pull/1643 """ - with mock.patch('datadog_checks.squid.squid.requests.get') as g: + with mock.patch('datadog_checks.squid.squid.requests.Session.get') as g: with mock.patch('datadog_checks.squid.SquidCheck.submit_version'): g.return_value = mock.MagicMock(text="client_http.requests=42\n\n") check.parse_counter = mock.MagicMock(return_value=('foo', 'bar')) @@ -73,7 +73,7 @@ def test_get_counters(check): def test_host_without_protocol(check, instance): - with mock.patch('datadog_checks.squid.squid.requests.get') as g: + with mock.patch('datadog_checks.squid.squid.requests.Session.get') as g: with mock.patch('datadog_checks.squid.SquidCheck.submit_version'): g.return_value = mock.MagicMock(text="client_http.requests=42\n\n") check.parse_counter = mock.MagicMock(return_value=('foo', 'bar')) @@ -83,7 +83,7 @@ def test_host_without_protocol(check, instance): def test_host_https(check, instance): instance['host'] = 'https://localhost' - with mock.patch('datadog_checks.squid.squid.requests.get') as g: + with mock.patch('datadog_checks.squid.squid.requests.Session.get') as g: with mock.patch('datadog_checks.squid.SquidCheck.submit_version'): g.return_value = mock.MagicMock(text="client_http.requests=42\n\n") check.parse_counter = mock.MagicMock(return_value=('foo', 'bar')) @@ -103,7 +103,7 @@ def test_legacy_username_password(instance, auth_config): instance.update(auth_config) check = SquidCheck(common.CHECK_NAME, {}, {}, [instance]) - with mock.patch('datadog_checks.base.utils.http.requests.get') as g: + with mock.patch('datadog_checks.base.utils.http.requests.Session.get') as g: with mock.patch('datadog_checks.squid.SquidCheck.submit_version'): check.get_counters('host', 'port', []) diff --git a/strimzi/tests/test_unit.py b/strimzi/tests/test_unit.py index 71da37cde60ab..0c9475cefab1a 100644 --- a/strimzi/tests/test_unit.py +++ b/strimzi/tests/test_unit.py @@ -56,7 +56,7 @@ def test_check_unique_operator( tag, mocker, ): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) dd_run_check(check(instance)) for expected_metric in metrics: @@ -76,7 +76,7 @@ def test_check_unique_operator( def test_check_all_operators(dd_run_check, aggregator, check, mocker): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) dd_run_check( check( { diff --git a/teamcity/tests/test_teamcity.py b/teamcity/tests/test_teamcity.py index caa815916ae1c..65c23a4a12dab 100644 --- a/teamcity/tests/test_teamcity.py +++ b/teamcity/tests/test_teamcity.py @@ -66,7 +66,7 @@ def test_config(dd_run_check, extra_config, expected_http_kwargs): instance.update(extra_config) check = TeamCityRest('teamcity', {}, [instance]) - with patch('datadog_checks.base.utils.http.requests.get') as r: + with patch('datadog_checks.base.utils.http.requests.Session.get') as r: dd_run_check(check) http_wargs = { diff --git a/tekton/tests/test_unit.py b/tekton/tests/test_unit.py index 83bd63353b527..0010a2eae466d 100644 --- a/tekton/tests/test_unit.py +++ b/tekton/tests/test_unit.py @@ -17,7 +17,7 @@ ], ) def test_check(dd_run_check, aggregator, mocker, instance, metrics, request, namespace): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) dd_run_check(check(request.getfixturevalue(instance))) for expected_metric in metrics: @@ -33,7 +33,7 @@ def test_check(dd_run_check, aggregator, mocker, instance, metrics, request, nam def test_invalid_url(dd_run_check, aggregator, pipelines_instance, mocker): pipelines_instance["pipelines_controller_endpoint"] = "http://unknowwn" - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) with pytest.raises(Exception): dd_run_check(check(pipelines_instance)) diff --git a/temporal/tests/conftest.py b/temporal/tests/conftest.py index c31e432914f2e..2ed04281aa8c5 100644 --- a/temporal/tests/conftest.py +++ b/temporal/tests/conftest.py @@ -87,7 +87,7 @@ def mock_metrics(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), diff --git a/tls/datadog_checks/tls/tls_remote.py b/tls/datadog_checks/tls/tls_remote.py index 516b3a98b4cf1..b15db76eca847 100644 --- a/tls/datadog_checks/tls/tls_remote.py +++ b/tls/datadog_checks/tls/tls_remote.py @@ -179,8 +179,16 @@ def fetch_intermediate_certs(self): with sock: try: - context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS) + context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS_CLIENT) context.verify_mode = ssl.CERT_NONE + ciphers = self.agent_check.tls_context_wrapper.config.get('tls_ciphers') + if ciphers: + if 'ALL' in ciphers: + updated_ciphers = "ALL" + else: + updated_ciphers = ":".join(ciphers) + + context.set_ciphers(updated_ciphers) with context.wrap_socket(sock, server_hostname=self.agent_check._server_hostname) as secure_sock: der_cert = secure_sock.getpeercert(binary_form=True) diff --git a/torchserve/tests/inference/test_unit.py b/torchserve/tests/inference/test_unit.py index 9cae5f90b3c8e..a965bd8b23fce 100644 --- a/torchserve/tests/inference/test_unit.py +++ b/torchserve/tests/inference/test_unit.py @@ -13,7 +13,7 @@ def test_check(dd_run_check, aggregator, check, mocked_inference_instance, mocker): - mocker.patch('requests.get', wraps=mock_http_responses()) + mocker.patch('requests.Session.get', wraps=mock_http_responses()) dd_run_check(check(mocked_inference_instance)) aggregator.assert_service_check( diff --git a/torchserve/tests/management/test_model_discovery.py b/torchserve/tests/management/test_model_discovery.py index de5f97c04fc23..ea17df181c441 100644 --- a/torchserve/tests/management/test_model_discovery.py +++ b/torchserve/tests/management/test_model_discovery.py @@ -109,7 +109,8 @@ def test_get_models(check, mocked_management_instance, expected_models, fixture_ mock_resp.raise_for_status.side_effect = HTTPError() if status_code != 200 else None responses.append(mock_resp) - with mock.patch('datadog_checks.base.utils.http.requests') as req: + req = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=req): discovery = ModelDiscovery(check(mocked_management_instance), include=[".*"]) req.get.side_effect = responses assert [('.*', model['modelName'], model, None) for model in expected_models] == list(discovery.get_items()) diff --git a/torchserve/tests/management/test_unit.py b/torchserve/tests/management/test_unit.py index 7a2f7705ba133..15cd3b694faff 100644 --- a/torchserve/tests/management/test_unit.py +++ b/torchserve/tests/management/test_unit.py @@ -14,7 +14,7 @@ def test_check(dd_run_check, aggregator, check, mocked_management_instance, mocker): - mocker.patch('requests.get', wraps=mock_http_responses()) + mocker.patch('requests.Session.get', wraps=mock_http_responses()) dd_run_check(check(mocked_management_instance)) for metric in METRICS: @@ -43,7 +43,7 @@ def custom_mock_http_responses(url, **_params): return mock_http_responses()(url) - mocker.patch('requests.get', wraps=custom_mock_http_responses) + mocker.patch('requests.Session.get', wraps=custom_mock_http_responses) dd_run_check(check(mocked_management_instance)) # We should not get anything for these models @@ -157,7 +157,7 @@ def custom_mock_http_responses(url, **_params): def test_check_with_discovery( dd_run_check, aggregator, check, mocked_management_instance, mocker, include, exclude, limit, expected_models ): - mocker.patch('requests.get', wraps=mock_http_responses()) + mocker.patch('requests.Session.get', wraps=mock_http_responses()) mocked_management_instance["limit"] = limit mocked_management_instance["exclude"] = exclude mocked_management_instance["include"] = include @@ -308,13 +308,13 @@ def test_check_with_events( datadog_agent.reset() check_instance = check(mocked_management_instance) - mocker.patch('requests.get', wraps=mock_http_responses()) + mocker.patch('requests.Session.get', wraps=mock_http_responses()) dd_run_check(check_instance) dd_run_check(check_instance) assert len(aggregator.events) == 0 - mocker.patch('requests.get', wraps=mock_http_responses(new_response)) + mocker.patch('requests.Session.get', wraps=mock_http_responses(new_response)) dd_run_check(check_instance) for expected_event, actual_event in zip(expected_events, aggregator.events, strict=True): @@ -338,13 +338,13 @@ def test_check_disable_events(dd_run_check, datadog_agent, aggregator, check, mo mocked_management_instance["submit_events"] = False check_instance = check(mocked_management_instance) - mocker.patch('requests.get', wraps=mock_http_responses()) + mocker.patch('requests.Session.get', wraps=mock_http_responses()) dd_run_check(check_instance) dd_run_check(check_instance) assert len(aggregator.events) == 0 - mocker.patch('requests.get', wraps=mock_http_responses("management/events/models_all_dropped.json")) + mocker.patch('requests.Session.get', wraps=mock_http_responses("management/events/models_all_dropped.json")) dd_run_check(check_instance) assert len(aggregator.events) == 0 diff --git a/torchserve/tests/openmetrics/test_unit.py b/torchserve/tests/openmetrics/test_unit.py index 5964f02bcae23..78693ed4ab288 100644 --- a/torchserve/tests/openmetrics/test_unit.py +++ b/torchserve/tests/openmetrics/test_unit.py @@ -13,7 +13,7 @@ def test_check(dd_run_check, aggregator, check, mocked_openmetrics_instance, mocker): - mocker.patch('requests.get', wraps=mock_http_responses()) + mocker.patch('requests.Session.get', wraps=mock_http_responses()) dd_run_check(check(mocked_openmetrics_instance)) for name, expected_values in METRICS.items(): diff --git a/twistlock/tests/test_twistlock.py b/twistlock/tests/test_twistlock.py index b832e4f3f3fdb..aaba9d57bf62d 100644 --- a/twistlock/tests/test_twistlock.py +++ b/twistlock/tests/test_twistlock.py @@ -35,7 +35,7 @@ def mock_get_factory(fixture_group): - def mock_get(url, *args, **kwargs): + def mock_get(session, url, *args, **kwargs): split_url = url.split('/') path = split_url[-1] return MockResponse(file_path=os.path.join(HERE, 'fixtures', fixture_group, '{}.json'.format(path))) @@ -48,7 +48,7 @@ def test_check(aggregator, instance, fixture_group): check = TwistlockCheck('twistlock', {}, [instance]) - with mock.patch('requests.get', side_effect=mock_get_factory(fixture_group), autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_get_factory(fixture_group), autospec=True): check.check(instance) check.check(instance) @@ -70,11 +70,12 @@ def test_config_project(aggregator, instance, fixture_group): instance['project'] = project check = TwistlockCheck('twistlock', {}, [instance]) - with mock.patch('requests.get', side_effect=mock_get_factory(fixture_group), autospec=True) as r: + with mock.patch('requests.Session.get', side_effect=mock_get_factory(fixture_group), autospec=True) as r: check.check(instance) r.assert_called_with( - mock.ANY, + mock.ANY, # The session object is passed as the first argument + mock.ANY, # The URL is passed as the second argument params=qparams, auth=mock.ANY, cert=mock.ANY, @@ -101,7 +102,7 @@ def mock_get(url, *args, **kwargs): return MockResponse(file_path=os.path.join(HERE, 'fixtures', fixture_group, '{}.json'.format(path))) return MockResponse(file_path=os.path.join(HERE, 'fixtures', 'empty_images.json')) - with mock.patch('requests.get', side_effect=mock_get): + with mock.patch('requests.Session.get', side_effect=mock_get): check.check(instance) check.check(instance) @@ -111,7 +112,9 @@ def test_err_response(aggregator, instance): check = TwistlockCheck('twistlock', {}, [instance]) with pytest.raises(Exception, match='^Error in response'): - with mock.patch('requests.get', return_value=MockResponse('{"err": "invalid credentials"}'), autospec=True): + with mock.patch( + 'requests.Session.get', return_value=MockResponse('{"err": "invalid credentials"}'), autospec=True + ): check.check(instance) diff --git a/vault/tests/test_vault.py b/vault/tests/test_vault.py index 81c37dceff840..12691eedfc1d7 100644 --- a/vault/tests/test_vault.py +++ b/vault/tests/test_vault.py @@ -76,7 +76,7 @@ def test_service_check_connect_ok_all_tags(self, aggregator, dd_run_check, globa # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': return MockResponse( json_data={'ha_enabled': False, 'is_self': True, 'leader_address': '', 'leader_cluster_address': ''} @@ -97,7 +97,7 @@ def mock_requests_get(url, *args, **kwargs): ) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) aggregator.assert_service_check(Vault.SERVICE_CHECK_CONNECT, status=Vault.OK, tags=global_tags, count=1) @@ -135,7 +135,7 @@ def test_service_check_500_fail(self, aggregator, dd_run_check, global_tags): instance.update(INSTANCES['main']) c = Vault(Vault.CHECK_NAME, {}, [instance]) - with mock.patch('requests.get', return_value=MockResponse(status_code=500)): + with mock.patch('requests.Session.get', return_value=MockResponse(status_code=500)): with pytest.raises( Exception, match=r'^The Vault endpoint `{}.+?` returned 500$'.format(re.escape(instance['api_url'])) ): @@ -169,7 +169,7 @@ def test_service_check_unsealed_ok_all_tags(self, aggregator, dd_run_check, glob # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': return MockResponse( json_data={'ha_enabled': False, 'is_self': True, 'leader_address': '', 'leader_cluster_address': ''} @@ -190,7 +190,7 @@ def mock_requests_get(url, *args, **kwargs): ) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) expected_tags = [ @@ -212,7 +212,7 @@ def test_service_check_unsealed_fail(self, aggregator, dd_run_check, use_openmet # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/health': return MockResponse( json_data={ @@ -230,7 +230,7 @@ def mock_requests_get(url, *args, **kwargs): ) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) aggregator.assert_service_check(Vault.SERVICE_CHECK_UNSEALED, status=Vault.CRITICAL, count=1) @@ -253,7 +253,7 @@ def test_service_check_initialized_ok_all_tags(self, aggregator, dd_run_check, g # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': return MockResponse( json_data={'ha_enabled': False, 'is_self': True, 'leader_address': '', 'leader_cluster_address': ''} @@ -274,7 +274,7 @@ def mock_requests_get(url, *args, **kwargs): ) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) expected_tags = [ @@ -296,7 +296,7 @@ def test_service_check_initialized_fail(self, aggregator, dd_run_check, use_open # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/health': return MockResponse( json_data={ @@ -314,7 +314,7 @@ def mock_requests_get(url, *args, **kwargs): ) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) aggregator.assert_service_check(Vault.SERVICE_CHECK_INITIALIZED, status=Vault.CRITICAL, count=1) @@ -327,7 +327,7 @@ def test_disable_legacy_cluster_tag(self, aggregator, dd_run_check, global_tags) # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': return MockResponse( json_data={'ha_enabled': False, 'is_self': True, 'leader_address': '', 'leader_cluster_address': ''} @@ -348,7 +348,7 @@ def mock_requests_get(url, *args, **kwargs): ) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) expected_tags = [ @@ -368,7 +368,7 @@ def test_replication_dr_mode(self, aggregator, dd_run_check, use_openmetrics): # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/health': return MockResponse( json_data={ @@ -387,7 +387,7 @@ def mock_requests_get(url, *args, **kwargs): ) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) c.log.debug.assert_called_with( "Detected vault in replication DR secondary mode, skipping Prometheus metric collection." @@ -405,7 +405,7 @@ def test_replication_dr_mode_collect_secondary(self, aggregator, dd_run_check, u # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/health': return MockResponse( json_data={ @@ -426,7 +426,7 @@ def mock_requests_get(url, *args, **kwargs): metric_collection = 'OpenMetrics' if use_openmetrics else 'Prometheus' - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) c.log.debug.assert_called_with( "Detected vault in replication DR secondary mode but also detected that " @@ -445,7 +445,7 @@ def test_replication_dr_mode_changed(self, aggregator, dd_run_check, use_openmet # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/health': if getattr(mock_requests_get, 'first_health_call', True): mock_requests_get.first_health_call = False @@ -470,7 +470,7 @@ def mock_requests_get(url, *args, **kwargs): ) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) assert not c._skip_dr_metric_collection aggregator.assert_service_check(Vault.SERVICE_CHECK_CONNECT, status=Vault.OK, count=1) @@ -504,7 +504,7 @@ def test_event_leader_change(self, aggregator, dd_run_check, cluster, use_openme # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': return MockResponse( json_data={ @@ -516,7 +516,7 @@ def mock_requests_get(url, *args, **kwargs): ) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) assert len(aggregator.events) > 0 @@ -545,7 +545,7 @@ def test_leader_change_not_self(self, aggregator, dd_run_check, use_openmetrics) # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': return MockResponse( json_data={ @@ -557,7 +557,7 @@ def mock_requests_get(url, *args, **kwargs): ) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) assert len(aggregator.events) == 0 @@ -571,7 +571,7 @@ def test_is_leader_metric_true(self, aggregator, dd_run_check, use_openmetrics): # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': return MockResponse( json_data={ @@ -583,7 +583,7 @@ def mock_requests_get(url, *args, **kwargs): ) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) aggregator.assert_metric('vault.is_leader', 1) @@ -597,7 +597,7 @@ def test_is_leader_metric_false(self, aggregator, dd_run_check, use_openmetrics) # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': return MockResponse( json_data={ @@ -609,7 +609,7 @@ def mock_requests_get(url, *args, **kwargs): ) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) aggregator.assert_metric('vault.is_leader', 0) @@ -624,7 +624,7 @@ def test_sys_health_non_standard_status_codes(self, aggregator, dd_run_check, st # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/health': return MockResponse( json_data={ @@ -643,7 +643,7 @@ def mock_requests_get(url, *args, **kwargs): ) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) aggregator.assert_metric('vault.is_leader', 1) @@ -658,12 +658,12 @@ def test_sys_leader_non_standard_status_codes(self, aggregator, dd_run_check, us # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': return MockResponse(json_data={'errors': ["Vault is sealed"]}, status_code=503) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) aggregator.assert_metric('vault.is_leader', count=0) @@ -807,9 +807,9 @@ def test_x_vault_request_header_is_set(monkeypatch, instance, dd_run_check, use_ c = Vault(Vault.CHECK_NAME, {}, [instance]) - requests_get = requests.get + requests_get = requests.Session.get mock_get = mock.Mock(side_effect=requests_get) - monkeypatch.setattr(requests, 'get', mock_get) + monkeypatch.setattr(requests.Session, 'get', mock_get) dd_run_check(c) diff --git a/vertica/tests/test_unit.py b/vertica/tests/test_unit.py index 09ecd3baeff81..34068a2f6cd0f 100644 --- a/vertica/tests/test_unit.py +++ b/vertica/tests/test_unit.py @@ -19,7 +19,7 @@ def test_ssl_config_ok(aggregator, tls_instance): with mock.patch('datadog_checks.vertica.vertica.vertica') as vertica: - with mock.patch('datadog_checks.base.utils.tls.ssl') as ssl: + with mock.patch('datadog_checks.base.utils.http.ssl') as ssl: vertica.connect.return_value = mock.MagicMock() tls_context = mock.MagicMock() ssl.SSLContext.return_value = tls_context @@ -37,7 +37,7 @@ def test_ssl_config_ok(aggregator, tls_instance): def test_ssl_legacy_config_ok(aggregator, tls_instance_legacy): with mock.patch('datadog_checks.vertica.vertica.vertica') as vertica: - with mock.patch('datadog_checks.base.utils.tls.ssl') as ssl: + with mock.patch('datadog_checks.base.utils.http.ssl') as ssl: vertica.connect.return_value = mock.MagicMock() tls_context = mock.MagicMock() ssl.SSLContext.return_value = tls_context diff --git a/vllm/tests/test_unit.py b/vllm/tests/test_unit.py index a7b4cc136cb6b..f6aa9f5cca85f 100644 --- a/vllm/tests/test_unit.py +++ b/vllm/tests/test_unit.py @@ -23,7 +23,7 @@ def test_check_vllm(dd_run_check, aggregator, datadog_agent, instance): MockResponse(file_path=get_fixture_path("vllm_version.json")), ] - with mock.patch('requests.get', side_effect=mock_responses): + with mock.patch('requests.Session.get', side_effect=mock_responses): dd_run_check(check) for metric in METRICS_MOCK: @@ -47,7 +47,7 @@ def test_check_vllm_w_ray_prefix(dd_run_check, aggregator, datadog_agent, ray_in MockResponse(file_path=get_fixture_path("vllm_version.json")), ] - with mock.patch('requests.get', side_effect=mock_responses): + with mock.patch('requests.Session.get', side_effect=mock_responses): dd_run_check(check) for metric in METRICS_MOCK: @@ -81,7 +81,7 @@ def test_emits_critical_openemtrics_service_check_when_service_is_down( """ mock_http_response(status_code=404) check = vLLMCheck("vllm", {}, [instance]) - with pytest.raises(Exception, match="requests.exceptions.HTTPError"): + with pytest.raises(Exception, match='requests.exceptions.HTTPError'): dd_run_check(check) aggregator.assert_all_metrics_covered() diff --git a/voltdb/tests/test_integration.py b/voltdb/tests/test_integration.py index 3aff8ab036f10..985094e4e4a59 100644 --- a/voltdb/tests/test_integration.py +++ b/voltdb/tests/test_integration.py @@ -74,7 +74,7 @@ def test_http_error(self, aggregator, instance): # type: (AggregatorStub, Instance) -> None check = VoltDBCheck('voltdb', {}, [instance]) - with mock.patch('requests.get', side_effect=requests.RequestException('Something failed')): + with mock.patch('requests.Session.get', side_effect=requests.RequestException('Something failed')): error = check.run() assert 'Something failed' in error @@ -88,7 +88,7 @@ def test_http_response_error(self, aggregator, instance): resp = requests.Response() resp.status_code = 503 - with mock.patch('requests.get', return_value=resp): + with mock.patch('requests.Session.get', return_value=resp): error = check.run() assert '503 Server Error' in error diff --git a/vsphere/tests/conftest.py b/vsphere/tests/conftest.py index 91966233e1218..cf7338574e981 100644 --- a/vsphere/tests/conftest.py +++ b/vsphere/tests/conftest.py @@ -247,8 +247,8 @@ def mock_http_api(monkeypatch): http = MockHttpV7() else: http = MockHttpV6() - monkeypatch.setattr('requests.get', MagicMock(side_effect=http.get)) - monkeypatch.setattr('requests.post', MagicMock(side_effect=http.post)) + monkeypatch.setattr('requests.Session.get', MagicMock(side_effect=http.get)) + monkeypatch.setattr('requests.Session.post', MagicMock(side_effect=http.post)) yield http diff --git a/vsphere/tests/test_check.py b/vsphere/tests/test_check.py index 090e266e0ed3f..093703ca9bc57 100644 --- a/vsphere/tests/test_check.py +++ b/vsphere/tests/test_check.py @@ -283,7 +283,7 @@ def test_continue_if_tag_collection_fail(aggregator, dd_run_check, realtime_inst check = VSphereCheck('vsphere', {}, [realtime_instance]) check.log = MagicMock() - with mock.patch('requests.post', side_effect=Exception, autospec=True): + with mock.patch('requests.Session.post', side_effect=Exception, autospec=True): dd_run_check(check) aggregator.assert_metric('vsphere.cpu.usage.avg', tags=['vcenter_server:FAKE'], hostname='10.0.0.104') diff --git a/yarn/tests/conftest.py b/yarn/tests/conftest.py index ac6efe6588ee8..16e5a8c5938c0 100644 --- a/yarn/tests/conftest.py +++ b/yarn/tests/conftest.py @@ -58,47 +58,50 @@ def instance(): @pytest.fixture def mocked_request(): - with patch("requests.get", new=requests_get_mock): + with patch("requests.Session.get", new=requests_get_mock): yield @pytest.fixture def mocked_auth_request(): - def requests_auth_get(*args, **kwargs): + def requests_auth_get(session, *args, **kwargs): # Make sure we're passing in authentication - assert 'auth' in kwargs, 'Missing "auth" argument in requests.get(...) call' + assert 'auth' in kwargs, 'Missing "auth" argument in requests.Session.get(...) call' # Make sure we've got the correct username and password - assert kwargs['auth'] == (TEST_USERNAME, TEST_PASSWORD), "Incorrect username or password in requests.get" + assert kwargs['auth'] == ( + TEST_USERNAME, + TEST_PASSWORD, + ), "Incorrect username or password in requests.Session.get" # Return mocked request.get(...) - return requests_get_mock(*args, **kwargs) + return requests_get_mock(session, *args, **kwargs) - with patch("requests.get", new=requests_auth_get): + with patch("requests.Session.get", new=requests_auth_get): yield @pytest.fixture def mocked_bad_cert_request(): """ - Mock request.get to an endpoint with a badly configured ssl cert + Mock request.Session.get to an endpoint with a badly configured ssl cert """ - def requests_bad_cert_get(*args, **kwargs): + def requests_bad_cert_get(session, *args, **kwargs): # Make sure we're passing in the 'verify' argument - assert 'verify' in kwargs, 'Missing "verify" argument in requests.get(...) call' + assert 'verify' in kwargs, 'Missing "verify" argument in requests.Session.get(...) call' if kwargs['verify']: raise SSLError("certificate verification failed for {}".format(args[0])) # Return the actual response - return requests_get_mock(*args, **kwargs) + return requests_get_mock(session, *args, **kwargs) - with patch("requests.get", new=requests_bad_cert_get): + with patch("requests.Session.get", new=requests_bad_cert_get): yield -def requests_get_mock(*args, **kwargs): +def requests_get_mock(session, *args, **kwargs): if args[0] == YARN_CLUSTER_METRICS_URL: return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'cluster_metrics')) elif args[0] == YARN_APPS_URL: