diff --git a/sqlserver/changelog.d/17775.fixed b/sqlserver/changelog.d/17775.fixed new file mode 100644 index 0000000000000..27b4acdb78f6b --- /dev/null +++ b/sqlserver/changelog.d/17775.fixed @@ -0,0 +1 @@ +Hyphen in database name support diff --git a/sqlserver/datadog_checks/sqlserver/const.py b/sqlserver/datadog_checks/sqlserver/const.py index 1d939b20598d6..8b4a70ff1e6d0 100644 --- a/sqlserver/datadog_checks/sqlserver/const.py +++ b/sqlserver/datadog_checks/sqlserver/const.py @@ -56,7 +56,7 @@ ] DATABASE_SERVICE_CHECK_QUERY = """SELECT 1;""" -SWITCH_DB_STATEMENT = """USE {};""" +SWITCH_DB_STATEMENT = """USE [{}];""" VALID_METRIC_TYPES = ('gauge', 'rate', 'histogram') diff --git a/sqlserver/tests/common.py b/sqlserver/tests/common.py index c109388ab0a86..9d3df10b5d1e7 100644 --- a/sqlserver/tests/common.py +++ b/sqlserver/tests/common.py @@ -283,7 +283,7 @@ def assert_metrics( tags = check_tags + ['database:{}'.format(dbname)] for mname in DB_PERF_COUNT_METRICS_NAMES_SINGLE: aggregator.assert_metric(mname, hostname=hostname, tags=tags) - if dbname == 'datadog_test' and is_always_on(): + if dbname == 'datadog_test-1' and is_always_on(): for mname in DB_PERF_COUNT_METRICS_NAMES_AO: aggregator.assert_metric(mname, hostname=hostname, tags=tags) else: diff --git a/sqlserver/tests/compose-ha/sql/aoag_primary.sql b/sqlserver/tests/compose-ha/sql/aoag_primary.sql index 1bec999b936e9..9ed17b021f6b6 100644 --- a/sqlserver/tests/compose-ha/sql/aoag_primary.sql +++ b/sqlserver/tests/compose-ha/sql/aoag_primary.sql @@ -20,7 +20,7 @@ CREATE USER fred FOR LOGIN fred; GRANT CONNECT ANY DATABASE to fred; GO -CREATE DATABASE datadog_test; +CREATE DATABASE [datadog_test-1]; GO -- create an offline database to have an unavailable database to test with @@ -38,10 +38,10 @@ GO -- Create test database for integration tests -- only bob and fred have read/write access to this database -USE datadog_test; -CREATE TABLE datadog_test.dbo.ϑings (id int, name varchar(255)); -INSERT INTO datadog_test.dbo.ϑings VALUES (1, 'foo'), (2, 'bar'); -CREATE CLUSTERED INDEX thingsindex ON datadog_test.dbo.ϑings (name); +USE [datadog_test-1]; +CREATE TABLE [datadog_test-1].dbo.ϑings (id int, name varchar(255)); +INSERT INTO [datadog_test-1].dbo.ϑings VALUES (1, 'foo'), (2, 'bar'); +CREATE CLUSTERED INDEX thingsindex ON [datadog_test-1].dbo.ϑings (name); CREATE USER bob FOR LOGIN bob; CREATE USER fred FOR LOGIN fred; GO @@ -184,10 +184,10 @@ USE [master] GO --change recovery model and take full backup for db to meet requirements of AOAG -ALTER DATABASE datadog_test SET RECOVERY FULL ; +ALTER DATABASE [datadog_test-1] SET RECOVERY FULL ; GO -BACKUP DATABASE datadog_test TO DISK = N'/var/opt/mssql/backup/datadog_test.bak' WITH NOFORMAT, NOINIT, NAME = N'datadog_test-Full Database Backup', SKIP, NOREWIND, NOUNLOAD, STATS = 10 +BACKUP DATABASE [datadog_test-1] TO DISK = N'/var/opt/mssql/backup/[datadog_test-1].bak' WITH NOFORMAT, NOINIT, NAME = N'[datadog_test-1]-Full Database Backup', SKIP, NOREWIND, NOUNLOAD, STATS = 10 GO @@ -271,5 +271,5 @@ USE [master] GO WAITFOR DELAY '00:00:10' -ALTER AVAILABILITY GROUP [AG1] ADD DATABASE [datadog_test] +ALTER AVAILABILITY GROUP [AG1] ADD DATABASE [datadog_test-1] GO diff --git a/sqlserver/tests/compose-high-cardinality-windows/setup.sql b/sqlserver/tests/compose-high-cardinality-windows/setup.sql index de9057514602b..fd4c0efa3d4cf 100644 --- a/sqlserver/tests/compose-high-cardinality-windows/setup.sql +++ b/sqlserver/tests/compose-high-cardinality-windows/setup.sql @@ -33,16 +33,16 @@ GO -- Create test database for integration tests -- only bob and fred have read/write access to this database -- the datadog user has only connect access but can't read any objects -CREATE DATABASE datadog_test; +CREATE DATABASE [datadog_test-1]; GO -USE datadog_test; +USE [datadog_test-1]; GO -- This table is pronounced "things" except we've replaced "th" with the greek lower case "theta" to ensure we -- correctly support unicode throughout the integration. -CREATE TABLE datadog_test.dbo.ϑings (id int, name varchar(255)); -INSERT INTO datadog_test.dbo.ϑings VALUES (1, 'foo'), (2, 'bar'); -CREATE CLUSTERED INDEX thingsindex ON datadog_test.dbo.ϑings (name); +CREATE TABLE [datadog_test-1].dbo.ϑings (id int, name varchar(255)); +INSERT INTO [datadog_test-1].dbo.ϑings VALUES (1, 'foo'), (2, 'bar'); +CREATE CLUSTERED INDEX thingsindex ON [datadog_test-1].dbo.ϑings (name); CREATE USER bob FOR LOGIN bob; CREATE USER fred FOR LOGIN fred; -- we don't need to recreate the datadog user in this new DB because it already exists in the model @@ -196,7 +196,7 @@ GO ------------------------------ HIGH CARDINALITY ENV SETUP ------------------------------ -- Table variables -DECLARE @table_prefix VARCHAR(100) = 'CREATE TABLE datadog_test.dbo.' +DECLARE @table_prefix VARCHAR(100) = 'CREATE TABLE [datadog_test-1].dbo.' DECLARE @table_columns VARCHAR(500) = ' (id INT NOT NULL IDENTITY, col1_txt TEXT, col2_txt TEXT, col3_txt TEXT, col4_txt TEXT, col5_txt TEXT, col6_txt TEXT, col7_txt TEXT, col8_txt TEXT, col9_txt TEXT, col10_txt TEXT, col11_float FLOAT, col12_float FLOAT, col13_float FLOAT, col14_int INT, col15_int INT, col16_int INT, col17_date DATE, PRIMARY KEY(id));'; -- Create a main table which contains high cardinality data for testing. @@ -227,7 +227,7 @@ BEGIN DECLARE @col16_int INT = FLOOR(RAND() * 2500); DECLARE @col17_date DATE = CAST(CAST(RAND()*100000 AS INT) AS DATETIME); - INSERT INTO datadog_test.dbo.high_cardinality (col1_txt, col2_txt, col3_txt, col4_txt, col5_txt, col6_txt, col7_txt, col8_txt, col9_txt, col10_txt, col11_float, col12_float, col13_float, col14_int, col15_int, col16_int, col17_date) VALUES (@col1_txt, @col2_txt, @col3_txt, @col4_txt, @col5_txt, @col6_txt, @col7_txt, @col8_txt, @col9_txt, @col10_txt, @col11_float, @col12_float, @col13_float, @col14_int, @col15_int, @col16_int, @col17_date); + INSERT INTO [datadog_test-1].dbo.high_cardinality (col1_txt, col2_txt, col3_txt, col4_txt, col5_txt, col6_txt, col7_txt, col8_txt, col9_txt, col10_txt, col11_float, col12_float, col13_float, col14_int, col15_int, col16_int, col17_date) VALUES (@col1_txt, @col2_txt, @col3_txt, @col4_txt, @col5_txt, @col6_txt, @col7_txt, @col8_txt, @col9_txt, @col10_txt, @col11_float, @col12_float, @col13_float, @col14_int, @col15_int, @col16_int, @col17_date); SET @row_count = @row_count + 1 END; diff --git a/sqlserver/tests/compose-high-cardinality/setup.sql b/sqlserver/tests/compose-high-cardinality/setup.sql index 1e5c4c8dc687f..f8c2cc506500b 100644 --- a/sqlserver/tests/compose-high-cardinality/setup.sql +++ b/sqlserver/tests/compose-high-cardinality/setup.sql @@ -125,9 +125,9 @@ GO -- Create test database for integration tests. -- Only bob and fred have read/write access to this database. -CREATE DATABASE datadog_test; +CREATE DATABASE [datadog_test-1]; GO -USE datadog_test; +USE [datadog_test-1]; GO CREATE USER bob FOR LOGIN bob; @@ -174,12 +174,12 @@ GO -- This table is pronounced "things" except we've replaced "th" with the greek lower case "theta" to ensure we -- correctly support unicode throughout the integration. -CREATE TABLE datadog_test.dbo.ϑings (id int, name varchar(255)); -INSERT INTO datadog_test.dbo.ϑings VALUES (1, 'foo'), (2, 'bar'); -CREATE CLUSTERED INDEX thingsindex ON datadog_test.dbo.ϑings (name); +CREATE TABLE [datadog_test-1].dbo.ϑings (id int, name varchar(255)); +INSERT INTO [datadog_test-1].dbo.ϑings VALUES (1, 'foo'), (2, 'bar'); +CREATE CLUSTERED INDEX thingsindex ON [datadog_test-1].dbo.ϑings (name); -- Table variables -DECLARE @table_prefix VARCHAR(100) = 'CREATE TABLE datadog_test.dbo.' +DECLARE @table_prefix VARCHAR(100) = 'CREATE TABLE [datadog_test-1].dbo.' DECLARE @table_columns VARCHAR(500) = ' (id INT NOT NULL IDENTITY, col1_txt TEXT, col2_txt TEXT, col3_txt TEXT, col4_txt TEXT, col5_txt TEXT, col6_txt TEXT, col7_txt TEXT, col8_txt TEXT, col9_txt TEXT, col10_txt TEXT, col11_float FLOAT, col12_float FLOAT, col13_float FLOAT, col14_int INT, col15_int INT, col16_int INT, col17_date DATE, PRIMARY KEY(id));'; -- Create a main table which contains high cardinality data for testing. @@ -210,7 +210,7 @@ BEGIN DECLARE @col16_int INT = FLOOR(RAND() * 2500); DECLARE @col17_date DATE = CAST(CAST(RAND()*100000 AS INT) AS DATETIME); - INSERT INTO datadog_test.dbo.high_cardinality (col1_txt, col2_txt, col3_txt, col4_txt, col5_txt, col6_txt, col7_txt, col8_txt, col9_txt, col10_txt, col11_float, col12_float, col13_float, col14_int, col15_int, col16_int, col17_date) VALUES (@col1_txt, @col2_txt, @col3_txt, @col4_txt, @col5_txt, @col6_txt, @col7_txt, @col8_txt, @col9_txt, @col10_txt, @col11_float, @col12_float, @col13_float, @col14_int, @col15_int, @col16_int, @col17_date); + INSERT INTO [datadog_test-1].dbo.high_cardinality (col1_txt, col2_txt, col3_txt, col4_txt, col5_txt, col6_txt, col7_txt, col8_txt, col9_txt, col10_txt, col11_float, col12_float, col13_float, col14_int, col15_int, col16_int, col17_date) VALUES (@col1_txt, @col2_txt, @col3_txt, @col4_txt, @col5_txt, @col6_txt, @col7_txt, @col8_txt, @col9_txt, @col10_txt, @col11_float, @col12_float, @col13_float, @col14_int, @col15_int, @col16_int, @col17_date); SET @row_count = @row_count + 1 END; diff --git a/sqlserver/tests/compose-windows/setup.sql b/sqlserver/tests/compose-windows/setup.sql index 3510590e29a3a..3df6386c8b4f2 100644 --- a/sqlserver/tests/compose-windows/setup.sql +++ b/sqlserver/tests/compose-windows/setup.sql @@ -33,16 +33,16 @@ GO -- Create test database for integration tests -- only bob and fred have read/write access to this database -- the datadog user has only connect access but can't read any objects -CREATE DATABASE datadog_test; +CREATE DATABASE [datadog_test-1]; GO -USE datadog_test; +USE [datadog_test-1]; GO -- This table is pronounced "things" except we've replaced "th" with the greek lower case "theta" to ensure we -- correctly support unicode throughout the integration. -CREATE TABLE datadog_test.dbo.ϑings (id int, name varchar(255)); -INSERT INTO datadog_test.dbo.ϑings VALUES (1, 'foo'), (2, 'bar'); -CREATE CLUSTERED INDEX thingsindex ON datadog_test.dbo.ϑings (name); +CREATE TABLE [datadog_test-1].dbo.ϑings (id int, name varchar(255)); +INSERT INTO [datadog_test-1].dbo.ϑings VALUES (1, 'foo'), (2, 'bar'); +CREATE CLUSTERED INDEX thingsindex ON [datadog_test-1].dbo.ϑings (name); CREATE USER bob FOR LOGIN bob; CREATE USER fred FOR LOGIN fred; -- we don't need to recreate the datadog user in this new DB because it already exists in the model diff --git a/sqlserver/tests/compose/setup.sql b/sqlserver/tests/compose/setup.sql index 9643baee4b7d4..86b2934a43c79 100644 --- a/sqlserver/tests/compose/setup.sql +++ b/sqlserver/tests/compose/setup.sql @@ -17,16 +17,16 @@ GO -- Create test database for integration tests -- only bob and fred have read/write access to this database -CREATE DATABASE datadog_test; +CREATE DATABASE [datadog_test-1]; GO -USE datadog_test; +USE [datadog_test-1]; -- This table is pronounced "things" except we've replaced "th" with the greek lower case "theta" to ensure we -- correctly support unicode throughout the integration. -CREATE TABLE datadog_test.dbo.ϑings (id int, name varchar(255)); -INSERT INTO datadog_test.dbo.ϑings VALUES (1, 'foo'), (2, 'bar'); +CREATE TABLE [datadog_test-1].dbo.ϑings (id int, name varchar(255)); +INSERT INTO [datadog_test-1].dbo.ϑings VALUES (1, 'foo'), (2, 'bar'); CREATE USER bob FOR LOGIN bob; CREATE USER fred FOR LOGIN fred; -CREATE CLUSTERED INDEX thingsindex ON datadog_test.dbo.ϑings (name); +CREATE CLUSTERED INDEX thingsindex ON [datadog_test-1].dbo.ϑings (name); GO EXEC sp_addrolemember 'db_datareader', 'bob' diff --git a/sqlserver/tests/conftest.py b/sqlserver/tests/conftest.py index 6dbb01aa097b8..dc28a32ecb29b 100644 --- a/sqlserver/tests/conftest.py +++ b/sqlserver/tests/conftest.py @@ -13,6 +13,7 @@ from datadog_checks.dev import WaitFor, docker_run from datadog_checks.dev.conditions import CheckDockerLogs from datadog_checks.dev.docker import using_windows_containers +from datadog_checks.sqlserver.const import SWITCH_DB_STATEMENT from .common import ( DOCKER_SERVER, @@ -198,7 +199,7 @@ def execute_with_retries(self, query, params=(), database=None, retries=3, sleep logging.info("executing query with retries. query='%s' params=%s attempt=%s", query, params, attempt) with self.conn.cursor() as cursor: if database: - cursor.execute("USE {}".format(database)) + cursor.execute(SWITCH_DB_STATEMENT.format(database)) cursor.execute(query, params) if return_result: return cursor.fetchall() diff --git a/sqlserver/tests/test_activity.py b/sqlserver/tests/test_activity.py index 8d4bb498e8eb2..00ef5688383f3 100644 --- a/sqlserver/tests/test_activity.py +++ b/sqlserver/tests/test_activity.py @@ -64,14 +64,14 @@ def dbm_instance(instance_docker): "database,query,match_pattern,is_proc,expected_comments", [ [ - "datadog_test", + "datadog_test-1", "/*test=foo*/ SELECT * FROM ϑings", r"SELECT \* FROM ϑings", False, ["/*test=foo*/"], ], [ - "datadog_test", + "datadog_test-1", "EXEC bobProc", r"SELECT \* FROM ϑings", True, @@ -98,7 +98,7 @@ def test_collect_load_activity( def run_test_query(c, q): cur = c.cursor() - cur.execute("USE {}".format(database)) + cur.execute("USE [{}]".format(database)) # 0xFF can't be decoded to Unicode, which makes it good test data, # since Unicode is a default format cur.execute("SET CONTEXT_INFO 0xff") @@ -166,7 +166,7 @@ def run_test_query(c, q): assert blocked_row['procedure_signature'], "missing procedure signature" assert blocked_row['procedure_name'], "missing procedure name" assert re.match(match_pattern, blocked_row['text'], re.IGNORECASE), "incorrect blocked query" - assert blocked_row['database_name'] == "datadog_test", "incorrect database_name" + assert blocked_row['database_name'] == "datadog_test-1", "incorrect database_name" assert blocked_row['context_info'] == "ff", "incorrect context_info" assert blocked_row['id'], "missing session id" assert blocked_row['now'], "missing current timestamp" @@ -254,7 +254,7 @@ def test_activity_nested_blocking_transactions( def run_queries(conn, queries): cur = conn.cursor() - cur.execute("USE {}".format("datadog_test")) + cur.execute("USE [{}]".format("datadog_test-1")) cur.execute("BEGIN TRANSACTION") for q in queries: try: @@ -306,7 +306,7 @@ def run_queries(conn, queries): # associated sys.dm_exec_requests. assert root_blocker["user_name"] == "fred" assert root_blocker["session_status"] == "sleeping" - assert root_blocker["database_name"] == "datadog_test" + assert root_blocker["database_name"] == "datadog_test-1" assert root_blocker["last_request_start_time"] assert root_blocker["client_port"] assert root_blocker["client_address"] @@ -328,7 +328,7 @@ def run_queries(conn, queries): assert tx3["session_status"] == "running" # verify other essential fields are present assert tx2["user_name"] == "bob" - assert tx2["database_name"] == "datadog_test" + assert tx2["database_name"] == "datadog_test-1" assert tx2["last_request_start_time"] assert tx2["client_port"] assert tx2["client_address"] @@ -340,7 +340,7 @@ def run_queries(conn, queries): assert tx2["query_plan_hash"] assert tx3["user_name"] == "fred" - assert tx3["database_name"] == "datadog_test" + assert tx3["database_name"] == "datadog_test-1" assert tx3["last_request_start_time"] assert tx3["client_port"] assert tx3["client_address"] @@ -391,7 +391,7 @@ def test_activity_metadata( def _run_test_query(conn, q): cur = conn.cursor() - cur.execute("USE {}".format("datadog_test")) + cur.execute("USE [{}]".format("datadog_test-1")) cur.execute(q) def _obfuscate_sql(sql_query, options=None): @@ -646,7 +646,7 @@ def _obfuscate_sql(sql_query, options=None): def run_test_query(c, q): cur = c.cursor() - cur.execute("USE datadog_test") + cur.execute("USE [datadog_test-1]") cur.execute(q) run_test_query(fred_conn, "EXEC procedureWithLargeCommment") diff --git a/sqlserver/tests/test_database_metrics.py b/sqlserver/tests/test_database_metrics.py index d8d0e7d296e03..7ab389e50c594 100644 --- a/sqlserver/tests/test_database_metrics.py +++ b/sqlserver/tests/test_database_metrics.py @@ -24,7 +24,7 @@ SQLSERVER_MAJOR_VERSION, ) -AUTODISCOVERY_DBS = ['master', 'msdb', 'datadog_test'] +AUTODISCOVERY_DBS = ['master', 'msdb', 'datadog_test-1'] STATIC_SERVER_INFO = { STATIC_INFO_MAJOR_VERSION: SQLSERVER_MAJOR_VERSION, @@ -61,8 +61,8 @@ def test_sqlserver_index_usage_metrics( ('msdb', 'PK__backupse__21F79AAB9439648C', 'backupset', 0, 1, 0, 0), ], [ - ('datadog_test', 'idx_something', 'some_table', 10, 60, 12, 18), - ('datadog_test', 'idx_something_else', 'some_table', 20, 30, 40, 50), + ('datadog_test-1', 'idx_something', 'some_table', 10, 60, 12, 18), + ('datadog_test-1', 'idx_something_else', 'some_table', 20, 30, 40, 50), ], ] mocked_results_tempdb = [ @@ -153,7 +153,7 @@ def test_sqlserver_db_fragmentation_metrics( ('msdb', 'syscachedcredentials', 1, 'PK__syscache__F6D56B562DA81DC6', 0, 0.0, 0, 0.0), ('msdb', 'syscollector_blobs_internal', 1, 'PK_syscollector_blobs_internal_paremeter_name', 0, 0.0, 0, 0.0), ], - [('datadog_test', 'ϑings', 1, 'thingsindex', 1, 1.0, 1, 0.0)], + [('datadog_test-1', 'ϑings', 1, 'thingsindex', 1, 1.0, 1, 0.0)], ] mocked_results_tempdb = [ [('tempdb', '#TempExample__000000000008', 1, 'PK__#TempExa__3214EC278A26D67E', 1, 1.0, 1, 0.0)], @@ -250,7 +250,7 @@ def test_sqlserver_database_backup_metrics( ('model', 'model', 2), ('msdb', 'msdb', 0), ('tempdb', 'tempdb', 0), - ('datadog_test', 'datadog_test', 10), + ('datadog_test-1', 'datadog_test-1', 10), ] sqlserver_check = SQLServer(CHECK_NAME, init_config, [instance_docker_metrics]) diff --git a/sqlserver/tests/test_integration.py b/sqlserver/tests/test_integration.py index 3837707ea2a9f..adff12d1f82e6 100644 --- a/sqlserver/tests/test_integration.py +++ b/sqlserver/tests/test_integration.py @@ -86,7 +86,7 @@ def test_check_docker(aggregator, dd_run_check, init_config, instance_docker, da instance_docker['procedure_metrics'] = {'enabled': False} instance_docker['query_activity'] = {'enabled': False} instance_docker['collect_settings'] = {'enabled': False} - autodiscovery_dbs = ['master', 'msdb', 'datadog_test'] + autodiscovery_dbs = ['master', 'msdb', 'datadog_test-1'] if database_autodiscovery: instance_docker['autodiscovery_include'] = autodiscovery_dbs sqlserver_check = SQLServer(CHECK_NAME, init_config, [instance_docker]) @@ -336,13 +336,13 @@ def test_autodiscovery_perf_counters(aggregator, dd_run_check, instance_autodisc @pytest.mark.usefixtures('dd_environment') @always_on def test_autodiscovery_perf_counters_ao(aggregator, dd_run_check, instance_autodiscovery): - instance_autodiscovery['autodiscovery_include'] = ['datadog_test'] + instance_autodiscovery['autodiscovery_include'] = ['datadog_test-1'] check = SQLServer(CHECK_NAME, {}, [instance_autodiscovery]) dd_run_check(check) instance_tags = instance_autodiscovery.get('tags', []) expected_metrics = [m[0] for m in INSTANCE_METRICS_DATABASE] - tags = ['database:datadog_test'] + instance_tags + tags = ['database:datadog_test-1'] + instance_tags for metric in expected_metrics: print(aggregator.metrics(metric)) aggregator.assert_metric(metric, tags=tags, hostname=check.resolved_hostname) @@ -483,7 +483,7 @@ def test_index_fragmentation_metrics(aggregator, dd_run_check, instance_docker, assert 'master' in seen_databases if database_autodiscovery: - assert 'datadog_test' in seen_databases + assert 'datadog_test-1' in seen_databases @pytest.mark.integration @@ -595,7 +595,7 @@ def test_file_space_usage_metrics(aggregator, dd_run_check, instance_docker, dat ), ( True, - 'datadog_test', + 'datadog_test-1', 'forced_hostname', ENGINE_EDITION_SQL_DATABASE, 'forced_hostname', @@ -612,10 +612,10 @@ def test_file_space_usage_metrics(aggregator, dd_run_check, instance_docker, dat ), ( True, - 'datadog_test', + 'datadog_test-1', None, ENGINE_EDITION_SQL_DATABASE, - 'localhost/datadog_test', + 'localhost/datadog_test-1', { 'azure': { 'deployment_type': 'sql_database', @@ -623,7 +623,7 @@ def test_file_space_usage_metrics(aggregator, dd_run_check, instance_docker, dat }, }, [ - "dd.internal.resource:azure_sql_server_database:localhost/datadog_test", + "dd.internal.resource:azure_sql_server_database:localhost/datadog_test-1", "dd.internal.resource:azure_sql_server:my-instance", ], ), @@ -779,11 +779,11 @@ def test_database_instance_metadata(aggregator, dd_run_check, instance_docker, d def test_index_usage_statistics(aggregator, dd_run_check, instance_docker, database_autodiscovery): instance_docker['database_autodiscovery'] = database_autodiscovery if not database_autodiscovery: - instance_docker['database'] = "datadog_test" + instance_docker['database'] = "datadog_test-1" # currently the `thingsindex` index on the `name` column in the ϑings table # in order to generate user seeks, scans, updates and lookups we can run a variety # of queries against this table - conn_str = 'DRIVER={};Server={};Database=datadog_test;UID={};PWD={};TrustServerCertificate=yes;'.format( + conn_str = 'DRIVER={};Server={};Database=datadog_test-1;UID={};PWD={};TrustServerCertificate=yes;'.format( instance_docker['driver'], instance_docker['host'], "bob", "Password12!" ) conn = pyodbc.connect(conn_str, timeout=DEFAULT_TIMEOUT, autocommit=True) @@ -804,7 +804,7 @@ def execute_query(query, params): check = SQLServer(CHECK_NAME, {}, [instance_docker]) dd_run_check(check) expected_tags = instance_docker.get('tags', []) + [ - 'db:datadog_test', + 'db:datadog_test-1', 'table:ϑings', 'index_name:thingsindex', ] diff --git a/sqlserver/tests/test_metrics.py b/sqlserver/tests/test_metrics.py index fb343be375ef9..9cd60b1aa92bf 100644 --- a/sqlserver/tests/test_metrics.py +++ b/sqlserver/tests/test_metrics.py @@ -43,7 +43,7 @@ from .utils import always_on, is_always_on, not_windows_ci INCR_FRACTION_METRICS = {'sqlserver.latches.latch_wait_time'} -AUTODISCOVERY_DBS = ['master', 'msdb', 'datadog_test'] +AUTODISCOVERY_DBS = ['master', 'msdb', 'datadog_test-1'] @pytest.mark.integration @@ -140,7 +140,7 @@ def test_check_instance_metrics_autodiscovery( hostname=sqlserver_check.resolved_hostname, count=1, ) - if db == 'datadog_test' and is_always_on(): + if db == 'datadog_test-1' and is_always_on(): for metric_name, _, _, _ in INSTANCE_METRICS_DATABASE_AO: aggregator.assert_metric( metric_name, @@ -210,34 +210,34 @@ def test_check_index_usage_metrics( instance_docker_metrics, bob_conn, ): - instance_docker_metrics['database'] = 'datadog_test' + instance_docker_metrics['database'] = 'datadog_test-1' instance_docker_metrics['include_index_usage_metrics'] = True instance_docker_metrics['ignore_missing_database'] = True # Cause an index seek bob_conn.execute_with_retries( - query="SELECT * FROM datadog_test.dbo.ϑings WHERE name = 'foo'", + query="SELECT * FROM [datadog_test-1].dbo.ϑings WHERE name = 'foo'", database=instance_docker_metrics['database'], retries=1, return_result=False, ) # Cause an index scan bob_conn.execute_with_retries( - query="SELECT * FROM datadog_test.dbo.ϑings WHERE name LIKE '%foo%'", + query="SELECT * FROM [datadog_test-1].dbo.ϑings WHERE name LIKE '%foo%'", database=instance_docker_metrics['database'], retries=1, return_result=False, ) # Cause an index lookup bob_conn.execute_with_retries( - query="SELECT id FROM datadog_test.dbo.ϑings WHERE name = 'foo'", + query="SELECT id FROM [datadog_test-1].dbo.ϑings WHERE name = 'foo'", database=instance_docker_metrics['database'], retries=1, return_result=False, ) # Cause an index update bob_conn.execute_with_retries( - query="UPDATE datadog_test.dbo.ϑings SET id = 1 WHERE name = 'foo'", + query="UPDATE [datadog_test-1].dbo.ϑings SET id = 1 WHERE name = 'foo'", database=instance_docker_metrics['database'], retries=1, return_result=False, @@ -388,7 +388,7 @@ def test_check_incr_fraction_metrics( instance_docker_metrics, bob_conn_raw, ): - instance_docker_metrics['database'] = 'datadog_test' + instance_docker_metrics['database'] = 'datadog_test-1' instance_docker_metrics['ignore_missing_database'] = True sqlserver_check = SQLServer(CHECK_NAME, init_config, [instance_docker_metrics]) diff --git a/sqlserver/tests/test_statements.py b/sqlserver/tests/test_statements.py index 94ce0841cc372..9c821a246dd77 100644 --- a/sqlserver/tests/test_statements.py +++ b/sqlserver/tests/test_statements.py @@ -159,7 +159,7 @@ def test_get_statement_metrics_query_cached(aggregator, dbm_instance, caplog): True, ], [ - "datadog_test", + "datadog_test-1", "SELECT * FROM ϑings", [r"SELECT \* FROM ϑings"], ((),), @@ -169,7 +169,7 @@ def test_get_statement_metrics_query_cached(aggregator, dbm_instance, caplog): False, ], [ - "datadog_test", + "datadog_test-1", "SELECT * FROM ϑings where id = ?", [r"SELECT \* FROM ϑings where id = @P1"], ( @@ -183,7 +183,7 @@ def test_get_statement_metrics_query_cached(aggregator, dbm_instance, caplog): False, ], [ - "datadog_test", + "datadog_test-1", "EXEC bobProc", [r"SELECT \* FROM ϑings"], ((),), @@ -193,7 +193,7 @@ def test_get_statement_metrics_query_cached(aggregator, dbm_instance, caplog): True, ], [ - "datadog_test", + "datadog_test-1", "EXEC bobProc", [r"SELECT \* FROM ϑings"], ((),), @@ -204,8 +204,8 @@ def test_get_statement_metrics_query_cached(aggregator, dbm_instance, caplog): ], [ "master", - "SELECT * FROM datadog_test.dbo.ϑings where id = ?", - [r"SELECT \* FROM datadog_test.dbo.ϑings where id = @P1"], + "SELECT * FROM [datadog_test-1].dbo.ϑings where id = ?", + [r"SELECT \* FROM \[datadog_test-1\].dbo.ϑings where id = @P1"], ( (1,), (2,), @@ -217,7 +217,7 @@ def test_get_statement_metrics_query_cached(aggregator, dbm_instance, caplog): False, ], [ - "datadog_test", + "datadog_test-1", "SELECT * FROM ϑings where id = ? and name = ?", [r"SELECT \* FROM ϑings where id = @P1 and name = @P2"], ( @@ -231,7 +231,7 @@ def test_get_statement_metrics_query_cached(aggregator, dbm_instance, caplog): False, ], [ - "datadog_test", + "datadog_test-1", "SELECT * FROM ϑings where id = ?", [r"SELECT \* FROM ϑings where id = @P1"], ( @@ -245,7 +245,7 @@ def test_get_statement_metrics_query_cached(aggregator, dbm_instance, caplog): True, ], [ - "datadog_test", + "datadog_test-1", "EXEC bobProcParams @P1 = ?, @P2 = ?", [ r"SELECT \* FROM ϑings WHERE id = @P1", @@ -261,7 +261,7 @@ def test_get_statement_metrics_query_cached(aggregator, dbm_instance, caplog): True, ], [ - "datadog_test", + "datadog_test-1", "EXEC bobProcParams @P1 = ?, @P2 = ?", [ r"SELECT \* FROM ϑings WHERE id = @P1", @@ -635,7 +635,7 @@ def test_statement_cloud_metadata( query = 'SELECT * FROM ϑings' def _run_query(): - bob_conn.execute_with_retries(query, (), database="datadog_test") + bob_conn.execute_with_retries(query, (), database="datadog_test-1") # the check must be run three times: # 1) set _last_stats_query_time (this needs to happen before the 1st test queries to ensure the query time @@ -938,10 +938,10 @@ def _obfuscate_sql(sql_query, options=None): with mock.patch.object(datadog_agent, 'obfuscate_sql', passthrough=True) as mock_agent: mock_agent.side_effect = _obfuscate_sql dd_run_check(check) - bob_conn.execute_with_retries(query, (), database="datadog_test") + bob_conn.execute_with_retries(query, (), database="datadog_test-1") dd_run_check(check) aggregator.reset() - bob_conn.execute_with_retries(query, (), database="datadog_test") + bob_conn.execute_with_retries(query, (), database="datadog_test-1") dd_run_check(check) # dbm-metrics @@ -971,10 +971,10 @@ def _obfuscate_sql(sql_query, options=None): with mock.patch.object(datadog_agent, 'obfuscate_sql', passthrough=True) as mock_agent: mock_agent.side_effect = _obfuscate_sql dd_run_check(check) - bob_conn.execute_with_retries(query, (), database="datadog_test") + bob_conn.execute_with_retries(query, (), database="datadog_test-1") dd_run_check(check) aggregator.reset() - bob_conn.execute_with_retries(query, (), database="datadog_test") + bob_conn.execute_with_retries(query, (), database="datadog_test-1") dd_run_check(check) # dbm-metrics @@ -1025,14 +1025,14 @@ def test_metrics_lookback_multiplier(instance_docker): [ pytest.param(None, None, False, id="no_database_configured_not_azure_sql"), # should default to master pytest.param( - "datadog_test", None, False, id="configured_database_datadog_test_not_azure_sql" - ), # should not filter to configured database datadog_test + "datadog_test-1", None, False, id="configured_database_datadog_test-1_not_azure_sql" + ), # should not filter to configured database datadog_test-1 pytest.param( "master", None, False, id="configured_database_master_not_azure_sql" ), # should use configured database master pytest.param( - "datadog_test", ENGINE_EDITION_SQL_DATABASE, True, id="configured_database_datadog_test_azure_sql" - ), # should filter to configured database datadog_test + "datadog_test-1", ENGINE_EDITION_SQL_DATABASE, True, id="configured_database_datadog_test-1_azure_sql" + ), # should filter to configured database datadog_test-1 ], ) def test_statement_with_metrics_azure_sql_filtered_to_configured_database( @@ -1052,7 +1052,7 @@ def test_statement_with_metrics_azure_sql_filtered_to_configured_database( check.static_info_cache[STATIC_INFO_ENGINE_EDITION] = engine_edition def _execute_queries(): - bob_conn.execute_with_retries("SELECT * FROM ϑings", (), database="datadog_test") + bob_conn.execute_with_retries("SELECT * FROM ϑings", (), database="datadog_test-1") bob_conn.execute_with_retries("SELECT count(*) from sys.databases", (), database="master") dd_run_check(check) @@ -1075,5 +1075,5 @@ def _execute_queries(): ), "should have only collected metrics for configured database" else: database_names = {row['database_name'] for row in sqlserver_rows} - assert 'datadog_test' in database_names, "should have collected metrics for datadog_test databases" + assert 'datadog_test-1' in database_names, "should have collected metrics for datadog_test-1 databases" assert 'master' in database_names, "should have collected metrics for master databases" diff --git a/sqlserver/tests/test_stored_procedures.py b/sqlserver/tests/test_stored_procedures.py index fa9ae00108366..aedb32494f026 100644 --- a/sqlserver/tests/test_stored_procedures.py +++ b/sqlserver/tests/test_stored_procedures.py @@ -156,7 +156,7 @@ def test_get_procedure_metrics_query_cached(aggregator, dbm_instance, caplog): ], ], [ - "datadog_test", # database + "datadog_test-1", # database "EXEC bobProc", # query ((),), 1, @@ -164,13 +164,13 @@ def test_get_procedure_metrics_query_cached(aggregator, dbm_instance, caplog): { 'schema_name': 'dbo', 'procedure_name': 'bobProc', - 'database_name': 'datadog_test', + 'database_name': 'datadog_test-1', 'execution_count': 1, } ], ], [ - "datadog_test", # database + "datadog_test-1", # database "EXEC bobProc", # query ((),), 10, @@ -178,13 +178,13 @@ def test_get_procedure_metrics_query_cached(aggregator, dbm_instance, caplog): { 'schema_name': 'dbo', 'procedure_name': 'bobProc', - 'database_name': 'datadog_test', + 'database_name': 'datadog_test-1', 'execution_count': 10, } ], ], [ - "datadog_test", # database + "datadog_test-1", # database "EXEC bobProcParams @P1 = ?, @P2 = ?", # query ( (1, "foo"), @@ -195,7 +195,7 @@ def test_get_procedure_metrics_query_cached(aggregator, dbm_instance, caplog): { 'schema_name': 'dbo', 'procedure_name': 'bobProcParams', - 'database_name': 'datadog_test', + 'database_name': 'datadog_test-1', 'execution_count': 2, } ], @@ -299,12 +299,12 @@ def test_procedure_metrics_limit(aggregator, dd_run_check, dbm_instance, bob_con dd_run_check(check) bob_conn.execute_with_retries('EXEC multiQueryProc', (), database='master') bob_conn.execute_with_retries('EXEC encryptedProc', (), database='master') - bob_conn.execute_with_retries('EXEC bobProc', (), database='datadog_test') + bob_conn.execute_with_retries('EXEC bobProc', (), database='datadog_test-1') dd_run_check(check) aggregator.reset() bob_conn.execute_with_retries('EXEC multiQueryProc', (), database='master') bob_conn.execute_with_retries('EXEC encryptedProc', (), database='master') - bob_conn.execute_with_retries('EXEC bobProc', (), database='datadog_test') + bob_conn.execute_with_retries('EXEC bobProc', (), database='datadog_test-1') dd_run_check(check) # dbm-metrics diff --git a/sqlserver/tests/utils.py b/sqlserver/tests/utils.py index c0a6ac2c78205..1d009b47ed6f5 100644 --- a/sqlserver/tests/utils.py +++ b/sqlserver/tests/utils.py @@ -73,14 +73,14 @@ def is_ready(self): """ cursor = self.get_conn().cursor() cursor.execute( - 'SELECT COUNT(*) FROM datadog_test.sys.database_principals WHERE name LIKE \'high_cardinality_user_%\'' + 'SELECT COUNT(*) FROM datadog_test-1.sys.database_principals WHERE name LIKE \'high_cardinality_user_%\'' ) user_count = cursor.fetchone()[0] - cursor.execute('SELECT COUNT(*) FROM datadog_test.sys.schemas WHERE name LIKE \'high_cardinality_schema%\'') + cursor.execute('SELECT COUNT(*) FROM datadog_test-1.sys.schemas WHERE name LIKE \'high_cardinality_schema%\'') schema_count = cursor.fetchone()[0] - cursor.execute('SELECT COUNT(*) FROM datadog_test.sys.tables') + cursor.execute('SELECT COUNT(*) FROM datadog_test-1.sys.tables') table_count = cursor.fetchone()[0] - cursor.execute('SELECT COUNT(*) FROM datadog_test.dbo.high_cardinality') + cursor.execute('SELECT COUNT(*) FROM datadog_test-1.dbo.high_cardinality') row_count = cursor.fetchone()[0] return ( user_count >= HighCardinalityQueries.EXPECTED_OBJ_COUNT @@ -91,7 +91,7 @@ def is_ready(self): def start_background(self, config=None): """ - Run a set of queries against the table `datadog_test.dbo.high_cardinality` in the background + Run a set of queries against the table `datadog_test-1.dbo.high_cardinality` in the background Args: config (dict, optional): Configure how many threads will spin off for each kind of query. @@ -149,7 +149,7 @@ def create_high_cardinality_query(self): """Creates a high cardinality query by shuffling the columns.""" columns = copy(self.columns) shuffle(columns) - return 'SELECT {col} FROM datadog_test.dbo.high_cardinality WHERE id = {id}'.format( + return 'SELECT {col} FROM datadog_test-1.dbo.high_cardinality WHERE id = {id}'.format( col=','.join(columns), id=randint(1, HighCardinalityQueries.EXPECTED_ROW_COUNT) ) @@ -157,8 +157,10 @@ def create_slow_query(self): """Creates a slow running query by trying to match a pattern that may or may not exist.""" columns = copy(self.columns) shuffle(columns) - return 'SELECT TOP 10 {col} FROM datadog_test.dbo.high_cardinality WHERE col2_txt LIKE \'%{pattern}%\''.format( - col={columns[0]}, pattern=self._create_rand_string() + return ( + 'SELECT TOP 10 {col} FROM datadog_test-1.dbo.high_cardinality WHERE col2_txt LIKE \'%{pattern}%\''.format( + col={columns[0]}, pattern=self._create_rand_string() + ) ) def create_complex_query(self): @@ -169,23 +171,23 @@ def create_complex_query(self): SELECT {col} FROM - datadog_test.dbo.high_cardinality AS hc1 + datadog_test-1.dbo.high_cardinality AS hc1 JOIN ( SELECT id, COUNT(*) col12_float FROM - datadog_test.dbo.high_cardinality AS hc2 + datadog_test-1.dbo.high_cardinality AS hc2 WHERE hc2.col1_txt LIKE '%-%' AND hc2.col14_int > ( SELECT AVG(hc3.col15_int) FROM - datadog_test.dbo.high_cardinality AS hc3) + datadog_test-1.dbo.high_cardinality AS hc3) GROUP BY hc2.id) AS hc4 ON hc4.id = hc1.id - JOIN datadog_test.dbo.high_cardinality AS hc5 ON hc5.id = hc1.id + JOIN datadog_test-1.dbo.high_cardinality AS hc5 ON hc5.id = hc1.id WHERE CAST(hc5.col17_date AS VARCHAR) IN('2003-04-23', '2043-09-10', '1996-08-08')