Skip to content

Hyphen in database name support (SDBM-1013) #17775

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Jun 11, 2024
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions sqlserver/changelog.d/17775.fixed
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Hyphen in database name support
2 changes: 1 addition & 1 deletion sqlserver/datadog_checks/sqlserver/const.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@
]

DATABASE_SERVICE_CHECK_QUERY = """SELECT 1;"""
SWITCH_DB_STATEMENT = """USE {};"""
SWITCH_DB_STATEMENT = """USE [{}];"""

VALID_METRIC_TYPES = ('gauge', 'rate', 'histogram')

Expand Down
2 changes: 1 addition & 1 deletion sqlserver/tests/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,7 @@ def assert_metrics(
tags = check_tags + ['database:{}'.format(dbname)]
for mname in DB_PERF_COUNT_METRICS_NAMES_SINGLE:
aggregator.assert_metric(mname, hostname=hostname, tags=tags)
if dbname == 'datadog_test' and is_always_on():
if dbname == 'datadog_test-1' and is_always_on():
for mname in DB_PERF_COUNT_METRICS_NAMES_AO:
aggregator.assert_metric(mname, hostname=hostname, tags=tags)
else:
Expand Down
16 changes: 8 additions & 8 deletions sqlserver/tests/compose-ha/sql/aoag_primary.sql
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ CREATE USER fred FOR LOGIN fred;
GRANT CONNECT ANY DATABASE to fred;
GO

CREATE DATABASE datadog_test;
CREATE DATABASE [datadog_test-1];
GO

-- create an offline database to have an unavailable database to test with
Expand All @@ -38,10 +38,10 @@ GO

-- Create test database for integration tests
-- only bob and fred have read/write access to this database
USE datadog_test;
CREATE TABLE datadog_test.dbo.ϑings (id int, name varchar(255));
INSERT INTO datadog_test.dbo.ϑings VALUES (1, 'foo'), (2, 'bar');
CREATE CLUSTERED INDEX thingsindex ON datadog_test.dbo.ϑings (name);
USE [datadog_test-1];
CREATE TABLE [datadog_test-1].dbo.ϑings (id int, name varchar(255));
INSERT INTO [datadog_test-1].dbo.ϑings VALUES (1, 'foo'), (2, 'bar');
CREATE CLUSTERED INDEX thingsindex ON [datadog_test-1].dbo.ϑings (name);
CREATE USER bob FOR LOGIN bob;
CREATE USER fred FOR LOGIN fred;
GO
Expand Down Expand Up @@ -184,10 +184,10 @@ USE [master]
GO

--change recovery model and take full backup for db to meet requirements of AOAG
ALTER DATABASE datadog_test SET RECOVERY FULL ;
ALTER DATABASE [datadog_test-1] SET RECOVERY FULL ;
GO

BACKUP DATABASE datadog_test TO DISK = N'/var/opt/mssql/backup/datadog_test.bak' WITH NOFORMAT, NOINIT, NAME = N'datadog_test-Full Database Backup', SKIP, NOREWIND, NOUNLOAD, STATS = 10
BACKUP DATABASE [datadog_test-1] TO DISK = N'/var/opt/mssql/backup/[datadog_test-1].bak' WITH NOFORMAT, NOINIT, NAME = N'[datadog_test-1]-Full Database Backup', SKIP, NOREWIND, NOUNLOAD, STATS = 10
GO


Expand Down Expand Up @@ -271,5 +271,5 @@ USE [master]
GO

WAITFOR DELAY '00:00:10'
ALTER AVAILABILITY GROUP [AG1] ADD DATABASE [datadog_test]
ALTER AVAILABILITY GROUP [AG1] ADD DATABASE [[datadog_test-1]]
GO
14 changes: 7 additions & 7 deletions sqlserver/tests/compose-high-cardinality-windows/setup.sql
Original file line number Diff line number Diff line change
Expand Up @@ -33,16 +33,16 @@ GO
-- Create test database for integration tests
-- only bob and fred have read/write access to this database
-- the datadog user has only connect access but can't read any objects
CREATE DATABASE datadog_test;
CREATE DATABASE [datadog_test-1];
GO
USE datadog_test;
USE [datadog_test-1];
GO

-- This table is pronounced "things" except we've replaced "th" with the greek lower case "theta" to ensure we
-- correctly support unicode throughout the integration.
CREATE TABLE datadog_test.dbo.ϑings (id int, name varchar(255));
INSERT INTO datadog_test.dbo.ϑings VALUES (1, 'foo'), (2, 'bar');
CREATE CLUSTERED INDEX thingsindex ON datadog_test.dbo.ϑings (name);
CREATE TABLE [datadog_test-1].dbo.ϑings (id int, name varchar(255));
INSERT INTO [datadog_test-1].dbo.ϑings VALUES (1, 'foo'), (2, 'bar');
CREATE CLUSTERED INDEX thingsindex ON [datadog_test-1].dbo.ϑings (name);
CREATE USER bob FOR LOGIN bob;
CREATE USER fred FOR LOGIN fred;
-- we don't need to recreate the datadog user in this new DB because it already exists in the model
Expand Down Expand Up @@ -196,7 +196,7 @@ GO
------------------------------ HIGH CARDINALITY ENV SETUP ------------------------------

-- Table variables
DECLARE @table_prefix VARCHAR(100) = 'CREATE TABLE datadog_test.dbo.'
DECLARE @table_prefix VARCHAR(100) = 'CREATE TABLE [datadog_test-1].dbo.'
DECLARE @table_columns VARCHAR(500) = ' (id INT NOT NULL IDENTITY, col1_txt TEXT, col2_txt TEXT, col3_txt TEXT, col4_txt TEXT, col5_txt TEXT, col6_txt TEXT, col7_txt TEXT, col8_txt TEXT, col9_txt TEXT, col10_txt TEXT, col11_float FLOAT, col12_float FLOAT, col13_float FLOAT, col14_int INT, col15_int INT, col16_int INT, col17_date DATE, PRIMARY KEY(id));';

-- Create a main table which contains high cardinality data for testing.
Expand Down Expand Up @@ -227,7 +227,7 @@ BEGIN
DECLARE @col16_int INT = FLOOR(RAND() * 2500);
DECLARE @col17_date DATE = CAST(CAST(RAND()*100000 AS INT) AS DATETIME);

INSERT INTO datadog_test.dbo.high_cardinality (col1_txt, col2_txt, col3_txt, col4_txt, col5_txt, col6_txt, col7_txt, col8_txt, col9_txt, col10_txt, col11_float, col12_float, col13_float, col14_int, col15_int, col16_int, col17_date) VALUES (@col1_txt, @col2_txt, @col3_txt, @col4_txt, @col5_txt, @col6_txt, @col7_txt, @col8_txt, @col9_txt, @col10_txt, @col11_float, @col12_float, @col13_float, @col14_int, @col15_int, @col16_int, @col17_date);
INSERT INTO [datadog_test-1].dbo.high_cardinality (col1_txt, col2_txt, col3_txt, col4_txt, col5_txt, col6_txt, col7_txt, col8_txt, col9_txt, col10_txt, col11_float, col12_float, col13_float, col14_int, col15_int, col16_int, col17_date) VALUES (@col1_txt, @col2_txt, @col3_txt, @col4_txt, @col5_txt, @col6_txt, @col7_txt, @col8_txt, @col9_txt, @col10_txt, @col11_float, @col12_float, @col13_float, @col14_int, @col15_int, @col16_int, @col17_date);

SET @row_count = @row_count + 1
END;
Expand Down
14 changes: 7 additions & 7 deletions sqlserver/tests/compose-high-cardinality/setup.sql
Original file line number Diff line number Diff line change
Expand Up @@ -125,9 +125,9 @@ GO

-- Create test database for integration tests.
-- Only bob and fred have read/write access to this database.
CREATE DATABASE datadog_test;
CREATE DATABASE [datadog_test-1];
GO
USE datadog_test;
USE [datadog_test-1];
GO

CREATE USER bob FOR LOGIN bob;
Expand Down Expand Up @@ -174,12 +174,12 @@ GO

-- This table is pronounced "things" except we've replaced "th" with the greek lower case "theta" to ensure we
-- correctly support unicode throughout the integration.
CREATE TABLE datadog_test.dbo.ϑings (id int, name varchar(255));
INSERT INTO datadog_test.dbo.ϑings VALUES (1, 'foo'), (2, 'bar');
CREATE CLUSTERED INDEX thingsindex ON datadog_test.dbo.ϑings (name);
CREATE TABLE [datadog_test-1].dbo.ϑings (id int, name varchar(255));
INSERT INTO [datadog_test-1].dbo.ϑings VALUES (1, 'foo'), (2, 'bar');
CREATE CLUSTERED INDEX thingsindex ON [datadog_test-1].dbo.ϑings (name);

-- Table variables
DECLARE @table_prefix VARCHAR(100) = 'CREATE TABLE datadog_test.dbo.'
DECLARE @table_prefix VARCHAR(100) = 'CREATE TABLE [datadog_test-1].dbo.'
DECLARE @table_columns VARCHAR(500) = ' (id INT NOT NULL IDENTITY, col1_txt TEXT, col2_txt TEXT, col3_txt TEXT, col4_txt TEXT, col5_txt TEXT, col6_txt TEXT, col7_txt TEXT, col8_txt TEXT, col9_txt TEXT, col10_txt TEXT, col11_float FLOAT, col12_float FLOAT, col13_float FLOAT, col14_int INT, col15_int INT, col16_int INT, col17_date DATE, PRIMARY KEY(id));';

-- Create a main table which contains high cardinality data for testing.
Expand Down Expand Up @@ -210,7 +210,7 @@ BEGIN
DECLARE @col16_int INT = FLOOR(RAND() * 2500);
DECLARE @col17_date DATE = CAST(CAST(RAND()*100000 AS INT) AS DATETIME);

INSERT INTO datadog_test.dbo.high_cardinality (col1_txt, col2_txt, col3_txt, col4_txt, col5_txt, col6_txt, col7_txt, col8_txt, col9_txt, col10_txt, col11_float, col12_float, col13_float, col14_int, col15_int, col16_int, col17_date) VALUES (@col1_txt, @col2_txt, @col3_txt, @col4_txt, @col5_txt, @col6_txt, @col7_txt, @col8_txt, @col9_txt, @col10_txt, @col11_float, @col12_float, @col13_float, @col14_int, @col15_int, @col16_int, @col17_date);
INSERT INTO [datadog_test-1].dbo.high_cardinality (col1_txt, col2_txt, col3_txt, col4_txt, col5_txt, col6_txt, col7_txt, col8_txt, col9_txt, col10_txt, col11_float, col12_float, col13_float, col14_int, col15_int, col16_int, col17_date) VALUES (@col1_txt, @col2_txt, @col3_txt, @col4_txt, @col5_txt, @col6_txt, @col7_txt, @col8_txt, @col9_txt, @col10_txt, @col11_float, @col12_float, @col13_float, @col14_int, @col15_int, @col16_int, @col17_date);

SET @row_count = @row_count + 1
END;
Expand Down
10 changes: 5 additions & 5 deletions sqlserver/tests/compose-windows/setup.sql
Original file line number Diff line number Diff line change
Expand Up @@ -33,16 +33,16 @@ GO
-- Create test database for integration tests
-- only bob and fred have read/write access to this database
-- the datadog user has only connect access but can't read any objects
CREATE DATABASE datadog_test;
CREATE DATABASE [datadog_test-1];
GO
USE datadog_test;
USE [datadog_test-1];
GO

-- This table is pronounced "things" except we've replaced "th" with the greek lower case "theta" to ensure we
-- correctly support unicode throughout the integration.
CREATE TABLE datadog_test.dbo.ϑings (id int, name varchar(255));
INSERT INTO datadog_test.dbo.ϑings VALUES (1, 'foo'), (2, 'bar');
CREATE CLUSTERED INDEX thingsindex ON datadog_test.dbo.ϑings (name);
CREATE TABLE [datadog_test-1].dbo.ϑings (id int, name varchar(255));
INSERT INTO [datadog_test-1].dbo.ϑings VALUES (1, 'foo'), (2, 'bar');
CREATE CLUSTERED INDEX thingsindex ON [datadog_test-1].dbo.ϑings (name);
CREATE USER bob FOR LOGIN bob;
CREATE USER fred FOR LOGIN fred;
-- we don't need to recreate the datadog user in this new DB because it already exists in the model
Expand Down
10 changes: 5 additions & 5 deletions sqlserver/tests/compose/setup.sql
Original file line number Diff line number Diff line change
Expand Up @@ -17,16 +17,16 @@ GO

-- Create test database for integration tests
-- only bob and fred have read/write access to this database
CREATE DATABASE datadog_test;
CREATE DATABASE [datadog_test-1];
GO
USE datadog_test;
USE [datadog_test-1];
-- This table is pronounced "things" except we've replaced "th" with the greek lower case "theta" to ensure we
-- correctly support unicode throughout the integration.
CREATE TABLE datadog_test.dbo.ϑings (id int, name varchar(255));
INSERT INTO datadog_test.dbo.ϑings VALUES (1, 'foo'), (2, 'bar');
CREATE TABLE [datadog_test-1].dbo.ϑings (id int, name varchar(255));
INSERT INTO [datadog_test-1].dbo.ϑings VALUES (1, 'foo'), (2, 'bar');
CREATE USER bob FOR LOGIN bob;
CREATE USER fred FOR LOGIN fred;
CREATE CLUSTERED INDEX thingsindex ON datadog_test.dbo.ϑings (name);
CREATE CLUSTERED INDEX thingsindex ON [datadog_test-1].dbo.ϑings (name);
GO

EXEC sp_addrolemember 'db_datareader', 'bob'
Expand Down
3 changes: 2 additions & 1 deletion sqlserver/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from datadog_checks.dev import WaitFor, docker_run
from datadog_checks.dev.conditions import CheckDockerLogs
from datadog_checks.dev.docker import using_windows_containers
from datadog_checks.sqlserver.const import SWITCH_DB_STATEMENT

from .common import (
DOCKER_SERVER,
Expand Down Expand Up @@ -198,7 +199,7 @@ def execute_with_retries(self, query, params=(), database=None, retries=3, sleep
logging.info("executing query with retries. query='%s' params=%s attempt=%s", query, params, attempt)
with self.conn.cursor() as cursor:
if database:
cursor.execute("USE {}".format(database))
cursor.execute(SWITCH_DB_STATEMENT.format(database))
cursor.execute(query, params)
if return_result:
return cursor.fetchall()
Expand Down
20 changes: 10 additions & 10 deletions sqlserver/tests/test_activity.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,14 +64,14 @@ def dbm_instance(instance_docker):
"database,query,match_pattern,is_proc,expected_comments",
[
[
"datadog_test",
"datadog_test-1",
"/*test=foo*/ SELECT * FROM ϑings",
r"SELECT \* FROM ϑings",
False,
["/*test=foo*/"],
],
[
"datadog_test",
"datadog_test-1",
"EXEC bobProc",
r"SELECT \* FROM ϑings",
True,
Expand All @@ -98,7 +98,7 @@ def test_collect_load_activity(

def run_test_query(c, q):
cur = c.cursor()
cur.execute("USE {}".format(database))
cur.execute("USE [{}]".format(database))
# 0xFF can't be decoded to Unicode, which makes it good test data,
# since Unicode is a default format
cur.execute("SET CONTEXT_INFO 0xff")
Expand Down Expand Up @@ -166,7 +166,7 @@ def run_test_query(c, q):
assert blocked_row['procedure_signature'], "missing procedure signature"
assert blocked_row['procedure_name'], "missing procedure name"
assert re.match(match_pattern, blocked_row['text'], re.IGNORECASE), "incorrect blocked query"
assert blocked_row['database_name'] == "datadog_test", "incorrect database_name"
assert blocked_row['database_name'] == "datadog_test-1", "incorrect database_name"
assert blocked_row['context_info'] == "ff", "incorrect context_info"
assert blocked_row['id'], "missing session id"
assert blocked_row['now'], "missing current timestamp"
Expand Down Expand Up @@ -254,7 +254,7 @@ def test_activity_nested_blocking_transactions(

def run_queries(conn, queries):
cur = conn.cursor()
cur.execute("USE {}".format("datadog_test"))
cur.execute("USE [{}]".format("datadog_test-1"))
cur.execute("BEGIN TRANSACTION")
for q in queries:
try:
Expand Down Expand Up @@ -306,7 +306,7 @@ def run_queries(conn, queries):
# associated sys.dm_exec_requests.
assert root_blocker["user_name"] == "fred"
assert root_blocker["session_status"] == "sleeping"
assert root_blocker["database_name"] == "datadog_test"
assert root_blocker["database_name"] == "datadog_test-1"
assert root_blocker["last_request_start_time"]
assert root_blocker["client_port"]
assert root_blocker["client_address"]
Expand All @@ -328,7 +328,7 @@ def run_queries(conn, queries):
assert tx3["session_status"] == "running"
# verify other essential fields are present
assert tx2["user_name"] == "bob"
assert tx2["database_name"] == "datadog_test"
assert tx2["database_name"] == "datadog_test-1"
assert tx2["last_request_start_time"]
assert tx2["client_port"]
assert tx2["client_address"]
Expand All @@ -340,7 +340,7 @@ def run_queries(conn, queries):
assert tx2["query_plan_hash"]

assert tx3["user_name"] == "fred"
assert tx3["database_name"] == "datadog_test"
assert tx3["database_name"] == "datadog_test-1"
assert tx3["last_request_start_time"]
assert tx3["client_port"]
assert tx3["client_address"]
Expand Down Expand Up @@ -391,7 +391,7 @@ def test_activity_metadata(

def _run_test_query(conn, q):
cur = conn.cursor()
cur.execute("USE {}".format("datadog_test"))
cur.execute("USE [{}]".format("datadog_test-1"))
cur.execute(q)

def _obfuscate_sql(sql_query, options=None):
Expand Down Expand Up @@ -646,7 +646,7 @@ def _obfuscate_sql(sql_query, options=None):

def run_test_query(c, q):
cur = c.cursor()
cur.execute("USE datadog_test")
cur.execute("USE [datadog_test-1]")
cur.execute(q)

run_test_query(fred_conn, "EXEC procedureWithLargeCommment")
Expand Down
10 changes: 5 additions & 5 deletions sqlserver/tests/test_database_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
SQLSERVER_MAJOR_VERSION,
)

AUTODISCOVERY_DBS = ['master', 'msdb', 'datadog_test']
AUTODISCOVERY_DBS = ['master', 'msdb', 'datadog_test-1']

STATIC_SERVER_INFO = {
STATIC_INFO_MAJOR_VERSION: SQLSERVER_MAJOR_VERSION,
Expand Down Expand Up @@ -61,8 +61,8 @@ def test_sqlserver_index_usage_metrics(
('msdb', 'PK__backupse__21F79AAB9439648C', 'backupset', 0, 1, 0, 0),
],
[
('datadog_test', 'idx_something', 'some_table', 10, 60, 12, 18),
('datadog_test', 'idx_something_else', 'some_table', 20, 30, 40, 50),
('datadog_test-1', 'idx_something', 'some_table', 10, 60, 12, 18),
('datadog_test-1', 'idx_something_else', 'some_table', 20, 30, 40, 50),
],
]
mocked_results_tempdb = [
Expand Down Expand Up @@ -153,7 +153,7 @@ def test_sqlserver_db_fragmentation_metrics(
('msdb', 'syscachedcredentials', 1, 'PK__syscache__F6D56B562DA81DC6', 0, 0.0, 0, 0.0),
('msdb', 'syscollector_blobs_internal', 1, 'PK_syscollector_blobs_internal_paremeter_name', 0, 0.0, 0, 0.0),
],
[('datadog_test', 'ϑings', 1, 'thingsindex', 1, 1.0, 1, 0.0)],
[('datadog_test-1', 'ϑings', 1, 'thingsindex', 1, 1.0, 1, 0.0)],
]
mocked_results_tempdb = [
[('tempdb', '#TempExample__000000000008', 1, 'PK__#TempExa__3214EC278A26D67E', 1, 1.0, 1, 0.0)],
Expand Down Expand Up @@ -250,7 +250,7 @@ def test_sqlserver_database_backup_metrics(
('model', 'model', 2),
('msdb', 'msdb', 0),
('tempdb', 'tempdb', 0),
('datadog_test', 'datadog_test', 10),
('datadog_test-1', 'datadog_test-1', 10),
]

sqlserver_check = SQLServer(CHECK_NAME, init_config, [instance_docker_metrics])
Expand Down
Loading
Loading