Skip to content

Commit 464b8fd

Browse files
committed
improve variable names
1 parent 13c564f commit 464b8fd

File tree

3 files changed

+38
-40
lines changed

3 files changed

+38
-40
lines changed

sqlserver/assets/configuration/spec.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -715,8 +715,8 @@ files:
715715
display_default: false
716716
- name: schemas_collection
717717
description: |
718-
Configure collection of schemas. "\If database_autodiscovery is not enabled, data is collected
719-
only for the database configured with database."\
718+
Configure collection of schemas. If `database_autodiscovery` is not enabled, data is collected
719+
only for the database configured with `database` parameter.
720720
options:
721721
- name: enabled
722722
description: |

sqlserver/datadog_checks/sqlserver/schemas.py

Lines changed: 33 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
SCHEMA_QUERY,
2525
TABLES_IN_SCHEMA_QUERY,
2626
)
27-
from datadog_checks.sqlserver.utils import execute_query_output_result_as_dicts, get_list_chunks, is_azure_sql_database
27+
from datadog_checks.sqlserver.utils import execute_query, get_list_chunks, is_azure_sql_database
2828

2929

3030
class SubmitData:
@@ -73,7 +73,6 @@ def truncate(self, json_event):
7373
else:
7474
return json_event
7575

76-
# NOTE: DB with no schemas is never submitted
7776
def submit(self):
7877
if not self.db_to_schemas:
7978
return
@@ -82,7 +81,7 @@ def submit(self):
8281
for db, schemas_by_id in self.db_to_schemas.items():
8382
db_info = {}
8483
if db not in self.db_info:
85-
self._log.error("Couldn't find database info for %s", db)
84+
self._log.error("Couldn't find database info for {}".format(db))
8685
db_info["name"] = db
8786
else:
8887
db_info = self.db_info[db]
@@ -101,7 +100,7 @@ class Schemas(DBMAsyncJob):
101100

102101
TABLES_CHUNK_SIZE = 500
103102
# Note: in async mode execution time also cannot exceed 2 checks.
104-
MAX_EXECUTION_TIME = 10
103+
DEFAULT_MAX_EXECUTION_TIME = 10
105104
MAX_COLUMNS_PER_EVENT = 100_000
106105

107106
def __init__(self, check, config):
@@ -111,7 +110,7 @@ def __init__(self, check, config):
111110
self._last_schemas_collect_time = None
112111
collection_interval = config.schema_config.get('collection_interval', DEFAULT_SCHEMAS_COLLECTION_INTERVAL)
113112
self._max_execution_time = min(
114-
config.schema_config.get('max_execution_time', self.MAX_EXECUTION_TIME), collection_interval
113+
config.schema_config.get('max_execution_time', self.DEFAULT_MAX_EXECUTION_TIME), collection_interval
115114
)
116115
super(Schemas, self).__init__(
117116
check,
@@ -155,8 +154,8 @@ def _fetch_schema_data(self, cursor, db_name):
155154
if schema_collection_elapsed_time > self._max_execution_time:
156155
# TODO Report truncation to the backend
157156
self._log.warning(
158-
"""Truncated data due to the effective execution time reaching {},
159-
stopped on db - {} on schema {}""".format(
157+
"""Truncated data due to the execution time reaching {}s,
158+
stopped on db {} on schema {}""".format(
160159
self._max_execution_time, db_name, schema["name"]
161160
)
162161
)
@@ -184,16 +183,12 @@ def _fetch_for_databases(self):
184183
self._fetch_schema_data(cursor, db_name)
185184
except StopIteration as e:
186185
self._log.error(
187-
"While executing fetch schemas for databse - %s, the following exception occured - %s",
188-
db_name,
189-
e,
186+
"While executing fetch schemas for databse {}, the following exception occured {}".format(db_name, e)
190187
)
191188
return
192189
except Exception as e:
193190
self._log.error(
194-
"While executing fetch schemas for databse - %s, the following exception occured - %s",
195-
db_name,
196-
e,
191+
"While executing fetch schemas for databse {}, the following exception occured {}".format(db_name, e)
197192
)
198193
# Switch DB back to MASTER
199194
if not is_azure_sql_database(engine_edition):
@@ -254,17 +249,17 @@ def _collect_schemas_data(self):
254249
)
255250

256251
databases = self._check.get_databases()
257-
db_infos = self._query_db_informations(databases)
252+
db_infos = self._query_db_information(databases)
258253
self._data_submitter.store_db_infos(db_infos)
259254
self._fetch_for_databases()
260-
self._log.debug("Finished collect_schemas_data")
261255
self._data_submitter.submit()
256+
self._log.debug("Finished collect_schemas_data")
262257

263-
def _query_db_informations(self, db_names):
258+
def _query_db_information(self, db_names):
264259
with self._check.connection.open_managed_default_connection():
265260
with self._check.connection.get_managed_cursor() as cursor:
266261
db_names_formatted = ",".join(["'{}'".format(t) for t in db_names])
267-
return execute_query_output_result_as_dicts(
262+
return execute_query(
268263
DB_QUERY.format(db_names_formatted), cursor, convert_results_to_str=True
269264
)
270265

@@ -276,7 +271,7 @@ def _get_tables(self, schema, cursor):
276271
"name": str
277272
"columns": []
278273
"""
279-
tables_info = execute_query_output_result_as_dicts(
274+
tables_info = execute_query(
280275
TABLES_IN_SCHEMA_QUERY, cursor, convert_results_to_str=True, parameter=schema["id"]
281276
)
282277
for t in tables_info:
@@ -292,7 +287,7 @@ def _query_schema_information(self, cursor):
292287
"id": str
293288
"owner_name": str
294289
"""
295-
return execute_query_output_result_as_dicts(SCHEMA_QUERY, cursor, convert_results_to_str=True)
290+
return execute_query(SCHEMA_QUERY, cursor, convert_results_to_str=True)
296291

297292
@tracked_method(agent_check_getter=agent_check_getter, track_result_length=True)
298293
def _get_tables_data(self, table_list, schema, cursor):
@@ -359,7 +354,8 @@ def _populate_with_columns_data(self, table_ids, name_to_id, id_to_table_data, s
359354
]
360355
rows = [dict(zip(columns, [str(item) for item in row])) for row in data]
361356
for row in rows:
362-
table_id = name_to_id.get(str(row.get("table_name")))
357+
table_name = str(row.get("table_name"))
358+
table_id = name_to_id.get(table_name)
363359
if table_id is not None:
364360
row.pop("table_name", None)
365361
if "nullable" in row:
@@ -372,28 +368,28 @@ def _populate_with_columns_data(self, table_ids, name_to_id, id_to_table_data, s
372368
row
373369
]
374370
else:
375-
self._log.error("Columns found for an unkown table with the object_id: %s", table_id)
371+
self._log.debug("Columns found for an unkown table with the object_id: {}".format(table_id))
376372
else:
377-
self._log.error("Couldn't find id of a table: %s", table_id)
373+
self._log.debug("Couldn't find id of a table: {}".format(table_name))
378374
return len(data)
379375

380376
@tracked_method(agent_check_getter=agent_check_getter)
381377
def _populate_with_partitions_data(self, table_ids, id_to_table_data, cursor):
382-
rows = execute_query_output_result_as_dicts(PARTITIONS_QUERY.format(table_ids), cursor)
378+
rows = execute_query(PARTITIONS_QUERY.format(table_ids), cursor)
383379
for row in rows:
384380
id = row.pop("id", None)
385381
if id is not None:
386382
id_str = str(id)
387383
if id_str in id_to_table_data:
388384
id_to_table_data[id_str]["partitions"] = row
389385
else:
390-
self._log.error("Partition found for an unkown table with the object_id: %s", id_str)
386+
self._log.debug("Partition found for an unkown table with the object_id: {}".format(id_str))
391387
else:
392-
self._log.error("Return rows of [%s] query should have id column", PARTITIONS_QUERY)
388+
self._log.debug("Return rows of [{}] query should have id column".format(PARTITIONS_QUERY))
393389

394390
@tracked_method(agent_check_getter=agent_check_getter)
395391
def _populate_with_index_data(self, table_ids, id_to_table_data, cursor):
396-
rows = execute_query_output_result_as_dicts(INDEX_QUERY.format(table_ids), cursor)
392+
rows = execute_query(INDEX_QUERY.format(table_ids), cursor)
397393
for row in rows:
398394
id = row.pop("id", None)
399395
if id is not None:
@@ -402,21 +398,21 @@ def _populate_with_index_data(self, table_ids, id_to_table_data, cursor):
402398
id_to_table_data[id_str].setdefault("indexes", [])
403399
id_to_table_data[id_str]["indexes"].append(row)
404400
else:
405-
self._log.error("Index found for an unkown table with the object_id: %s", id_str)
401+
self._log.debug("Index found for an unkown table with the object_id: {}".format(id_str))
406402
else:
407-
self._log.error("Return rows of [%s] query should have id column", INDEX_QUERY)
403+
self._log.debug("Return rows of [{}] query should have id column".format(INDEX_QUERY))
408404

409405
@tracked_method(agent_check_getter=agent_check_getter, track_result_length=True)
410-
def _populate_with_foreign_keys_data(self, table_ids, id_to_table_data, cursor):
411-
rows = execute_query_output_result_as_dicts(FOREIGN_KEY_QUERY.format(table_ids), cursor)
406+
def _populate_with_foreign_keys_data(self, table_ids, table_id_to_table_data, cursor):
407+
rows = execute_query(FOREIGN_KEY_QUERY.format(table_ids), cursor)
412408
for row in rows:
413-
id = row.pop("id", None)
409+
table_id = row.pop("id", None)
414410
if id is not None:
415-
id_str = str(id)
416-
if id_str in id_to_table_data:
417-
id_to_table_data.get(str(id)).setdefault("foreign_keys", [])
418-
id_to_table_data.get(str(id))["foreign_keys"].append(row)
411+
table_id_str = str(table_id)
412+
if table_id_str in table_id_to_table_data:
413+
table_id_to_table_data.get(table_id_str).setdefault("foreign_keys", [])
414+
table_id_to_table_data.get(table_id_str)["foreign_keys"].append(row)
419415
else:
420-
self._log.error("Foreign key found for an unkown table with the object_id: %s", id_str)
416+
self._log.debug("Foreign key found for an unkown table with the object_id: {}".format(table_id_str))
421417
else:
422-
self._log.error("Return rows of [%s] query should have id column", FOREIGN_KEY_QUERY)
418+
self._log.debug("Return rows of [{}] query should have id column".format(FOREIGN_KEY_QUERY))

sqlserver/datadog_checks/sqlserver/utils.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,8 @@
77
from datadog_checks.base.utils.platform import Platform
88
from datadog_checks.sqlserver.const import ENGINE_EDITION_AZURE_MANAGED_INSTANCE, ENGINE_EDITION_SQL_DATABASE
99

10+
from typing import Dict
11+
1012
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
1113
DRIVER_CONFIG_DIR = os.path.join(CURRENT_DIR, 'data', 'driver_config')
1214

@@ -139,7 +141,7 @@ def is_azure_sql_database(engine_edition):
139141
return engine_edition == ENGINE_EDITION_SQL_DATABASE
140142

141143

142-
def execute_query_output_result_as_dicts(query, cursor, convert_results_to_str=False, parameter=None):
144+
def execute_query(query, cursor, convert_results_to_str=False, parameter=None) -> Dict[str, str]:
143145
if parameter is not None:
144146
cursor.execute(query, (parameter,))
145147
else:

0 commit comments

Comments
 (0)