Skip to content

Commit 31a8ef2

Browse files
committed
fix
1 parent 60625af commit 31a8ef2

File tree

14 files changed

+21
-21
lines changed

14 files changed

+21
-21
lines changed

datadog_checks_dev/datadog_checks/dev/plugin/pytest.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -320,7 +320,7 @@ def enum_object_items(data_source, machine_name, object_name, detail_level):
320320
counters = {}
321321
for object_name, data in perf_objects.items():
322322
instances, counter_values = data
323-
instance_counts = {instance_name: 0 for instance_name in instances}
323+
instance_counts = dict.fromkeys(instances, 0)
324324
instance_indices = []
325325
for instance_name in instances:
326326
instance_indices.append(instance_counts[instance_name])

dns_check/tests/mocks.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ def __init__(self, address):
1717
else:
1818
items = [MockDNSAnswer.MockItem(address)]
1919

20-
self.items = {item: None for item in items}
20+
self.items = dict.fromkeys(items)
2121

2222
class MockItem:
2323
def __init__(self, address):

haproxy/datadog_checks/haproxy/legacy/haproxy.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -453,7 +453,7 @@ def _normalize_status(status):
453453
return formatted_status
454454

455455
def _process_backend_hosts_metric(self, active_tag=None):
456-
agg_statuses = defaultdict(lambda: {status: 0 for status in Services.COLLATED_STATUSES})
456+
agg_statuses = defaultdict(lambda: dict.fromkeys(Services.COLLATED_STATUSES, 0))
457457
active_tag = [] if active_tag is None else active_tag
458458

459459
for host_status, count in self.hosts_statuses.items():
@@ -493,7 +493,7 @@ def _process_status_metric(
493493
self,
494494
active_tag=None,
495495
):
496-
agg_statuses_counter = defaultdict(lambda: {status: 0 for status in Services.COLLATED_STATUSES})
496+
agg_statuses_counter = defaultdict(lambda: dict.fromkeys(Services.COLLATED_STATUSES, 0))
497497
active_tag = [] if active_tag is None else active_tag
498498
# Initialize `statuses_counter`: every value is a defaultdict initialized with the correct
499499
# keys, which depends on the `collate_status_tags_per_host` option

http_check/tests/conftest.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ def call_endpoint(url):
4343

4444
@pytest.fixture(scope='session')
4545
def mock_local_http_dns():
46-
mapping = {x: ('127.0.0.1', 443) for x in MOCKED_HOSTS}
46+
mapping = dict.fromkeys(MOCKED_HOSTS, ('127.0.0.1', 443))
4747
with mock_local(mapping):
4848
yield
4949

kubelet/datadog_checks/kubelet/kubelet.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -228,13 +228,13 @@ def __init__(self, name, init_config, instances):
228228

229229
self.probes_scraper_config = self.get_scraper_config(probes_instance)
230230

231-
counter_transformers = {k: self.send_always_counter for k in self.COUNTER_METRICS}
231+
counter_transformers = dict.fromkeys(self.COUNTER_METRICS, self.send_always_counter)
232232

233233
histogram_transformers = {
234234
k: self._histogram_from_seconds_to_microseconds(v) for k, v in TRANSFORM_VALUE_HISTOGRAMS.items()
235235
}
236236

237-
volume_metric_transformers = {k: self.append_pod_tags_to_volume_metrics for k in self.VOLUME_METRICS}
237+
volume_metric_transformers = dict.fromkeys(self.VOLUME_METRICS, self.append_pod_tags_to_volume_metrics)
238238

239239
self.transformers = {}
240240
for d in [

kubelet/datadog_checks/kubelet/prometheus.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -367,7 +367,7 @@ def _process_usage_metric(self, m_name, metric, cache, scraper_config, labels=No
367367
labels = []
368368

369369
# track containers that still exist in the cache
370-
seen_keys = {k: False for k in cache}
370+
seen_keys = dict.fromkeys(cache, False)
371371

372372
samples = self._sum_values_by_context(metric, self._get_entity_id_if_container_metric)
373373
for c_id, sample in samples.items():

mapreduce/tests/common.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ def setup_mapreduce():
8282

8383
@contextmanager
8484
def mock_local_mapreduce_dns():
85-
mapping = {x: ('127.0.0.1', None) for x in MOCKED_E2E_HOSTS}
85+
mapping = dict.fromkeys(MOCKED_E2E_HOSTS, ('127.0.0.1', None))
8686
with mock_local(mapping):
8787
yield
8888

mongo/datadog_checks/mongo/discovery.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ def __init__(self, check):
1717

1818
super(MongoDBDatabaseAutodiscovery, self).__init__(
1919
self._list_databases,
20-
include={db: 0 for db in self._autodiscovery_config.get("include", [".*"])},
20+
include=dict.fromkeys(self._autodiscovery_config.get("include", [".*"]), 0),
2121
exclude=self._autodiscovery_config.get("exclude"),
2222
interval=self._autodiscovery_config.get('refresh_interval', DEFAULT_REFRESH_INTERVAL),
2323
)

network/datadog_checks/network/network.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -317,7 +317,7 @@ def get_net_proc_base_location(proc_location):
317317
return net_proc_base_location
318318

319319
def _get_metrics(self):
320-
return {val: 0 for val in self.cx_state_gauge.values()}
320+
return dict.fromkeys(self.cx_state_gauge.values(), 0)
321321

322322
def parse_cx_state(self, lines, tcp_states, state_col, protocol=None, ip_version=None):
323323
"""

process/tests/test_process.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -168,7 +168,7 @@ def mock_psutil_wrapper(method, accessors):
168168
if accessors is None:
169169
result = 0
170170
else:
171-
result = {accessor: 0 for accessor in accessors}
171+
result = dict.fromkeys(accessors, 0)
172172
return result
173173

174174

rabbitmq/datadog_checks/rabbitmq/rabbitmq.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -490,7 +490,7 @@ def get_connections_stat(self, instance, base_url, object_type, vhosts, limit_vh
490490
if grab_all_data or not len(data):
491491
data = self._get_data(urljoin(base_url, object_type))
492492

493-
stats = {vhost: 0 for vhost in vhosts}
493+
stats = dict.fromkeys(vhosts, 0)
494494
connection_states = defaultdict(int)
495495
for conn in data:
496496
if conn['vhost'] in vhosts:

teleport/datadog_checks/teleport/metrics.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -208,10 +208,10 @@
208208
}
209209

210210
METRIC_MAP_BY_SERVICE = {
211-
**{metric: "teleport" for metric in COMMON_METRICS_MAP.keys()},
212-
**{metric: "proxy" for metric in PROXY_METRICS_MAP.keys()},
213-
**{metric: "auth" for metric in AUTH_METRICS_MAP.keys()},
214-
**{metric: "ssh" for metric in SSH_METRICS_MAP.keys()},
215-
**{metric: "kubernetes" for metric in KUBERNETES_METRICS_MAP.keys()},
216-
**{metric: "database" for metric in DATABASE_METRICS_MAP.keys()},
211+
**dict.fromkeys(COMMON_METRICS_MAP.keys(), "teleport"),
212+
**dict.fromkeys(PROXY_METRICS_MAP.keys(), "proxy"),
213+
**dict.fromkeys(AUTH_METRICS_MAP.keys(), "auth"),
214+
**dict.fromkeys(SSH_METRICS_MAP.keys(), "ssh"),
215+
**dict.fromkeys(KUBERNETES_METRICS_MAP.keys(), "kubernetes"),
216+
**dict.fromkeys(DATABASE_METRICS_MAP.keys(), "database"),
217217
}

temporal/scripts/generate_metadata.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ def main():
4141

4242
def append_metric_metadata(metric_name, metric_type='count', unit_name=None):
4343
qualified_metric_name = f'temporal.server.{metric_name}'
44-
metric_meta = {k: '' for k in metadata_fields}
44+
metric_meta = dict.fromkeys(metadata_fields, '')
4545
metric_meta['orientation'] = 0
4646
metric_meta.update(previous_metadata.get(qualified_metric_name, {}))
4747
metric_meta['integration'] = 'temporal'

torchserve/datadog_checks/torchserve/model_discovery.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ def __init__(self, check, limit=None, include=None, exclude=None, interval=None)
1111
super().__init__(
1212
self.get_models,
1313
limit=limit,
14-
include={pattern: None for pattern in include} if include else None,
14+
include=dict.fromkeys(include) if include else None,
1515
exclude=exclude,
1616
interval=interval,
1717
key=lambda n: n.get("modelName"),

0 commit comments

Comments
 (0)