Skip to content

Commit 650623b

Browse files
authored
Change S3 Tracker Logging to DEBUG (#522)
The S3 tracker had excessive logging. This change modifies the message to disambiguate S3 client and also moves them to debug. If users want to see them, they now just need to adjust their logging settings.
1 parent 103fdf5 commit 650623b

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

burr/tracking/s3client.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -215,7 +215,7 @@ def thread(self):
215215

216216
while self.running:
217217
try:
218-
logger.info(f"Checking for new data to flush -- batch is of size: {len(batch)}")
218+
logger.debug(f"Checking for new data to flush -- s3 batch is of size: {len(batch)}")
219219
# Wait up to flush_interval for new data
220220
item = self.log_queue.get(timeout=self.flush_interval)
221221
# signal that we're done
@@ -229,14 +229,14 @@ def thread(self):
229229
len(batch) >= self.max_batch_size
230230
or (time.time() - last_flush_time) >= self.flush_interval
231231
):
232-
logger.info(f"Flushing batch with {len(batch)} events")
232+
logger.debug(f"Flushing s3 batch with {len(batch)} events")
233233
self.log_events(batch)
234234
batch = []
235235
last_flush_time = time.time()
236236
except queue.Empty:
237237
# Flush on timeout if there's any data
238238
if batch:
239-
logger.info(f"Flushing batch on queue empty with {len(batch)} events")
239+
logger.debug(f"Flushing s3 batch on queue empty with {len(batch)} events")
240240
self.log_events(batch)
241241
batch = []
242242
last_flush_time = time.time()

0 commit comments

Comments
 (0)