@@ -187,20 +187,18 @@ def get_bandwidth(data, uid, provider, model):
187
187
def load_entire_questions ():
188
188
# Asynchronous function to fetch a URL
189
189
async def fetch (session , url ):
190
- async with session . get ( url ) as response :
191
- try :
190
+ async with aiohttp . ClientSession ( timeout = aiohttp . ClientTimeout ( total = 60 )) as session :
191
+ async with session . get ( url ) as response :
192
192
return await response .json ()
193
- except Exception as err :
194
- pass
195
193
196
194
# Asynchronous function to gather multiple HTTP requests
197
195
async def gather_requests (urls ):
198
- async with aiohttp .ClientSession () as session :
199
- tasks = []
200
- for url in urls :
201
- tasks .append (fetch (session , url )) # Create a task for each URL
202
- results = await asyncio .gather (* tasks ) # Run all tasks concurrently
203
- return results
196
+ # async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=30) ) as session:
197
+ tasks = []
198
+ for url in urls :
199
+ tasks .append (fetch (None , url ))
200
+ results = await asyncio .gather (* tasks , return_exceptions = True ) # Run all tasks concurrently
201
+ return results
204
202
205
203
# Main function to run the event loop
206
204
def main (urls ):
@@ -215,12 +213,11 @@ def main(urls):
215
213
responses = main (urls )
216
214
queries = []
217
215
for response in responses :
218
- if response is None :
216
+ if response is None or isinstance ( response , Exception ) :
219
217
continue
220
218
for row in response .get ('rows' , []):
221
219
query = row ['row' ]['query' ]
222
220
queries .append (query )
223
-
224
221
return queries
225
222
226
223
0 commit comments