@@ -26,27 +26,23 @@ async def fetch_error_message(code, session):
26
26
async def main ():
27
27
ERROR_CODE_RANGE = range (0 , 10000000 )
28
28
OUTPUT_FILENAME = 'ests-errors.csv'
29
- BATCH_SIZE = 10000 # Adjust this value based on your available memory
30
-
31
- results = []
29
+ BATCH_SIZE = 1000 # Adjust this value based on your available memory
32
30
33
31
async with aiohttp .TCPConnector (limit_per_host = 50 ) as connector , aiohttp .ClientSession (connector = connector ) as session :
34
- for i in range (0 , len (ERROR_CODE_RANGE ), BATCH_SIZE ):
35
- batch = ERROR_CODE_RANGE [i :i + BATCH_SIZE ]
36
- tasks = [fetch_error_message (code , session ) for code in batch ]
37
- for result in await asyncio .gather (* tasks ):
38
- if result :
39
- error_code , message , remediation = result
40
- results .append ((error_code , message , remediation ))
41
-
42
- # Sort the results by error code
43
- results .sort (key = lambda x : x [0 ])
44
-
45
- # Write the sorted results to the CSV file
46
- with open (OUTPUT_FILENAME , mode = 'w' , newline = '' ) as file :
47
- writer = csv .writer (file )
48
- writer .writerow (['Code' , 'Message' , 'Remediation' ])
49
- writer .writerows (results )
32
+ with open (OUTPUT_FILENAME , mode = 'w' , newline = '' ) as file :
33
+ writer = csv .writer (file )
34
+ writer .writerow (['Code' , 'Message' , 'Remediation' ])
35
+ for i in range (0 , len (ERROR_CODE_RANGE ), BATCH_SIZE ):
36
+ results = []
37
+ batch = ERROR_CODE_RANGE [i :i + BATCH_SIZE ]
38
+ tasks = [fetch_error_message (code , session ) for code in batch ]
39
+ for result in await asyncio .gather (* tasks ):
40
+ if result :
41
+ error_code , message , remediation = result
42
+ results .append ((error_code , message , remediation ))
43
+ # Sort the results by error code
44
+ results .sort (key = lambda x : x [0 ])
45
+ writer .writerows (results )
50
46
51
47
if __name__ == '__main__' :
52
48
# 7GB RAM limit to avoid runner OOM
0 commit comments