@@ -96,7 +96,7 @@ def get_aliases(url,type):
96
96
# create alias dictionary
97
97
try :
98
98
aliases = collections .defaultdict (list )
99
- for page in range (1 ,5 ):
99
+ for page in range (1 ,1001 ):
100
100
# Update the URL with the current page
101
101
url = f'{ url } &page={ page } '
102
102
# Fetch the JSON data
@@ -127,7 +127,7 @@ def get_aliases(url,type):
127
127
if "d" in boards :
128
128
dan_tags = {}
129
129
try :
130
- for page in range (1 , 5 ):
130
+ for page in range (1 ,1001 ):
131
131
# Update the URL with the current page
132
132
url = f'{ base_url } &page={ page } '
133
133
# Fetch the JSON data
@@ -162,7 +162,7 @@ def get_aliases(url,type):
162
162
if "e" in boards :
163
163
e6_tags = {}
164
164
try :
165
- for page in range (1 , 2 ):
165
+ for page in range (1 ,1001 ):
166
166
# Update the URL with the current page
167
167
url = f'{ e6_base_url } &page={ page } '
168
168
# Fetch the JSON data
@@ -189,7 +189,7 @@ def get_aliases(url,type):
189
189
except Complete :
190
190
print (f'All tags with { minimum_count } posts or greater have been scraped.' )
191
191
192
- # e6 tags are fucked, a proper solution would take ~10 hours per list and I'm not going that far for furries
192
+ # e6 tags are fucked, a proper solution would take ~10 hours to run per list and I'm not going that far for furries
193
193
#if "e" in boards:
194
194
# e6_aliases = get_aliases(e6_alias_url, "e")
195
195
# backdate(e6_tags,e6_aliases,max_date)
0 commit comments