forked from Xerbo/furaffinity-dl
-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathfuraffinity-dl.py
More file actions
186 lines (160 loc) · 5.75 KB
/
furaffinity-dl.py
File metadata and controls
186 lines (160 loc) · 5.75 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
#!/usr/bin/python3
import contextlib
import os
from time import sleep
from bs4 import BeautifulSoup
import Modules.config as config
from Modules.download import download
from Modules.functions import check_filter
from Modules.functions import DownloadComplete
from Modules.functions import login
from Modules.functions import next_button
from Modules.functions import requests_retry_session
from Modules.functions import system_message_handler
from Modules.index import check_file
from Modules.index import start_indexing
# Terminate the process
import threading
import queue
q = queue.Queue()
workers = []
def worker():
while True:
item = q.get()
if item == 'shutdown':
break
download(item)
q.task_done()
def main():
urls = []
"""loop over and download all images on the page(s)"""
page_num = config.start
with contextlib.suppress(DownloadComplete):
while True:
if config.stop == page_num:
print(
f'{config.WARN_COLOR}Reached page "{config.stop}", \
stopping.{config.END}'
)
break
page_url = f"{download_url}/{page_num}"
response = requests_retry_session().get(page_url)
s = BeautifulSoup(response.text, "html.parser")
# System messages
if s.find(class_="notice-message") is not None:
system_message_handler(s)
# End of gallery
if s.find(id="no-images") is not None:
print(f"{config.SUCCESS_COLOR}End of gallery{config.END}")
break
# Download all images on the page
for img in s.findAll("figure"):
title = img.find("figcaption").contents[0].text
img_url = img.find("a").attrs.get("href")
if config.submission_filter is True and check_filter(title) is True:
print(
f'{config.WARN_COLOR}"{title}" was filtered and will not be \
downloaded - {config.BASE_URL}{img_url}{config.END}'
)
continue
if config.dont_redownload is True and check_file(img_url) is True:
if config.check is True:
print(
f'{config.SUCCESS_COLOR}Downloaded all recent files of \
"{username}"{config.END}'
)
raise DownloadComplete
print(
f'{config.WARN_COLOR}Skipping "{title}" since \
it\'s already downloaded{config.END}'
)
continue
if config.disable_threading:
download(img_url)
else:
q.put(img_url)
sleep(config.interval)
q.join()
page_num = next_button(page_url)
if not config.disable_threading:
stop_threads = True
for _ in range(config.num_threads):
q.put("shutdown")
for t in workers:
t.join()
if __name__ == "__main__":
if config.login is True:
login()
exit()
if config.index is True:
if os.path.isfile(f"{config.output_folder}/index.idx"):
os.remove(f"{config.output_folder}/index.idx")
start_indexing(config.output_folder)
print(f"{config.SUCCESS_COLOR}indexing finished{config.END}")
exit()
one_time_response = requests_retry_session().get(config.BASE_URL)
one_time_s = BeautifulSoup(one_time_response.text, "html.parser")
if one_time_s.find(class_="loggedin_user_avatar") is not None:
account_username = one_time_s.find(class_="loggedin_user_avatar").attrs.get(
"alt"
)
print(
f'{config.SUCCESS_COLOR}Logged in as \
"{account_username}"{config.END}'
)
else:
print(
f"{config.WARN_COLOR}Not logged in, NSFW content \
is inaccessible{config.END}"
)
if config.download is not None:
download(f"/view/{config.download}/")
exit()
if not config.disable_threading:
stop_threads = False
for id in range(config.num_threads):
print(id, 'started thread')
tmp = threading.Thread(target=worker, daemon=False)
workers.append(tmp)
tmp.start()
if config.submissions is True:
download_url = f"{config.BASE_URL}/msg/submissions"
main()
print(
f"{config.SUCCESS_COLOR}Finished \
downloading submissions{config.END}"
)
exit()
if config.folder is not None:
folder = config.folder.split("/")
download_url = (
f"{config.BASE_URL}/gallery/{config.username}/folder/{config.folder}"
)
main()
print(
f'{config.SUCCESS_COLOR}Finished \
downloading "{config.folder[1]}"{config.END}'
)
exit()
if config.category not in ["gallery", "scraps", "favorites"]:
print(
f"{config.ERROR_COLOR}Please enter a valid category [gallery/scraps/favorites] {config.END}"
)
exit()
if not config.username:
print("Not enough arguments")
config.parser.print_help()
os._exit(1)
for username in config.username:
username = username.split("#")[0].translate(
str.maketrans(config.username_replace_chars)
)
if username != "":
print(f'{config.SUCCESS_COLOR}Now downloading "{username}"{config.END}')
download_url = f"{config.BASE_URL}/{config.category}/{username}"
print(f"Downloading page {config.start} - {download_url}/{config.start}")
main()
print(
f'{config.SUCCESS_COLOR}Finished \
downloading "{username}"{config.END}'
)