Allow downloading of nsfw (not sketchy) images

This commit is contained in:
Imre Kerr
2016-06-29 15:41:44 +02:00
parent 8366646c4e
commit 70803850de

View File

@@ -9,6 +9,7 @@
######################################################## ########################################################
import os import os
import getpass
import bs4 import bs4
import re import re
import requests import requests
@@ -17,6 +18,12 @@ import time
os.makedirs('Wallhaven', exist_ok=True) os.makedirs('Wallhaven', exist_ok=True)
def login():
print('NSFW images require login')
username = input('Enter username: ')
password = getpass.getpass('Enter password: ')
req = requests.post('https://alpha.wallhaven.cc/auth/login', data={'username':username, 'password':password})
return req.cookies
def choice(): def choice():
print('''**************************************************************** print('''****************************************************************
@@ -55,30 +62,32 @@ def choice():
Purity Codes Purity Codes
sfw - For 'Safe For Work' sfw - For 'Safe For Work'
sketchy - For 'Sketchy'
nsfw - For 'Not Safe For Work' nsfw - For 'Not Safe For Work'
both - For both 'SFW' and 'NSFW' ws - For 'SFW' and 'Sketchy'
wn - for 'SFW' and 'NSFW'
sn - For 'Sketchy' and 'NSFW'
all - For 'SFW', 'Sketchy' and 'NSFW'
**************************************************************** ****************************************************************
''') ''')
pcode = input('Enter Purity: ') pcode = input('Enter Purity: ')
SFW = '100' ptags = {'sfw':'100', 'sketchy':'010', 'nsfw':'001', 'ws':'110', 'wn':'101', 'sn':'011', 'all':'111'}
NSFW = '010' ptag = ptags[pcode]
BOTH = '110'
if pcode.lower() == 'sfw': if pcode in ['nsfw', 'wn', 'sn', 'all']:
ptag = SFW cookies = login()
elif pcode.lower() == "nsfw": else:
ptag = NSFW cookies = dict()
elif pcode.lower() == "both":
ptag = BOTH
CATURL = 'https://alpha.wallhaven.cc/search?categories=' + \ CATURL = 'https://alpha.wallhaven.cc/search?categories=' + \
ctag + '&purity=' + ptag + '&page=' ctag + '&purity=' + ptag + '&page='
return CATURL return (CATURL, cookies)
def latest(): def latest():
print('Downloading latest') print('Downloading latest')
latesturl = 'https://alpha.wallhaven.cc/latest?page=' latesturl = 'https://alpha.wallhaven.cc/latest?page='
return latesturl return (latesturl, dict())
def main(): def main():
@@ -89,15 +98,15 @@ def main():
Enter choice: ''') Enter choice: ''')
if Choice.lower() == 'yes': if Choice.lower() == 'yes':
BASEURL = choice() BASEURL, cookies = choice()
else: else:
BASEURL = latest() BASEURL, cookies = latest()
pgid = int(input('How Many pages you want to Download: ')) pgid = int(input('How Many pages you want to Download: '))
print('Number of Wallpapers to Download: ' + str(24 * pgid)) print('Number of Wallpapers to Download: ' + str(24 * pgid))
for i in range(1, pgid + 1): for i in range(1, pgid + 1):
url = BASEURL + str(i) url = BASEURL + str(i)
urlreq = requests.get(url) urlreq = requests.get(url, cookies=cookies)
soup = bs4.BeautifulSoup(urlreq.text, 'lxml') soup = bs4.BeautifulSoup(urlreq.text, 'lxml')
soupid = soup.findAll('a', {'class': 'preview'}) soupid = soup.findAll('a', {'class': 'preview'})
res = re.compile(r'\d+') res = re.compile(r'\d+')
@@ -108,7 +117,7 @@ def main():
i] i]
for ext in imgext: for ext in imgext:
iurl = url + ext iurl = url + ext
imgreq = requests.get(iurl) imgreq = requests.get(iurl, cookies=cookies)
if imgreq.status_code == 200: if imgreq.status_code == 200:
print('Downloading: ' + iurl) print('Downloading: ' + iurl)
with open(os.path.join('Wallhaven', os.path.basename(iurl)), 'ab') as imageFile: with open(os.path.join('Wallhaven', os.path.basename(iurl)), 'ab') as imageFile: