2024-09-24 20:48:58 +00:00
|
|
|
# Licensed under the WTFPL License
|
2024-09-24 20:44:39 +00:00
|
|
|
|
|
|
|
import os
|
|
|
|
import time
|
|
|
|
import aiohttp
|
2024-09-26 01:44:38 +00:00
|
|
|
import re
|
2024-09-24 20:44:39 +00:00
|
|
|
import asyncio
|
|
|
|
from urllib.parse import urlparse
|
|
|
|
url = 'https://storage.googleapis.com/panels-api/data/20240916/media-1a-i-p~s'
|
|
|
|
|
|
|
|
async def delay(ms):
|
|
|
|
await asyncio.sleep(ms / 1000)
|
|
|
|
|
|
|
|
async def download_image(session, image_url, file_path):
|
|
|
|
try:
|
|
|
|
async with session.get(image_url) as response:
|
|
|
|
if response.status != 200:
|
|
|
|
raise Exception(f"Failed to download image: {response.status}")
|
|
|
|
content = await response.read()
|
|
|
|
with open(file_path, 'wb') as f:
|
|
|
|
f.write(content)
|
|
|
|
except Exception as e:
|
|
|
|
print(f"Error downloading image: {str(e)}")
|
|
|
|
|
|
|
|
async def main():
|
|
|
|
try:
|
2024-09-26 01:44:38 +00:00
|
|
|
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(verify_ssl=False)) as session: # Ignore SSL errors
|
2024-09-24 20:44:39 +00:00
|
|
|
async with session.get(url) as response:
|
|
|
|
if response.status != 200:
|
|
|
|
raise Exception(f"⛔ Failed to fetch JSON file: {response.status}")
|
|
|
|
json_data = await response.json()
|
|
|
|
data = json_data.get('data')
|
|
|
|
|
|
|
|
if not data:
|
|
|
|
raise Exception('⛔ JSON does not have a "data" property at its root.')
|
|
|
|
|
|
|
|
download_dir = os.path.join(os.getcwd(), 'downloads')
|
|
|
|
if not os.path.exists(download_dir):
|
|
|
|
os.makedirs(download_dir)
|
|
|
|
print(f"📁 Created directory: {download_dir}")
|
|
|
|
|
2024-09-26 01:44:38 +00:00
|
|
|
# file_index = 1 #Not used
|
2024-09-24 20:44:39 +00:00
|
|
|
for key, subproperty in data.items():
|
|
|
|
if subproperty and subproperty.get('dhd'):
|
|
|
|
image_url = subproperty['dhd']
|
2024-09-26 01:44:38 +00:00
|
|
|
match = re.search(r'/content/([^/]+)/', image_url) # Extract artist name from URL
|
|
|
|
if match:
|
|
|
|
artist_name = match.group(1)
|
|
|
|
sanitized_artist_name = artist_name.split('_')[0].split('~')[1]
|
|
|
|
print(f"🎨 Sanitized artist name: {sanitized_artist_name}")
|
|
|
|
artist_dir = os.path.join(download_dir, sanitized_artist_name)
|
|
|
|
if not os.path.exists(artist_dir):
|
|
|
|
os.makedirs(artist_dir)
|
|
|
|
print(f"📁 Created artist directory: {artist_dir}")
|
2024-09-24 20:44:39 +00:00
|
|
|
|
2024-09-26 02:47:17 +00:00
|
|
|
file_name_match = re.search(r'/([^/]+)\?', image_url) # Extract file name from URL
|
2024-09-26 01:44:38 +00:00
|
|
|
if file_name_match:
|
|
|
|
raw_file_name = file_name_match.group(1)
|
2024-09-26 02:47:17 +00:00
|
|
|
file_name = raw_file_name.split('.')[0]
|
|
|
|
file_extension = raw_file_name.split('.')[-1]
|
|
|
|
sanitized_file_name = file_name.replace('~', ' ')
|
|
|
|
file_path = os.path.join(artist_dir, f"{sanitized_file_name}." + file_extension)
|
2024-09-26 02:47:46 +00:00
|
|
|
# print(f"📄 File path: {file_path}")
|
2024-09-24 20:44:39 +00:00
|
|
|
|
2024-09-26 01:44:38 +00:00
|
|
|
await download_image(session, image_url, file_path)
|
|
|
|
print(f"🖼️ Saved image to {file_path}")
|
|
|
|
|
|
|
|
await delay(250)
|
2024-09-24 20:44:39 +00:00
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
print(f"Error: {str(e)}")
|
|
|
|
|
|
|
|
def ascii_art():
|
|
|
|
print("""
|
|
|
|
/$$ /$$ /$$ /$$ /$$$$$$$ /$$$$$$ /$$$$$$$
|
|
|
|
| $$$ /$$$| $$ /$$/| $$__ $$ /$$__ $$| $$__ $$
|
|
|
|
| $$$$ /$$$$| $$ /$$/ | $$ \\ $$| $$ \\__/| $$ \\ $$
|
|
|
|
| $$ $$/$$ $$| $$$$$/ | $$$$$$$ | $$$$$$ | $$ | $$
|
|
|
|
| $$ $$$| $$| $$ $$ | $$__ $$ \\____ $$| $$ | $$
|
|
|
|
| $$\\ $ | $$| $$\\ $$ | $$ \\ $$ /$$ \\ $$| $$ | $$
|
|
|
|
| $$ \\/ | $$| $$ \\ $$| $$$$$$$/| $$$$$$/| $$$$$$$/
|
|
|
|
|__/ |__/|__/ \\__/|_______/ \\______/ |_______/""")
|
|
|
|
print("")
|
|
|
|
print("🤑 Starting downloads from your favorite sellout grifter's wallpaper app...")
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
ascii_art()
|
|
|
|
time.sleep(5)
|
|
|
|
asyncio.run(main())
|