Update mkbsd.py

This commit is contained in:
ninjat 2024-09-26 15:21:33 +12:00 committed by GitHub
parent 82e50c64f0
commit bda9d557f6
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -1,61 +1,54 @@
# Licensed under the WTFPL License import requests
import os import os
import time import json
import aiohttp from concurrent.futures import ThreadPoolExecutor
import asyncio
from urllib.parse import urlparse
url = 'https://storage.googleapis.com/panels-api/data/20240916/media-1a-i-p~s'
async def delay(ms): # URL of the JSON data
await asyncio.sleep(ms / 1000) all_url = "https://storage.googleapis.com/panels-cdn/data/20240730/all.json"
async def download_image(session, image_url, file_path): # Function to download the URL
try: def download_url(url):
async with session.get(image_url) as response: file_name = os.path.basename(url)
if response.status != 200: file_path = os.path.join("downloads", file_name)
raise Exception(f"Failed to download image: {response.status}")
content = await response.read()
with open(file_path, 'wb') as f:
f.write(content)
except Exception as e:
print(f"Error downloading image: {str(e)}")
async def main(): if not os.path.exists(file_path):
try: print(f"Downloading {url}")
async with aiohttp.ClientSession() as session: response = requests.get(url)
async with session.get(url) as response: with open(file_path, 'wb') as file:
if response.status != 200: file.write(response.content)
raise Exception(f"⛔ Failed to fetch JSON file: {response.status}") else:
json_data = await response.json() print(f"Skipping {url}")
data = json_data.get('data')
if not data: # Recursive function to extract URLs from JSON structure
raise Exception('⛔ JSON does not have a "data" property at its root.') def extract_urls(element, urls):
if isinstance(element, dict):
for key, value in element.items():
if key == "url":
urls.append(value)
else:
extract_urls(value, urls)
elif isinstance(element, list):
for item in element:
extract_urls(item, urls)
download_dir = os.path.join(os.getcwd(), 'downloads') # Main function to process the JSON and download files
if not os.path.exists(download_dir): def main():
os.makedirs(download_dir) # Fetch the JSON data
print(f"📁 Created directory: {download_dir}") response = requests.get(all_url)
json_data = response.json()
file_index = 1 # Extract URLs
for key, subproperty in data.items(): urls = []
if subproperty and subproperty.get('dhd'): extract_urls(json_data, urls)
image_url = subproperty['dhd'] print(f"Found {len(urls)} URLs")
print(f"🔍 Found image URL!")
parsed_url = urlparse(image_url)
ext = os.path.splitext(parsed_url.path)[-1] or '.jpg'
filename = f"{file_index}{ext}"
file_path = os.path.join(download_dir, filename)
await download_image(session, image_url, file_path) # Ensure 'downloads' directory exists
print(f"🖼️ Saved image to {file_path}") if not os.path.exists("downloads"):
os.makedirs("downloads")
file_index += 1 # Download files with parallelism (max 10 threads)
await delay(250) with ThreadPoolExecutor(max_workers=10) as executor:
executor.map(download_url, urls)
except Exception as e:
print(f"Error: {str(e)}")
def ascii_art(): def ascii_art():
print(""" print("""
@ -73,4 +66,4 @@ def ascii_art():
if __name__ == "__main__": if __name__ == "__main__":
ascii_art() ascii_art()
time.sleep(5) time.sleep(5)
asyncio.run(main()) main()