Finished product. Works, basically. Need error handling next.

This commit is contained in:
sin 2025-04-01 22:04:27 +00:00
parent 55cff8e3e3
commit 632a8d7986
1 changed files with 27 additions and 32 deletions

View File

@ -1,36 +1,31 @@
import feedparser import feedparser
import os import os
import requests import requests
def download_attachment(url, filename): def download_attachment(url, filename):
response = requests.get(url) response = requests.get(url)
if response.status_code == 200: if response.status_code == 200:
with open(filename, 'wb') as f: try:
f.write(response.content) with open(filename, 'wb') as f:
print(f"Downloaded {filename}") f.write(response.content)
else: print(f"Downloaded {filename}")
print(f"Failed to download {url}") except:
print("Download Failure")
print(url)
print(filename)
else:
print(f"Failed to download {url}")
def create_downloads_folder(): def main():
downloads_folder = "Downloads" feed_url = input("Enter the RSS feed URL: ")
if not os.path.exists(downloads_folder): num_entries = int(input("Enter the number of entries to process: "))
os.makedirs(downloads_folder) feed = feedparser.parse(feed_url)
print(f"Created folder: {downloads_folder}") for i, entry in enumerate(feed.entries[:num_entries]):
if hasattr(entry, 'links'):
for link in entry.links:
url = link.href
name = str(link.href.split("/")[-1])
download_attachment(url, name)
def main(): if __name__ == "__main__":
feed_url = input("Enter the RSS feed URL: ") main()
num_entries = int(input("Enter the number of entries to process: "))
create_downloads_folder()
feed = feedparser.parse(feed_url)
for i, entry in enumerate(feed.entries[:num_entries]):
if hasattr(entry, 'links'):
for link in entry.links:
if link.type == 'application/pdf' or link.type.endswith('/zip') or link.type.endswith('/rar'):
filename = os.path.basename(link.href)
download_attachment(os.path.join("Downloads", filename), link.href)
if __name__ == "__main__":
main()