Finished product. Works, basically. Need error handling next.

This commit is contained in:
sin 2025-04-01 22:04:27 +00:00
parent 55cff8e3e3
commit 632a8d7986
1 changed files with 27 additions and 32 deletions

View File

@ -1,36 +1,31 @@
import feedparser
import os
import requests
import os
import requests
def download_attachment(url, filename):
response = requests.get(url)
if response.status_code == 200:
with open(filename, 'wb') as f:
f.write(response.content)
print(f"Downloaded {filename}")
else:
print(f"Failed to download {url}")
def download_attachment(url, filename):
response = requests.get(url)
if response.status_code == 200:
try:
with open(filename, 'wb') as f:
f.write(response.content)
print(f"Downloaded {filename}")
except:
print("Download Failure")
print(url)
print(filename)
else:
print(f"Failed to download {url}")
def create_downloads_folder():
downloads_folder = "Downloads"
if not os.path.exists(downloads_folder):
os.makedirs(downloads_folder)
print(f"Created folder: {downloads_folder}")
def main():
feed_url = input("Enter the RSS feed URL: ")
num_entries = int(input("Enter the number of entries to process: "))
feed = feedparser.parse(feed_url)
for i, entry in enumerate(feed.entries[:num_entries]):
if hasattr(entry, 'links'):
for link in entry.links:
url = link.href
name = str(link.href.split("/")[-1])
download_attachment(url, name)
def main():
feed_url = input("Enter the RSS feed URL: ")
num_entries = int(input("Enter the number of entries to process: "))
create_downloads_folder()
feed = feedparser.parse(feed_url)
for i, entry in enumerate(feed.entries[:num_entries]):
if hasattr(entry, 'links'):
for link in entry.links:
if link.type == 'application/pdf' or link.type.endswith('/zip') or link.type.endswith('/rar'):
filename = os.path.basename(link.href)
download_attachment(os.path.join("Downloads", filename), link.href)
if __name__ == "__main__":
main()
if __name__ == "__main__":
main()