I've written a script in python to scrape the first five titles of different posts from a webpage and then write the titles in individual text files and put them in five different subfolders within a desktop folder DataStorage
.
Currently my following script can parse the titles of five posts and write them in five different text files and put them in a desktop folder DataStorage
.
How can I create five different subfolders within a main folder and put the text files in concerning subfolders?
This is my attempt so far:
import os
import requests
from bs4 import BeautifulSoup
url = "https://stackoverflow.com/questions/tagged/web-scraping"
dirf = r"C:\Users\WCS\Desktop\DataStorage" #The main folder in desktop
if not os.path.exists(dirf):os.makedirs(dirf)
os.chdir(dirf)
res = requests.get(url)
soup = BeautifulSoup(res.text,"lxml")
for item in soup.select(".summary .question-hyperlink")[:5]:
filename = item.text.split(" ")[0]
with open(filename+'.txt','w', encoding='utf-8') as filename:
filename.write(item.text)
The following might work
import os
import requests
from bs4 import BeautifulSoup
url = "https://stackoverflow.com/questions/tagged/web-scraping"
dirf = r"C:\Users\WCS\Desktop\DataStorage" # The main folder in desktop
if not os.path.exists(dirf):
os.makedirs(dirf)
os.chdir(dirf)
res = requests.get(url)
soup = BeautifulSoup(res.text, "lxml")
for item in soup.select(".summary .question-hyperlink")[:5]:
filename = item.text.split(" ")[0]
curr_dir = os.path.join(dirf, filename)
os.makedirs(curr_dir)
filepath = os.path.join(curr_dir, filename+'.txt')
with open(filepath, 'w', encoding='utf-8') as f:
f.write(item.text)