Search code examples
pythonbeautifulsoupurllib

How to access webpages using Python via a proxy


I am writing a small program to fetch all hyperlinks from a webpage by providing a URL, but it seem like the network I am in is using proxy and it is not able to fetch .. My code:

import sys
import urllib
import urlparse

from bs4 import BeautifulSoup
def process(url):
    page = urllib.urlopen(url) 
    text = page.read()
    page.close()
    soup = BeautifulSoup(text) 
    with open('s.txt','w') as file:
        for tag in soup.findAll('a', href=True):
            tag['href'] = urlparse.urljoin(url, tag['href'])
            print tag['href']
            file.write('\n')
            file.write(tag['href'])


def main():
    if len(sys.argv) == 1:
        print 'No url !!'
        sys.exit(1)
    for url in sys.argv[1:]:
        process(url)

Solution

  • You could use the requests module instead.

    import requests
    
    proxies = { 'http': 'http://host/' } 
    # or if it requires authentication 'http://user:pass@host/' instead
    
    r = requests.get(url, proxies=proxies)
    text = r.text