I would like to iterate over a list of url api requests with different ?get variables (x1,....,xn).
from urllib2 import urlopen, Request
import pandas as pd
var=[x[0],...,x[n]]
url_list=['http://BLAH/get?var[0]&fmt=csv','http://BLAH/get?var[1]&fmt=csv',... ...,'http://BLAH/get?var[n]&fmt=csv']
j=0
while j < len(url_list):
req=Request(url_list[j])
response=urlopen(req)
df=pd.read_csv(response)
print df
j=j+1**
I have tried to del req and response.close() but my code still produces a conflict error.
File "C:\Users\Anaconda\lib\urllib2.py", line 127, in urlopen
return _opener.open(url, data, timeout)
File "C:\Users\Anaconda\lib\urllib2.py", line 410, in open
response = meth(req, response)
File "C:\Users\Anaconda\lib\urllib2.py", line 523, in http_response
'http', request, response, code, msg, hdrs)
File "C:\Users\nfitzsimons\Anaconda\lib\urllib2.py", line 448, in error
return self._call_chain(*args)
File "C:\Users\Anaconda\lib\urllib2.py", line 382, in _call_chain
result = func(*args)
File "C:\Users\Anaconda\lib\urllib2.py", line 531, in http_error_default
raise HTTPError(req.get_full_url(), code, msg, hdrs, fp)
rllib2.HTTPError: HTTP Error 409: Conflict
Has anybody any suggestions?
If you're just trying to iterate over a list of urls and print the contents upon a successful request, why not try this?
#!/usr/bin/env python
try:
import requests
except ImportError as err:
print("Woops, you're missing " + str(err))
urls = []
req = requests
for url in urls:
response = req.get(url)
if response.status_code == 200: #Successful request
print(response.content)