Search code examples
pythonasynchronousaiohttp

Aiohttp not performing any requests


First of all heres the code:

import random
import asyncio
from aiohttp import ClientSession
import csv

headers =[]
def extractsites(file):
    sites = []
    readfile = open(file, "r")
    reader = csv.reader(readfile, delimiter=",")
    raw = list(reader)
    for a in raw:
        sites.append((a[1]))
    return sites

async def bound_fetch(sem, url):
    async with sem:
        print("doing request for "+ url)
        async with ClientSession() as session:
            async with session.get(url) as response:
                responseheader = await response.headers
                print(headers)


async def run():
    urls = extractsites("cisco-umbrella.csv")
    tasks = []
    sem = asyncio.Semaphore(100)
    for i in urls:
        task = asyncio.ensure_future(bound_fetch(sem, "http://"+i))
        tasks.append(task)
    headers = await asyncio.wait(*tasks)
    print(headers)


def main():
    loop = asyncio.get_event_loop()
    future = asyncio.ensure_future(run())
    loop.run_until_complete(future)

if __name__ == '__main__':
    main()

As per my last question I'm following this blog post: https://pawelmhm.github.io/asyncio/python/aiohttp/2016/04/22/asyncio-aiohttp.html

I tried to adapt my code as close as possible to the example implementation but this code is still not making any requests and printing the headers in bound_headers as I wish.

Can somebody spot whats wrong with this code ?


Solution

  • response.headers is a regular property, no need to put await before the call

    asyncio.wait on other hand accepts a list of futures and returns (done, pending) pair. Looks like you should replace await wait() call with await asyncio.gather(*tasks) (gather doc)