Search code examples
pythonmultithreadingpython-requestsreadlines

How i can make a request for every token from a txt file in python


I have a text file called tokens.txt.

Ex: 12463,4126,6343,6345.

And i want to send a post request with each tokens and use multi threading.

For some reasons my code only gets the last token from the txt file and only uses that.

import requests
from concurrent.futures import ThreadPoolExecutor, as_completed
from time import time

url_list = [
    "https://www.google.com/api/"
]
file_lines = open("tokens.txt", "r").readlines()
for line in file_lines:
    tokens = {
        'Token':line.replace('/n','')
        }

def makerequest(url):
    while True:
        html = requests.post(url,stream=True, data=tokens)
        print(tokens)
        return html.content

start = time()

processes = []
with ThreadPoolExecutor(max_workers=200) as executor:
    for url in url_list:
        processes.append(executor.submit(makerequest, url))

for task in as_completed(processes):
    print(task.result())


print(f'Time taken: {time() - start}')

How can i send for each token a request?


Solution

  • In your case tokens = {"Token": <last_token>}

    Modify your code like this so that for each token one request can be sent.

    tokens = set() 
    '''
    <- You can use list also but in this case set is better  as it will ensure only 
    one request for one token even if your tokens file contains duplicate line.
    '''
    url_list = [
        "https://www.google.com/api/"
    ]
    
    tokens = set()
    with open("tokens.txt", "r") as f:
        file_lines = f.readlines()
        for line in file_lines:
            tokens.add(line.strip())
    
    token_data = {"Token": None}
    def makerequest(url):
        for token in tokens:
            token_data["Token"] = token
            html = requests.post(url,stream=True, data=token_data)
            print(token)
            # do something with html here
            # don't return or break