I'm trying to mock some AWS functionalities that are integrated in a post request function. Initially I tried with python mock lib. However, the calls are still made to AWS such as the two way authentication.
The mock lib code looks like this.
Test file
from unittest import mock
from fastapi.testclient import TestClient
@mock.patch("utils.io.aws.upload_audio_to_s3", return_value="test_1.wav")
def test_dummy_code(process):
with open("tests/files/test.wav", "rb") as f:
values = {"language_code": "nl-EN"}
response = client.post("/mock/test", files={"file": f}, data=values)
assert response.json() == {"status": "test_1.wav"}
Server file with post
from fastapi import APIRouter, File, Form, UploadFile
from utils.io.aws import upload_audio_to_s3
router = APIRouter()
@router.post("/mock/test")
async def get_mocking(file: UploadFile = File(...)):
print(file)
print("Files recieved")
# s3_file_path = process_file(file)
s3_file_path = upload_audio_to_s3(
"test.wav", SOUND_FILE_EXTENSION, S3_FOLDER_NAME, BUCKET_NAME
)
print(s3_file_path)
return {"status": s3_file_path}
aws file
import boto3
def connect_to_s3(region_name=REGION):
s3_client = boto3.client("s3", region_name=region_name)
s3_resource = boto3.resource("s3", region_name=region_name)
# if not _validate_access(s3_client):
# # logging.info("Access denied. Trying with MFA")
# s3_client, s3_resource = _connect_to_s3_mfa()
return s3_client, s3_resource
def upload_file_to_s3(
file_path: str,
filename: str,
directory: str = None,
bucket_name: str = "audio",
s3_resource: boto3.resource = None,
):
if not directory:
s3_file_path = f"{filename}"
else:
s3_file_path = f"{directory}/{filename}"
obj = s3_resource.Bucket(bucket_name)
obj.upload_file(file_path, s3_file_path)
return s3_file_path
def upload_audio_to_s3(audio_file_path, file_ext, folder_name, bucket_name):
s3_client, s3_resource = connect_to_s3()
random_file_name = create_random_file("temp")
s3_file_path = f"{random_file_name}{file_ext}"
s3_folder = f"{folder_name}/batch"
s3_file_path = upload_file_to_s3(
audio_file_path, s3_file_path, s3_folder, bucket_name, s3_resource
)
return s3_file_path
Im really looking for a solution which works for integrated s3 functions, preferably with moto. Thanks!
Adding the mock decorator and initializing the client, resource and bucket outside of the function did the trick. Beware of the bucket name! Here is the part of the code that made it work.
@mock_s3
def test_dummy_code():
conn, resource = connect_to_s3('us-east-1')
# We need to create the bucket since this is all in Moto's 'virtual' AWS account
conn.create_bucket(Bucket=BUCKET_NAME)
with open("tests/files/test.wav", "rb") as f:
values = {"language_code": "nl-NL"}
response = client.post("/mock/test", files={"file": f}, data=values)
assert response.status_code == 200