Search code examples
pythonamazon-web-servicesamazon-s3gzipboto

GzipFile not supported by S3?


I am trying to iterate through some file paths so that I gzip each file individually. Each item in the testList contains strings (paths) like this: /tmp/File.

After gzipping them, I want to upload each gzip file to S3:

import boto3
import gzip
import shutil


s3 = boto3.client('s3')
bucket = s3_resource.Bucket('testunzipping')

with zipfile.ZipFile('/tmp/DataPump_10000838.zip', 'r') as zip_ref:
    testList = []
    for i in zip_ref.namelist():
        if (i.startswith("__MACOSX/") == False):
            val = '/tmp/'+i
            testList.append(val)
            
    testList.remove(testList[0])

    for i in testList:
        fileName = i.replace("/tmp/DataPump_10000838/", "") 
        fileName2 = i + '.gz'
        with open(i, 'rb') as f_in:
            with gzip.open(fileName2, 'wb') as f_out:
                shutil.copyfileobj(f_in, f_out)
            gzip_object = gzip.compress(f_out)
            bucket.upload_fileobj(f_out, fileName, ExtraArgs={'ContentType': "text/plain", 'ContentEncoding':'gzip'})

However, currently, the last line gives me this error:

Response
{
  "errorMessage": "Input <gzip on 0x7fd53bc53fa0> of type: <class 'gzip.GzipFile'> is not supported.",
  "errorType": "RuntimeError",
  "requestId": "",
  "stackTrace": [
    "  File \"/var/lang/lib/python3.9/importlib/__init__.py\", line 127, in import_module\n    return _bootstrap._gcd_import(name[level:], package, level)\n",
    "  File \"<frozen importlib._bootstrap>\", line 1030, in _gcd_import\n",
    "  File \"<frozen importlib._bootstrap>\", line 1007, in _find_and_load\n",
    "  File \"<frozen importlib._bootstrap>\", line 986, in _find_and_load_unlocked\n",
    "  File \"<frozen importlib._bootstrap>\", line 680, in _load_unlocked\n",
    "  File \"<frozen importlib._bootstrap_external>\", line 850, in exec_module\n",
    "  File \"<frozen importlib._bootstrap>\", line 228, in _call_with_frames_removed\n",
    "  File \"/var/task/lambda_function.py\", line 50, in <module>\n    bucket.upload_fileobj(f_out, fileName, ExtraArgs={'ContentType': \"text/plain\", 'ContentEncoding':'gzip'})\n",
    "  File \"/var/runtime/boto3/s3/inject.py\", line 579, in bucket_upload_fileobj\n    return self.meta.client.upload_fileobj(\n",
    "  File \"/var/runtime/boto3/s3/inject.py\", line 539, in upload_fileobj\n    return future.result()\n",
    "  File \"/var/runtime/s3transfer/futures.py\", line 106, in result\n    return self._coordinator.result()\n",
    "  File \"/var/runtime/s3transfer/futures.py\", line 265, in result\n    raise self._exception\n",
    "  File \"/var/runtime/s3transfer/tasks.py\", line 255, in _main\n    self._submit(transfer_future=transfer_future, **kwargs)\n",
    "  File \"/var/runtime/s3transfer/upload.py\", line 545, in _submit\n    upload_input_manager = self._get_upload_input_manager_cls(\n",
    "  File \"/var/runtime/s3transfer/upload.py\", line 521, in _get_upload_input_manager_cls\n    raise RuntimeError(\n"
  ]
}

How else can I upload my f_out object to S3 bucket? Does S3/boto not support gzips? I also tried ExtraArgs={'ContentType': "application/gzip"}but got the same error.


Solution

  • Assuming each file can fit into memory, you can simply do this to compress the data in-memory and package it in a BytesIO for the S3 API to read.

    import boto3
    import gzip
    import io
    
    
    s3 = boto3.client("s3")
    bucket = s3_resource.Bucket("testunzipping")
    for i in testList:
        fileName = i.replace("/tmp/DataPump_10000838/", "")
        with open(i, "rb") as f_in:
            gzipped_content = gzip.compress(f_in.read())
            bucket.upload_fileobj(
                io.BytesIO(gzipped_content),
                fileName,
                ExtraArgs={"ContentType": "text/plain", "ContentEncoding": "gzip"},
            )
    

    If that's not the case, you can use a tempfile to compress the data onto disk first:

    import boto3
    import gzip
    import io
    import shutil
    
    
    s3 = boto3.client("s3")
    bucket = s3_resource.Bucket("testunzipping")
    for i in testList:
        fileName = i.replace("/tmp/DataPump_10000838/", "")
        with tempfile.TemporaryFile() as tmpf:
            with open(i, "rb") as f_in, gzip.GzipFile(mode="wb", fileobj=tmpf) as gzf:
                shutil.copyfileobj(f_in, gzf)
            tmpf.seek(0)
            bucket.upload_fileobj(
                tmpf,
                fileName,
                ExtraArgs={"ContentType": "text/plain", "ContentEncoding": "gzip"},
            )