I'm using Peewee with PyMySQL and I'm getting stuck on the 64k blob
size when trying to use the CompressedField
from the playhouse
module...
The following code gives me truncated data for the second test
from peewee import *
from playhouse.db_url import connect
from playhouse.fields import CompressedField
db = connect("mysql://me:pass@IP/test_db")
class Compress(Model):
name = CharField()
cmprssd_data = CompressedField()
class Meta:
database = db
db.connect()
db.create_tables([Compress], safe=True)
short_str = "".zfill(200)
di_test1 = {"name": "first", "cmprssd_data": short_str }
test1 = Compress(**di_test1)
test1.save()
long_str = "".zfill(200000000)
di_test2 = {"name": "second", "cmprssd_data": long_str }
test2 = Compress(**di_test2)
test2.save()
I tried updating 'max_allowed_packet'
to 1073741824
in both MySQL and pymysql but this did not change anything.
By the way, as I think it's the same problem, using the long_str
with a PickledField
gives me a broken pipe error.
Is there a way of telling peewee to work with longblob
? (or does the problem come from somewhere else?)
I also found a thread on the broken pipe problem with pymysql but I don't know how to tell a Peewee model to do the chunk stuff on that specific field...
This can be accomplished with a custom field:
from peewee import *
from playhouse.fields import CompressedField
# tell database that the longblob column type exists
db = MySQLDatabase('test', host='127.0.0.1', user='root',
fields={'longblob': 'longblob'})
# create a custom field, in this case subclassing the existing field
class LongCompressedField(CompressedField):
db_field = 'longblob' # <-- this matches our dictionary key above
class MyModel(Model):
val = LongCompressedField()
class Meta:
db_table = 'test'
database = db
db.connect()
db.create_table(MyModel, safe=True)
m = MyModel(val=''.zfill(200000000))
m.save()