Skip to content

Commit

Permalink
S3: Consistently use compresslevel=9 for gzipped content
Browse files Browse the repository at this point in the history
Closes #572
  • Loading branch information
jschneier committed Aug 30, 2018
1 parent 1325034 commit 3978c40
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion storages/backends/s3boto.py
Original file line number Diff line number Diff line change
Expand Up @@ -356,7 +356,7 @@ def _compress_content(self, content):
# This means each time a file is compressed it changes even if the other contents don't change
# For S3 this defeats detection of changes using MD5 sums on gzipped files
# Fixing the mtime at 0.0 at compression time avoids this problem
zfile = GzipFile(mode='wb', compresslevel=6, fileobj=zbuf, mtime=0.0)
zfile = GzipFile(mode='wb', fileobj=zbuf, mtime=0.0)
try:
zfile.write(force_bytes(content.read()))
finally:
Expand Down
2 changes: 1 addition & 1 deletion storages/backends/s3boto3.py
Original file line number Diff line number Diff line change
Expand Up @@ -435,7 +435,7 @@ def _compress_content(self, content):
# This means each time a file is compressed it changes even if the other contents don't change
# For S3 this defeats detection of changes using MD5 sums on gzipped files
# Fixing the mtime at 0.0 at compression time avoids this problem
zfile = GzipFile(mode='wb', compresslevel=6, fileobj=zbuf, mtime=0.0)
zfile = GzipFile(mode='wb', fileobj=zbuf, mtime=0.0)
try:
zfile.write(force_bytes(content.read()))
finally:
Expand Down

0 comments on commit 3978c40

Please sign in to comment.