嘿我正在尝试做一个简单的manage.py collectstatic
上传到aws china区域的s3。我遇到的问题是进程中途停止然后出现此错误
Traceback (most recent call last):
File "/srv/bnn/env/bin/manage.py", line 6, in <module>
exec(compile(open(__file__).read(), __file__, 'exec'))
File "/srv/bnn/content/scripts/manage.py", line 12, in <module>
execute_from_command_line(argv)
File "/srv/bnn/env/lib/python3.5/site-packages/django/core/management/__init__.py", line 363, in execute_from_command_line
utility.execute()
File "/srv/bnn/env/lib/python3.5/site-packages/django/core/management/__init__.py", line 355, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/srv/bnn/env/lib/python3.5/site-packages/django/core/management/base.py", line 283, in run_from_argv
self.execute(*args, **cmd_options)
File "/srv/bnn/env/lib/python3.5/site-packages/django/core/management/base.py", line 330, in execute
output = self.handle(*args, **options)
File "/srv/bnn/env/lib/python3.5/site-packages/django/contrib/staticfiles/management/commands/collectstatic.py", line 199, in handle
collected = self.collect()
File "/srv/bnn/env/lib/python3.5/site-packages/django/contrib/staticfiles/management/commands/collectstatic.py", line 139, in collect
for original_path, processed_path, processed in processor:
File "/srv/bnn/env/lib/python3.5/site-packages/pipeline/storage.py", line 62, in post_process
for name, hashed_name, processed in super_class.post_process(paths.copy(), dry_run, **options):
File "/srv/bnn/env/lib/python3.5/site-packages/pipeline/storage.py", line 39, in post_process
for name, hashed_name, processed in super_class.post_process(paths.copy(), dry_run, **options):
File "/srv/bnn/env/lib/python3.5/site-packages/django/contrib/staticfiles/storage.py", line 414, in post_process
for post_processed in all_post_processed:
File "/srv/bnn/env/lib/python3.5/site-packages/django/contrib/staticfiles/storage.py", line 246, in post_process
for name, hashed_name, processed, _ in self._post_process(paths, adjustable_paths, hashed_files):
File "/srv/bnn/env/lib/python3.5/site-packages/django/contrib/staticfiles/storage.py", line 312, in _post_process
hashed_name = self.hashed_name(name, content_file)
File "/srv/bnn/env/lib/python3.5/site-packages/django/contrib/staticfiles/storage.py", line 109, in hashed_name
file_hash = self.file_hash(clean_name, content)
File "/srv/bnn/env/lib/python3.5/site-packages/django/contrib/staticfiles/storage.py", line 86, in file_hash
for chunk in content.chunks():
File "/srv/bnn/env/lib/python3.5/site-packages/django/core/files/base.py", line 76, in chunks
self.seek(0)
ValueError: I/O operation on closed file.
但有些文件最终在s3中结束。我也尝试在那个ec2实例上做一个boto安装boto,它也有一个超时错误。
所以我的问题是如何更改django的超时,以便它不会出现超时错误,然后停止使用collectstatic方法?
答案 0 :(得分:0)
使用自定义StorageClass修复了问题
在custom_storages.py中:
from django.contrib.staticfiles.storage import ManifestFilesMixin
from storages.backends.s3boto3 import S3Boto3Storage, SpooledTemporaryFile
import os
class CustomS3Boto3Storage(ManifestFilesMixin, S3Boto3Storage):
def _save_content(self, obj, content, parameters):
"""
We create a clone of the content file as when this is passed to boto3 it wrongly closes
the file upon upload where as the storage backend expects it to still be open
"""
# Seek our content back to the start
content.seek(0, os.SEEK_SET)
# Create a temporary file that will write to disk after a specified size
content_autoclose = SpooledTemporaryFile()
# Write our original content into our copy that will be closed by boto3
content_autoclose.write(content.read())
# Upload the object which will auto close the content_autoclose instance
super(CustomS3Boto3Storage, self)._save_content(obj, content_autoclose, parameters)
# Cleanup if this is fixed upstream our duplicate should always close
if not content_autoclose.closed:
content_autoclose.close()
class StaticStorage(CustomS3Boto3Storage):
location = settings.STATICFILES_LOCATION
在您的设置中:
STATICFILES_LOCATION = 'static'
STATICFILES_STORAGE = 'custom_storages.StaticStorage'
信用转到charlesthk
此github issue有很多有关此问题的有用信息。