django-storages EndpointConnectionError
django-storages EndpointConnectionError
抱歉打扰了,但我想我遗漏了一些东西而且我找不到我的解决方案。当 运行 我的 collectstatic 时,我得到以下错误:
botocore.exceptions.EndpointConnectionError: Could not connect to the endpoint URL: "http://localhost:1212/test/static/gis/css/ol3.css"
这是以下设置:
docker-compose.yaml
. . .
s3server:
image: scality/s3server:latest
restart: unless-stopped
ports:
- "1212:8000"
volumes:
- s3data:/usr/src/app/localData
- s3metadata:/usr/src/app/localMetadata
environment:
SCALITY_ACCESS_KEY_ID: newAccessKey
SCALITY_SECRET_ACCESS_KEY: newSecretKey
SSL: "FALSE"
settings.py
# AWS settings
AWS_ACCESS_KEY_ID = env.str('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = env.str('AWS_SECRET_ACCESS_KEY')
AWS_S3_REGION_NAME = env.str('AWS_S3_REGION_NAME')
AWS_STORAGE_BUCKET_NAME = env.str('AWS_STORAGE_BUCKET_NAME')
AWS_S3_ENDPOINT_URL = env.str('AWS_S3_ENDPOINT_URL')
AWS_DEFAULT_ACL = None
AWS_S3_OBJECT_PARAMETERS = {
'CacheControl': 'max-age=86400',
}
AWS_QUERYSTRING_AUTH = False
# s3 static settings
AWS_STATIC_LOCATION = 'static'
STATIC_URL = f'{AWS_S3_ENDPOINT_URL}/{AWS_STATIC_LOCATION}/'
STATICFILES_STORAGE = 'backend.storages.StaticStorage'
# s3 media settings
AWS_MEDIA_LOCATION = 'media'
MEDIA_URL = f'{AWS_S3_ENDPOINT_URL}/{AWS_MEDIA_LOCATION}/'
DEFAULT_FILE_STORAGE = 'backend.storages.PublicMediaStorage'
dev.env
AWS_STORAGE_BUCKET_NAME=test
AWS_ACCESS_KEY_ID=newAccessKey
AWS_SECRET_ACCESS_KEY=newSecretKey
AWS_S3_REGION_NAME=us-east-1
AWS_S3_ENDPOINT_URL=http://localhost:1212
backend/storages.py
class StaticStorage(S3Boto3Storage):
location = settings.AWS_STATIC_LOCATION
default_acl = "public-read"
class PublicMediaStorage(S3Boto3Storage):
location = settings.AWS_MEDIA_LOCATION
default_acl = 'public-read'
file_overwrite = False
我真的不明白为什么下面的脚本工作得很好:
script.py
import logging
import boto3
from botocore.exceptions import ClientError
s3_client = boto3.client(
's3',
aws_access_key_id="newAccessKey",
aws_secret_access_key="newSecretKey",
endpoint_url='http://localhost:1212',
region_name="us-east-1",
)
def create_bucket(bucket_name):
try:
s3_client.create_bucket(
Bucket=bucket_name,
CreateBucketConfiguration={'LocationConstraint': "us-east-1"},
)
except ClientError as e:
logging.error(e)
return False
return True
if __name__ == "__main__":
create_bucket("test", region="us-east-1")
response = s3_client.list_buckets()
# Output the bucket names
print('Existing buckets:')
for bucket in response['Buckets']:
print(f' {bucket["Name"]}')
response = s3_client.upload_file(
"backend/tests/test_image.jpg",
"test",
"static/test_image",
)
s3_client.download_file('test', 'static/test_image', 'toto.jpg')
嗯,在一个容器里面,locahost显然不是其他服务。将 AWS_S3_ENDPOINT_URL=http://localhost:1212
更改为 AWS_S3_ENDPOINT_URL=http://s3server:8000
并在 compose 中公开来自 s3server 的 8000 端口。让它工作的最后一步是在安装在 scality 服务器的 config.json 中添加 "s3server": "us-east-1"
。
抱歉打扰了,但我想我遗漏了一些东西而且我找不到我的解决方案。当 运行 我的 collectstatic 时,我得到以下错误:
botocore.exceptions.EndpointConnectionError: Could not connect to the endpoint URL: "http://localhost:1212/test/static/gis/css/ol3.css"
这是以下设置:
docker-compose.yaml
. . .
s3server:
image: scality/s3server:latest
restart: unless-stopped
ports:
- "1212:8000"
volumes:
- s3data:/usr/src/app/localData
- s3metadata:/usr/src/app/localMetadata
environment:
SCALITY_ACCESS_KEY_ID: newAccessKey
SCALITY_SECRET_ACCESS_KEY: newSecretKey
SSL: "FALSE"
settings.py
# AWS settings
AWS_ACCESS_KEY_ID = env.str('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = env.str('AWS_SECRET_ACCESS_KEY')
AWS_S3_REGION_NAME = env.str('AWS_S3_REGION_NAME')
AWS_STORAGE_BUCKET_NAME = env.str('AWS_STORAGE_BUCKET_NAME')
AWS_S3_ENDPOINT_URL = env.str('AWS_S3_ENDPOINT_URL')
AWS_DEFAULT_ACL = None
AWS_S3_OBJECT_PARAMETERS = {
'CacheControl': 'max-age=86400',
}
AWS_QUERYSTRING_AUTH = False
# s3 static settings
AWS_STATIC_LOCATION = 'static'
STATIC_URL = f'{AWS_S3_ENDPOINT_URL}/{AWS_STATIC_LOCATION}/'
STATICFILES_STORAGE = 'backend.storages.StaticStorage'
# s3 media settings
AWS_MEDIA_LOCATION = 'media'
MEDIA_URL = f'{AWS_S3_ENDPOINT_URL}/{AWS_MEDIA_LOCATION}/'
DEFAULT_FILE_STORAGE = 'backend.storages.PublicMediaStorage'
dev.env
AWS_STORAGE_BUCKET_NAME=test
AWS_ACCESS_KEY_ID=newAccessKey
AWS_SECRET_ACCESS_KEY=newSecretKey
AWS_S3_REGION_NAME=us-east-1
AWS_S3_ENDPOINT_URL=http://localhost:1212
backend/storages.py
class StaticStorage(S3Boto3Storage):
location = settings.AWS_STATIC_LOCATION
default_acl = "public-read"
class PublicMediaStorage(S3Boto3Storage):
location = settings.AWS_MEDIA_LOCATION
default_acl = 'public-read'
file_overwrite = False
我真的不明白为什么下面的脚本工作得很好:
script.py
import logging
import boto3
from botocore.exceptions import ClientError
s3_client = boto3.client(
's3',
aws_access_key_id="newAccessKey",
aws_secret_access_key="newSecretKey",
endpoint_url='http://localhost:1212',
region_name="us-east-1",
)
def create_bucket(bucket_name):
try:
s3_client.create_bucket(
Bucket=bucket_name,
CreateBucketConfiguration={'LocationConstraint': "us-east-1"},
)
except ClientError as e:
logging.error(e)
return False
return True
if __name__ == "__main__":
create_bucket("test", region="us-east-1")
response = s3_client.list_buckets()
# Output the bucket names
print('Existing buckets:')
for bucket in response['Buckets']:
print(f' {bucket["Name"]}')
response = s3_client.upload_file(
"backend/tests/test_image.jpg",
"test",
"static/test_image",
)
s3_client.download_file('test', 'static/test_image', 'toto.jpg')
嗯,在一个容器里面,locahost显然不是其他服务。将 AWS_S3_ENDPOINT_URL=http://localhost:1212
更改为 AWS_S3_ENDPOINT_URL=http://s3server:8000
并在 compose 中公开来自 s3server 的 8000 端口。让它工作的最后一步是在安装在 scality 服务器的 config.json 中添加 "s3server": "us-east-1"
。