fix(storage): use centralized config management (#9620)

This commit is contained in:
-LAN- 2024-10-22 14:04:59 +08:00 committed by GitHub
parent 5f12c17355
commit b14d59e977
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 20 additions and 17 deletions

View File

@ -5,6 +5,7 @@ import boto3
from botocore.client import Config from botocore.client import Config
from botocore.exceptions import ClientError from botocore.exceptions import ClientError
from configs import dify_config
from extensions.storage.base_storage import BaseStorage from extensions.storage.base_storage import BaseStorage
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -15,24 +16,23 @@ class AwsS3Storage(BaseStorage):
def __init__(self): def __init__(self):
super().__init__() super().__init__()
app_config = self.app.config self.bucket_name = dify_config.S3_BUCKET_NAME
self.bucket_name = app_config.get("S3_BUCKET_NAME") if dify_config.S3_USE_AWS_MANAGED_IAM:
if app_config.get("S3_USE_AWS_MANAGED_IAM"):
logger.info("Using AWS managed IAM role for S3") logger.info("Using AWS managed IAM role for S3")
session = boto3.Session() session = boto3.Session()
region_name = app_config.get("S3_REGION") region_name = dify_config.S3_REGION
self.client = session.client(service_name="s3", region_name=region_name) self.client = session.client(service_name="s3", region_name=region_name)
else: else:
logger.info("Using ak and sk for S3") logger.info("Using ak and sk for S3")
self.client = boto3.client( self.client = boto3.client(
"s3", "s3",
aws_secret_access_key=app_config.get("S3_SECRET_KEY"), aws_secret_access_key=dify_config.S3_SECRET_KEY,
aws_access_key_id=app_config.get("S3_ACCESS_KEY"), aws_access_key_id=dify_config.S3_ACCESS_KEY,
endpoint_url=app_config.get("S3_ENDPOINT"), endpoint_url=dify_config.S3_ENDPOINT,
region_name=app_config.get("S3_REGION"), region_name=dify_config.S3_REGION,
config=Config(s3={"addressing_style": app_config.get("S3_ADDRESS_STYLE")}), config=Config(s3={"addressing_style": dify_config.S3_ADDRESS_STYLE}),
) )
# create bucket # create bucket
try: try:

View File

@ -4,6 +4,7 @@ from pathlib import Path
from supabase import Client from supabase import Client
from configs import dify_config
from extensions.storage.base_storage import BaseStorage from extensions.storage.base_storage import BaseStorage
@ -12,14 +13,16 @@ class SupabaseStorage(BaseStorage):
def __init__(self): def __init__(self):
super().__init__() super().__init__()
app_config = self.app.config if dify_config.SUPABASE_URL is None:
self.bucket_name = app_config.get("SUPABASE_BUCKET_NAME") raise ValueError("SUPABASE_URL is not set")
self.client = Client( if dify_config.SUPABASE_API_KEY is None:
supabase_url=app_config.get("SUPABASE_URL"), supabase_key=app_config.get("SUPABASE_API_KEY") raise ValueError("SUPABASE_API_KEY is not set")
) if dify_config.SUPABASE_BUCKET_NAME is None:
self.create_bucket( raise ValueError("SUPABASE_BUCKET_NAME is not set")
id=app_config.get("SUPABASE_BUCKET_NAME"), bucket_name=app_config.get("SUPABASE_BUCKET_NAME")
) self.bucket_name = dify_config.SUPABASE_BUCKET_NAME
self.client = Client(supabase_url=dify_config.SUPABASE_URL, supabase_key=dify_config.SUPABASE_API_KEY)
self.create_bucket(id=dify_config.SUPABASE_BUCKET_NAME, bucket_name=dify_config.SUPABASE_BUCKET_NAME)
def create_bucket(self, id, bucket_name): def create_bucket(self, id, bucket_name):
if not self.bucket_exists(): if not self.bucket_exists():