Initial commit

This commit is contained in:
2025-10-14 14:17:21 +08:00
commit ac715a8b88
35011 changed files with 3834178 additions and 0 deletions

View File

@@ -0,0 +1,54 @@
import posixpath
from collections.abc import Generator
import oss2 as aliyun_s3 # type: ignore
from configs import dify_config
from extensions.storage.base_storage import BaseStorage
class AliyunOssStorage(BaseStorage):
"""Implementation for Aliyun OSS storage."""
def __init__(self):
super().__init__()
self.bucket_name = dify_config.ALIYUN_OSS_BUCKET_NAME
self.folder = dify_config.ALIYUN_OSS_PATH
oss_auth_method = aliyun_s3.Auth
region = None
if dify_config.ALIYUN_OSS_AUTH_VERSION == "v4":
oss_auth_method = aliyun_s3.AuthV4
region = dify_config.ALIYUN_OSS_REGION
oss_auth = oss_auth_method(dify_config.ALIYUN_OSS_ACCESS_KEY, dify_config.ALIYUN_OSS_SECRET_KEY)
self.client = aliyun_s3.Bucket(
oss_auth,
dify_config.ALIYUN_OSS_ENDPOINT,
self.bucket_name,
connect_timeout=30,
region=region,
)
def save(self, filename, data):
self.client.put_object(self.__wrapper_folder_filename(filename), data)
def load_once(self, filename: str) -> bytes:
obj = self.client.get_object(self.__wrapper_folder_filename(filename))
data: bytes = obj.read()
return data
def load_stream(self, filename: str) -> Generator:
obj = self.client.get_object(self.__wrapper_folder_filename(filename))
while chunk := obj.read(4096):
yield chunk
def download(self, filename: str, target_filepath):
self.client.get_object_to_file(self.__wrapper_folder_filename(filename), target_filepath)
def exists(self, filename: str):
return self.client.object_exists(self.__wrapper_folder_filename(filename))
def delete(self, filename: str):
self.client.delete_object(self.__wrapper_folder_filename(filename))
def __wrapper_folder_filename(self, filename: str) -> str:
return posixpath.join(self.folder, filename) if self.folder else filename

View File

@@ -0,0 +1,87 @@
import logging
from collections.abc import Generator
import boto3 # type: ignore
from botocore.client import Config # type: ignore
from botocore.exceptions import ClientError # type: ignore
from configs import dify_config
from extensions.storage.base_storage import BaseStorage
logger = logging.getLogger(__name__)
class AwsS3Storage(BaseStorage):
"""Implementation for Amazon Web Services S3 storage."""
def __init__(self):
super().__init__()
self.bucket_name = dify_config.S3_BUCKET_NAME
if dify_config.S3_USE_AWS_MANAGED_IAM:
logger.info("Using AWS managed IAM role for S3")
session = boto3.Session()
region_name = dify_config.S3_REGION
self.client = session.client(service_name="s3", region_name=region_name)
else:
logger.info("Using ak and sk for S3")
self.client = boto3.client(
"s3",
aws_secret_access_key=dify_config.S3_SECRET_KEY,
aws_access_key_id=dify_config.S3_ACCESS_KEY,
endpoint_url=dify_config.S3_ENDPOINT,
region_name=dify_config.S3_REGION,
config=Config(s3={"addressing_style": dify_config.S3_ADDRESS_STYLE}),
)
# create bucket
try:
self.client.head_bucket(Bucket=self.bucket_name)
except ClientError as e:
# if bucket not exists, create it
if e.response["Error"]["Code"] == "404":
self.client.create_bucket(Bucket=self.bucket_name)
# if bucket is not accessible, pass, maybe the bucket is existing but not accessible
elif e.response["Error"]["Code"] == "403":
pass
else:
# other error, raise exception
raise
def save(self, filename, data):
self.client.put_object(Bucket=self.bucket_name, Key=filename, Body=data)
def load_once(self, filename: str) -> bytes:
try:
data: bytes = self.client.get_object(Bucket=self.bucket_name, Key=filename)["Body"].read()
except ClientError as ex:
if ex.response["Error"]["Code"] == "NoSuchKey":
raise FileNotFoundError("File not found")
else:
raise
return data
def load_stream(self, filename: str) -> Generator:
try:
response = self.client.get_object(Bucket=self.bucket_name, Key=filename)
yield from response["Body"].iter_chunks()
except ClientError as ex:
if ex.response["Error"]["Code"] == "NoSuchKey":
raise FileNotFoundError("file not found")
elif "reached max retries" in str(ex):
raise ValueError("please do not request the same file too frequently")
else:
raise
def download(self, filename, target_filepath):
self.client.download_file(self.bucket_name, filename, target_filepath)
def exists(self, filename):
try:
self.client.head_object(Bucket=self.bucket_name, Key=filename)
return True
except:
return False
def delete(self, filename):
self.client.delete_object(Bucket=self.bucket_name, Key=filename)

View File

@@ -0,0 +1,84 @@
from collections.abc import Generator
from datetime import UTC, datetime, timedelta
from typing import Optional
from azure.identity import ChainedTokenCredential, DefaultAzureCredential
from azure.storage.blob import AccountSasPermissions, BlobServiceClient, ResourceTypes, generate_account_sas
from configs import dify_config
from extensions.ext_redis import redis_client
from extensions.storage.base_storage import BaseStorage
class AzureBlobStorage(BaseStorage):
"""Implementation for Azure Blob storage."""
def __init__(self):
super().__init__()
self.bucket_name = dify_config.AZURE_BLOB_CONTAINER_NAME
self.account_url = dify_config.AZURE_BLOB_ACCOUNT_URL
self.account_name = dify_config.AZURE_BLOB_ACCOUNT_NAME
self.account_key = dify_config.AZURE_BLOB_ACCOUNT_KEY
self.credential: Optional[ChainedTokenCredential] = None
if self.account_key == "managedidentity":
self.credential = DefaultAzureCredential()
else:
self.credential = None
def save(self, filename, data):
client = self._sync_client()
blob_container = client.get_container_client(container=self.bucket_name)
blob_container.upload_blob(filename, data)
def load_once(self, filename: str) -> bytes:
client = self._sync_client()
blob = client.get_container_client(container=self.bucket_name)
blob = blob.get_blob_client(blob=filename)
data: bytes = blob.download_blob().readall()
return data
def load_stream(self, filename: str) -> Generator:
client = self._sync_client()
blob = client.get_blob_client(container=self.bucket_name, blob=filename)
blob_data = blob.download_blob()
yield from blob_data.chunks()
def download(self, filename, target_filepath):
client = self._sync_client()
blob = client.get_blob_client(container=self.bucket_name, blob=filename)
with open(target_filepath, "wb") as my_blob:
blob_data = blob.download_blob()
blob_data.readinto(my_blob)
def exists(self, filename):
client = self._sync_client()
blob = client.get_blob_client(container=self.bucket_name, blob=filename)
return blob.exists()
def delete(self, filename):
client = self._sync_client()
blob_container = client.get_container_client(container=self.bucket_name)
blob_container.delete_blob(filename)
def _sync_client(self):
if self.account_key == "managedidentity":
return BlobServiceClient(account_url=self.account_url, credential=self.credential) # type: ignore
cache_key = "azure_blob_sas_token_{}_{}".format(self.account_name, self.account_key)
cache_result = redis_client.get(cache_key)
if cache_result is not None:
sas_token = cache_result.decode("utf-8")
else:
sas_token = generate_account_sas(
account_name=self.account_name or "",
account_key=self.account_key or "",
resource_types=ResourceTypes(service=True, container=True, object=True),
permission=AccountSasPermissions(read=True, write=True, delete=True, list=True, add=True, create=True),
expiry=datetime.now(UTC).replace(tzinfo=None) + timedelta(hours=1),
)
redis_client.set(cache_key, sas_token, ex=3000)
return BlobServiceClient(account_url=self.account_url or "", credential=sas_token)

View File

@@ -0,0 +1,57 @@
import base64
import hashlib
from collections.abc import Generator
from baidubce.auth.bce_credentials import BceCredentials # type: ignore
from baidubce.bce_client_configuration import BceClientConfiguration # type: ignore
from baidubce.services.bos.bos_client import BosClient # type: ignore
from configs import dify_config
from extensions.storage.base_storage import BaseStorage
class BaiduObsStorage(BaseStorage):
"""Implementation for Baidu OBS storage."""
def __init__(self):
super().__init__()
self.bucket_name = dify_config.BAIDU_OBS_BUCKET_NAME
client_config = BceClientConfiguration(
credentials=BceCredentials(
access_key_id=dify_config.BAIDU_OBS_ACCESS_KEY,
secret_access_key=dify_config.BAIDU_OBS_SECRET_KEY,
),
endpoint=dify_config.BAIDU_OBS_ENDPOINT,
)
self.client = BosClient(config=client_config)
def save(self, filename, data):
md5 = hashlib.md5()
md5.update(data)
content_md5 = base64.standard_b64encode(md5.digest())
self.client.put_object(
bucket_name=self.bucket_name, key=filename, data=data, content_length=len(data), content_md5=content_md5
)
def load_once(self, filename: str) -> bytes:
response = self.client.get_object(bucket_name=self.bucket_name, key=filename)
data: bytes = response.data.read()
return data
def load_stream(self, filename: str) -> Generator:
response = self.client.get_object(bucket_name=self.bucket_name, key=filename).data
while chunk := response.read(4096):
yield chunk
def download(self, filename, target_filepath):
self.client.get_object_to_file(bucket_name=self.bucket_name, key=filename, file_name=target_filepath)
def exists(self, filename):
res = self.client.get_object_meta_data(bucket_name=self.bucket_name, key=filename)
if res is None:
return False
return True
def delete(self, filename):
self.client.delete_object(bucket_name=self.bucket_name, key=filename)

View File

@@ -0,0 +1,40 @@
"""Abstract interface for file storage implementations."""
from abc import ABC, abstractmethod
from collections.abc import Generator
class BaseStorage(ABC):
"""Interface for file storage."""
@abstractmethod
def save(self, filename, data):
raise NotImplementedError
@abstractmethod
def load_once(self, filename: str) -> bytes:
raise NotImplementedError
@abstractmethod
def load_stream(self, filename: str) -> Generator:
raise NotImplementedError
@abstractmethod
def download(self, filename, target_filepath):
raise NotImplementedError
@abstractmethod
def exists(self, filename):
raise NotImplementedError
@abstractmethod
def delete(self, filename):
raise NotImplementedError
def scan(self, path, files=True, directories=False) -> list[str]:
"""
Scan files and directories in the given path.
This method is implemented only in some storage backends.
If a storage backend doesn't support scanning, it will raise NotImplementedError.
"""
raise NotImplementedError("This storage backend doesn't support scanning")

View File

@@ -0,0 +1,60 @@
import base64
import io
import json
from collections.abc import Generator
from google.cloud import storage as google_cloud_storage # type: ignore
from configs import dify_config
from extensions.storage.base_storage import BaseStorage
class GoogleCloudStorage(BaseStorage):
"""Implementation for Google Cloud storage."""
def __init__(self):
super().__init__()
self.bucket_name = dify_config.GOOGLE_STORAGE_BUCKET_NAME
service_account_json_str = dify_config.GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64
# if service_account_json_str is empty, use Application Default Credentials
if service_account_json_str:
service_account_json = base64.b64decode(service_account_json_str).decode("utf-8")
# convert str to object
service_account_obj = json.loads(service_account_json)
self.client = google_cloud_storage.Client.from_service_account_info(service_account_obj)
else:
self.client = google_cloud_storage.Client()
def save(self, filename, data):
bucket = self.client.get_bucket(self.bucket_name)
blob = bucket.blob(filename)
with io.BytesIO(data) as stream:
blob.upload_from_file(stream)
def load_once(self, filename: str) -> bytes:
bucket = self.client.get_bucket(self.bucket_name)
blob = bucket.get_blob(filename)
data: bytes = blob.download_as_bytes()
return data
def load_stream(self, filename: str) -> Generator:
bucket = self.client.get_bucket(self.bucket_name)
blob = bucket.get_blob(filename)
with blob.open(mode="rb") as blob_stream:
while chunk := blob_stream.read(4096):
yield chunk
def download(self, filename, target_filepath):
bucket = self.client.get_bucket(self.bucket_name)
blob = bucket.get_blob(filename)
blob.download_to_filename(target_filepath)
def exists(self, filename):
bucket = self.client.get_bucket(self.bucket_name)
blob = bucket.blob(filename)
return blob.exists()
def delete(self, filename):
bucket = self.client.get_bucket(self.bucket_name)
bucket.delete_blob(filename)

View File

@@ -0,0 +1,51 @@
from collections.abc import Generator
from obs import ObsClient # type: ignore
from configs import dify_config
from extensions.storage.base_storage import BaseStorage
class HuaweiObsStorage(BaseStorage):
"""Implementation for Huawei OBS storage."""
def __init__(self):
super().__init__()
self.bucket_name = dify_config.HUAWEI_OBS_BUCKET_NAME
self.client = ObsClient(
access_key_id=dify_config.HUAWEI_OBS_ACCESS_KEY,
secret_access_key=dify_config.HUAWEI_OBS_SECRET_KEY,
server=dify_config.HUAWEI_OBS_SERVER,
)
def save(self, filename, data):
self.client.putObject(bucketName=self.bucket_name, objectKey=filename, content=data)
def load_once(self, filename: str) -> bytes:
data: bytes = self.client.getObject(bucketName=self.bucket_name, objectKey=filename)["body"].response.read()
return data
def load_stream(self, filename: str) -> Generator:
response = self.client.getObject(bucketName=self.bucket_name, objectKey=filename)["body"].response
while chunk := response.read(4096):
yield chunk
def download(self, filename, target_filepath):
self.client.getObject(bucketName=self.bucket_name, objectKey=filename, downloadPath=target_filepath)
def exists(self, filename):
res = self._get_meta(filename)
if res is None:
return False
return True
def delete(self, filename):
self.client.deleteObject(bucketName=self.bucket_name, objectKey=filename)
def _get_meta(self, filename):
res = self.client.getObjectMetadata(bucketName=self.bucket_name, objectKey=filename)
if res.status < 300:
return res
else:
return None

View File

@@ -0,0 +1,99 @@
import logging
import os
from collections.abc import Generator
from pathlib import Path
import opendal # type: ignore[import]
from dotenv import dotenv_values
from extensions.storage.base_storage import BaseStorage
logger = logging.getLogger(__name__)
def _get_opendal_kwargs(*, scheme: str, env_file_path: str = ".env", prefix: str = "OPENDAL_"):
kwargs = {}
config_prefix = prefix + scheme.upper() + "_"
for key, value in os.environ.items():
if key.startswith(config_prefix):
kwargs[key[len(config_prefix) :].lower()] = value
file_env_vars: dict = dotenv_values(env_file_path) or {}
for key, value in file_env_vars.items():
if key.startswith(config_prefix) and key[len(config_prefix) :].lower() not in kwargs and value:
kwargs[key[len(config_prefix) :].lower()] = value
return kwargs
class OpenDALStorage(BaseStorage):
def __init__(self, scheme: str, **kwargs):
kwargs = kwargs or _get_opendal_kwargs(scheme=scheme)
if scheme == "fs":
root = kwargs.get("root", "storage")
Path(root).mkdir(parents=True, exist_ok=True)
self.op = opendal.Operator(scheme=scheme, **kwargs) # type: ignore
logger.debug(f"opendal operator created with scheme {scheme}")
retry_layer = opendal.layers.RetryLayer(max_times=3, factor=2.0, jitter=True)
self.op = self.op.layer(retry_layer)
logger.debug("added retry layer to opendal operator")
def save(self, filename: str, data: bytes) -> None:
self.op.write(path=filename, bs=data)
logger.debug(f"file {filename} saved")
def load_once(self, filename: str) -> bytes:
if not self.exists(filename):
raise FileNotFoundError("File not found")
content: bytes = self.op.read(path=filename)
logger.debug(f"file {filename} loaded")
return content
def load_stream(self, filename: str) -> Generator:
if not self.exists(filename):
raise FileNotFoundError("File not found")
batch_size = 4096
file = self.op.open(path=filename, mode="rb")
while chunk := file.read(batch_size):
yield chunk
logger.debug(f"file {filename} loaded as stream")
def download(self, filename: str, target_filepath: str):
if not self.exists(filename):
raise FileNotFoundError("File not found")
with Path(target_filepath).open("wb") as f:
f.write(self.op.read(path=filename))
logger.debug(f"file {filename} downloaded to {target_filepath}")
def exists(self, filename: str) -> bool:
res: bool = self.op.exists(path=filename)
return res
def delete(self, filename: str):
if self.exists(filename):
self.op.delete(path=filename)
logger.debug(f"file {filename} deleted")
return
logger.debug(f"file {filename} not found, skip delete")
def scan(self, path: str, files: bool = True, directories: bool = False) -> list[str]:
if not self.exists(path):
raise FileNotFoundError("Path not found")
all_files = self.op.scan(path=path)
if files and directories:
logger.debug(f"files and directories on {path} scanned")
return [f.path for f in all_files]
if files:
logger.debug(f"files on {path} scanned")
return [f.path for f in all_files if not f.path.endswith("/")]
elif directories:
logger.debug(f"directories on {path} scanned")
return [f.path for f in all_files if f.path.endswith("/")]
else:
raise ValueError("At least one of files or directories must be True")

View File

@@ -0,0 +1,59 @@
from collections.abc import Generator
import boto3 # type: ignore
from botocore.exceptions import ClientError # type: ignore
from configs import dify_config
from extensions.storage.base_storage import BaseStorage
class OracleOCIStorage(BaseStorage):
"""Implementation for Oracle OCI storage."""
def __init__(self):
super().__init__()
self.bucket_name = dify_config.OCI_BUCKET_NAME
self.client = boto3.client(
"s3",
aws_secret_access_key=dify_config.OCI_SECRET_KEY,
aws_access_key_id=dify_config.OCI_ACCESS_KEY,
endpoint_url=dify_config.OCI_ENDPOINT,
region_name=dify_config.OCI_REGION,
)
def save(self, filename, data):
self.client.put_object(Bucket=self.bucket_name, Key=filename, Body=data)
def load_once(self, filename: str) -> bytes:
try:
data: bytes = self.client.get_object(Bucket=self.bucket_name, Key=filename)["Body"].read()
except ClientError as ex:
if ex.response["Error"]["Code"] == "NoSuchKey":
raise FileNotFoundError("File not found")
else:
raise
return data
def load_stream(self, filename: str) -> Generator:
try:
response = self.client.get_object(Bucket=self.bucket_name, Key=filename)
yield from response["Body"].iter_chunks()
except ClientError as ex:
if ex.response["Error"]["Code"] == "NoSuchKey":
raise FileNotFoundError("File not found")
else:
raise
def download(self, filename, target_filepath):
self.client.download_file(self.bucket_name, filename, target_filepath)
def exists(self, filename):
try:
self.client.head_object(Bucket=self.bucket_name, Key=filename)
return True
except:
return False
def delete(self, filename):
self.client.delete_object(Bucket=self.bucket_name, Key=filename)

View File

@@ -0,0 +1,16 @@
from enum import StrEnum
class StorageType(StrEnum):
ALIYUN_OSS = "aliyun-oss"
AZURE_BLOB = "azure-blob"
BAIDU_OBS = "baidu-obs"
GOOGLE_STORAGE = "google-storage"
HUAWEI_OBS = "huawei-obs"
LOCAL = "local"
OCI_STORAGE = "oci-storage"
OPENDAL = "opendal"
S3 = "s3"
TENCENT_COS = "tencent-cos"
VOLCENGINE_TOS = "volcengine-tos"
SUPBASE = "supabase"

View File

@@ -0,0 +1,59 @@
import io
from collections.abc import Generator
from pathlib import Path
from supabase import Client
from configs import dify_config
from extensions.storage.base_storage import BaseStorage
class SupabaseStorage(BaseStorage):
"""Implementation for supabase obs storage."""
def __init__(self):
super().__init__()
if dify_config.SUPABASE_URL is None:
raise ValueError("SUPABASE_URL is not set")
if dify_config.SUPABASE_API_KEY is None:
raise ValueError("SUPABASE_API_KEY is not set")
if dify_config.SUPABASE_BUCKET_NAME is None:
raise ValueError("SUPABASE_BUCKET_NAME is not set")
self.bucket_name = dify_config.SUPABASE_BUCKET_NAME
self.client = Client(supabase_url=dify_config.SUPABASE_URL, supabase_key=dify_config.SUPABASE_API_KEY)
self.create_bucket(id=dify_config.SUPABASE_BUCKET_NAME, bucket_name=dify_config.SUPABASE_BUCKET_NAME)
def create_bucket(self, id, bucket_name):
if not self.bucket_exists():
self.client.storage.create_bucket(id=id, name=bucket_name)
def save(self, filename, data):
self.client.storage.from_(self.bucket_name).upload(filename, data)
def load_once(self, filename: str) -> bytes:
content: bytes = self.client.storage.from_(self.bucket_name).download(filename)
return content
def load_stream(self, filename: str) -> Generator:
result = self.client.storage.from_(self.bucket_name).download(filename)
byte_stream = io.BytesIO(result)
while chunk := byte_stream.read(4096): # Read in chunks of 4KB
yield chunk
def download(self, filename, target_filepath):
result = self.client.storage.from_(self.bucket_name).download(filename)
Path(target_filepath).write_bytes(result)
def exists(self, filename):
result = self.client.storage.from_(self.bucket_name).list(filename)
if result.count() > 0:
return True
return False
def delete(self, filename):
self.client.storage.from_(self.bucket_name).remove(filename)
def bucket_exists(self):
buckets = self.client.storage.list_buckets()
return any(bucket.name == self.bucket_name for bucket in buckets)

View File

@@ -0,0 +1,43 @@
from collections.abc import Generator
from qcloud_cos import CosConfig, CosS3Client # type: ignore
from configs import dify_config
from extensions.storage.base_storage import BaseStorage
class TencentCosStorage(BaseStorage):
"""Implementation for Tencent Cloud COS storage."""
def __init__(self):
super().__init__()
self.bucket_name = dify_config.TENCENT_COS_BUCKET_NAME
config = CosConfig(
Region=dify_config.TENCENT_COS_REGION,
SecretId=dify_config.TENCENT_COS_SECRET_ID,
SecretKey=dify_config.TENCENT_COS_SECRET_KEY,
Scheme=dify_config.TENCENT_COS_SCHEME,
)
self.client = CosS3Client(config)
def save(self, filename, data):
self.client.put_object(Bucket=self.bucket_name, Body=data, Key=filename)
def load_once(self, filename: str) -> bytes:
data: bytes = self.client.get_object(Bucket=self.bucket_name, Key=filename)["Body"].get_raw_stream().read()
return data
def load_stream(self, filename: str) -> Generator:
response = self.client.get_object(Bucket=self.bucket_name, Key=filename)
yield from response["Body"].get_stream(chunk_size=4096)
def download(self, filename, target_filepath):
response = self.client.get_object(Bucket=self.bucket_name, Key=filename)
response["Body"].get_stream_to_file(target_filepath)
def exists(self, filename):
return self.client.object_exists(Bucket=self.bucket_name, Key=filename)
def delete(self, filename):
self.client.delete_object(Bucket=self.bucket_name, Key=filename)

View File

@@ -0,0 +1,46 @@
from collections.abc import Generator
import tos # type: ignore
from configs import dify_config
from extensions.storage.base_storage import BaseStorage
class VolcengineTosStorage(BaseStorage):
"""Implementation for Volcengine TOS storage."""
def __init__(self):
super().__init__()
self.bucket_name = dify_config.VOLCENGINE_TOS_BUCKET_NAME
self.client = tos.TosClientV2(
ak=dify_config.VOLCENGINE_TOS_ACCESS_KEY,
sk=dify_config.VOLCENGINE_TOS_SECRET_KEY,
endpoint=dify_config.VOLCENGINE_TOS_ENDPOINT,
region=dify_config.VOLCENGINE_TOS_REGION,
)
def save(self, filename, data):
self.client.put_object(bucket=self.bucket_name, key=filename, content=data)
def load_once(self, filename: str) -> bytes:
data = self.client.get_object(bucket=self.bucket_name, key=filename).read()
if not isinstance(data, bytes):
raise TypeError("Expected bytes, got {}".format(type(data).__name__))
return data
def load_stream(self, filename: str) -> Generator:
response = self.client.get_object(bucket=self.bucket_name, key=filename)
while chunk := response.read(4096):
yield chunk
def download(self, filename, target_filepath):
self.client.get_object_to_file(bucket=self.bucket_name, key=filename, file_path=target_filepath)
def exists(self, filename):
res = self.client.head_object(bucket=self.bucket_name, key=filename)
if res.status_code != 200:
return False
return True
def delete(self, filename):
self.client.delete_object(bucket=self.bucket_name, key=filename)