Initial commit - cleaned repository
This commit is contained in:
333
services/files/backend/minio_client.py
Normal file
333
services/files/backend/minio_client.py
Normal file
@ -0,0 +1,333 @@
|
||||
"""
|
||||
MinIO Client for S3-compatible object storage
|
||||
"""
|
||||
from minio import Minio
|
||||
from minio.error import S3Error
|
||||
import asyncio
|
||||
import io
|
||||
from typing import Optional, Dict, Any, List
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class MinIOManager:
|
||||
def __init__(self, endpoint: str, access_key: str, secret_key: str, secure: bool = False):
|
||||
self.endpoint = endpoint
|
||||
self.access_key = access_key
|
||||
self.secret_key = secret_key
|
||||
self.secure = secure
|
||||
self.client = None
|
||||
self.is_connected = False
|
||||
|
||||
async def initialize(self):
|
||||
"""Initialize MinIO client and create default buckets"""
|
||||
try:
|
||||
self.client = Minio(
|
||||
self.endpoint,
|
||||
access_key=self.access_key,
|
||||
secret_key=self.secret_key,
|
||||
secure=self.secure
|
||||
)
|
||||
|
||||
# Create default buckets
|
||||
default_buckets = ["default", "public", "thumbnails", "temp"]
|
||||
for bucket in default_buckets:
|
||||
await self.create_bucket(bucket, public=(bucket == "public"))
|
||||
|
||||
self.is_connected = True
|
||||
logger.info(f"Connected to MinIO at {self.endpoint}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to initialize MinIO: {e}")
|
||||
self.is_connected = False
|
||||
raise
|
||||
|
||||
async def create_bucket(self, bucket_name: str, public: bool = False):
|
||||
"""Create a new bucket"""
|
||||
try:
|
||||
# Run in executor to avoid blocking
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
# Check if bucket exists
|
||||
exists = await loop.run_in_executor(
|
||||
None,
|
||||
self.client.bucket_exists,
|
||||
bucket_name
|
||||
)
|
||||
|
||||
if not exists:
|
||||
await loop.run_in_executor(
|
||||
None,
|
||||
self.client.make_bucket,
|
||||
bucket_name
|
||||
)
|
||||
logger.info(f"Created bucket: {bucket_name}")
|
||||
|
||||
# Set bucket policy if public
|
||||
if public:
|
||||
policy = {
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Principal": {"AWS": ["*"]},
|
||||
"Action": ["s3:GetObject"],
|
||||
"Resource": [f"arn:aws:s3:::{bucket_name}/*"]
|
||||
}
|
||||
]
|
||||
}
|
||||
import json
|
||||
await loop.run_in_executor(
|
||||
None,
|
||||
self.client.set_bucket_policy,
|
||||
bucket_name,
|
||||
json.dumps(policy)
|
||||
)
|
||||
logger.info(f"Set public policy for bucket: {bucket_name}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create bucket {bucket_name}: {e}")
|
||||
raise
|
||||
|
||||
async def upload_file(self, bucket: str, object_name: str, file_data: bytes,
|
||||
content_type: str = "application/octet-stream",
|
||||
metadata: Optional[Dict[str, str]] = None):
|
||||
"""Upload a file to MinIO"""
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
# Convert bytes to BytesIO
|
||||
file_stream = io.BytesIO(file_data)
|
||||
length = len(file_data)
|
||||
|
||||
# Upload file
|
||||
result = await loop.run_in_executor(
|
||||
None,
|
||||
self.client.put_object,
|
||||
bucket,
|
||||
object_name,
|
||||
file_stream,
|
||||
length,
|
||||
content_type,
|
||||
metadata
|
||||
)
|
||||
|
||||
logger.info(f"Uploaded {object_name} to {bucket}")
|
||||
return {
|
||||
"bucket": bucket,
|
||||
"object_name": object_name,
|
||||
"etag": result.etag,
|
||||
"version_id": result.version_id
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to upload file: {e}")
|
||||
raise
|
||||
|
||||
async def get_file(self, bucket: str, object_name: str) -> io.BytesIO:
|
||||
"""Get a file from MinIO"""
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
# Get object
|
||||
response = await loop.run_in_executor(
|
||||
None,
|
||||
self.client.get_object,
|
||||
bucket,
|
||||
object_name
|
||||
)
|
||||
|
||||
# Read data
|
||||
data = response.read()
|
||||
response.close()
|
||||
response.release_conn()
|
||||
|
||||
return io.BytesIO(data)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get file: {e}")
|
||||
raise
|
||||
|
||||
async def delete_file(self, bucket: str, object_name: str):
|
||||
"""Delete a file from MinIO"""
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
await loop.run_in_executor(
|
||||
None,
|
||||
self.client.remove_object,
|
||||
bucket,
|
||||
object_name
|
||||
)
|
||||
|
||||
logger.info(f"Deleted {object_name} from {bucket}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete file: {e}")
|
||||
raise
|
||||
|
||||
async def list_files(self, bucket: str, prefix: Optional[str] = None,
|
||||
recursive: bool = True) -> List[Dict[str, Any]]:
|
||||
"""List files in a bucket"""
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
objects = await loop.run_in_executor(
|
||||
None,
|
||||
lambda: list(self.client.list_objects(
|
||||
bucket,
|
||||
prefix=prefix,
|
||||
recursive=recursive
|
||||
))
|
||||
)
|
||||
|
||||
files = []
|
||||
for obj in objects:
|
||||
files.append({
|
||||
"name": obj.object_name,
|
||||
"size": obj.size,
|
||||
"last_modified": obj.last_modified,
|
||||
"etag": obj.etag,
|
||||
"content_type": obj.content_type
|
||||
})
|
||||
|
||||
return files
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to list files: {e}")
|
||||
raise
|
||||
|
||||
async def get_file_info(self, bucket: str, object_name: str) -> Dict[str, Any]:
|
||||
"""Get file information"""
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
stat = await loop.run_in_executor(
|
||||
None,
|
||||
self.client.stat_object,
|
||||
bucket,
|
||||
object_name
|
||||
)
|
||||
|
||||
return {
|
||||
"size": stat.size,
|
||||
"etag": stat.etag,
|
||||
"content_type": stat.content_type,
|
||||
"last_modified": stat.last_modified,
|
||||
"metadata": stat.metadata
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get file info: {e}")
|
||||
raise
|
||||
|
||||
async def generate_presigned_download_url(self, bucket: str, object_name: str,
|
||||
expires_in: int = 3600) -> str:
|
||||
"""Generate a presigned URL for downloading"""
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
url = await loop.run_in_executor(
|
||||
None,
|
||||
self.client.presigned_get_object,
|
||||
bucket,
|
||||
object_name,
|
||||
timedelta(seconds=expires_in)
|
||||
)
|
||||
|
||||
return url
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to generate presigned URL: {e}")
|
||||
raise
|
||||
|
||||
async def generate_presigned_upload_url(self, bucket: str, object_name: str,
|
||||
expires_in: int = 3600) -> str:
|
||||
"""Generate a presigned URL for uploading"""
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
url = await loop.run_in_executor(
|
||||
None,
|
||||
self.client.presigned_put_object,
|
||||
bucket,
|
||||
object_name,
|
||||
timedelta(seconds=expires_in)
|
||||
)
|
||||
|
||||
return url
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to generate presigned upload URL: {e}")
|
||||
raise
|
||||
|
||||
async def copy_file(self, source_bucket: str, source_object: str,
|
||||
dest_bucket: str, dest_object: str):
|
||||
"""Copy a file within MinIO"""
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
await loop.run_in_executor(
|
||||
None,
|
||||
self.client.copy_object,
|
||||
dest_bucket,
|
||||
dest_object,
|
||||
f"/{source_bucket}/{source_object}"
|
||||
)
|
||||
|
||||
logger.info(f"Copied {source_object} to {dest_object}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to copy file: {e}")
|
||||
raise
|
||||
|
||||
async def list_buckets(self) -> List[str]:
|
||||
"""List all buckets"""
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
buckets = await loop.run_in_executor(
|
||||
None,
|
||||
self.client.list_buckets
|
||||
)
|
||||
|
||||
return [bucket.name for bucket in buckets]
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to list buckets: {e}")
|
||||
raise
|
||||
|
||||
async def get_storage_stats(self) -> Dict[str, Any]:
|
||||
"""Get storage statistics"""
|
||||
try:
|
||||
buckets = await self.list_buckets()
|
||||
|
||||
stats = {
|
||||
"buckets": buckets,
|
||||
"bucket_count": len(buckets),
|
||||
"bucket_stats": {}
|
||||
}
|
||||
|
||||
# Get stats for each bucket
|
||||
for bucket in buckets:
|
||||
files = await self.list_files(bucket)
|
||||
total_size = sum(f["size"] for f in files)
|
||||
stats["bucket_stats"][bucket] = {
|
||||
"file_count": len(files),
|
||||
"total_size": total_size
|
||||
}
|
||||
|
||||
return stats
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get storage stats: {e}")
|
||||
raise
|
||||
|
||||
async def check_file_exists(self, bucket: str, object_name: str) -> bool:
|
||||
"""Check if a file exists"""
|
||||
try:
|
||||
await self.get_file_info(bucket, object_name)
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
Reference in New Issue
Block a user