add bucket info in values
All checks were successful
Build and Publish / build-release (push) Successful in 1m8s

This commit is contained in:
2026-04-24 14:38:05 -05:00
parent c77845852e
commit 2018f5a5f6
3 changed files with 82 additions and 46 deletions

View File

@@ -1,7 +1,10 @@
from pydantic_settings import BaseSettings from pydantic_settings import BaseSettings
class Settings(BaseSettings): class Settings(BaseSettings):
# S3 settings # COSI settings (preferred)
cosi_bucket_info_path: str = ""
# S3 settings (fallback)
s3_endpoint: str = "http://localhost:9000" s3_endpoint: str = "http://localhost:9000"
s3_access_key_id: str = "minioadmin" s3_access_key_id: str = "minioadmin"
s3_secret_key: str = "minioadmin" s3_secret_key: str = "minioadmin"

View File

@@ -1,6 +1,7 @@
import boto3 import boto3
import tempfile import json
import os import os
import tempfile
from botocore.client import Config from botocore.client import Config
from fastapi import UploadFile from fastapi import UploadFile
from app.config import settings from app.config import settings
@@ -8,7 +9,36 @@ from app.logger import get_logger
logger = get_logger(__name__) logger = get_logger(__name__)
def read_bucket_info() -> dict:
if not settings.cosi_bucket_info_path:
raise ValueError("COSI_BUCKET_INFO_PATH not set")
with open(settings.cosi_bucket_info_path, "r") as f:
return json.load(f)
def get_cosi_s3_config() -> dict:
bucket_info = read_bucket_info()
s3_conf = bucket_info["spec"]["secretS3"]
return {
"endpoint": s3_conf["endpoint"],
"access_key": s3_conf["accessKeyID"],
"secret_key": s3_conf["accessSecretKey"],
"bucket": bucket_info["spec"]["bucketName"]
}
def get_client(): def get_client():
if settings.cosi_bucket_info_path:
cosi_config = get_cosi_s3_config()
return boto3.client(
"s3",
endpoint_url=cosi_config["endpoint"],
aws_access_key_id=cosi_config["access_key"],
aws_secret_access_key=cosi_config["secret_key"],
config=Config(signature_version="s3v4"),
region_name="us-east-1"
)
return boto3.client( return boto3.client(
"s3", "s3",
endpoint_url=settings.s3_endpoint, endpoint_url=settings.s3_endpoint,
@@ -18,39 +48,42 @@ def get_client():
region_name=settings.s3_region region_name=settings.s3_region
) )
def ensure_bucket_exists() -> None:
"""Ensure the S3 bucket exists, create it if it doesn't exist.
Raises: def get_bucket_name() -> str:
Exception: If bucket creation fails (service will fail to start) if settings.cosi_bucket_info_path:
""" return get_cosi_s3_config()["bucket"]
return settings.s3_bucket
def ensure_bucket_exists() -> None:
bucket_name = get_bucket_name()
client = get_client() client = get_client()
try: try:
client.head_bucket(Bucket=settings.s3_bucket) client.head_bucket(Bucket=bucket_name)
logger.info(f"Bucket '{settings.s3_bucket}' already exists") logger.info(f"Bucket '{bucket_name}' already exists")
except client.exceptions.ClientError as e: except client.exceptions.ClientError as e:
error_code = e.response['Error']['Code'] error_code = e.response['Error']['Code']
if error_code == '404': if error_code == '404':
try: try:
client.create_bucket( client.create_bucket(
Bucket=settings.s3_bucket, Bucket=bucket_name,
CreateBucketConfiguration={ CreateBucketConfiguration={
'LocationConstraint': settings.s3_region 'LocationConstraint': 'us-east-1'
} }
) )
logger.info(f"Created bucket '{settings.s3_bucket}'") logger.info(f"Created bucket '{bucket_name}'")
except Exception as create_error: except Exception as create_error:
logger.error(f"Failed to create bucket '{settings.s3_bucket}': {create_error}") logger.error(f"Failed to create bucket '{bucket_name}': {create_error}")
raise raise
else: else:
logger.error(f"Error checking bucket: {e}") logger.error(f"Error checking bucket: {e}")
raise raise
def upload_file(file: UploadFile, s3_key: str, content_type: str, metadata: dict = None) -> str: def upload_file(file: UploadFile, s3_key: str, content_type: str, metadata: dict = None) -> str:
"""Upload file to S3 with metadata""" bucket_name = get_bucket_name()
client = get_client() client = get_client()
# Read file content
file.file.seek(0, os.SEEK_END) file.file.seek(0, os.SEEK_END)
file_size = file.file.tell() file_size = file.file.tell()
file.file.seek(0) file.file.seek(0)
@@ -62,7 +95,7 @@ def upload_file(file: UploadFile, s3_key: str, content_type: str, metadata: dict
extra_args["Metadata"] = metadata extra_args["Metadata"] = metadata
client.put_object( client.put_object(
Bucket=settings.s3_bucket, Bucket=bucket_name,
Key=s3_key, Key=s3_key,
Body=file_content, Body=file_content,
ContentLength=file_size, ContentLength=file_size,
@@ -71,40 +104,45 @@ def upload_file(file: UploadFile, s3_key: str, content_type: str, metadata: dict
) )
return s3_key return s3_key
def delete_file(s3_key: str) -> None: def delete_file(s3_key: str) -> None:
"""Delete file from S3""" bucket_name = get_bucket_name()
client = get_client() client = get_client()
client.delete_object(Bucket=settings.s3_bucket, Key=s3_key) client.delete_object(Bucket=bucket_name, Key=s3_key)
def file_exists(s3_key: str) -> bool: def file_exists(s3_key: str) -> bool:
"""Check if file exists in S3""" bucket_name = get_bucket_name()
client = get_client() client = get_client()
try: try:
client.head_object(Bucket=settings.s3_bucket, Key=s3_key) client.head_object(Bucket=bucket_name, Key=s3_key)
return True return True
except client.exceptions.ClientError: except client.exceptions.ClientError:
return False return False
def get_file_metadata(s3_key: str) -> dict: def get_file_metadata(s3_key: str) -> dict:
"""Get file metadata from S3""" bucket_name = get_bucket_name()
client = get_client() client = get_client()
response = client.head_object(Bucket=settings.s3_bucket, Key=s3_key) response = client.head_object(Bucket=bucket_name, Key=s3_key)
return response.get("Metadata", {}) return response.get("Metadata", {})
def download_to_temp(s3_key: str) -> str: def download_to_temp(s3_key: str) -> str:
"""Download file from S3 to temp file""" bucket_name = get_bucket_name()
client = get_client() client = get_client()
suffix = os.path.splitext(s3_key)[-1] or ".tmp" suffix = os.path.splitext(s3_key)[-1] or ".tmp"
tmp = tempfile.NamedTemporaryFile(delete=False, suffix=suffix) tmp = tempfile.NamedTemporaryFile(delete=False, suffix=suffix)
client.download_fileobj(settings.s3_bucket, s3_key, tmp) client.download_fileobj(bucket_name, s3_key, tmp)
tmp.close() tmp.close()
return tmp.name return tmp.name
def presigned_download_url(s3_key: str, expires_in: int = 3600) -> str: def presigned_download_url(s3_key: str, expires_in: int = 3600) -> str:
"""Generate presigned download URL""" bucket_name = get_bucket_name()
client = get_client() client = get_client()
return client.generate_presigned_url( return client.generate_presigned_url(
"get_object", "get_object",
Params={"Bucket": settings.s3_bucket, "Key": s3_key}, Params={"Bucket": bucket_name, "Key": s3_key},
ExpiresIn=expires_in ExpiresIn=expires_in
) )

View File

@@ -11,22 +11,8 @@ controllers:
env: env:
LOG_LEVEL: info LOG_LEVEL: info
PORT: "8082" PORT: "8082"
S3_ENDPOINT: COSI_BUCKET_INFO_PATH:
value: "https://dev.s3.corredorconect.com/" value: "/var/run/secrets/cosi/BucketInfo"
S3_ACCESS_KEY_ID:
valueFrom:
secretKeyRef:
name: 'document-service-s3-credentials'
key: rootAccessKeyId
S3_SECRET_KEY:
valueFrom:
secretKeyRef:
name: 'document-service-s3-credentials'
key: rootSecretAccessKey
S3_BUCKET:
value: "document-bucket"
S3_REGION:
value: "us-east-1"
probes: probes:
liveness: liveness:
enabled: true enabled: true
@@ -56,6 +42,15 @@ service:
port: 8082 port: 8082
protocol: HTTP protocol: HTTP
persistence:
cosi-bucket-info:
enabled: true
type: secret
name: document-service-s3-credentials
globalMounts:
- path: /var/run/secrets/cosi
readOnly: true
rawResources: rawResources:
bucket: bucket:
enabled: true enabled: true