This commit is contained in:
Simone Scarduzio
2025-09-25 16:58:43 +02:00
parent 3b580a4070
commit f1cdc10fd5
4 changed files with 12 additions and 25 deletions

View File

@@ -200,21 +200,12 @@ class LoggingMetricsAdapter(MetricsPort):
def increment(self, name: str, value: int = 1, tags: dict[str, str] | None = None) -> None:
"""Log counter increment."""
logger.log(
self.log_level,
f"METRIC:INCREMENT {name}={value} tags={tags or {}}"
)
logger.log(self.log_level, f"METRIC:INCREMENT {name}={value} tags={tags or {}}")
def gauge(self, name: str, value: float, tags: dict[str, str] | None = None) -> None:
"""Log gauge value."""
logger.log(
self.log_level,
f"METRIC:GAUGE {name}={value:.2f} tags={tags or {}}"
)
logger.log(self.log_level, f"METRIC:GAUGE {name}={value:.2f} tags={tags or {}}")
def timing(self, name: str, value: float, tags: dict[str, str] | None = None) -> None:
"""Log timing value."""
logger.log(
self.log_level,
f"METRIC:TIMING {name}={value:.2f}ms tags={tags or {}}"
)
logger.log(self.log_level, f"METRIC:TIMING {name}={value:.2f}ms tags={tags or {}}")

View File

@@ -61,6 +61,7 @@ def create_service(
if metrics_type == "cloudwatch":
# Import here to avoid dependency if not used
from ...adapters.metrics_cloudwatch import CloudWatchMetricsAdapter
metrics = CloudWatchMetricsAdapter(
namespace=os.environ.get("DG_METRICS_NAMESPACE", "DeltaGlider"),
region=region,
@@ -68,6 +69,7 @@ def create_service(
)
elif metrics_type == "logging":
from ...adapters.metrics_cloudwatch import LoggingMetricsAdapter
metrics = LoggingMetricsAdapter(log_level=log_level)
else:
metrics = NoopMetricsAdapter()

View File

@@ -281,7 +281,7 @@ class DeltaGliderClient:
# Use storage adapter's list_objects method if available
from .adapters.storage_s3 import S3StorageAdapter
if hasattr(self.service.storage, 'list_objects'):
if hasattr(self.service.storage, "list_objects"):
# Use list_objects method if available
result = self.service.storage.list_objects(
bucket=Bucket,
@@ -1072,7 +1072,7 @@ class DeltaGliderClient:
storage_adapter = self.service.storage
# Check if storage adapter has boto3 client
if hasattr(storage_adapter, 'client'):
if hasattr(storage_adapter, "client"):
try:
# Use boto3's native presigned URL generation
url = storage_adapter.client.generate_presigned_url(
@@ -1095,9 +1095,7 @@ class DeltaGliderClient:
base_url = f"https://{bucket}.s3.amazonaws.com"
# Warning: This is not a real presigned URL, just a placeholder
self.service.logger.warning(
"Using placeholder presigned URL - not suitable for production"
)
self.service.logger.warning("Using placeholder presigned URL - not suitable for production")
return f"{base_url}/{key}?expires={ExpiresIn}"
def generate_presigned_post(
@@ -1123,7 +1121,7 @@ class DeltaGliderClient:
storage_adapter = self.service.storage
# Check if storage adapter has boto3 client
if hasattr(storage_adapter, 'client'):
if hasattr(storage_adapter, "client"):
try:
# Use boto3's native presigned POST generation
response = storage_adapter.client.generate_presigned_post(
@@ -1148,7 +1146,7 @@ class DeltaGliderClient:
"fields": {
"key": Key,
**(Fields or {}),
}
},
}
def _parse_tagging(self, tagging: str) -> dict[str, str]:

View File

@@ -148,9 +148,7 @@ class TestBoto3Compatibility:
def test_put_object_with_bytes(self, client):
"""Test put_object with byte data."""
response = client.put_object(
Bucket="test-bucket", Key="test.txt", Body=b"Hello World"
)
response = client.put_object(Bucket="test-bucket", Key="test.txt", Body=b"Hello World")
assert "ETag" in response
assert response["ResponseMetadata"]["HTTPStatusCode"] == 200
@@ -161,9 +159,7 @@ class TestBoto3Compatibility:
def test_put_object_with_string(self, client):
"""Test put_object with string data."""
response = client.put_object(
Bucket="test-bucket", Key="test2.txt", Body="Hello String"
)
response = client.put_object(Bucket="test-bucket", Key="test2.txt", Body="Hello String")
assert "ETag" in response
obj = client.service.storage.objects["test-bucket/test2.txt"]