feat: project foundation — monorepo setup, shared config, redis streams, telemetry

- pyproject.toml with core deps and optional dep groups per service
- shared/config.py: Pydantic BaseSettings with TRADING_ env prefix
- shared/redis_streams.py: StreamPublisher/StreamConsumer wrappers
- shared/telemetry.py: OpenTelemetry + Prometheus metric export
- tests for Redis Streams helpers (5 passing)
This commit is contained in:
Viktor Barzin 2026-02-22 15:13:26 +00:00
parent 0ac9884b89
commit ae5b3f89d1
No known key found for this signature in database
GPG key ID: 0EB088298288D958
7 changed files with 248 additions and 0 deletions

0
shared/__init__.py Normal file
View file

19
shared/config.py Normal file
View file

@ -0,0 +1,19 @@
"""Shared configuration for all trading bot services."""
from pydantic_settings import BaseSettings
class BaseConfig(BaseSettings):
"""Base configuration shared across all services.
All settings can be overridden via environment variables
prefixed with ``TRADING_``.
"""
database_url: str = "postgresql+asyncpg://trading:trading@localhost:5432/trading"
redis_url: str = "redis://localhost:6379/0"
log_level: str = "INFO"
otel_service_name: str = "trading-bot"
otel_metrics_port: int = 9090
model_config = {"env_prefix": "TRADING_"}

68
shared/redis_streams.py Normal file
View file

@ -0,0 +1,68 @@
"""Thin wrappers around redis-py Streams for publish/consume with JSON serialization."""
import json
import logging
from typing import AsyncIterator
from redis.asyncio import Redis
logger = logging.getLogger(__name__)
class StreamPublisher:
"""Publishes JSON-encoded messages to a Redis Stream."""
def __init__(self, redis: Redis, stream: str) -> None:
self.redis = redis
self.stream = stream
async def publish(self, data: dict) -> str:
"""Serialize *data* as JSON and append to the stream via XADD.
Returns the message ID assigned by Redis.
"""
msg_id = await self.redis.xadd(self.stream, {"data": json.dumps(data)})
logger.debug("Published to %s: %s", self.stream, msg_id)
return msg_id
class StreamConsumer:
"""Consumes JSON-encoded messages from a Redis Stream using consumer groups."""
def __init__(self, redis: Redis, stream: str, group: str, consumer: str) -> None:
self.redis = redis
self.stream = stream
self.group = group
self.consumer = consumer
async def ensure_group(self) -> None:
"""Create the consumer group if it does not already exist."""
try:
await self.redis.xgroup_create(self.stream, self.group, id="0", mkstream=True)
logger.info("Created consumer group %s on %s", self.group, self.stream)
except Exception:
# Group already exists — this is expected on subsequent starts.
pass
async def consume(
self, batch_size: int = 10, block_ms: int = 5000
) -> AsyncIterator[tuple[str, dict]]:
"""Yield ``(msg_id, data)`` tuples from the stream.
Messages are acknowledged immediately after yielding so they
won't be redelivered to this consumer.
"""
await self.ensure_group()
while True:
messages = await self.redis.xreadgroup(
self.group,
self.consumer,
{self.stream: ">"},
count=batch_size,
block=block_ms,
)
for _stream_name, entries in messages:
for msg_id, fields in entries:
data = json.loads(fields[b"data"])
yield msg_id, data
await self.redis.xack(self.stream, self.group, msg_id)

21
shared/telemetry.py Normal file
View file

@ -0,0 +1,21 @@
"""OpenTelemetry setup with Prometheus metric export."""
from opentelemetry import metrics
from opentelemetry.exporter.prometheus import PrometheusMetricReader
from opentelemetry.sdk.metrics import MeterProvider
from prometheus_client import start_http_server
def setup_telemetry(service_name: str, metrics_port: int = 9090) -> metrics.Meter:
"""Initialise an OpenTelemetry MeterProvider backed by Prometheus.
A Prometheus-compatible HTTP server is started on *metrics_port*
so that an external Prometheus instance can scrape ``/metrics``.
Returns a ``Meter`` that services use to create counters, histograms, etc.
"""
reader = PrometheusMetricReader()
provider = MeterProvider(metric_readers=[reader])
metrics.set_meter_provider(provider)
start_http_server(metrics_port)
return metrics.get_meter(service_name)