382 lines
11 KiB
Python
382 lines
11 KiB
Python
"""
|
|
Main entry for FastAPI application
|
|
Integrates webhook handling, deduplication, queue management, and related services
|
|
"""
|
|
|
|
import asyncio
|
|
from contextlib import asynccontextmanager
|
|
from typing import Dict, Any
|
|
import structlog
|
|
from fastapi import FastAPI, Request, HTTPException, Depends
|
|
from fastapi.middleware.cors import CORSMiddleware
|
|
from fastapi.responses import JSONResponse, Response
|
|
from redis import asyncio as aioredis
|
|
from prometheus_client import Counter, Histogram, Gauge, generate_latest, CONTENT_TYPE_LATEST
|
|
|
|
from app.config import get_settings
|
|
from app.services.dedup_service import DeduplicationService
|
|
from app.services.jenkins_service import JenkinsService
|
|
from app.services.webhook_service import WebhookService
|
|
from app.tasks.jenkins_tasks import get_celery_app
|
|
# Route imports will be dynamically handled at runtime
|
|
|
|
# Configure structured logging
|
|
structlog.configure(
|
|
processors=[
|
|
structlog.stdlib.filter_by_level,
|
|
structlog.stdlib.add_logger_name,
|
|
structlog.stdlib.add_log_level,
|
|
structlog.stdlib.PositionalArgumentsFormatter(),
|
|
structlog.processors.TimeStamper(fmt="iso"),
|
|
structlog.processors.StackInfoRenderer(),
|
|
structlog.processors.format_exc_info,
|
|
structlog.processors.UnicodeDecoder(),
|
|
structlog.processors.JSONRenderer()
|
|
],
|
|
context_class=dict,
|
|
logger_factory=structlog.stdlib.LoggerFactory(),
|
|
wrapper_class=structlog.stdlib.BoundLogger,
|
|
cache_logger_on_first_use=True,
|
|
)
|
|
|
|
logger = structlog.get_logger()
|
|
|
|
# Monitoring metrics
|
|
WEBHOOK_REQUESTS_TOTAL = Counter(
|
|
"webhook_requests_total",
|
|
"Total number of webhook requests",
|
|
["status", "environment"]
|
|
)
|
|
|
|
WEBHOOK_REQUEST_DURATION = Histogram(
|
|
"webhook_request_duration_seconds",
|
|
"Webhook request duration in seconds",
|
|
["environment"]
|
|
)
|
|
|
|
QUEUE_SIZE = Gauge(
|
|
"queue_size",
|
|
"Current queue size",
|
|
["queue_type"]
|
|
)
|
|
|
|
DEDUP_HITS = Counter(
|
|
"dedup_hits_total",
|
|
"Total number of deduplication hits"
|
|
)
|
|
|
|
# Global service instances
|
|
dedup_service: DeduplicationService = None
|
|
jenkins_service: JenkinsService = None
|
|
webhook_service: WebhookService = None
|
|
celery_app = None
|
|
redis_client: aioredis.Redis = None
|
|
|
|
|
|
@asynccontextmanager
|
|
async def lifespan(app: FastAPI):
|
|
"""Application lifecycle management"""
|
|
global dedup_service, jenkins_service, webhook_service, celery_app, redis_client
|
|
|
|
# Initialize on startup
|
|
logger.info("Starting Gitea Webhook Ambassador")
|
|
|
|
try:
|
|
# Initialize Redis connection
|
|
settings = get_settings()
|
|
redis_client = aioredis.from_url(
|
|
settings.redis.url,
|
|
password=settings.redis.password,
|
|
db=settings.redis.db,
|
|
encoding="utf-8",
|
|
decode_responses=True
|
|
)
|
|
|
|
# Test Redis connection
|
|
await redis_client.ping()
|
|
logger.info("Redis connection established")
|
|
|
|
# Initialize Celery
|
|
celery_app = get_celery_app()
|
|
|
|
# Initialize services
|
|
dedup_service = DeduplicationService(redis_client)
|
|
jenkins_service = JenkinsService()
|
|
webhook_service = WebhookService(
|
|
dedup_service=dedup_service,
|
|
jenkins_service=jenkins_service,
|
|
celery_app=celery_app
|
|
)
|
|
|
|
logger.info("All services initialized successfully")
|
|
|
|
yield
|
|
|
|
except Exception as e:
|
|
logger.error("Failed to initialize services", error=str(e))
|
|
raise
|
|
|
|
finally:
|
|
# Cleanup on shutdown
|
|
logger.info("Shutting down Gitea Webhook Ambassador")
|
|
|
|
if redis_client:
|
|
await redis_client.close()
|
|
logger.info("Redis connection closed")
|
|
|
|
|
|
# Create FastAPI application
|
|
app = FastAPI(
|
|
title="Gitea Webhook Ambassador",
|
|
description="High-performance Gitea to Jenkins Webhook service",
|
|
version="1.0.0",
|
|
lifespan=lifespan
|
|
)
|
|
|
|
# Add CORS middleware
|
|
app.add_middleware(
|
|
CORSMiddleware,
|
|
allow_origins=["*"], # In production, restrict to specific domains
|
|
allow_credentials=True,
|
|
allow_methods=["*"],
|
|
allow_headers=["*"],
|
|
)
|
|
|
|
|
|
# Dependency injection
|
|
def get_dedup_service() -> DeduplicationService:
|
|
if dedup_service is None:
|
|
raise HTTPException(status_code=503, detail="Deduplication service not available")
|
|
return dedup_service
|
|
|
|
|
|
def get_webhook_service() -> WebhookService:
|
|
if webhook_service is None:
|
|
raise HTTPException(status_code=503, detail="Webhook service not available")
|
|
return webhook_service
|
|
|
|
|
|
def get_celery_app_dep():
|
|
if celery_app is None:
|
|
raise HTTPException(status_code=503, detail="Celery app not available")
|
|
return celery_app
|
|
|
|
|
|
# Middleware
|
|
@app.middleware("http")
|
|
async def log_requests(request: Request, call_next):
|
|
"""Request logging middleware"""
|
|
start_time = asyncio.get_event_loop().time()
|
|
|
|
# Log request start
|
|
logger.info("Request started",
|
|
method=request.method,
|
|
url=str(request.url),
|
|
client_ip=request.client.host if request.client else None)
|
|
|
|
try:
|
|
response = await call_next(request)
|
|
|
|
# Log request complete
|
|
process_time = asyncio.get_event_loop().time() - start_time
|
|
logger.info("Request completed",
|
|
method=request.method,
|
|
url=str(request.url),
|
|
status_code=response.status_code,
|
|
process_time=process_time)
|
|
|
|
return response
|
|
|
|
except Exception as e:
|
|
# Log request error
|
|
process_time = asyncio.get_event_loop().time() - start_time
|
|
logger.error("Request failed",
|
|
method=request.method,
|
|
url=str(request.url),
|
|
error=str(e),
|
|
process_time=process_time)
|
|
raise
|
|
|
|
|
|
@app.middleware("http")
|
|
async def add_security_headers(request: Request, call_next):
|
|
"""Add security headers"""
|
|
response = await call_next(request)
|
|
|
|
# Add security-related HTTP headers
|
|
response.headers["X-Content-Type-Options"] = "nosniff"
|
|
response.headers["X-Frame-Options"] = "DENY"
|
|
response.headers["X-XSS-Protection"] = "1; mode=block"
|
|
response.headers["Strict-Transport-Security"] = "max-age=31536000; includeSubDomains"
|
|
|
|
return response
|
|
|
|
|
|
# Exception handler
|
|
@app.exception_handler(Exception)
|
|
async def global_exception_handler(request: Request, exc: Exception):
|
|
"""Global exception handler"""
|
|
logger.error("Unhandled exception",
|
|
method=request.method,
|
|
url=str(request.url),
|
|
error=str(exc),
|
|
exc_info=True)
|
|
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content={
|
|
"success": False,
|
|
"message": "Internal server error",
|
|
"error": str(exc) if get_settings().debug else "An unexpected error occurred"
|
|
}
|
|
)
|
|
|
|
|
|
# Health check endpoint
|
|
@app.get("/health")
|
|
async def health_check():
|
|
"""Basic health check"""
|
|
try:
|
|
# Check Redis connection
|
|
if redis_client:
|
|
await redis_client.ping()
|
|
redis_healthy = True
|
|
else:
|
|
redis_healthy = False
|
|
|
|
# Check Celery connection
|
|
if celery_app:
|
|
inspect = celery_app.control.inspect()
|
|
celery_healthy = bool(inspect.active() is not None)
|
|
else:
|
|
celery_healthy = False
|
|
|
|
return {
|
|
"status": "healthy" if redis_healthy and celery_healthy else "unhealthy",
|
|
"timestamp": asyncio.get_event_loop().time(),
|
|
"services": {
|
|
"redis": "healthy" if redis_healthy else "unhealthy",
|
|
"celery": "healthy" if celery_healthy else "unhealthy"
|
|
}
|
|
}
|
|
|
|
except Exception as e:
|
|
logger.error("Health check failed", error=str(e))
|
|
return JSONResponse(
|
|
status_code=503,
|
|
content={
|
|
"status": "unhealthy",
|
|
"error": str(e)
|
|
}
|
|
)
|
|
|
|
|
|
@app.get("/health/queue")
|
|
async def queue_health_check():
|
|
"""Queue health check"""
|
|
try:
|
|
if celery_app is None:
|
|
return JSONResponse(
|
|
status_code=503,
|
|
content={"status": "unhealthy", "error": "Celery not available"}
|
|
)
|
|
|
|
inspect = celery_app.control.inspect()
|
|
|
|
# Get queue stats
|
|
active = inspect.active()
|
|
reserved = inspect.reserved()
|
|
registered = inspect.registered()
|
|
|
|
active_count = sum(len(tasks) for tasks in (active or {}).values())
|
|
reserved_count = sum(len(tasks) for tasks in (reserved or {}).values())
|
|
worker_count = len(registered or {})
|
|
|
|
# Update monitoring metrics
|
|
QUEUE_SIZE.labels(queue_type="active").set(active_count)
|
|
QUEUE_SIZE.labels(queue_type="reserved").set(reserved_count)
|
|
|
|
return {
|
|
"status": "healthy",
|
|
"queue_stats": {
|
|
"active_tasks": active_count,
|
|
"queued_tasks": reserved_count,
|
|
"worker_count": worker_count,
|
|
"total_queue_length": active_count + reserved_count
|
|
}
|
|
}
|
|
|
|
except Exception as e:
|
|
logger.error("Queue health check failed", error=str(e))
|
|
return JSONResponse(
|
|
status_code=503,
|
|
content={
|
|
"status": "unhealthy",
|
|
"error": str(e)
|
|
}
|
|
)
|
|
|
|
|
|
# Metrics endpoint
|
|
@app.get("/metrics")
|
|
async def metrics():
|
|
"""Prometheus metrics endpoint"""
|
|
return Response(
|
|
content=generate_latest(),
|
|
media_type=CONTENT_TYPE_LATEST
|
|
)
|
|
|
|
|
|
# Include route modules
|
|
try:
|
|
from app.handlers import webhook, health, admin
|
|
|
|
app.include_router(
|
|
webhook.router,
|
|
prefix="/webhook",
|
|
tags=["webhook"]
|
|
)
|
|
|
|
app.include_router(
|
|
health.router,
|
|
prefix="/health",
|
|
tags=["health"]
|
|
)
|
|
|
|
app.include_router(
|
|
admin.router,
|
|
prefix="/admin",
|
|
tags=["admin"]
|
|
)
|
|
except ImportError as e:
|
|
# If module does not exist, log warning but do not interrupt app startup
|
|
logger.warning(f"Some handlers not available: {e}")
|
|
|
|
# Root path
|
|
@app.get("/")
|
|
async def root():
|
|
"""Root path"""
|
|
return {
|
|
"name": "Gitea Webhook Ambassador",
|
|
"version": "1.0.0",
|
|
"description": "High-performance Gitea to Jenkins Webhook service",
|
|
"endpoints": {
|
|
"webhook": "/webhook/gitea",
|
|
"health": "/health",
|
|
"metrics": "/metrics",
|
|
"admin": "/admin"
|
|
}
|
|
}
|
|
|
|
|
|
if __name__ == "__main__":
|
|
import uvicorn
|
|
|
|
settings = get_settings()
|
|
uvicorn.run(
|
|
"app.main:app",
|
|
host=settings.host,
|
|
port=settings.port,
|
|
reload=settings.debug,
|
|
log_level=settings.logging.level.lower()
|
|
) |