Compare commits

...

23 Commits

Author SHA1 Message Date
Mateusz Gruszczyński
2aee79e94e dane w headerach i inne funkcje 2025-10-09 17:05:25 +02:00
Mateusz Gruszczyński
ca55b48d24 dane w headerach i inne funkcje 2025-10-09 17:01:54 +02:00
Mateusz Gruszczyński
72c69c3d42 dane w headerach i inne funkcje 2025-10-09 17:00:29 +02:00
Mateusz Gruszczyński
51378ce292 dane w headerach i inne funkcje 2025-10-09 16:56:58 +02:00
Mateusz Gruszczyński
559ef2e2c0 dane w headerach i inne funkcje 2025-10-09 16:55:56 +02:00
Mateusz Gruszczyński
941937354c dane w headerach i inne funkcje 2025-10-09 16:54:54 +02:00
Mateusz Gruszczyński
73279cc5f6 dane w headerach i inne funkcje 2025-10-09 16:49:27 +02:00
Mateusz Gruszczyński
2d267a1e9e dane w headerach i inne funkcje 2025-10-09 16:46:28 +02:00
Mateusz Gruszczyński
039ec9e799 dane w headerach i inne funkcje 2025-10-09 16:44:55 +02:00
Mateusz Gruszczyński
acd2657b5b dane w headerach i inne funkcje 2025-10-09 16:43:15 +02:00
Mateusz Gruszczyński
eb137c87b0 dane w headerach i inne funkcje 2025-10-09 16:40:56 +02:00
Mateusz Gruszczyński
cb109b63ae fix compose 2025-10-07 20:55:33 +02:00
Mateusz Gruszczyński
742e56b56d poprawka 2025-10-06 11:05:48 +02:00
Mateusz Gruszczyński
07a190d067 praca za proxy 2025-10-06 10:03:25 +02:00
Mateusz Gruszczyński
926aa73357 praca za proxy 2025-10-06 10:01:57 +02:00
Mateusz Gruszczyński
2a281f6b44 praca za proxy 2025-10-06 09:47:26 +02:00
Mateusz Gruszczyński
0b010e8bef praca za proxy 2025-10-06 09:40:04 +02:00
Mateusz Gruszczyński
203b816e8d praca za proxy 2025-10-06 09:35:15 +02:00
Mateusz Gruszczyński
06b23dcd96 praca za proxy 2025-10-06 09:32:14 +02:00
Mateusz Gruszczyński
feba31ce6f praca za proxy 2025-10-06 09:26:02 +02:00
Mateusz Gruszczyński
cfdf38ce1d favicon 204 2025-10-06 09:02:34 +02:00
Mateusz Gruszczyński
6db9d9ccd4 logowanie dla health 2025-10-06 09:00:00 +02:00
Mateusz Gruszczyński
95917a9178 logowanie dla health 2025-10-06 08:57:16 +02:00
11 changed files with 343 additions and 84 deletions

View File

@@ -29,3 +29,6 @@ CACHE_MAXSIZE=4096
# Log # Log
LOG_LEVEL=info LOG_LEVEL=info
# Proxy
TRUSTED_PROXIES="10.0.0.0/8,127.0.0.1"

View File

@@ -5,4 +5,4 @@ RUN apt-get update && apt-get install -y build-essential libmaxminddb0 libmaxmin
RUN pip install --no-cache-dir -r app/requirements.txt RUN pip install --no-cache-dir -r app/requirements.txt
COPY . /app COPY . /app
ENV PYTHONUNBUFFERED=1 ENV PYTHONUNBUFFERED=1
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--log-config", "logging.yml"] CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--log-config", "logging.yml", "--proxy-headers", "--forwarded-allow-ips=*"]

View File

@@ -1,54 +1,200 @@
from fastapi import APIRouter, Request, Depends, HTTPException, status from fastapi import APIRouter, Request, Depends, HTTPException, status, Response
from fastapi.security import HTTPBasic, HTTPBasicCredentials from fastapi.security import HTTPBasic, HTTPBasicCredentials
from .deps import get_geo from .deps import get_geo
from .config import settings from .config import settings
from .geo import reload_provider from .geo import reload_provider
import secrets import secrets
import ipaddress import ipaddress
import re
import json
router = APIRouter() router = APIRouter()
security = HTTPBasic() security = HTTPBasic()
VENDOR_SINGLE_IP_HEADERS = [
"cf-connecting-ip", # Cloudflare
"true-client-ip", # Akamai/F5
"x-cluster-client-ip", # niektóre load balancery
"x-real-ip", # klasyk (nginx/traefik)
]
def _check_admin(creds: HTTPBasicCredentials): def _check_admin(creds: HTTPBasicCredentials):
user = settings.admin_user user = settings.admin_user
pwd = settings.admin_pass pwd = settings.admin_pass
if not user or not pwd: if not user or not pwd:
raise HTTPException(status_code=403, detail='admin credentials not configured') raise HTTPException(status_code=403, detail="admin credentials not configured")
# constant-time compare # constant-time compare
if not (secrets.compare_digest(creds.username, user) and secrets.compare_digest(creds.password, pwd)): if not (
raise HTTPException(status_code=401, detail='invalid credentials', headers={"WWW-Authenticate":"Basic"}) secrets.compare_digest(creds.username, user)
and secrets.compare_digest(creds.password, pwd)
):
raise HTTPException(
status_code=401,
detail="invalid credentials",
headers={"WWW-Authenticate": "Basic"},
)
return True return True
@router.get('/ip')
async def my_ip(request: Request, geo=Depends(get_geo)): def _normalize_ip_str(ip_raw: str) -> str | None:
ip = request.client.host """Usuń port, whitespace i ewentualne cudzysłowy"""
# handle IPv6 mapped IPv4 like ::ffff:1.2.3.4 if not ip_raw:
return None
ip_raw = ip_raw.strip().strip('"').strip("'")
# usuń port, np. 1.2.3.4:5678
if ":" in ip_raw and ip_raw.count(":") == 1:
# prawdopodobnie IPv4:port
ip_raw = ip_raw.split(":")[0]
# Pozostaw kwestie IPv6 z %zone
return ip_raw
def _is_ip_trusted(ip_str: str) -> bool:
try: try:
ip = ip.split('%')[0] ip = ipaddress.ip_address(ip_str.split("%")[0])
except Exception:
return False
for net in settings.trusted_proxies:
try:
if ip in net:
return True
except Exception:
continue
return False
def _extract_from_forwarded(header_value: str) -> list[str]:
# Forwarded: for=192.0.2.43, for="[2001:db8:cafe::17]";proto=http;by=...
ips = []
parts = re.split(r",\s*(?=[fF]or=)", header_value)
for part in parts:
m = re.search(r'for=(?P<val>"[^"]+"|[^;,\s]+)', part)
if m:
val = m.group("val").strip('"').strip("'")
ips.append(val)
return ips
def geo_headers(data: dict) -> dict:
h = {}
country = data.get("country", {}).get("name") if data.get("country") else None
city = data.get("city")
ip_val = data.get("ip")
if ip_val and country:
h["X-IP-ADDRESS"] = ip_val
h["X-COUNTRY"] = country
if city:
h["X-CITY"] = city
return h
def get_client_ip(request: Request) -> str:
"""
Zwraca IP klienta biorąc pod uwagę:
- CF-Connecting-IP / True-Client-IP / X-Cluster-Client-Ip / X-Real-IP
- X-Forwarded-For (RFC7239 semantyka: client, proxy1, proxy2)
- Forwarded: for=...
Logika XFF:
- weź listę IP
- zdejmuj od PRAWEJ strony te, które są zaufanymi proxy
- zwróć ostatni pozostały (jeśli nic nie zostało, zwróć lewy skrajny)
"""
# 0) Vendorowe nagłówki z pojedynczym IP (preferowane, jeśli są i nie są zaufane)
for h in VENDOR_SINGLE_IP_HEADERS:
v = request.headers.get(h)
if v:
ip = _normalize_ip_str(v)
if ip:
if not settings.trusted_proxies or not _is_ip_trusted(ip):
return ip
# jeśli vendor wskazuje zaufane proxy, idź dalej
# 1) X-Forwarded-For (client, proxy1, proxy2...)
xff = request.headers.get("x-forwarded-for")
if xff:
raw_ips = [p.strip() for p in xff.split(",") if p.strip()]
norm_ips = []
for raw in raw_ips:
v = _normalize_ip_str(raw)
if v:
norm_ips.append(v)
if norm_ips:
if settings.trusted_proxies:
# zdejmuj od PRAWEJ strony zaufane hopy
tmp = norm_ips[:]
while tmp and _is_ip_trusted(tmp[-1]):
tmp.pop()
if tmp:
return tmp[-1] # ostatni niezaufany = klient
# w skrajnym przypadku wszystkie są zaufane zwróć najbardziej „kliencki” (lewy)
return norm_ips[0]
else:
# bez listy zaufanych proxy bierzemy lewy skrajny
return norm_ips[0]
# 2) Forwarded (RFC7239)
fwd = request.headers.get("forwarded")
if fwd:
fwd_ips = _extract_from_forwarded(fwd)
norm_ips = [_normalize_ip_str(ip) for ip in fwd_ips if _normalize_ip_str(ip)]
if norm_ips:
if settings.trusted_proxies:
tmp = norm_ips[:]
while tmp and _is_ip_trusted(tmp[-1]):
tmp.pop()
if tmp:
return tmp[-1]
return norm_ips[0]
return norm_ips[0]
# 3) Fallback: request.client.host
try:
host = request.client.host
if host:
return host.split("%")[0] if "%" in host else host
except Exception: except Exception:
pass pass
return geo.lookup(ip)
@router.get('/ip/{ip_address}') return "0.0.0.0"
async def ip_lookup(ip_address: str, geo=Depends(get_geo)):
# validate IP
try:
# allow zone index for IPv6 and strip it for validation
if '%' in ip_address:
addr = ip_address.split('%')[0]
else:
addr = ip_address
ipaddress.ip_address(addr)
except Exception:
raise HTTPException(status_code=400, detail='invalid IP address')
return geo.lookup(ip_address)
@router.post('/reload')
@router.api_route('/ip', methods=["GET", "HEAD"])
async def my_ip(request: Request, geo=Depends(get_geo)):
ip = get_client_ip(request) # pobieranie IP:contentReference[oaicite:0]{index=0}
data = geo.lookup(ip) # geo lookup:contentReference[oaicite:1]{index=1}:contentReference[oaicite:2]{index=2}
headers = geo_headers(data)
if request.method == "HEAD":
return Response(status_code=200, headers=headers)
body = json.dumps(data, ensure_ascii=False) + "\n"
return Response(content=body, media_type="application/json", headers=headers)
@router.api_route('/ip/{ip_address}', methods=["GET", "HEAD"])
async def ip_lookup(ip_address: str, request: Request, geo=Depends(get_geo)):
data = geo.lookup(ip_address)
headers = geo_headers(data)
if request.method == "HEAD":
return Response(status_code=200, headers=headers)
body = json.dumps(data, ensure_ascii=False) + "\n"
return Response(content=body, media_type="application/json", headers=headers)
@router.post("/reload")
async def reload(creds: HTTPBasicCredentials = Depends(security)): async def reload(creds: HTTPBasicCredentials = Depends(security)):
_check_admin(creds) _check_admin(creds)
provider = reload_provider() provider = reload_provider()
return {'reloaded': True, 'provider': type(provider).__name__} return {"reloaded": True, "provider": type(provider).__name__}
@router.get('/health')
@router.get("/health")
async def health(): async def health():
return {'status':'ok'} return {"status": "ok"}
# from fastapi import Request
# @router.get("/_debug/headers")
# async def debug_headers(request: Request):
# return {"headers": dict(request.headers)}

View File

@@ -1,38 +1,70 @@
import os import os
from pydantic_settings import BaseSettings from pydantic_settings import BaseSettings
from dotenv import load_dotenv from dotenv import load_dotenv
import ipaddress
load_dotenv() load_dotenv()
def _parse_trusted_proxies(raw: str):
# raw: comma-separated list of IPs or CIDR ranges
items = [p.strip() for p in (raw or "").split(",") if p.strip()]
nets = []
for p in items:
try:
if "/" in p:
nets.append(ipaddress.ip_network(p, strict=False))
else:
# treat single IP as /32 or /128 network
ip = ipaddress.ip_address(p)
nets.append(
ipaddress.ip_network(
ip.exploded + ("/32" if ip.version == 4 else "/128")
)
)
except Exception:
# ignoruj błędne wpisy
continue
return nets
class Settings(BaseSettings): class Settings(BaseSettings):
geo_provider: str = os.getenv('GEO_PROVIDER', 'maxmind') geo_provider: str = os.getenv("GEO_PROVIDER", "maxmind")
# MaxMind # MaxMind
maxmind_account_id: str | None = os.getenv('MAXMIND_ACCOUNT_ID') maxmind_account_id: str | None = os.getenv("MAXMIND_ACCOUNT_ID")
maxmind_license_key: str | None = os.getenv('MAXMIND_LICENSE_KEY') maxmind_license_key: str | None = os.getenv("MAXMIND_LICENSE_KEY")
maxmind_db_name: str = os.getenv('MAXMIND_DB_NAME', 'GeoLite2-City') maxmind_db_name: str = os.getenv("MAXMIND_DB_NAME", "GeoLite2-City")
maxmind_db_path: str = os.getenv('MAXMIND_DB_PATH', '/data/GeoLite2-City.mmdb') maxmind_db_path: str = os.getenv("MAXMIND_DB_PATH", "/data/GeoLite2-City.mmdb")
maxmind_download_url_template: str = os.getenv( maxmind_download_url_template: str | None = os.getenv(
'MAXMIND_DOWNLOAD_URL_TEMPLATE', "MAXMIND_DOWNLOAD_URL_TEMPLATE",
'https://download.maxmind.com/app/geoip_download?edition_id={DBNAME}&license_key={LICENSE_KEY}&suffix=tar.gz' "https://download.maxmind.com/app/geoip_download?edition_id={DBNAME}&license_key={LICENSE_KEY}&suffix=tar.gz",
) )
maxmind_direct_db_url: str | None = os.getenv('MAXMIND_DIRECT_DB_URL') maxmind_direct_db_url: str | None = os.getenv("MAXMIND_DIRECT_DB_URL")
maxmind_github_repo: str | None = os.getenv('MAXMIND_GITHUB_REPO') maxmind_github_repo: str | None = os.getenv("MAXMIND_GITHUB_REPO")
github_token: str | None = os.getenv('GITHUB_TOKEN') github_token: str | None = os.getenv("GITHUB_TOKEN")
# IP2Location # IP2Location
ip2location_download_url: str | None = os.getenv('IP2LOCATION_DOWNLOAD_URL') ip2location_download_url: str | None = os.getenv("IP2LOCATION_DOWNLOAD_URL")
ip2location_db_path: str = os.getenv('IP2LOCATION_DB_PATH', '/data/IP2LOCATION.BIN') ip2location_db_path: str = os.getenv("IP2LOCATION_DB_PATH", "/data/IP2LOCATION.BIN")
update_interval_seconds: int = int(os.getenv('UPDATE_INTERVAL_SECONDS', '86400')) update_interval_seconds: int = int(os.getenv("UPDATE_INTERVAL_SECONDS", "86400"))
host: str = os.getenv('HOST', '0.0.0.0') host: str = os.getenv("HOST", "0.0.0.0")
port: int = int(os.getenv('PORT', '8000')) port: int = int(os.getenv("PORT", "8000"))
log_level: str = os.getenv('LOG_LEVEL', 'info') log_level: str = os.getenv("LOG_LEVEL", "info")
admin_user: str | None = os.getenv('ADMIN_USER') admin_user: str | None = os.getenv("ADMIN_USER")
admin_pass: str | None = os.getenv('ADMIN_PASS') admin_pass: str | None = os.getenv("ADMIN_PASS")
cache_maxsize: int = int(os.getenv('CACHE_MAXSIZE', '4096')) cache_maxsize: int = int(os.getenv("CACHE_MAXSIZE", "4096"))
# Nowe: lista zaufanych proxy (CIDR lub IP), oddzielone przecinkami
# Przykład: "127.0.0.1,10.0.0.0/8,192.168.1.5"
_trusted_proxies_raw: str | None = os.getenv("TRUSTED_PROXIES", "")
@property
def trusted_proxies(self):
return _parse_trusted_proxies(self._trusted_proxies_raw)
settings = Settings() settings = Settings()

View File

@@ -1,6 +1,7 @@
from functools import lru_cache from functools import lru_cache
from .geo import get_provider_instance from .geo import get_provider_instance
@lru_cache() @lru_cache()
def get_geo(): def get_geo():
return get_provider_instance() return get_provider_instance()

View File

@@ -8,6 +8,7 @@ from .config import settings
try: try:
import geoip2.database import geoip2.database
from geoip2.errors import AddressNotFoundError from geoip2.errors import AddressNotFoundError
try: try:
# geoip2<5 # geoip2<5
from geoip2.errors import InvalidDatabaseError # type: ignore from geoip2.errors import InvalidDatabaseError # type: ignore
@@ -17,8 +18,10 @@ try:
except Exception as e: except Exception as e:
print("Import geoip2 failed:", e) print("Import geoip2 failed:", e)
geoip2 = None geoip2 = None
# awaryjne aliasy, aby kod dalej działał # awaryjne aliasy, aby kod dalej działał
class _TmpErr(Exception): ... class _TmpErr(Exception): ...
AddressNotFoundError = _TmpErr AddressNotFoundError = _TmpErr
InvalidDatabaseError = _TmpErr InvalidDatabaseError = _TmpErr
@@ -67,8 +70,10 @@ class MaxMindGeo(GeoLookupBase):
def _detect_db_type(self): def _detect_db_type(self):
"""Próbuje określić typ bazy na podstawie metadanych, nazwy lub próbnych zapytań.""" """Próbuje określić typ bazy na podstawie metadanych, nazwy lub próbnych zapytań."""
t = (getattr(self._reader, "metadata", None) t = (
and getattr(self._reader.metadata, "database_type", "")) or "" getattr(self._reader, "metadata", None)
and getattr(self._reader.metadata, "database_type", "")
) or ""
if t: if t:
return t.lower() return t.lower()
@@ -80,7 +85,7 @@ class MaxMindGeo(GeoLookupBase):
probes = [ probes = [
("city", self._reader.city), ("city", self._reader.city),
("country", self._reader.country), ("country", self._reader.country),
("asn", self._reader.asn) ("asn", self._reader.asn),
] ]
test_ip = "1.1.1.1" test_ip = "1.1.1.1"
for key, fn in probes: for key, fn in probes:
@@ -107,7 +112,9 @@ class MaxMindGeo(GeoLookupBase):
pass pass
self._reader = geoip2.database.Reader(self.db_path) self._reader = geoip2.database.Reader(self.db_path)
self._db_type = self._detect_db_type() self._db_type = self._detect_db_type()
print(f"[MaxMindGeo] opened {self.db_path} type={self._db_type or 'unknown'}") print(
f"[MaxMindGeo] opened {self.db_path} type={self._db_type or 'unknown'}"
)
def _lookup_inner(self, ip: str): def _lookup_inner(self, ip: str):
t = (self._db_type or "").lower() t = (self._db_type or "").lower()
@@ -117,7 +124,9 @@ class MaxMindGeo(GeoLookupBase):
"ip": ip, "ip": ip,
"asn": { "asn": {
"number": getattr(rec, "autonomous_system_number", None), "number": getattr(rec, "autonomous_system_number", None),
"organization": getattr(rec, "autonomous_system_organization", None), "organization": getattr(
rec, "autonomous_system_organization", None
),
}, },
"database_type": self._db_type, "database_type": self._db_type,
} }
@@ -145,7 +154,9 @@ class MaxMindGeo(GeoLookupBase):
"continent": getattr(rec.continent, "name", None), "continent": getattr(rec.continent, "name", None),
"database_type": self._db_type, "database_type": self._db_type,
} }
raise RuntimeError(f"Nieobsługiwany / niewykryty typ bazy: {self._db_type} (plik: {self.db_path})") raise RuntimeError(
f"Nieobsługiwany / niewykryty typ bazy: {self._db_type} (plik: {self.db_path})"
)
def lookup(self, ip: str): def lookup(self, ip: str):
if not self.is_valid_ip(ip): if not self.is_valid_ip(ip):
@@ -213,8 +224,12 @@ _provider_lock = threading.RLock()
def _create_provider(): def _create_provider():
provider = settings.geo_provider.lower() provider = settings.geo_provider.lower()
if provider == "ip2location": if provider == "ip2location":
return IP2LocationGeo(db_path=settings.ip2location_db_path, cache_maxsize=settings.cache_maxsize) return IP2LocationGeo(
return MaxMindGeo(db_path=settings.maxmind_db_path, cache_maxsize=settings.cache_maxsize) db_path=settings.ip2location_db_path, cache_maxsize=settings.cache_maxsize
)
return MaxMindGeo(
db_path=settings.maxmind_db_path, cache_maxsize=settings.cache_maxsize
)
def get_provider_instance(): def get_provider_instance():

View File

@@ -1,6 +1,10 @@
import logging import logging
class IgnoreHealth(logging.Filter):
class IgnoreHealthAndFavicon(logging.Filter):
def __init__(self, name: str = ""):
super().__init__(name)
def filter(self, record: logging.LogRecord) -> bool: def filter(self, record: logging.LogRecord) -> bool:
msg = record.getMessage() msg = record.getMessage()
return "/health" not in msg return all(p not in msg for p in ["/health", "/favicon.ico"])

View File

@@ -1,10 +1,65 @@
from fastapi import FastAPI from fastapi import FastAPI, Request, Response
from .api import router from fastapi.responses import JSONResponse, PlainTextResponse
from starlette.middleware.base import BaseHTTPMiddleware
from .deps import get_geo
from .api import get_client_ip, router
from .config import settings from .config import settings
import uvicorn import uvicorn
app = FastAPI(title='IP Geo API') app = FastAPI(title="IP Geo API")
app.include_router(router) app.include_router(router)
if __name__ == '__main__':
uvicorn.run('app.main:app', host=settings.host, port=settings.port, log_level=settings.log_level) async def add_geo_headers(request, call_next):
ip = get_client_ip(request)
geo = get_geo()
data = geo.lookup(ip)
response: Response = await call_next(request)
country = data.get("country", {}).get("name") if data.get("country") else None
city = data.get("city")
ip_val = data.get("ip")
if ip_val and country:
response.headers["X-IP-ADDRESS"] = ip_val
response.headers["X-COUNTRY"] = country
if city:
response.headers["X-CITY"] = city
return response
app.add_middleware(BaseHTTPMiddleware, dispatch=add_geo_headers)
@app.get("/favicon.ico")
async def favicon():
return Response(status_code=204)
@app.api_route("/", methods=["GET", "HEAD"])
async def root(request: Request):
ua = request.headers.get("user-agent", "").lower()
ip = get_client_ip(request).strip()
if any(x in ua for x in ["mozilla", "chrome", "safari", "edge", "firefox"]):
if request.method == "HEAD":
return Response(status_code=404)
return JSONResponse({"detail": "Not Found"}, status_code=404)
if request.method == "HEAD":
return Response(status_code=200)
return PlainTextResponse(ip + "\n")
if __name__ == "__main__":
uvicorn.run(
"app.main:app",
host=settings.host,
port=settings.port,
log_level=settings.log_level,
proxy_headers=True,
forwarded_allow_ips="*",
# access_log=True
)

View File

@@ -4,30 +4,32 @@ services:
container_name: ip-geo-api container_name: ip-geo-api
restart: unless-stopped restart: unless-stopped
ports: ports:
- "${PORT:-8080}:8000" - "${PORT:-8080}:${PORT}"
healthcheck: healthcheck:
test: test:
[ [
"CMD", "CMD",
"python", "python",
"-c", "-c",
"import urllib.request; import sys; req = urllib.request.Request('http://localhost:8000/health'); sys.exit(0) if urllib.request.urlopen(req).read() == b'OK' else sys.exit(1)", "import json, urllib.request, sys; r = urllib.request.urlopen('http://localhost:${PORT}/health'); d = json.load(r); sys.exit(0) if r.getcode()==200 and d.get('status')=='ok' else sys.exit(1)",
] ]
interval: 30s interval: 30s
timeout: 10s timeout: 10s
retries: 3 retries: 3
start_period: 10s start_period: 10s
command: command:
[ [
"uvicorn", "uvicorn",
"app.main:app", "app.main:app",
"--host", "--host",
"0.0.0.0", "0.0.0.0",
"--port", "--port",
"8000", "${PORT}",
"--log-config", "--log-config",
"logging.yml", "logging.yml",
] "--proxy-headers",
"--forwarded-allow-ips=*",
]
volumes: volumes:
- ./data:/data - ./data:/data
env_file: env_file:

View File

@@ -3,24 +3,25 @@ disable_existing_loggers: False
filters: filters:
ignore_health: ignore_health:
"()": app.logging_filter.IgnoreHealth "()": app.logging_filter.IgnoreHealthAndFavicon
formatters: formatters:
access:
format: '%(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s'
use_colors: true
default: default:
"()": uvicorn.logging.DefaultFormatter
format: "%(levelprefix)s %(message)s" format: "%(levelprefix)s %(message)s"
use_colors: true use_colors: true
access:
"()": uvicorn.logging.AccessFormatter
format: '%(client_addr)s - "%(request_line)s" %(status_code)s'
handlers: handlers:
default:
class: logging.StreamHandler
formatter: default
access: access:
class: logging.StreamHandler class: logging.StreamHandler
formatter: access formatter: access
filters: [ignore_health] filters: [ignore_health]
default:
class: logging.StreamHandler
formatter: default
loggers: loggers:
uvicorn: uvicorn: