1st commit

This commit is contained in:
Mateusz Gruszczyński
2025-10-06 08:27:10 +02:00
commit b26b979a6a
16 changed files with 644 additions and 0 deletions

31
.env.example Normal file
View File

@@ -0,0 +1,31 @@
# Provider
GEO_PROVIDER=maxmind
# Ścieżki do baz
MAXMIND_DB_PATH=./data/GeoLite2-City.mmdb
IP2LOCATION_DB_PATH=./data/IP2LOCATION.BIN
# Priorytet źródeł (1→3)
MAXMIND_GITHUB_REPO=P3TERX/GeoLite.mmdb # 1) GitHub Releases (latest)
# GITHUB_TOKEN=ghp_xxx # opcjonalnie, zwiększa limity API
# MAXMIND_DIRECT_DB_URL=https://...mmdb # 2) bezpośredni URL do .mmdb
# MAXMIND_LICENSE_KEY=xxxxxxxxxxxxxxxx # 3) oficjalny MaxMind download
# Inne
MAXMIND_DB_NAME=GeoLite2-City
MAXMIND_DOWNLOAD_URL_TEMPLATE=https://download.maxmind.com/app/geoip_download?edition_id={DBNAME}&license_key={LICENSE_KEY}&suffix=tar.gz
# Updater
UPDATE_INTERVAL_SECONDS=86400
# API i admin (reload)
HOST=0.0.0.0
PORT=8000
ADMIN_USER=admin
ADMIN_PASS=admin123
# Cache
CACHE_MAXSIZE=4096
# Log
LOG_LEVEL=info

53
.gitignore vendored Normal file
View File

@@ -0,0 +1,53 @@
# Python
__pycache__/
*.py[cod]
*.pyo
*.pyd
*.pdb
*.log
*.tmp
# Venv / environment
.env
.venv/
env/
venv/
pip-wheel-metadata/
# IDE
.vscode/
.idea/
*.swp
*.swo
# OS
.DS_Store
Thumbs.db
# Bytecode
*.pyc
*.pyo
*.pyd
# Compiled files
*.so
# Unit test / coverage
.coverage
htmlcov/
.tox/
pytest_cache/
.cache/
# Data / databases
data/
*.mmdb
*.BIN
*.tar.gz
*.zip
# Docker
*.pid
*.sock
docker-compose.override.
data/.mmdb

8
Dockerfile Normal file
View File

@@ -0,0 +1,8 @@
FROM python:3.13-slim
WORKDIR /app
COPY app/requirements.txt ./app/requirements.txt
RUN apt-get update && apt-get install -y build-essential libmaxminddb0 libmaxminddb-dev wget && rm -rf /var/lib/apt/lists/*
RUN pip install --no-cache-dir -r app/requirements.txt
COPY . /app
ENV PYTHONUNBUFFERED=1
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]

1
LICENSE Normal file
View File

@@ -0,0 +1 @@
MIT

34
README.md Normal file
View File

@@ -0,0 +1,34 @@
# IP Geo API
Krótko:
- FastAPI zwracające dane geo dla adresu IP (MaxMind GeoLite2 lub IP2Location).
- LRU cache, walidacja IPv4/IPv6, endpoint /reload chroniony BasicAuth.
- Updater pobiera/aktualizuje bazę do /data.
Uruchomienie:
1. Skopiuj `.env.example` -> `.env` i uzupełnij.
2. `docker-compose up --build -d`
3. API: `GET /ip/{ip}` lub `GET /ip`. Reload: `POST /reload` z BasicAuth (ADMIN_USER/ADMIN_PASS).
Pliki:
- app/: kod
- scripts/: downloader + updater
- data/: miejsce dla baz
## Bezpośredni URL do MMDB (np. GitHub Releases)
Jeśli posiadasz pełny link do pliku `.mmdb` (np. z projektu P3TERX/GeoLite.mmdb Releases),
ustaw `MAXMIND_DIRECT_DB_URL` w `.env`. Updater pobierze plik bez rozpakowywania archiwum.
## Automatyczne pobieranie z GitHub Releases
Ustaw `MAXMIND_GITHUB_REPO` (domyślnie `P3TERX/GeoLite.mmdb`). Updater sprawdzi `releases/latest` i pobierze pierwszy asset `.mmdb`.
Opcjonalnie dodaj `GITHUB_TOKEN` dla wyższych limitów GitHub API.
Priorytet w updaterze:
1) GitHub Releases (latest) → `.mmdb`
2) `MAXMIND_DIRECT_DB_URL`
3) Oficjalne pobieranie MaxMind przez `MAXMIND_LICENSE_KEY` + `MAXMIND_DOWNLOAD_URL_TEMPLATE`

0
app/__init__.py Normal file
View File

54
app/api.py Normal file
View File

@@ -0,0 +1,54 @@
from fastapi import APIRouter, Request, Depends, HTTPException, status
from fastapi.security import HTTPBasic, HTTPBasicCredentials
from .deps import get_geo
from .config import settings
from .geo import reload_provider
import secrets
import ipaddress
router = APIRouter()
security = HTTPBasic()
def _check_admin(creds: HTTPBasicCredentials):
user = settings.admin_user
pwd = settings.admin_pass
if not user or not pwd:
raise HTTPException(status_code=403, detail='admin credentials not configured')
# constant-time compare
if not (secrets.compare_digest(creds.username, user) and secrets.compare_digest(creds.password, pwd)):
raise HTTPException(status_code=401, detail='invalid credentials', headers={"WWW-Authenticate":"Basic"})
return True
@router.get('/ip')
async def my_ip(request: Request, geo=Depends(get_geo)):
ip = request.client.host
# handle IPv6 mapped IPv4 like ::ffff:1.2.3.4
try:
ip = ip.split('%')[0]
except Exception:
pass
return geo.lookup(ip)
@router.get('/ip/{ip_address}')
async def ip_lookup(ip_address: str, geo=Depends(get_geo)):
# validate IP
try:
# allow zone index for IPv6 and strip it for validation
if '%' in ip_address:
addr = ip_address.split('%')[0]
else:
addr = ip_address
ipaddress.ip_address(addr)
except Exception:
raise HTTPException(status_code=400, detail='invalid IP address')
return geo.lookup(ip_address)
@router.post('/reload')
async def reload(creds: HTTPBasicCredentials = Depends(security)):
_check_admin(creds)
provider = reload_provider()
return {'reloaded': True, 'provider': type(provider).__name__}
@router.get('/health')
async def health():
return {'status':'ok'}

38
app/config.py Normal file
View File

@@ -0,0 +1,38 @@
import os
from pydantic_settings import BaseSettings
from dotenv import load_dotenv
load_dotenv()
class Settings(BaseSettings):
geo_provider: str = os.getenv('GEO_PROVIDER', 'maxmind')
# MaxMind
maxmind_account_id: str | None = os.getenv('MAXMIND_ACCOUNT_ID')
maxmind_license_key: str | None = os.getenv('MAXMIND_LICENSE_KEY')
maxmind_db_name: str = os.getenv('MAXMIND_DB_NAME', 'GeoLite2-City')
maxmind_db_path: str = os.getenv('MAXMIND_DB_PATH', '/data/GeoLite2-City.mmdb')
maxmind_download_url_template: str = os.getenv(
'MAXMIND_DOWNLOAD_URL_TEMPLATE',
'https://download.maxmind.com/app/geoip_download?edition_id={DBNAME}&license_key={LICENSE_KEY}&suffix=tar.gz'
)
maxmind_direct_db_url: str | None = os.getenv('MAXMIND_DIRECT_DB_URL')
maxmind_github_repo: str | None = os.getenv('MAXMIND_GITHUB_REPO')
github_token: str | None = os.getenv('GITHUB_TOKEN')
# IP2Location
ip2location_download_url: str | None = os.getenv('IP2LOCATION_DOWNLOAD_URL')
ip2location_db_path: str = os.getenv('IP2LOCATION_DB_PATH', '/data/IP2LOCATION.BIN')
update_interval_seconds: int = int(os.getenv('UPDATE_INTERVAL_SECONDS', '86400'))
host: str = os.getenv('HOST', '0.0.0.0')
port: int = int(os.getenv('PORT', '8000'))
log_level: str = os.getenv('LOG_LEVEL', 'info')
admin_user: str | None = os.getenv('ADMIN_USER')
admin_pass: str | None = os.getenv('ADMIN_PASS')
cache_maxsize: int = int(os.getenv('CACHE_MAXSIZE', '4096'))
settings = Settings()

6
app/deps.py Normal file
View File

@@ -0,0 +1,6 @@
from functools import lru_cache
from .geo import get_provider_instance
@lru_cache()
def get_geo():
return get_provider_instance()

238
app/geo.py Normal file
View File

@@ -0,0 +1,238 @@
import ipaddress
import threading
from functools import lru_cache, wraps
from typing import Dict
from pathlib import Path
from .config import settings
try:
import geoip2.database
from geoip2.errors import AddressNotFoundError
try:
# geoip2<5
from geoip2.errors import InvalidDatabaseError # type: ignore
except Exception:
# geoip2>=5
from maxminddb.errors import InvalidDatabaseError # type: ignore
except Exception as e:
print("Import geoip2 failed:", e)
geoip2 = None
# awaryjne aliasy, aby kod dalej działał
class _TmpErr(Exception): ...
AddressNotFoundError = _TmpErr
InvalidDatabaseError = _TmpErr
try:
import IP2Location
except Exception:
IP2Location = None
class GeoLookupBase:
def lookup(self, ip: str) -> Dict:
raise NotImplementedError
def reload(self):
pass
def is_valid_ip(self, ip: str) -> bool:
try:
ipaddress.ip_address(ip.split("%")[0] if "%" in ip else ip)
return True
except Exception:
return False
def make_cached(func, maxsize: int):
cached = lru_cache(maxsize=maxsize)(func)
@wraps(func)
def wrapper(ip):
return cached(ip)
wrapper.cache_clear = cached.cache_clear # type: ignore[attr-defined]
return wrapper
class MaxMindGeo(GeoLookupBase):
def __init__(self, db_path: str | None = None, cache_maxsize: int = 4096):
if geoip2 is None:
raise RuntimeError("Brak biblioteki geoip2. Zainstaluj `geoip2`")
self.db_path = db_path or settings.maxmind_db_path
self._reader = None
self._db_type = ""
self._lock = threading.RLock()
self._open()
self.lookup_cached = make_cached(self._lookup_inner, cache_maxsize)
def _detect_db_type(self):
"""Próbuje określić typ bazy na podstawie metadanych, nazwy lub próbnych zapytań."""
t = (getattr(self._reader, "metadata", None)
and getattr(self._reader.metadata, "database_type", "")) or ""
if t:
return t.lower()
name = (self.db_path or "").lower()
for key in ("city", "country", "asn"):
if key in name:
return key
probes = [
("city", self._reader.city),
("country", self._reader.country),
("asn", self._reader.asn)
]
test_ip = "1.1.1.1"
for key, fn in probes:
try:
fn(test_ip)
except InvalidDatabaseError:
continue
except AddressNotFoundError:
return key
except Exception:
continue
else:
return key
return ""
def _open(self):
with self._lock:
if not Path(self.db_path).exists():
raise RuntimeError(f"DB not found: {self.db_path}")
if self._reader:
try:
self._reader.close()
except Exception:
pass
self._reader = geoip2.database.Reader(self.db_path)
self._db_type = self._detect_db_type()
print(f"[MaxMindGeo] opened {self.db_path} type={self._db_type or 'unknown'}")
def _lookup_inner(self, ip: str):
t = (self._db_type or "").lower()
if "asn" in t:
rec = self._reader.asn(ip)
return {
"ip": ip,
"asn": {
"number": getattr(rec, "autonomous_system_number", None),
"organization": getattr(rec, "autonomous_system_organization", None),
},
"database_type": self._db_type,
}
if "city" in t:
rec = self._reader.city(ip)
return {
"ip": ip,
"country": {"iso_code": rec.country.iso_code, "name": rec.country.name},
"continent": getattr(rec.continent, "name", None),
"subdivisions": [sub.name for sub in rec.subdivisions],
"city": getattr(rec.city, "name", None),
"location": {
"latitude": getattr(rec.location, "latitude", None),
"longitude": getattr(rec.location, "longitude", None),
"time_zone": getattr(rec.location, "time_zone", None),
},
"postal": getattr(rec.postal, "code", None),
"database_type": self._db_type,
}
if "country" in t:
rec = self._reader.country(ip)
return {
"ip": ip,
"country": {"iso_code": rec.country.iso_code, "name": rec.country.name},
"continent": getattr(rec.continent, "name", None),
"database_type": self._db_type,
}
raise RuntimeError(f"Nieobsługiwany / niewykryty typ bazy: {self._db_type} (plik: {self.db_path})")
def lookup(self, ip: str):
if not self.is_valid_ip(ip):
return {"ip": ip, "error": "invalid IP"}
try:
return self.lookup_cached(ip)
except Exception as e:
return {"ip": ip, "error": str(e)}
def reload(self):
with self._lock:
self._open()
try:
self.lookup_cached.cache_clear() # type: ignore[attr-defined]
except Exception:
pass
class IP2LocationGeo(GeoLookupBase):
def __init__(self, db_path: str | None = None, cache_maxsize: int = 4096):
if IP2Location is None:
raise RuntimeError("Brak biblioteki IP2Location. Zainstaluj `IP2Location`")
self.db_path = db_path or settings.ip2location_db_path
self._lock = threading.RLock()
self._db = IP2Location.IP2Location(self.db_path)
self.lookup_cached = make_cached(self._lookup_inner, cache_maxsize)
def _lookup_inner(self, ip: str):
r = self._db.get_all(ip)
return {
"ip": ip,
"country": {"iso_code": r.country_short, "name": r.country_long},
"region": r.region,
"city": r.city,
"latitude": r.latitude,
"longitude": r.longitude,
"zip_code": r.zipcode,
"timezone": r.timezone,
}
def lookup(self, ip: str):
if not self.is_valid_ip(ip):
return {"ip": ip, "error": "invalid IP"}
try:
return self.lookup_cached(ip)
except Exception as e:
return {"ip": ip, "error": str(e)}
def reload(self):
with self._lock:
try:
self._db = IP2Location.IP2Location(self.db_path)
except Exception:
pass
try:
self.lookup_cached.cache_clear() # type: ignore[attr-defined]
except Exception:
pass
_provider = None
_provider_lock = threading.RLock()
def _create_provider():
provider = settings.geo_provider.lower()
if provider == "ip2location":
return IP2LocationGeo(db_path=settings.ip2location_db_path, cache_maxsize=settings.cache_maxsize)
return MaxMindGeo(db_path=settings.maxmind_db_path, cache_maxsize=settings.cache_maxsize)
def get_provider_instance():
global _provider
with _provider_lock:
if _provider is None:
_provider = _create_provider()
return _provider
def reload_provider():
global _provider
with _provider_lock:
if _provider is None:
_provider = _create_provider()
else:
try:
_provider.reload()
except Exception:
_provider = _create_provider()
return _provider

10
app/main.py Normal file
View File

@@ -0,0 +1,10 @@
from fastapi import FastAPI
from .api import router
from .config import settings
import uvicorn
app = FastAPI(title='IP Geo API')
app.include_router(router)
if __name__ == '__main__':
uvicorn.run('app.main:app', host=settings.host, port=settings.port, log_level=settings.log_level)

8
app/requirements.txt Normal file
View File

@@ -0,0 +1,8 @@
fastapi
uvicorn[standard]
geoip2
python-dotenv
requests
IP2Location
pydantic
pydantic-settings

41
docker-compose.yml Normal file
View File

@@ -0,0 +1,41 @@
services:
api:
build: .
container_name: ip-geo-api
restart: unless-stopped
ports:
- "${HOST_PORT:-8080}:8000"
healthcheck:
test:
[
"CMD",
"python",
"-c",
"import urllib.request; import sys; req = urllib.request.Request('http://localhost:8000/health'); sys.exit(0) if urllib.request.urlopen(req).read() == b'OK' else sys.exit(1)",
]
interval: 30s
timeout: 10s
retries: 3
start_period: 10s
volumes:
- ./data:/data
env_file:
- .env
networks:
- ip-geo-api_network
depends_on:
- updater
updater:
build: .
container_name: ip-geo-updater
restart: unless-stopped
volumes:
- ./data:/data
command: ["python", "scripts/updater.py"]
env_file:
- .env
networks:
ip-geo-api_network:
driver: bridge

View File

@@ -0,0 +1,60 @@
import os
import requests
import tarfile
import shutil
from pathlib import Path
def download_maxmind(license_key: str, db_name: str, dest_path: str, url_template: str):
url = url_template.format(DBNAME=db_name, LICENSE_KEY=license_key)
tmp = Path("/tmp") / "maxmind_download.tar.gz"
r = requests.get(url, stream=True, timeout=60)
r.raise_for_status()
with open(tmp, "wb") as f:
for chunk in r.iter_content(chunk_size=8192):
if chunk:
f.write(chunk)
with tarfile.open(tmp, "r:gz") as tar:
for member in tar.getmembers():
if member.name.endswith(".mmdb"):
member_f = tar.extractfile(member)
if member_f is None:
continue
dest = Path(dest_path)
dest.parent.mkdir(parents=True, exist_ok=True)
with open(dest, "wb") as out_f:
shutil.copyfileobj(member_f, out_f)
return str(dest)
raise RuntimeError("Nie znaleziono pliku .mmdb w archiwum")
def download_file(url: str, dest_path: str):
r = requests.get(url, stream=True, timeout=60)
r.raise_for_status()
Path(dest_path).parent.mkdir(parents=True, exist_ok=True)
with open(dest_path, "wb") as f:
for chunk in r.iter_content(chunk_size=8192):
if chunk:
f.write(chunk)
return dest_path
def github_latest_mmdb(repo: str, token: str | None = None) -> str | None:
"""
Zwraca URL do najnowszego assetu .mmdb z releases/latest dla repo (owner/name).
Preferencja: City -> Country -> ASN.
"""
api = f"https://api.github.com/repos/{repo}/releases/latest"
headers = {"Accept": "application/vnd.github+json"}
if token:
headers["Authorization"] = f"Bearer {token}"
r = requests.get(api, headers=headers, timeout=30)
r.raise_for_status()
data = r.json()
assets = data.get("assets", [])
urls = [a.get("browser_download_url") for a in assets if (a.get("browser_download_url") or "").lower().endswith(".mmdb")]
if not urls:
return None
lower = [u.lower() for u in urls]
for key in ("city", "country", "asn"):
for i, u in enumerate(lower):
if key in u:
return urls[i]
return urls[0]

60
scripts/updater.py Normal file
View File

@@ -0,0 +1,60 @@
import time
import os
import logging
from scripts.download_helpers import download_maxmind, download_file, github_latest_mmdb
from app.config import settings
logging.basicConfig(level=logging.INFO)
log = logging.getLogger("db_updater")
def update_once():
provider = settings.geo_provider.lower()
try:
if provider == "maxmind":
# 1) GitHub Releases (latest)
repo = os.getenv("MAXMIND_GITHUB_REPO") or "P3TERX/GeoLite.mmdb"
token = os.getenv("GITHUB_TOKEN")
gh_url = None
try:
gh_url = github_latest_mmdb(repo, token)
except Exception as e:
log.warning("GitHub latest check failed: %s", e)
if gh_url:
download_file(gh_url, settings.maxmind_db_path)
log.info("MaxMind DB pobrana z GitHub Releases: %s", gh_url)
return
# 2) Bezpośredni URL do .mmdb
direct_url = os.getenv("MAXMIND_DIRECT_DB_URL")
if direct_url:
download_file(direct_url, settings.maxmind_db_path)
log.info("MaxMind DB pobrana z direct URL: %s", direct_url)
return
# 3) Oficjalne pobieranie MaxMind (wymaga licencji)
key = os.getenv("MAXMIND_LICENSE_KEY")
if not key:
log.error("Brak źródła bazy: ustaw MAXMIND_GITHUB_REPO lub MAXMIND_DIRECT_DB_URL lub MAXMIND_LICENSE_KEY")
return
download_maxmind(key, settings.maxmind_db_name, settings.maxmind_db_path, settings.maxmind_download_url_template)
log.info("MaxMind DB zaktualizowana (MaxMind download)")
elif provider == "ip2location":
url = os.getenv("IP2LOCATION_DOWNLOAD_URL")
if not url:
log.error("Brak IP2LOCATION_DOWNLOAD_URL w env")
return
download_file(url, settings.ip2location_db_path)
log.info("IP2Location DB zaktualizowana")
else:
log.error("Nieznany provider: %s", provider)
except Exception as e:
log.exception("Błąd przy aktualizacji bazy: %s", e)
if __name__ == "__main__":
interval = settings.update_interval_seconds
while True:
update_once()
time.sleep(interval)

2
start.sh Executable file
View File

@@ -0,0 +1,2 @@
#!/usr/bin/env bash
uvicorn app.main:app --host 0.0.0.0 --port 8000