# aukpad.py
from fastapi import FastAPI, WebSocket, WebSocketDisconnect, Request, HTTPException
from fastapi.responses import HTMLResponse, RedirectResponse, PlainTextResponse, FileResponse
import json, secrets, string, time, os, threading, asyncio
from collections import defaultdict
from typing import Optional
app = FastAPI()
application = app # alias if you prefer "application"
# Environment variables
USE_VALKEY = os.getenv("USE_VALKEY", "false").lower() == "true"
VALKEY_URL = os.getenv("VALKEY_URL", "redis://localhost:6379/0")
MAX_TEXT_SIZE = int(os.getenv("MAX_TEXT_SIZE", "1048576")) # 1MB default
MAX_CONNECTIONS_PER_IP = int(os.getenv("MAX_CONNECTIONS_PER_IP", "10"))
RETENTION_HOURS = int(os.getenv("RETENTION_HOURS", "48")) # Default 48 hours
# Valkey/Redis client (initialized later if enabled)
redis_client = None
# In-memory rooms: {doc_id: {"text": str, "ver": int, "peers": set[WebSocket], "last_access": float}}
rooms: dict[str, dict] = {}
# Rate limiting: {ip: [timestamp, timestamp, ...]}
rate_limits: dict[str, list] = defaultdict(list)
# Connection tracking: {ip: connection_count}
connections_per_ip: dict[str, int] = defaultdict(int)
def random_id(n: int = 4) -> str:
alphabet = string.ascii_lowercase + string.digits
return "".join(secrets.choice(alphabet) for _ in range(n))
def init_valkey():
global redis_client
if USE_VALKEY:
try:
import redis
redis_client = redis.from_url(VALKEY_URL, decode_responses=True)
redis_client.ping() # Test connection
print(f"Valkey/Redis connected: {VALKEY_URL}")
except ImportError:
print("Warning: redis package not installed, falling back to memory-only storage")
redis_client = None
except Exception as e:
print(f"Warning: Failed to connect to Valkey/Redis: {e}")
redis_client = None
def get_room_data_from_cache(doc_id: str) -> Optional[dict]:
if redis_client:
try:
data = redis_client.get(f"room:{doc_id}")
if data:
return json.loads(data)
except Exception as e:
print(f"Cache read error for {doc_id}: {e}")
return None
def save_room_data_to_cache(doc_id: str, text: str, ver: int):
if redis_client:
try:
data = {"text": text, "ver": ver, "last_access": time.time()}
redis_client.setex(f"room:{doc_id}", RETENTION_HOURS * 3600, json.dumps(data)) # TTL in seconds
except Exception as e:
print(f"Cache write error for {doc_id}: {e}")
def update_room_access_time(doc_id: str):
now = time.time()
if doc_id in rooms:
rooms[doc_id]["last_access"] = now
if redis_client:
try:
data = redis_client.get(f"room:{doc_id}")
if data:
room_data = json.loads(data)
room_data["last_access"] = now
redis_client.setex(f"room:{doc_id}", RETENTION_HOURS * 3600, json.dumps(room_data)) # Reset TTL
except Exception as e:
print(f"Cache access update error for {doc_id}: {e}")
def cleanup_old_rooms():
while True:
try:
now = time.time()
cutoff = now - (RETENTION_HOURS * 3600) # Convert hours to seconds
# Clean in-memory rooms
to_remove = []
for doc_id, room in rooms.items():
if room.get("last_access", 0) < cutoff and len(room.get("peers", set())) == 0:
to_remove.append(doc_id)
for doc_id in to_remove:
del rooms[doc_id]
print(f"Cleaned up inactive room: {doc_id}")
# Valkey/Redis has TTL, so it cleans up automatically
except Exception as e:
print(f"Cleanup error: {e}")
time.sleep(3600) # Run every hour
def check_rate_limit(client_ip: str) -> bool:
now = time.time()
hour_ago = now - 3600
# Clean old entries
rate_limits[client_ip] = [t for t in rate_limits[client_ip] if t > hour_ago]
# Check limit (50 per hour)
if len(rate_limits[client_ip]) >= 50:
return False
# Add current request
rate_limits[client_ip].append(now)
return True
HTML = """
aukpad
"""
@app.get("/favicon.ico", include_in_schema=False)
def favicon():
return FileResponse("favicon.ico")
@app.get("/", include_in_schema=False)
def root():
return RedirectResponse(url=f"/{random_id()}/", status_code=307)
@app.post("/", include_in_schema=False)
async def create_pad_with_content(request: Request):
# Get client IP
client_ip = request.client.host if request.client else "unknown"
# Check rate limit
if not check_rate_limit(client_ip):
raise HTTPException(status_code=429, detail="Rate limit exceeded. Max 50 requests per hour.")
# Get and validate content
content = await request.body()
if not content:
raise HTTPException(status_code=400, detail="Empty content not allowed")
try:
text_content = content.decode('utf-8')
except UnicodeDecodeError:
raise HTTPException(status_code=400, detail="Content must be valid UTF-8")
# Check for null bytes
if '\x00' in text_content:
raise HTTPException(status_code=400, detail="Null bytes not allowed")
# Check text size limit
if len(text_content.encode('utf-8')) > MAX_TEXT_SIZE:
raise HTTPException(status_code=413, detail=f"Content too large. Max size: {MAX_TEXT_SIZE} bytes")
doc_id = random_id()
rooms[doc_id] = {"text": text_content, "ver": 1, "peers": set(), "last_access": time.time()}
# Save to cache if enabled
save_room_data_to_cache(doc_id, text_content, 1)
# Return URL instead of redirect for CLI usage
base_url = str(request.base_url).rstrip('/')
return PlainTextResponse(f"{base_url}/{doc_id}/\n")
@app.get("/{doc_id}/", response_class=HTMLResponse)
def pad(doc_id: str):
# Update access time when pad is accessed
update_room_access_time(doc_id)
return HTMLResponse(HTML)
@app.get("/{doc_id}/raw", response_class=PlainTextResponse)
def get_raw_pad_content(doc_id: str):
# Check in-memory rooms first
if doc_id in rooms:
update_room_access_time(doc_id)
return PlainTextResponse(rooms[doc_id]["text"])
# Check cache if not in memory
cached_data = get_room_data_from_cache(doc_id)
if cached_data:
# Load into memory for future access
rooms[doc_id] = {
"text": cached_data.get("text", ""),
"ver": cached_data.get("ver", 0),
"peers": set(),
"last_access": time.time()
}
update_room_access_time(doc_id)
return PlainTextResponse(cached_data.get("text", ""))
# Return empty content if pad doesn't exist
return PlainTextResponse("")
async def _broadcast(doc_id: str, message: dict, exclude: WebSocket | None = None):
room = rooms.get(doc_id)
if not room: return
dead = []
payload = json.dumps(message)
for peer in room["peers"]:
if peer is exclude:
continue
try:
await peer.send_text(payload)
except Exception:
dead.append(peer)
for d in dead:
room["peers"].discard(d)
@app.websocket("/ws/{doc_id}")
async def ws(doc_id: str, ws: WebSocket):
# Get client IP for connection limiting
client_ip = ws.client.host if ws.client else "unknown"
# Check connection limit per IP
if connections_per_ip[client_ip] >= MAX_CONNECTIONS_PER_IP:
await ws.close(code=1008, reason="Too many connections from this IP")
return
await ws.accept()
connections_per_ip[client_ip] += 1
# Try to load room from cache first
if doc_id not in rooms:
cached_data = get_room_data_from_cache(doc_id)
if cached_data:
rooms[doc_id] = {
"text": cached_data.get("text", ""),
"ver": cached_data.get("ver", 0),
"peers": set(),
"last_access": time.time()
}
room = rooms.setdefault(doc_id, {"text": "", "ver": 0, "peers": set(), "last_access": time.time()})
room["peers"].add(ws)
# Update access time
update_room_access_time(doc_id)
await ws.send_text(json.dumps({"type": "init", "text": room["text"], "ver": room["ver"]}))
try:
while True:
msg = await ws.receive_text()
data = json.loads(msg)
if data.get("type") == "edit":
new_text = str(data.get("text", ""))
# Check text size limit
if len(new_text.encode('utf-8')) > MAX_TEXT_SIZE:
await ws.send_text(json.dumps({"type": "error", "message": f"Text too large. Max size: {MAX_TEXT_SIZE} bytes"}))
continue
room["text"] = new_text
room["ver"] += 1
room["last_access"] = time.time()
# Save to cache
save_room_data_to_cache(doc_id, room["text"], room["ver"])
await _broadcast(doc_id, {
"type": "update",
"text": room["text"],
"ver": room["ver"],
"clientId": data.get("clientId")
})
except WebSocketDisconnect:
pass
finally:
room["peers"].discard(ws)
# Decrement connection count for this IP
connections_per_ip[client_ip] = max(0, connections_per_ip[client_ip] - 1)
# Initialize Valkey/Redis and cleanup thread on startup
@app.on_event("startup")
async def startup_event():
init_valkey()
# Start cleanup thread
cleanup_thread = threading.Thread(target=cleanup_old_rooms, daemon=True)
cleanup_thread.start()
print("Aukpad started with cleanup routine")
# Run locally: uvicorn aukpad:app --reload