From 432ebbf5674ab3e5e7615d76ef33821e385808cd Mon Sep 17 00:00:00 2001 From: BennyFranciscus <268274351+BennyFranciscus@users.noreply.github.com> Date: Wed, 18 Mar 2026 17:03:06 +0000 Subject: [PATCH] Add Starlette: ASGI framework that powers FastAPI (~12k stars) Starlette 0.46.1 on Uvicorn (uvloop) + Gunicorn multi-worker. Same ASGI stack as FastAPI but without Pydantic/OpenAPI overhead. Pre-computed JSON + gzip caches, thread-local SQLite, orjson. --- frameworks/starlette/Dockerfile | 7 ++ frameworks/starlette/app.py | 172 ++++++++++++++++++++++++++ frameworks/starlette/gunicorn_conf.py | 6 + frameworks/starlette/meta.json | 19 +++ frameworks/starlette/requirements.txt | 5 + 5 files changed, 209 insertions(+) create mode 100644 frameworks/starlette/Dockerfile create mode 100644 frameworks/starlette/app.py create mode 100644 frameworks/starlette/gunicorn_conf.py create mode 100644 frameworks/starlette/meta.json create mode 100644 frameworks/starlette/requirements.txt diff --git a/frameworks/starlette/Dockerfile b/frameworks/starlette/Dockerfile new file mode 100644 index 00000000..e012b934 --- /dev/null +++ b/frameworks/starlette/Dockerfile @@ -0,0 +1,7 @@ +FROM python:3.13-slim +WORKDIR /app +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt +COPY . . +EXPOSE 8080 +CMD ["gunicorn", "-c", "gunicorn_conf.py", "app:app"] diff --git a/frameworks/starlette/app.py b/frameworks/starlette/app.py new file mode 100644 index 00000000..37a9c9f4 --- /dev/null +++ b/frameworks/starlette/app.py @@ -0,0 +1,172 @@ +import gzip +import json +import os +import sqlite3 +import threading + +import orjson +from starlette.applications import Starlette +from starlette.requests import Request +from starlette.responses import Response +from starlette.routing import Route + +# ── Dataset ────────────────────────────────────────────────────────── +dataset_items = None +dataset_path = os.environ.get("DATASET_PATH", "/data/dataset.json") +try: + with open(dataset_path) as f: + dataset_items = json.load(f) +except Exception: + pass + +# Pre-computed JSON for /json endpoint +json_buf: bytes | None = None +if dataset_items is not None: + items = [] + for d in dataset_items: + item = dict(d) + item["total"] = round(d["price"] * d["quantity"] * 100) / 100 + items.append(item) + json_buf = orjson.dumps({"items": items, "count": len(items)}) + +# Large dataset for compression (pre-serialised + pre-compressed) +large_json_buf: bytes | None = None +compressed_buf: bytes | None = None +try: + with open("/data/dataset-large.json") as f: + raw = json.load(f) + items = [] + for d in raw: + item = dict(d) + item["total"] = round(d["price"] * d["quantity"] * 100) / 100 + items.append(item) + large_json_buf = orjson.dumps({"items": items, "count": len(items)}) + compressed_buf = gzip.compress(large_json_buf, compresslevel=1) +except Exception: + pass + +# ── SQLite (thread-local) ──────────────────────────────────────────── +db_available = os.path.exists("/data/benchmark.db") +DB_QUERY = ( + "SELECT id, name, category, price, quantity, active, tags, rating_score, rating_count " + "FROM items WHERE price BETWEEN ? AND ? LIMIT 50" +) +_local = threading.local() + + +def _get_db() -> sqlite3.Connection: + conn = getattr(_local, "conn", None) + if conn is None: + conn = sqlite3.connect("/data/benchmark.db", uri=True) + conn.execute("PRAGMA mmap_size=268435456") + conn.row_factory = sqlite3.Row + _local.conn = conn + return conn + + +# ── Helpers ────────────────────────────────────────────────────────── +SERVER_HEADER = "starlette" + + +def _text(body: str | bytes, status: int = 200) -> Response: + return Response( + content=body, + status_code=status, + media_type="text/plain", + headers={"Server": SERVER_HEADER}, + ) + + +def _json_resp(body: bytes, status: int = 200, extra_headers: dict | None = None) -> Response: + headers = {"Server": SERVER_HEADER} + if extra_headers: + headers.update(extra_headers) + return Response(content=body, status_code=status, media_type="application/json", headers=headers) + + +# ── Routes ─────────────────────────────────────────────────────────── +async def pipeline(request: Request) -> Response: + return _text(b"ok") + + +async def baseline11(request: Request) -> Response: + total = 0 + for v in request.query_params.values(): + try: + total += int(v) + except ValueError: + pass + if request.method == "POST": + body = await request.body() + if body: + try: + total += int(body.strip()) + except ValueError: + pass + return _text(str(total)) + + +async def baseline2(request: Request) -> Response: + total = 0 + for v in request.query_params.values(): + try: + total += int(v) + except ValueError: + pass + return _text(str(total)) + + +async def json_endpoint(request: Request) -> Response: + if json_buf is None: + return _text("No dataset", 500) + return _json_resp(json_buf) + + +async def compression_endpoint(request: Request) -> Response: + if compressed_buf is None: + return _text("No dataset", 500) + return _json_resp(compressed_buf, extra_headers={"Content-Encoding": "gzip"}) + + +async def db_endpoint(request: Request) -> Response: + if not db_available: + return _json_resp(b'{"items":[],"count":0}') + min_val = float(request.query_params.get("min", 10)) + max_val = float(request.query_params.get("max", 50)) + conn = _get_db() + rows = conn.execute(DB_QUERY, (min_val, max_val)).fetchall() + items = [] + for r in rows: + items.append( + { + "id": r["id"], + "name": r["name"], + "category": r["category"], + "price": r["price"], + "quantity": r["quantity"], + "active": bool(r["active"]), + "tags": json.loads(r["tags"]), + "rating": {"score": r["rating_score"], "count": r["rating_count"]}, + } + ) + body = orjson.dumps({"items": items, "count": len(items)}) + return _json_resp(body) + + +async def upload_endpoint(request: Request) -> Response: + data = await request.body() + return _text(str(len(data))) + + +# ── App ────────────────────────────────────────────────────────────── +routes = [ + Route("/pipeline", pipeline, methods=["GET"]), + Route("/baseline11", baseline11, methods=["GET", "POST"]), + Route("/baseline2", baseline2, methods=["GET"]), + Route("/json", json_endpoint, methods=["GET"]), + Route("/compression", compression_endpoint, methods=["GET"]), + Route("/db", db_endpoint, methods=["GET"]), + Route("/upload", upload_endpoint, methods=["POST"]), +] + +app = Starlette(routes=routes) diff --git a/frameworks/starlette/gunicorn_conf.py b/frameworks/starlette/gunicorn_conf.py new file mode 100644 index 00000000..cf84fe46 --- /dev/null +++ b/frameworks/starlette/gunicorn_conf.py @@ -0,0 +1,6 @@ +import os + +bind = "0.0.0.0:8080" +workers = len(os.sched_getaffinity(0)) * 2 +worker_class = "uvicorn.workers.UvicornWorker" +keepalive = 120 diff --git a/frameworks/starlette/meta.json b/frameworks/starlette/meta.json new file mode 100644 index 00000000..38615308 --- /dev/null +++ b/frameworks/starlette/meta.json @@ -0,0 +1,19 @@ +{ + "display_name": "Starlette", + "language": "Python", + "type": "framework", + "engine": "uvicorn", + "description": "Starlette ASGI framework on Uvicorn (uvloop), multi-worker via gunicorn. The foundation FastAPI is built on.", + "repo": "https://github.com/encode/starlette", + "enabled": true, + "tests": [ + "baseline", + "pipelined", + "noisy", + "limited-conn", + "json", + "upload", + "compression", + "mixed" + ] +} diff --git a/frameworks/starlette/requirements.txt b/frameworks/starlette/requirements.txt new file mode 100644 index 00000000..ffdd15fd --- /dev/null +++ b/frameworks/starlette/requirements.txt @@ -0,0 +1,5 @@ +starlette==0.46.1 +uvicorn[standard]==0.34.0 +gunicorn==23.0.0 +uvloop==0.21.0 +orjson==3.10.15