# Server URLs (ensure these match your running servers)
FLASK_URL = "http://127.0.0.1:3000/"
FASTAPI_URL = "http://127.0.0.1:8000/"
-NUM_REQUESTS = 100
+# NUM_REQUESTS = 100 # Removed, will be passed as argument
def fetch_url_sync(url):
try:
- response = requests.get(url, timeout=5) # Added timeout to prevent hanging
- response.raise_for_status() # Raise an exception for bad status codes
+ response = requests.get(url, timeout=10) # Increased timeout for potentially more requests
+ response.raise_for_status()
return response.status_code
except requests.exceptions.RequestException as e:
- print(f"Request to {url} failed: {e}")
+ # print(f"Request to {url} failed: {e}") # Silenced for cleaner benchmark output
return None
-def run_flask_benchmark():
- print(f"Starting Flask benchmark: {NUM_REQUESTS} requests to {FLASK_URL}...")
- print("[DIAG-BRB-FLASK] Running requests SEQUENTIALLY for diagnosis.", flush=True)
+def run_flask_benchmark(num_requests):
+ print(f"Starting Flask benchmark: {num_requests} requests to {FLASK_URL}...")
+ # print("[DIAG-BRB-FLASK] Running requests SEQUENTIALLY for diagnosis.", flush=True) # Can be verbose
start_time = time.perf_counter()
results_list = []
- successful_so_far = 0
+ # successful_so_far = 0 # Not strictly needed here
- for i in range(NUM_REQUESTS):
+ for i in range(num_requests):
try:
- status_code = fetch_url_sync(FLASK_URL) # Direct call
- print(f"REQ_STATUS:{status_code}", flush=True) # New progress line
+ status_code = fetch_url_sync(FLASK_URL)
+ print(f"REQ_STATUS:{status_code}", flush=True) # Progress for the calling script to count
results_list.append(status_code)
- if status_code == 200:
- successful_so_far += 1
- print(f"[DIAG-BRB-FLASK] Request {i+1}/{NUM_REQUESTS} result: {status_code}", flush=True)
+ # if status_code == 200:
+ # successful_so_far += 1
+ # print(f"[DIAG-BRB-FLASK] Request {i+1}/{num_requests} result: {status_code}", flush=True)
except Exception as e:
- print(f"[DIAG-BRB-FLASK] Request {i+1}/{NUM_REQUESTS} failed with exception: {e}", flush=True)
+ # print(f"[DIAG-BRB-FLASK] Request {i+1}/{num_requests} failed with exception: {e}", flush=True)
results_list.append(None)
end_time = time.perf_counter()
total_time = end_time - start_time
successful_requests = sum(1 for r in results_list if r == 200)
- print(f"Final Flask benchmark summary: {successful_requests}/{NUM_REQUESTS} successful requests in {total_time:.2f} seconds.")
+ print(f"Final Flask benchmark summary: {successful_requests}/{num_requests} successful requests in {total_time:.2f} seconds.")
return total_time
async def fetch_url_async(client, url):
try:
- response = await client.get(url) # REMOVED timeout=10
+ response = await client.get(url, timeout=10) # Increased timeout
response.raise_for_status()
return response.status_code
except httpx.RequestError as e:
- print(f"Request to {url} failed: {e}")
+ # print(f"Request to {url} failed: {e}") # Silenced
return None
-async def run_fastapi_benchmark_async():
- print(f"Starting FastAPI benchmark: {NUM_REQUESTS} requests to {FASTAPI_URL}...")
+async def run_fastapi_benchmark_async(num_requests):
+ print(f"Starting FastAPI benchmark: {num_requests} requests to {FASTAPI_URL}...")
start_time = time.perf_counter()
async with httpx.AsyncClient() as client:
- tasks = [fetch_url_async(client, FASTAPI_URL) for _ in range(NUM_REQUESTS)]
+ tasks = [fetch_url_async(client, FASTAPI_URL) for _ in range(num_requests)]
results = await asyncio.gather(*tasks)
end_time = time.perf_counter()
total_time = end_time - start_time
successful_requests = sum(1 for r in results if r == 200)
- print(f"FastAPI benchmark: {successful_requests}/{NUM_REQUESTS} successful requests in {total_time:.2f} seconds.")
+ print(f"FastAPI benchmark: {successful_requests}/{num_requests} successful requests in {total_time:.2f} seconds.")
return total_time
-def run_fastapi_benchmark():
- return asyncio.run(run_fastapi_benchmark_async())
+def run_fastapi_benchmark(num_requests):
+ return asyncio.run(run_fastapi_benchmark_async(num_requests))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Run web server benchmarks.")
choices=["flask", "fastapi"],
help="Specify the framework to benchmark (flask or fastapi)"
)
+ parser.add_argument(
+ "num_requests",
+ type=int,
+ help="Number of requests to perform"
+ )
args = parser.parse_args()
if args.framework == "flask":
- run_flask_benchmark()
+ run_flask_benchmark(args.num_requests)
elif args.framework == "fastapi":
- run_fastapi_benchmark()
+ run_fastapi_benchmark(args.num_requests)
else:
print("Invalid framework specified. Choose 'flask' or 'fastapi'.")
\ No newline at end of file
--- /dev/null
+import subprocess
+import time
+import re
+import requests # pip install requests
+import webbrowser
+from rich.console import Console
+from rich.table import Table
+import sys
+import os
+
+# --- Configuration ------------------------------------------------------
+FLASK_SERVER_URL = "http://127.0.0.1:3000/"
+FASTAPI_SERVER_URL = "http://127.0.0.1:8000/"
+BENCHMARK_SCRIPT_PATH = "benchmark/run_benchmark.py" # This script sends requests, delays are in apps
+NUM_REQUESTS_EXPECTED = 1000
+PYTHON_EXE = sys.executable
+
+# ------------------------------------------------------------------------
+console = Console()
+
+# -------------------------- helpers -------------------------------------
+def start_server(command_args, health_check_url, server_name, cwd=None):
+ """Start server and wait until a 200 health check is returned."""
+ console.print(f"[yellow]Starting {server_name} server (No Restrictions Test)...[/yellow]")
+ popen_kwargs = dict(cwd=cwd, text=True, stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT)
+ if "uvicorn" in command_args[0] and not command_args[0].endswith(".exe"):
+ process = subprocess.Popen([PYTHON_EXE, "-m"] + command_args, **popen_kwargs)
+ else:
+ process = subprocess.Popen(command_args, **popen_kwargs)
+
+ max_wait = 30
+ start_t = time.time()
+ while time.time() - start_t < max_wait:
+ try:
+ if requests.get(health_check_url, timeout=3).status_code == 200:
+ console.print(f"[green]{server_name} ready.[/green]")
+ return process
+ except requests.RequestException:
+ time.sleep(0.3)
+ console.print(f"[red]{server_name} failed to start within {max_wait}s.[/red]")
+ process.terminate()
+ return None
+
+def stop_server(proc, name):
+ if not proc:
+ return
+ console.print(f"[yellow]Stopping {name}…[/yellow]")
+ proc.terminate()
+ try:
+ proc.wait(timeout=8)
+ except subprocess.TimeoutExpired:
+ proc.kill()
+ console.print(f"[green]{name} stopped.[/green]")
+
+def run_benchmark_script(framework_arg):
+ # This function remains largely the same, as benchmark/run_benchmark.py handles the requests.
+ # The "no restrictions" aspect is handled by running different app_*.py files.
+ console.print(f"Running benchmark for [bold]{framework_arg}[/bold] (No Restrictions Test) with {NUM_REQUESTS_EXPECTED} requests...")
+ cmd = [PYTHON_EXE, BENCHMARK_SCRIPT_PATH, framework_arg, str(NUM_REQUESTS_EXPECTED)]
+
+ # The stdout/stderr handling can be simplified if live progress isn't strictly needed for this version,
+ # or kept if useful. For now, keeping the detailed Flask progress handling.
+ if framework_arg.lower() == "flask":
+ final_summary_line = None
+ requests_done_count = 0
+ progress_line_printed = False
+ try:
+ process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, bufsize=1, universal_newlines=True, encoding='utf-8')
+ if process.stdout:
+ for line in iter(process.stdout.readline, ''):
+ line = line.strip()
+ if not line: continue
+ if line.startswith("REQ_STATUS:"):
+ requests_done_count += 1
+ print(f"\rFlask progress: Handled {requests_done_count}/{NUM_REQUESTS_EXPECTED} requests...", end="", flush=True)
+ progress_line_printed = True
+ elif line.startswith("[DIAG-BRB-FLASK]"):
+ if progress_line_printed: print("\r" + " " * 80 + "\r", end="", flush=True)
+ print(line, flush=True)
+ if progress_line_printed: print(f"\rFlask progress: Handled {requests_done_count}/{NUM_REQUESTS_EXPECTED} requests...", end="", flush=True)
+ elif "Final Flask benchmark summary:" in line:
+ final_summary_line = line
+ if progress_line_printed: print("\r" + " " * 80 + "\r", end="", flush=True)
+ process.stdout.close()
+ if progress_line_printed and not final_summary_line: print("\r" + " " * 80 + "\r", end="", flush=True)
+
+ stderr_output_list = []
+ if process.stderr:
+ for line in iter(process.stderr.readline, ''):
+ line = line.strip()
+ if line: stderr_output_list.append(line)
+ process.stderr.close()
+ process.wait(timeout=600)
+ if process.returncode != 0:
+ console.print(f"[red]{framework_arg} benchmark script failed (code {process.returncode})[/red]")
+ if stderr_output_list: console.print("[red]STDERR:[/red]"); [console.print(f"[red]{err_line}[/red]") for err_line in stderr_output_list]
+ return None
+ if final_summary_line: return final_summary_line
+ else:
+ console.print(f"[red]No summary line for {framework_arg}.[/red]")
+ if stderr_output_list: console.print("[red]STDERR:[/red]"); [console.print(f"[red]{err_line}[/red]") for err_line in stderr_output_list]
+ return None
+ except subprocess.TimeoutExpired:
+ console.print(f"[red]Benchmark for {framework_arg} timed out.[/red]")
+ if process.poll() is None: process.kill(); process.wait()
+ return None
+ except Exception as e:
+ console.print(f"[red]Error running Popen benchmark for {framework_arg}: {e}[/red]")
+ return None
+ else: # For FastAPI
+ try:
+ result = subprocess.run(cmd, text=True, capture_output=True, timeout=600, check=False, encoding='utf-8')
+ if result.returncode != 0:
+ console.print(f"[red]{framework_arg} benchmark failed with subprocess.run.[/red]")
+ if result.stderr: console.print(f"STDERR:\n{result.stderr.strip()}")
+ return None
+ if result.stdout and result.stdout.strip():
+ lines = result.stdout.strip().splitlines()
+ if lines: return lines[-1]
+ else: console.print(f"[red]No stdout lines from {framework_arg}.[/red]"); return None
+ else:
+ console.print(f"[red]No stdout from {framework_arg}.[/red]")
+ if result.stderr and result.stderr.strip(): console.print(f"STDERR:\n{result.stderr.strip()}")
+ return None
+ except subprocess.TimeoutExpired:
+ console.print(f"[red]Benchmark for {framework_arg} (subprocess.run) timed out.[/red]")
+ return None
+ except Exception as e:
+ console.print(f"[red]Error running subprocess.run benchmark for {framework_arg}: {e}[/red]")
+ return None
+
+def parse_benchmark(line):
+ m = re.search(r"(\d+)/(\d+) successful requests in ([\d.]+) seconds", line)
+ if not m:
+ return None
+ succ, total, tsec = map(float, m.groups())
+ return {"successful": f"{int(succ)}/{int(total)}", "total_time": tsec}
+
+def display_table(rows):
+ tbl = Table(title="Benchmark Summary - NO RESTRICTIONS", show_lines=True, header_style="bold magenta")
+ tbl.add_column("Framework", style="cyan")
+ tbl.add_column("Server Config", style="white")
+ tbl.add_column("Artificial Delay", style="green")
+ tbl.add_column("#Reqs", justify="right")
+ tbl.add_column("Success", justify="right")
+ tbl.add_column("Total s", justify="right", style="yellow")
+ tbl.add_column("Avg s/req", justify="right", style="blue")
+ for r in rows:
+ avg_time = r["total_time"] / NUM_REQUESTS_EXPECTED if NUM_REQUESTS_EXPECTED > 0 else 0
+ tbl.add_row(r["framework"], r["config"], r["delay"],
+ str(NUM_REQUESTS_EXPECTED), r["successful"],
+ f"{r['total_time']:.2f}", f"{avg_time:.4f}") # Increased precision for avg
+ console.print(tbl)
+
+# --------------------------- scenarios ----------------------------------
+SCENARIOS = [
+ {
+ "name": "FastAPI (No Delay)",
+ "config": "Uvicorn, async (default)",
+ "delay": "None",
+ "cmd": ["uvicorn", "app_fastapi.app_no_delay:app", "--host", "0.0.0.0",
+ "--port", "8000", "--log-level", "warning"],
+ "url": FASTAPI_SERVER_URL,
+ "bench_arg": "fastapi", # benchmark/run_benchmark.py uses this to pick the URL/method
+ },
+ {
+ "name": "Flask (No Delay, Threaded)",
+ "config": "Werkzeug (threaded=True)",
+ "delay": "None",
+ "cmd": [PYTHON_EXE, "app_flask/flask_application_no_delay.py"],
+ "url": FLASK_SERVER_URL,
+ "bench_arg": "flask", # benchmark/run_benchmark.py uses this to pick the URL/method
+ }
+]
+
+# ----------------------------- main -------------------------------------
+if __name__ == "__main__":
+ console.print("[bold underline]Automated Web Framework Benchmark (NO RESTRICTIONS)[/bold underline]\n")
+ results = []
+ root = os.getcwd()
+
+ for i, sc in enumerate(SCENARIOS, 1):
+ console.rule(f"[cyan]Scenario {i}/{len(SCENARIOS)} – {sc['name']}[/cyan]")
+ srv = start_server(sc["cmd"], sc["url"], sc["name"], cwd=root)
+ if not srv:
+ console.print(f"[red]Skipping benchmark for {sc['name']} as server failed to start.[/red]")
+ continue
+ try:
+ # No artificial grace period needed as apps have no sleep()
+ # if sc["name"].lower().startswith("flask"):
+ # time.sleep(2)
+ line = run_benchmark_script(sc["bench_arg"])
+ parsed = parse_benchmark(line) if line else None
+ if parsed:
+ results.append({"framework": sc["name"], "config": sc["config"],
+ "delay": sc["delay"], **parsed})
+ # Optionally, open browser after benchmark. Keeping it for consistency.
+ try:
+ console.print(f"[blue]Opening {sc['name']} page at {sc['url']} in browser...[/blue]")
+ webbrowser.open(sc["url"])
+ console.print(f"[blue]Keeping server alive for 3 seconds to view the page...[/blue]")
+ time.sleep(3) # Reduced delay as pages should load faster
+ except Exception as e:
+ console.print(f"[yellow]Could not open browser for {sc['name']}: {e}[/yellow]")
+ else:
+ console.print(f"[yellow]No parsed benchmark results for {sc['name']}.[/yellow]")
+ finally:
+ stop_server(srv, sc["name"])
+ console.print() # Newline after each scenario
+
+ if results:
+ display_table(results)
+ else:
+ console.print("[yellow]No benchmark results were collected.[/yellow]")
+ console.print("\n[bold]No Restrictions Benchmark run finished.[/bold]")