v12
authorSam Mirazi <sasan345@gmail.com>
Sun, 1 Jun 2025 18:49:05 +0000 (11:49 -0700)
committerSam Mirazi <sasan345@gmail.com>
Sun, 1 Jun 2025 18:49:05 +0000 (11:49 -0700)
benchmark/run_benchmark.py
o3.txt [deleted file]
show_benchmark_table.py

index 06508e7b46b1ba708bc3156583e23e429e9cfd30..9975e2dac0e68ce4f64076887fd3e5cfe344c10a 100644 (file)
@@ -30,21 +30,17 @@ def run_flask_benchmark():
     successful_so_far = 0
 
     for i in range(NUM_REQUESTS):
-        print(f"[DIAG-BRB-FLASK] Processing request {i+1}/{NUM_REQUESTS}", flush=True)
         try:
             status_code = fetch_url_sync(FLASK_URL) # Direct call
+            print(f"REQ_STATUS:{status_code}", flush=True) # New progress line
             results_list.append(status_code)
             if status_code == 200:
                 successful_so_far += 1
-            print(f"[DIAG-BRB-FLASK] Request {i+1} result: {status_code}", flush=True)
+            print(f"[DIAG-BRB-FLASK] Request {i+1}/{NUM_REQUESTS} result: {status_code}", flush=True)
         except Exception as e:
-            print(f"[DIAG-BRB-FLASK] Request {i+1} generated an exception: {e}", flush=True)
+            print(f"[DIAG-BRB-FLASK] Request {i+1}/{NUM_REQUESTS} failed with exception: {e}", flush=True)
             results_list.append(None)
 
-        if (i + 1) % 10 == 0 and (i + 1) < NUM_REQUESTS:
-            print(f"Flask progress: Handled {i+1}/{NUM_REQUESTS} requests... ({successful_so_far} successful so far)")
-            sys.stdout.flush()
-    
     end_time = time.perf_counter()
     total_time = end_time - start_time
     successful_requests = sum(1 for r in results_list if r == 200)
diff --git a/o3.txt b/o3.txt
deleted file mode 100644 (file)
index 4cd7626..0000000
--- a/o3.txt
+++ /dev/null
@@ -1,221 +0,0 @@
-### 🔍 Root cause
-
-The Flask server doesn’t “stall” because of Flask itself—it blocks on its own **`stdout`/`stderr` pipe**.
-
-* `show_benchmark_table.py` starts the Flask app with
-
-  ````python
-  subprocess.Popen(..., stdout=subprocess.PIPE, stderr=subprocess.PIPE)  # two pipes
-  ``` :contentReference[oaicite:0]{index=0}  
-  ````
-* `flask_application.py` prints **two lines per request** (request-received / request-finished).&#x20;
-* Because `show_benchmark_table.py` never reads those pipes, the OS-level pipe buffer (≈ 64 kB on Windows) fills after \~50 prints.
-* When the buffer is full the child process blocks on the very next `print`, so from the outside it looks as if the server “hangs” after request ≈ 52.
-
-The same Flask instance runs perfectly when started manually because its `stdout` inherits the console (no pipe, so no blocking).
-
----
-
-### ✅ Fix
-
-**Don’t pipe the Flask server’s output unless you actively drain it.**
-The quickest / simplest change: replace the two `PIPE`s with `None` (inherit parent console) or `subprocess.DEVNULL`.
-
-```python
-# --- in show_benchmark_table.py ----------------------------------------
-def start_server(command_args, health_check_url, server_name, cwd=None):
-    """Starts a server as a subprocess and waits for it to be ready."""
-    console.print(f"[yellow]Starting {server_name} server...[/yellow]")
-
-    # choose how to launch
-    popen_kwargs = dict(cwd=cwd, text=True)
-
-    # 🚩  STREAM HANDLING – avoid blocking
-    popen_kwargs["stdout"] = subprocess.DEVNULL        # or None
-    popen_kwargs["stderr"] = subprocess.STDOUT         # merge & discard
-
-    # handle “python -m uvicorn …” vs direct exe the same way
-    if "uvicorn" in command_args[0] and not command_args[0].endswith(".exe"):
-        process = subprocess.Popen([PYTHON_EXE, "-m"] + command_args, **popen_kwargs)
-    else:
-        process = subprocess.Popen(command_args, **popen_kwargs)
-```
-
-That’s the only change required—the benchmark then completes 100/100 requests.
-
----
-
-### 🛠️ Full updated `show_benchmark_table.py`
-
-> One complete file, ready to overwrite the old one.
-
-```python
-import subprocess
-import time
-import re
-import requests  # pip install requests
-from rich.console import Console
-from rich.table import Table
-import sys
-import os
-
-# --- Configuration ------------------------------------------------------
-FLASK_SERVER_URL = "http://127.0.0.1:3000/"
-FASTAPI_SERVER_URL = "http://127.0.0.1:8000/"
-BENCHMARK_SCRIPT_PATH = "benchmark/run_benchmark.py"
-NUM_REQUESTS_EXPECTED = 100
-PYTHON_EXE = sys.executable
-
-# ------------------------------------------------------------------------
-console = Console()
-
-# -------------------------- helpers -------------------------------------
-def start_server(command_args, health_check_url, server_name, cwd=None):
-    """Start server and wait until a 200 health check is returned."""
-    console.print(f"[yellow]Starting {server_name} server…[/yellow]")
-
-    # --- STREAM HANDLING: inherit console so the child can always write
-    popen_kwargs = dict(cwd=cwd, text=True,
-                        stdout=subprocess.DEVNULL,
-                        stderr=subprocess.STDOUT)
-
-    # run either as  “python -m uvicorn …”  or plain exe
-    if "uvicorn" in command_args[0] and not command_args[0].endswith(".exe"):
-        process = subprocess.Popen([PYTHON_EXE, "-m"] + command_args, **popen_kwargs)
-    else:
-        process = subprocess.Popen(command_args, **popen_kwargs)
-
-    max_wait = 30
-    start_t = time.time()
-    while time.time() - start_t < max_wait:
-        try:
-            if requests.get(health_check_url, timeout=3).status_code == 200:
-                console.print(f"[green]{server_name} ready.[/green]")
-                return process
-        except requests.RequestException:
-            time.sleep(0.3)
-    console.print(f"[red]{server_name} failed to start within {max_wait}s.[/red]")
-    process.terminate()
-    return None
-
-def stop_server(proc, name):
-    if not proc:
-        return
-    console.print(f"[yellow]Stopping {name}…[/yellow]")
-    proc.terminate()
-    try:
-        proc.wait(timeout=8)
-    except subprocess.TimeoutExpired:
-        proc.kill()
-    console.print(f"[green]{name} stopped.[/green]")
-
-def run_benchmark_script(framework_arg):
-    console.print(f"Running benchmark for [bold]{framework_arg}[/bold]…")
-    cmd = [PYTHON_EXE, BENCHMARK_SCRIPT_PATH, framework_arg]
-    result = subprocess.run(cmd, text=True, capture_output=True, timeout=600)
-    if result.returncode:
-        console.print(f"[red]{framework_arg} benchmark failed.[/red]")
-        console.print(result.stderr)
-        return None
-    last_line = result.stdout.strip().splitlines()[-1]
-    return last_line
-
-def parse_benchmark(line):
-    m = re.search(r"(\d+)/(\d+) successful requests in ([\d.]+) seconds", line)
-    if not m:
-        return None
-    succ, total, tsec = map(float, m.groups())
-    return {"successful": f"{int(succ)}/{int(total)}", "total_time": tsec}
-
-def display_table(rows):
-    tbl = Table(title="Benchmark Summary", show_lines=True, header_style="bold magenta")
-    tbl.add_column("Framework", style="cyan")
-    tbl.add_column("Server", style="white")
-    tbl.add_column("Delay", style="green")
-    tbl.add_column("#Reqs", justify="right")
-    tbl.add_column("Success", justify="right")
-    tbl.add_column("Total s", justify="right", style="yellow")
-    tbl.add_column("Avg s/req", justify="right", style="blue")
-    for r in rows:
-        avg = r["total_time"] / NUM_REQUESTS_EXPECTED
-        tbl.add_row(r["framework"], r["config"], r["delay"],
-                    str(NUM_REQUESTS_EXPECTED), r["successful"],
-                    f"{r['total_time']:.2f}", f"{avg:.3f}")
-    console.print(tbl)
-
-# --------------------------- scenarios ----------------------------------
-SCENARIOS = [
-    {
-        "name": "FastAPI",
-        "config": "Uvicorn, async",
-        "delay": "0.3 s asyncio.sleep",
-        "cmd": ["uvicorn", "app_fastapi.app:app", "--host", "0.0.0.0",
-                "--port", "8000", "--log-level", "warning"],
-        "url": FASTAPI_SERVER_URL,
-        "bench_arg": "fastapi",
-    },
-    {
-        "name": "Flask",
-        "config": "Single-threaded",
-        "delay": "0.3 s time.sleep",
-        "cmd": [PYTHON_EXE, "app_flask/flask_application.py"],
-        "url": FLASK_SERVER_URL,
-        "bench_arg": "flask",
-    }
-]
-
-# ----------------------------- main -------------------------------------
-if __name__ == "__main__":
-    console.print("[bold underline]Automated Web Framework Benchmark[/bold underline]\n")
-    results = []
-    root = os.getcwd()
-
-    for i, sc in enumerate(SCENARIOS, 1):
-        console.rule(f"[cyan]Scenario {i}/{len(SCENARIOS)} – {sc['name']}[/cyan]")
-        srv = start_server(sc["cmd"], sc["url"], sc["name"], cwd=root)
-        if not srv:
-            continue
-        try:
-            if sc["name"].lower() == "flask":
-                time.sleep(2)  # tiny grace period
-            line = run_benchmark_script(sc["bench_arg"])
-            parsed = parse_benchmark(line) if line else None
-            if parsed:
-                results.append({"framework": sc["name"], "config": sc["config"],
-                                "delay": sc["delay"], **parsed})
-        finally:
-            stop_server(srv, sc["name"])
-        console.print()
-
-    if results:
-        display_table(results)
-    console.print("\n[bold]Benchmark run finished.[/bold]")
-```
-
----
-
-### 🔄 Verification steps
-
-1. **Activate the venv**
-
-   ```powershell
-   cd "…/Hacker Dojo"
-   .\.venv\Scripts\activate
-   ```
-2. **Overwrite `show_benchmark_table.py` with the file above.**
-3. **Run benchmark again**
-
-   ```bash
-   python show_benchmark_table.py
-   ```
-
-   *Flask now reports 100/100 successful requests; FastAPI remains unchanged.*
-
----
-
-### ✍️ Notes / future improvements
-
-* If you still want to capture server logs, start a background thread to continuously read `process.stdout` instead of discarding it.
-* To compare “fair” concurrency you might additionally test Flask with `threaded=True` or via `waitress`, but that’s independent of the stall bug we just fixed.
-
-Let me know if anything needs tweaking!
index e55ab14da9d2c55663c3c51ede1565f7748d60ab..0bb3e7f6c125390b6e2cf035d7210792082c0bb7 100644 (file)
@@ -60,13 +60,113 @@ def stop_server(proc, name):
 def run_benchmark_script(framework_arg):
     console.print(f"Running benchmark for [bold]{framework_arg}[/bold]…")
     cmd = [PYTHON_EXE, BENCHMARK_SCRIPT_PATH, framework_arg]
-    result = subprocess.run(cmd, text=True, capture_output=True, timeout=600)
-    if result.returncode:
-        console.print(f"[red]{framework_arg} benchmark failed.[/red]")
-        console.print(result.stderr)
-        return None
-    last_line = result.stdout.strip().splitlines()[-1]
-    return last_line
+
+    if framework_arg.lower() == "flask":
+        final_summary_line = None
+        requests_done_count = 0
+        progress_line_printed = False
+        try:
+            # Ensure encoding is specified for Popen for consistent text handling
+            process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, bufsize=1, universal_newlines=True, encoding='utf-8')
+            
+            if process.stdout:
+                for line in iter(process.stdout.readline, ''):
+                    line = line.strip()
+                    if not line: 
+                        continue
+
+                    if line.startswith("REQ_STATUS:"):
+                        requests_done_count += 1
+                        # Using carriage return to update the line in place
+                        print(f"\rFlask progress: Handled {requests_done_count}/{NUM_REQUESTS_EXPECTED} requests...", end="", flush=True)
+                        progress_line_printed = True
+                    elif line.startswith("[DIAG-BRB-FLASK]"):
+                        if progress_line_printed:
+                            # Clear the progress line before printing diagnostic output
+                            print("\r" + " " * 80 + "\r", end="", flush=True) 
+                        print(line, flush=True) # Print diagnostic line
+                        if progress_line_printed:
+                            # Reprint the progress line after diagnostic output
+                            print(f"\rFlask progress: Handled {requests_done_count}/{NUM_REQUESTS_EXPECTED} requests...", end="", flush=True)
+                    elif "Final Flask benchmark summary:" in line:
+                        final_summary_line = line
+                        if progress_line_printed:
+                             # Clear the progress line before finishing
+                            print("\r" + " " * 80 + "\r", end="", flush=True)
+                        # The summary line itself will be printed by the main logic if needed, or parsed
+
+                process.stdout.close()
+            
+            # After the loop, if progress was printed, clear it finally
+            # This handles cases where the process ends without a final summary line immediately after progress
+            if progress_line_printed and not final_summary_line:
+                 print("\r" + " " * 80 + "\r", end="", flush=True)
+
+            stderr_output_list = []
+            if process.stderr:
+                for line in iter(process.stderr.readline, ''):
+                    line = line.strip()
+                    if line:
+                        stderr_output_list.append(line)
+                process.stderr.close()
+
+            process.wait(timeout=600) 
+
+            if process.returncode != 0:
+                console.print(f"[red]{framework_arg} benchmark script failed with return code {process.returncode}[/red]")
+                if stderr_output_list:
+                    console.print("[red]STDERR:[/red]")
+                    for err_line in stderr_output_list:
+                        console.print(f"[red]{err_line}[/red]")
+                return None
+            
+            if final_summary_line:
+                return final_summary_line
+            else:
+                console.print(f"[red]Could not find the final summary line for {framework_arg} in Popen benchmark output.[/red]")
+                if stderr_output_list:
+                    console.print("[red]STDERR output during Popen execution was:[/red]")
+                    for err_line in stderr_output_list:
+                        console.print(f"[red]{err_line}[/red]")
+                return None
+
+        except subprocess.TimeoutExpired:
+            console.print(f"[red]Benchmark for {framework_arg} (Popen path) timed out.[/red]")
+            if process.poll() is None: # Check if process is still running
+                process.kill()
+                process.wait()
+            return None
+        except Exception as e:
+            console.print(f"[red]An unexpected error occurred while running Popen benchmark for {framework_arg}: {e}[/red]")
+            return None
+            
+    else:  # For FastAPI or any other framework not needing live progress
+        try:
+            result = subprocess.run(cmd, text=True, capture_output=True, timeout=600, check=False, encoding='utf-8')
+            if result.returncode != 0:
+                console.print(f"[red]{framework_arg} benchmark failed with subprocess.run.[/red]")
+                if result.stderr:
+                    console.print(f"STDERR:\n{result.stderr.strip()}")
+                return None
+            
+            if result.stdout and result.stdout.strip():
+                lines = result.stdout.strip().splitlines()
+                if lines:
+                    return lines[-1] # Return the last line, expected to be the summary
+                else:
+                    console.print(f"[red]No lines in stdout from {framework_arg} benchmark script (subprocess.run path).[/red]")
+                    return None
+            else:
+                console.print(f"[red]No stdout from {framework_arg} benchmark script (subprocess.run path).[/red]")
+                if result.stderr and result.stderr.strip():
+                     console.print(f"STDERR:\n{result.stderr.strip()}")
+                return None
+        except subprocess.TimeoutExpired:
+            console.print(f"[red]Benchmark for {framework_arg} (subprocess.run path) timed out.[/red]")
+            return None
+        except Exception as e:
+            console.print(f"[red]An unexpected error occurred while running subprocess.run benchmark for {framework_arg}: {e}[/red]")
+            return None
 
 def parse_benchmark(line):
     m = re.search(r"(\d+)/(\d+) successful requests in ([\d.]+) seconds", line)